diff --git a/secp256k1-zkp-sys/Cargo.toml b/secp256k1-zkp-sys/Cargo.toml index 0a35639a..1511f7ae 100644 --- a/secp256k1-zkp-sys/Cargo.toml +++ b/secp256k1-zkp-sys/Cargo.toml @@ -13,7 +13,7 @@ description = "FFI for `libsecp256k1-zkp` library." keywords = [ "secp256k1", "libsecp256k1-zkp", "ffi" ] readme = "README.md" build = "build.rs" -links = "rustsecp256k1zkp_v0_10_0" +links = "rustsecp256k1zkp_v0_10_1" # Should make docs.rs show all functions, even those behind non-default features [package.metadata.docs.rs] diff --git a/secp256k1-zkp-sys/build.rs b/secp256k1-zkp-sys/build.rs index 05dc3e72..756108d5 100644 --- a/secp256k1-zkp-sys/build.rs +++ b/secp256k1-zkp-sys/build.rs @@ -39,6 +39,9 @@ fn main() { .define("ENABLE_MODULE_RANGEPROOF", Some("1")) .define("ENABLE_MODULE_ECDSA_ADAPTOR", Some("1")) .define("ENABLE_MODULE_WHITELIST", Some("1")) + .define("ENABLE_MODULE_EXTRAKEYS", Some("1")) + .define("ENABLE_MODULE_SCHNORRSIG", Some("1")) + .define("ENABLE_MODULE_SCHNORR_ADAPTOR", Some("1")) .define("ECMULT_GEN_PREC_BITS", Some("4")) // TODO these three should be changed to use libgmp, at least until secp PR 290 is merged .define("USE_NUM_NONE", Some("1")) diff --git a/secp256k1-zkp-sys/depend/secp256k1-HEAD-revision.txt b/secp256k1-zkp-sys/depend/secp256k1-HEAD-revision.txt index 3cc9fed5..e9742020 100644 --- a/secp256k1-zkp-sys/depend/secp256k1-HEAD-revision.txt +++ b/secp256k1-zkp-sys/depend/secp256k1-HEAD-revision.txt @@ -1,2 +1,2 @@ # This file was automatically created by vendor-libsecp.sh -6152622613fdf1c5af6f31f74c427c4e9ee120ce +c2f270985d2344c47d2922ff36648cfa1f83545e diff --git a/secp256k1-zkp-sys/depend/secp256k1/.github/workflows/ci.yml b/secp256k1-zkp-sys/depend/secp256k1/.github/workflows/ci.yml index 36293f13..7c988a1b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/.github/workflows/ci.yml +++ b/secp256k1-zkp-sys/depend/secp256k1/.github/workflows/ci.yml @@ -41,6 +41,7 @@ env: ECDSAADAPTOR: 'no' BPPP: 'no' SCHNORRSIG_HALFAGG: 'no' + SCHNORRADAPTOR: 'no' ### test options SECP256K1_TEST_ITERS: BENCH: 'yes' @@ -79,14 +80,14 @@ jobs: matrix: configuration: - env_vars: { WIDEMUL: 'int64', RECOVERY: 'yes' } - - env_vars: { WIDEMUL: 'int64', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes'} + - env_vars: { WIDEMUL: 'int64', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes'} - env_vars: { WIDEMUL: 'int128' } - env_vars: { WIDEMUL: 'int128_struct', ELLSWIFT: 'yes' } - env_vars: { WIDEMUL: 'int128', RECOVERY: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes' } - - env_vars: { WIDEMUL: 'int128', ECDH: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes'} + - env_vars: { WIDEMUL: 'int128', ECDH: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes'} - env_vars: { WIDEMUL: 'int128', ASM: 'x86_64', ELLSWIFT: 'yes' } - - env_vars: { RECOVERY: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes'} - - env_vars: { CTIMETESTS: 'no', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', CPPFLAGS: '-DVERIFY' } + - env_vars: { RECOVERY: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes'} + - env_vars: { CTIMETESTS: 'no', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes', CPPFLAGS: '-DVERIFY' } - env_vars: { BUILD: 'distcheck', WITH_VALGRIND: 'no', CTIMETESTS: 'no', BENCH: 'no' } - env_vars: { CPPFLAGS: '-DDETERMINISTIC' } - env_vars: { CFLAGS: '-O0', CTIMETESTS: 'no' } @@ -158,6 +159,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CC: ${{ matrix.cc }} steps: @@ -211,6 +213,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' steps: @@ -271,6 +274,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' steps: @@ -325,6 +329,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' strategy: @@ -389,6 +394,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' steps: @@ -450,6 +456,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' SECP256K1_TEST_ITERS: 2 @@ -510,6 +517,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' CFLAGS: '-fsanitize=undefined,address -g' UBSAN_OPTIONS: 'print_stacktrace=1:halt_on_error=1' @@ -576,6 +584,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'yes' CC: 'clang' SECP256K1_TEST_ITERS: 32 @@ -632,6 +641,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' CTIMETESTS: 'no' strategy: @@ -688,15 +698,15 @@ jobs: fail-fast: false matrix: env_vars: - - { WIDEMUL: 'int64', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes' } + - { WIDEMUL: 'int64', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes' } - { WIDEMUL: 'int128_struct', ECMULTGENPRECISION: 2, ECMULTWINDOW: 4 } - - { WIDEMUL: 'int128', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes' } + - { WIDEMUL: 'int128', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes' } - { WIDEMUL: 'int128', RECOVERY: 'yes' } - - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes' } - - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', CC: 'gcc' } - - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', WRAPPER_CMD: 'valgrind --error-exitcode=42', SECP256K1_TEST_ITERS: 2 } - - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', CC: 'gcc', WRAPPER_CMD: 'valgrind --error-exitcode=42', SECP256K1_TEST_ITERS: 2 } - - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', CPPFLAGS: '-DVERIFY', CTIMETESTS: 'no' } + - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes' } + - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes', CC: 'gcc' } + - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes', WRAPPER_CMD: 'valgrind --error-exitcode=42', SECP256K1_TEST_ITERS: 2 } + - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes', CC: 'gcc', WRAPPER_CMD: 'valgrind --error-exitcode=42', SECP256K1_TEST_ITERS: 2 } + - { WIDEMUL: 'int128', RECOVERY: 'yes', ECDH: 'yes', SCHNORRSIG: 'yes', ELLSWIFT: 'yes', EXPERIMENTAL: 'yes', ECDSA_S2C: 'yes', RANGEPROOF: 'yes', WHITELIST: 'yes', GENERATOR: 'yes', MUSIG: 'yes', ECDSAADAPTOR: 'yes', BPPP: 'yes', SCHNORRSIG_HALFAGG: 'yes', SCHNORRADAPTOR: 'yes', CPPFLAGS: '-DVERIFY', CTIMETESTS: 'no' } - BUILD: 'distcheck' steps: @@ -816,6 +826,7 @@ jobs: ECDSAADAPTOR: 'yes' BPPP: 'yes' SCHNORRSIG_HALFAGG: 'yes' + SCHNORRADAPTOR: 'yes' steps: - name: Checkout diff --git a/secp256k1-zkp-sys/depend/secp256k1/.gitignore b/secp256k1-zkp-sys/depend/secp256k1/.gitignore index b3ae618d..32edfb2b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/.gitignore +++ b/secp256k1-zkp-sys/depend/secp256k1/.gitignore @@ -66,6 +66,7 @@ libsecp256k1.pc contrib/gh-pr-create.sh musig_example +schnorr_adaptor_example ### CMake /CMakeUserPresets.json diff --git a/secp256k1-zkp-sys/depend/secp256k1/CONTRIBUTING.md b/secp256k1-zkp-sys/depend/secp256k1/CONTRIBUTING.md index 67076011..4a9068b8 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/CONTRIBUTING.md +++ b/secp256k1-zkp-sys/depend/secp256k1/CONTRIBUTING.md @@ -44,11 +44,11 @@ The Contributor Workflow & Peer Review in libsecp256k1 are similar to Bitcoin Co In addition, libsecp256k1 tries to maintain the following coding conventions: -* No runtime heap allocation (e.g., no `malloc`) unless explicitly requested by the caller (via `rustsecp256k1zkp_v0_10_0_context_create` or `rustsecp256k1zkp_v0_10_0_scratch_space_create`, for example). Moreover, it should be possible to use the library without any heap allocations. +* No runtime heap allocation (e.g., no `malloc`) unless explicitly requested by the caller (via `rustsecp256k1zkp_v0_10_1_context_create` or `rustsecp256k1zkp_v0_10_1_scratch_space_create`, for example). Moreover, it should be possible to use the library without any heap allocations. * The tests should cover all lines and branches of the library (see [Test coverage](#coverage)). * Operations involving secret data should be tested for being constant time with respect to the secrets (see [src/ctime_tests.c](src/ctime_tests.c)). * Local variables containing secret data should be cleared explicitly to try to delete secrets from memory. -* Use `rustsecp256k1zkp_v0_10_0_memcmp_var` instead of `memcmp` (see [#823](https://github.com/bitcoin-core/secp256k1/issues/823)). +* Use `rustsecp256k1zkp_v0_10_1_memcmp_var` instead of `memcmp` (see [#823](https://github.com/bitcoin-core/secp256k1/issues/823)). #### Style conventions @@ -71,7 +71,7 @@ In addition, libsecp256k1 tries to maintain the following coding conventions: * Use `void *ptr` instead of `void* ptr`. * Arguments of the publicly-facing API must have a specific order defined in [include/secp256k1.h](include/secp256k1.h). * User-facing comment lines in headers should be limited to 80 chars if possible. -* All identifiers in file scope should start with `rustsecp256k1zkp_v0_10_0_`. +* All identifiers in file scope should start with `rustsecp256k1zkp_v0_10_1_`. * Avoid trailing whitespace. ### Tests diff --git a/secp256k1-zkp-sys/depend/secp256k1/Makefile.am b/secp256k1-zkp-sys/depend/secp256k1/Makefile.am index 764fe917..6abec55b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/Makefile.am +++ b/secp256k1-zkp-sys/depend/secp256k1/Makefile.am @@ -6,7 +6,7 @@ AM_CFLAGS = $(SECP_CFLAGS) lib_LTLIBRARIES = libsecp256k1.la include_HEADERS = include/secp256k1.h -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_preallocated.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_preallocated.h noinst_HEADERS = noinst_HEADERS += src/scalar.h noinst_HEADERS += src/scalar_4x64.h @@ -65,22 +65,22 @@ noinst_HEADERS += src/hash_impl.h noinst_HEADERS += src/field.h noinst_HEADERS += src/field_impl.h noinst_HEADERS += src/bench.h -noinst_HEADERS += src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.h +noinst_HEADERS += src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.h noinst_HEADERS += contrib/lax_der_parsing.h noinst_HEADERS += contrib/lax_der_parsing.c noinst_HEADERS += contrib/lax_der_privatekey_parsing.h noinst_HEADERS += contrib/lax_der_privatekey_parsing.c noinst_HEADERS += examples/examples_util.h -PRECOMPUTED_LIB = librustsecp256k1zkp_v0_10_0_precomputed.la +PRECOMPUTED_LIB = librustsecp256k1zkp_v0_10_1_precomputed.la noinst_LTLIBRARIES = $(PRECOMPUTED_LIB) -librustsecp256k1zkp_v0_10_0_precomputed_la_SOURCES = src/precomputed_ecmult.c src/precomputed_ecmult_gen.c -# We need `-I$(top_srcdir)/src` in VPATH builds if librustsecp256k1zkp_v0_10_0_precomputed_la_SOURCES have been recreated in the build tree. +librustsecp256k1zkp_v0_10_1_precomputed_la_SOURCES = src/precomputed_ecmult.c src/precomputed_ecmult_gen.c +# We need `-I$(top_srcdir)/src` in VPATH builds if librustsecp256k1zkp_v0_10_1_precomputed_la_SOURCES have been recreated in the build tree. # This helps users and packagers who insist on recreating the precomputed files (e.g., Gentoo). -librustsecp256k1zkp_v0_10_0_precomputed_la_CPPFLAGS = -I$(top_srcdir)/src $(SECP_CONFIG_DEFINES) +librustsecp256k1zkp_v0_10_1_precomputed_la_CPPFLAGS = -I$(top_srcdir)/src $(SECP_CONFIG_DEFINES) if USE_EXTERNAL_ASM -COMMON_LIB = librustsecp256k1zkp_v0_10_0_common.la +COMMON_LIB = librustsecp256k1zkp_v0_10_1_common.la else COMMON_LIB = endif @@ -91,14 +91,14 @@ pkgconfig_DATA = libsecp256k1.pc if USE_EXTERNAL_ASM if USE_ASM_ARM -librustsecp256k1zkp_v0_10_0_common_la_SOURCES = src/asm/field_10x26_arm.s +librustsecp256k1zkp_v0_10_1_common_la_SOURCES = src/asm/field_10x26_arm.s endif endif -librustsecp256k1zkp_v0_10_0_la_SOURCES = src/secp256k1.c -librustsecp256k1zkp_v0_10_0_la_CPPFLAGS = $(SECP_CONFIG_DEFINES) -librustsecp256k1zkp_v0_10_0_la_LIBADD = $(COMMON_LIB) $(PRECOMPUTED_LIB) -librustsecp256k1zkp_v0_10_0_la_LDFLAGS = -no-undefined -version-info $(LIB_VERSION_CURRENT):$(LIB_VERSION_REVISION):$(LIB_VERSION_AGE) +librustsecp256k1zkp_v0_10_1_la_SOURCES = src/secp256k1.c +librustsecp256k1zkp_v0_10_1_la_CPPFLAGS = $(SECP_CONFIG_DEFINES) +librustsecp256k1zkp_v0_10_1_la_LIBADD = $(COMMON_LIB) $(PRECOMPUTED_LIB) +librustsecp256k1zkp_v0_10_1_la_LDFLAGS = -no-undefined -version-info $(LIB_VERSION_CURRENT):$(LIB_VERSION_REVISION):$(LIB_VERSION_AGE) noinst_PROGRAMS = if USE_BENCHMARK @@ -195,6 +195,17 @@ musig_example_LDFLAGS += -lbcrypt endif TESTS += musig_example endif +if ENABLE_MODULE_SCHNORR_ADAPTOR +noinst_PROGRAMS += schnorr_adaptor_example +schnorr_adaptor_example_SOURCES = examples/schnorr_adaptor.c +schnorr_adaptor_example_CPPFLAGS = -I$(top_srcdir)/include -DSECP256K1_STATIC +schnorr_adaptor_example_LDADD = libsecp256k1.la +schnorr_adaptor_example_LDFLAGS = -static +if BUILD_WINDOWS +schnorr_adaptor_example_LDFLAGS += -lbcrypt +endif +TESTS += schnorr_adaptor_example +endif endif ### Precomputed tables @@ -236,11 +247,11 @@ maintainer-clean-local: clean-precomp ### Pregenerated test vectors ### (see the comments in the previous section for detailed rationale) -TESTVECTORS = src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.h +TESTVECTORS = src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.h -src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.h: +src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.h: mkdir -p $(@D) - python3 $(top_srcdir)/tools/tests_wycheproof_generate.py $(top_srcdir)/src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.json > $@ + python3 $(top_srcdir)/tools/tests_wycheproof_generate.py $(top_srcdir)/src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.json > $@ testvectors: $(TESTVECTORS) @@ -259,10 +270,10 @@ EXTRA_DIST += sage/gen_exhaustive_groups.sage EXTRA_DIST += sage/gen_split_lambda_constants.sage EXTRA_DIST += sage/group_prover.sage EXTRA_DIST += sage/prove_group_implementations.sage -EXTRA_DIST += sage/rustsecp256k1zkp_v0_10_0_params.sage +EXTRA_DIST += sage/rustsecp256k1zkp_v0_10_1_params.sage EXTRA_DIST += sage/weierstrass_prover.sage EXTRA_DIST += src/wycheproof/WYCHEPROOF_COPYING -EXTRA_DIST += src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.json +EXTRA_DIST += src/wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.json EXTRA_DIST += tools/tests_wycheproof_generate.py if ENABLE_MODULE_SCHNORRSIG_HALFAGG @@ -320,3 +331,7 @@ endif if ENABLE_MODULE_ECDSA_ADAPTOR include src/modules/ecdsa_adaptor/Makefile.am.include endif + +if ENABLE_MODULE_SCHNORR_ADAPTOR +include src/modules/schnorr_adaptor/Makefile.am.include +endif \ No newline at end of file diff --git a/secp256k1-zkp-sys/depend/secp256k1/README.md b/secp256k1-zkp-sys/depend/secp256k1/README.md index 88bdb2ba..80d2b0ee 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/README.md +++ b/secp256k1-zkp-sys/depend/secp256k1/README.md @@ -12,6 +12,7 @@ Added features: * Experimental module for Confidential Assets (Pedersen commitments, range proofs, and [surjection proofs](src/modules/surjection/surjection.md)). * Experimental module for Bulletproofs++ range proofs. * Experimental module for [address whitelisting](src/modules/whitelist/whitelist.md). +* Experimental module for Schnorr adaptor signatures. Experimental features are made available for testing and review by the community. The APIs of these features should not be considered stable. @@ -72,6 +73,7 @@ Usage examples Usage examples can be found in the [examples](examples) directory. To compile them you need to configure with `--enable-examples`. * [ECDSA example](examples/ecdsa.c) * [Schnorr signatures example](examples/schnorr.c) + * [Schnorr adaptor signatures example](examples/schnorr_adaptor.c) * [Deriving a shared secret (ECDH) example](examples/ecdh.c) * [MuSig example](examples/musig.c) diff --git a/secp256k1-zkp-sys/depend/secp256k1/ci/ci.sh b/secp256k1-zkp-sys/depend/secp256k1/ci/ci.sh index 47c4ae67..c7101554 100755 --- a/secp256k1-zkp-sys/depend/secp256k1/ci/ci.sh +++ b/secp256k1-zkp-sys/depend/secp256k1/ci/ci.sh @@ -13,8 +13,8 @@ print_environment() { # does not rely on bash. for var in WERROR_CFLAGS MAKEFLAGS BUILD \ ECMULTWINDOW ECMULTGENPRECISION ASM WIDEMUL WITH_VALGRIND EXTRAFLAGS \ - EXPERIMENTAL ECDH RECOVERY SCHNORRSIG SCHNORRSIG_HALFAGG ELLSWIFT \ - ECDSA_S2C GENERATOR RANGEPROOF WHITELIST MUSIG ECDSAADAPTOR BPPP \ + EXPERIMENTAL ECDH RECOVERY SCHNORRSIG SCHNORRSIG_HALFAGG SCHNORRADAPTOR \ + ELLSWIFT ECDSA_S2C GENERATOR RANGEPROOF WHITELIST MUSIG ECDSAADAPTOR BPPP \ SECP256K1_TEST_ITERS BENCH SECP256K1_BENCH_ITERS CTIMETESTS\ EXAMPLES \ HOST WRAPPER_CMD \ @@ -83,6 +83,7 @@ esac --enable-module-schnorrsig="$SCHNORRSIG" --enable-module-musig="$MUSIG" --enable-module-ecdsa-adaptor="$ECDSAADAPTOR" \ --enable-module-schnorrsig="$SCHNORRSIG" \ --enable-module-schnorrsig-halfagg="$SCHNORRSIG_HALFAGG" \ + --enable-module-schnorr-adaptor="$SCHNORRADAPTOR" \ --enable-examples="$EXAMPLES" \ --enable-ctime-tests="$CTIMETESTS" \ --with-valgrind="$WITH_VALGRIND" \ diff --git a/secp256k1-zkp-sys/depend/secp256k1/cmake/TryAppendCFlags.cmake b/secp256k1-zkp-sys/depend/secp256k1/cmake/TryAppendCFlags.cmake index bdda0a34..c47c767a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/cmake/TryAppendCFlags.cmake +++ b/secp256k1-zkp-sys/depend/secp256k1/cmake/TryAppendCFlags.cmake @@ -1,6 +1,6 @@ include(CheckCCompilerFlag) -function(rustsecp256k1zkp_v0_10_0_check_c_flags_internal flags output) +function(rustsecp256k1zkp_v0_10_1_check_c_flags_internal flags output) string(MAKE_C_IDENTIFIER "${flags}" result) string(TOUPPER "${result}" result) set(result "C_SUPPORTS_${result}") @@ -17,7 +17,7 @@ endfunction() # Append flags to the COMPILE_OPTIONS directory property if CC accepts them. macro(try_append_c_flags) - rustsecp256k1zkp_v0_10_0_check_c_flags_internal("${ARGV}" result) + rustsecp256k1zkp_v0_10_1_check_c_flags_internal("${ARGV}" result) if(result) add_compile_options(${ARGV}) endif() diff --git a/secp256k1-zkp-sys/depend/secp256k1/configure.ac b/secp256k1-zkp-sys/depend/secp256k1/configure.ac index 4d2a6e67..260f5076 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/configure.ac +++ b/secp256k1-zkp-sys/depend/secp256k1/configure.ac @@ -188,6 +188,10 @@ AC_ARG_ENABLE(module_schnorrsig_halfagg, AS_HELP_STRING([--enable-module-schnorrsig-halfagg],[enable schnorrsig half-aggregation module (experimental) [default=no]]), [], [SECP_SET_DEFAULT([enable_module_schnorrsig_halfagg], [no], [yes])]) +AC_ARG_ENABLE(module_schnorr_adaptor, + AS_HELP_STRING([--enable-module-schnorr-adaptor],[enable Schnorr adaptor module [default=no]]), [], + [SECP_SET_DEFAULT([enable_module_schnorr_adaptor], [no], [yes])]) + AC_ARG_ENABLE(module_ellswift, AS_HELP_STRING([--enable-module-ellswift],[enable ElligatorSwift module [default=yes]]), [], [SECP_SET_DEFAULT([enable_module_ellswift], [yes], [yes])]) @@ -454,6 +458,14 @@ if test x"$enable_module_schnorrsig_halfagg" = x"yes"; then enable_module_schnorrsig=yes fi +if test x"$enable_module_schnorr_adaptor" = x"yes"; then + if test x"$enable_module_schnorrsig" = x"no"; then + AC_MSG_ERROR([Module dependency error: You have disabled the schnorrsig module explicitly, but it is required by the schnorr adaptor module.]) + fi + SECP_CONFIG_DEFINES="$SECP_CONFIG_DEFINES -DENABLE_MODULE_SCHNORR_ADAPTOR=1" + enable_module_schnorrsig=yes +fi + if test x"$enable_module_bppp" = x"yes"; then if test x"$enable_module_generator" = x"no"; then AC_MSG_ERROR([Module dependency error: You have disabled the generator module explicitly, but it is required by the bppp module.]) @@ -555,6 +567,9 @@ else if test x"$enable_module_schnorrsig_halfagg" = x"yes"; then AC_MSG_ERROR([Schnorrsig Half-Aggregation module is experimental. Use --enable-experimental to allow.]) fi + if test x"$enable_module_schnorr_adaptor" = x"yes"; then + AC_MSG_ERROR([Schnorr adaptor signatures module is experimental. Use --enable-experimental to allow.]) + fi if test x"$enable_module_bppp" = x"yes"; then AC_MSG_ERROR([Bulletproofs++ module is experimental. Use --enable-experimental to allow.]) fi @@ -611,6 +626,7 @@ AM_CONDITIONAL([ENABLE_MODULE_ECDSA_S2C], [test x"$enable_module_ecdsa_s2c" = x" AM_CONDITIONAL([ENABLE_MODULE_ECDSA_ADAPTOR], [test x"$enable_module_ecdsa_adaptor" = x"yes"]) AM_CONDITIONAL([ENABLE_MODULE_BPPP], [test x"$enable_module_bppp" = x"yes"]) AM_CONDITIONAL([ENABLE_MODULE_SCHNORRSIG_HALFAGG], [test x"$enable_module_schnorrsig_halfagg" = x"yes"]) +AM_CONDITIONAL([ENABLE_MODULE_SCHNORR_ADAPTOR], [test x"$enable_module_schnorr_adaptor" = x"yes"]) AM_CONDITIONAL([USE_REDUCED_SURJECTION_PROOF_SIZE], [test x"$use_reduced_surjection_proof_size" = x"yes"]) AM_CONDITIONAL([USE_EXTERNAL_ASM], [test x"$enable_external_asm" = x"yes"]) AM_CONDITIONAL([USE_ASM_ARM], [test x"$set_asm" = x"arm32"]) @@ -651,6 +667,7 @@ echo " module ecdsa-s2c = $enable_module_ecdsa_s2c" echo " module ecdsa-adaptor = $enable_module_ecdsa_adaptor" echo " module bppp = $enable_module_bppp" echo " module schnorrsig-halfagg = $enable_module_schnorrsig_halfagg" +echo " module schnorr-adaptor = $enable_module_schnorr_adaptor" echo echo " asm = $set_asm" echo " ecmult window size = $set_ecmult_window" diff --git a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.c b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.c index ac138836..5583d3f5 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.c +++ b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.c @@ -8,7 +8,7 @@ #include "lax_der_parsing.h" -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der_lax(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) { +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der_lax(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) { size_t rpos, rlen, spos, slen; size_t pos = 0; size_t lenbyte; @@ -16,7 +16,7 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der_lax(const rustsecp256k1zk int overflow = 0; /* Hack to initialize sig with a correctly-parsed but invalid signature. */ - rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig); /* Sequence tag byte */ if (pos == inputlen || input[pos] != 0x30) { @@ -137,11 +137,11 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der_lax(const rustsecp256k1zk } if (!overflow) { - overflow = !rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig); + overflow = !rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig); } if (overflow) { memset(tmpsig, 0, 64); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(ctx, sig, tmpsig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(ctx, sig, tmpsig); } return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.h b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.h index fb09234c..3d9e4c45 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.h +++ b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_parsing.h @@ -26,8 +26,8 @@ * certain violations are easily supported. You may need to adapt it. * * Do not use this for new systems. Use well-defined DER or compact signatures - * instead if you have the choice (see rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der and - * rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact). + * instead if you have the choice (see rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der and + * rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact). * * The supported violations are: * - All numbers are parsed as nonnegative integers, even though X.609-0207 @@ -83,9 +83,9 @@ extern "C" { * encoded numbers are out of range, signature validation with it is * guaranteed to fail for every message and public key. */ -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der_lax( - const rustsecp256k1zkp_v0_10_0_context* ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der_lax( + const rustsecp256k1zkp_v0_10_1_context* ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char *input, size_t inputlen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); diff --git a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.c b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.c index 7152c32e..4651d33e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.c +++ b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.c @@ -8,7 +8,7 @@ #include "lax_der_privatekey_parsing.h" -int ec_privkey_import_der(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *out32, const unsigned char *privkey, size_t privkeylen) { +int ec_privkey_import_der(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *out32, const unsigned char *privkey, size_t privkeylen) { const unsigned char *end = privkey + privkeylen; int lenb = 0; int len = 0; @@ -45,17 +45,17 @@ int ec_privkey_import_der(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned return 0; } if (privkey[1]) memcpy(out32 + 32 - privkey[1], privkey + 2, privkey[1]); - if (!rustsecp256k1zkp_v0_10_0_ec_seckey_verify(ctx, out32)) { + if (!rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, out32)) { memset(out32, 0, 32); return 0; } return 1; } -int ec_privkey_export_der(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *privkey, size_t *privkeylen, const unsigned char *key32, int compressed) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; +int ec_privkey_export_der(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *privkey, size_t *privkeylen, const unsigned char *key32, int compressed) { + rustsecp256k1zkp_v0_10_1_pubkey pubkey; size_t pubkeylen = 0; - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey, key32)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey, key32)) { *privkeylen = 0; return 0; } @@ -79,7 +79,7 @@ int ec_privkey_export_der(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned memcpy(ptr, key32, 32); ptr += 32; memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle); pubkeylen = 33; - rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED); + rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED); ptr += pubkeylen; *privkeylen = ptr - privkey; } else { @@ -104,7 +104,7 @@ int ec_privkey_export_der(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned memcpy(ptr, key32, 32); ptr += 32; memcpy(ptr, middle, sizeof(middle)); ptr += sizeof(middle); pubkeylen = 65; - rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_UNCOMPRESSED); + rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, ptr, &pubkeylen, &pubkey, SECP256K1_EC_UNCOMPRESSED); ptr += pubkeylen; *privkeylen = ptr - privkey; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.h b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.h index 8fdea1e5..c2a02720 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.h +++ b/secp256k1-zkp-sys/depend/secp256k1/contrib/lax_der_privatekey_parsing.h @@ -43,7 +43,7 @@ extern "C" { /** Export a private key in DER format. * * Returns: 1 if the private key was valid. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: privkey: pointer to an array for storing the private key in BER. * Should have space for 279 bytes, and cannot be NULL. * privkeylen: Pointer to an int where the length of the private key in @@ -57,10 +57,10 @@ extern "C" { * simple 32-byte private keys are sufficient. * * Note that this function does not guarantee correct DER output. It is - * guaranteed to be parsable by rustsecp256k1zkp_v0_10_0_ec_privkey_import_der + * guaranteed to be parsable by rustsecp256k1zkp_v0_10_1_ec_privkey_import_der */ SECP256K1_WARN_UNUSED_RESULT int ec_privkey_export_der( - const rustsecp256k1zkp_v0_10_0_context* ctx, + const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *privkey, size_t *privkeylen, const unsigned char *seckey, @@ -82,7 +82,7 @@ SECP256K1_WARN_UNUSED_RESULT int ec_privkey_export_der( * key. */ SECP256K1_WARN_UNUSED_RESULT int ec_privkey_import_der( - const rustsecp256k1zkp_v0_10_0_context* ctx, + const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const unsigned char *privkey, size_t privkeylen diff --git a/secp256k1-zkp-sys/depend/secp256k1/doc/ellswift.md b/secp256k1-zkp-sys/depend/secp256k1/doc/ellswift.md index 7dec7b90..86f665d1 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/doc/ellswift.md +++ b/secp256k1-zkp-sys/depend/secp256k1/doc/ellswift.md @@ -144,8 +144,8 @@ but the approach here is simple enough and gives fairly uniform output even in t **Note**: in the paper these conditions result in $\infty$ as output, due to the use of projective coordinates there. We wish to avoid the need for callers to deal with this special case. -This is implemented in `rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_frac_var` (which decodes to an x-coordinate represented as a fraction), and -in `rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_var` (which outputs the actual x-coordinate). +This is implemented in `rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_frac_var` (which decodes to an x-coordinate represented as a fraction), and +in `rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_var` (which outputs the actual x-coordinate). ## 3. The encoding function @@ -247,7 +247,7 @@ the loop can be simplified to only compute one of the inverses instead of all of * Let $t = G_{c,u}(x).$ * If $t \neq \bot$, return $(u, t)$; restart loop otherwise. -This is implemented in `rustsecp256k1zkp_v0_10_0_ellswift_xelligatorswift_var`. +This is implemented in `rustsecp256k1zkp_v0_10_1_ellswift_xelligatorswift_var`. ### 3.3 Finding the inverse @@ -388,7 +388,7 @@ Specialized for odd-ordered $a=0$ curves: * If $c \in \\{4, 6\\}:$ return $w(\frac{-\sqrt{-3}+1}{2}u + v).$ * If $c \in \\{5, 7\\}:$ return $w(\frac{-\sqrt{-3}-1}{2}u - v).$ -This is implemented in `rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var`. +This is implemented in `rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_inv_var`. And the x-only ElligatorSwift encoding algorithm is still: @@ -471,11 +471,11 @@ as decoder: * Let $y = \sqrt{g(x)}.$ * Return $(x, y)$ if $sign(y) = sign(t)$; $(x, -y)$ otherwise. -This is implemented in `rustsecp256k1zkp_v0_10_0_ellswift_swiftec_var`. The used $sign(x)$ function is the parity of $x$ when represented as in integer in $[0,q).$ +This is implemented in `rustsecp256k1zkp_v0_10_1_ellswift_swiftec_var`. The used $sign(x)$ function is the parity of $x$ when represented as in integer in $[0,q).$ The corresponding encoder would invoke the x-only one, but negating the output $t$ if $sign(t) \neq sign(y).$ -This is implemented in `rustsecp256k1zkp_v0_10_0_ellswift_elligatorswift_var`. +This is implemented in `rustsecp256k1zkp_v0_10_1_ellswift_elligatorswift_var`. Note that this is only intended for encoding points where both the x-coordinate and y-coordinate are unpredictable. When encoding x-only points where the y-coordinate is implicitly even (or implicitly square, or implicitly in $[0,q/2]$), the encoder in diff --git a/secp256k1-zkp-sys/depend/secp256k1/examples/ecdh.c b/secp256k1-zkp-sys/depend/secp256k1/examples/ecdh.c index d36eee75..174e0878 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/examples/ecdh.c +++ b/secp256k1-zkp-sys/depend/secp256k1/examples/ecdh.c @@ -26,19 +26,19 @@ int main(void) { unsigned char randomize[32]; int return_val; size_t len; - rustsecp256k1zkp_v0_10_0_pubkey pubkey1; - rustsecp256k1zkp_v0_10_0_pubkey pubkey2; + rustsecp256k1zkp_v0_10_1_pubkey pubkey1; + rustsecp256k1zkp_v0_10_1_pubkey pubkey2; /* Before we can call actual API functions, we need to create a "context". */ - rustsecp256k1zkp_v0_10_0_context* ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + rustsecp256k1zkp_v0_10_1_context* ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (!fill_random(randomize, sizeof(randomize))) { printf("Failed to generate randomness\n"); return 1; } /* Randomizing the context is recommended to protect against side-channel - * leakage See `rustsecp256k1zkp_v0_10_0_context_randomize` in secp256k1.h for more + * leakage See `rustsecp256k1zkp_v0_10_1_context_randomize` in secp256k1.h for more * information about it. This should never fail. */ - return_val = rustsecp256k1zkp_v0_10_0_context_randomize(ctx, randomize); + return_val = rustsecp256k1zkp_v0_10_1_context_randomize(ctx, randomize); assert(return_val); /*** Key Generation ***/ @@ -51,27 +51,27 @@ int main(void) { printf("Failed to generate randomness\n"); return 1; } - if (rustsecp256k1zkp_v0_10_0_ec_seckey_verify(ctx, seckey1) && rustsecp256k1zkp_v0_10_0_ec_seckey_verify(ctx, seckey2)) { + if (rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, seckey1) && rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, seckey2)) { break; } } /* Public key creation using a valid context with a verified secret key should never fail */ - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey1, seckey1); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey1, seckey1); assert(return_val); - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey2, seckey2); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey2, seckey2); assert(return_val); /* Serialize pubkey1 in a compressed form (33 bytes), should always return 1 */ len = sizeof(compressed_pubkey1); - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, compressed_pubkey1, &len, &pubkey1, SECP256K1_EC_COMPRESSED); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, compressed_pubkey1, &len, &pubkey1, SECP256K1_EC_COMPRESSED); assert(return_val); /* Should be the same size as the size of the output, because we passed a 33 byte array. */ assert(len == sizeof(compressed_pubkey1)); /* Serialize pubkey2 in a compressed form (33 bytes) */ len = sizeof(compressed_pubkey2); - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, compressed_pubkey2, &len, &pubkey2, SECP256K1_EC_COMPRESSED); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, compressed_pubkey2, &len, &pubkey2, SECP256K1_EC_COMPRESSED); assert(return_val); /* Should be the same size as the size of the output, because we passed a 33 byte array. */ assert(len == sizeof(compressed_pubkey2)); @@ -80,12 +80,12 @@ int main(void) { /* Perform ECDH with seckey1 and pubkey2. Should never fail with a verified * seckey and valid pubkey */ - return_val = rustsecp256k1zkp_v0_10_0_ecdh(ctx, shared_secret1, &pubkey2, seckey1, NULL, NULL); + return_val = rustsecp256k1zkp_v0_10_1_ecdh(ctx, shared_secret1, &pubkey2, seckey1, NULL, NULL); assert(return_val); /* Perform ECDH with seckey2 and pubkey1. Should never fail with a verified * seckey and valid pubkey */ - return_val = rustsecp256k1zkp_v0_10_0_ecdh(ctx, shared_secret2, &pubkey1, seckey2, NULL, NULL); + return_val = rustsecp256k1zkp_v0_10_1_ecdh(ctx, shared_secret2, &pubkey1, seckey2, NULL, NULL); assert(return_val); /* Both parties should end up with the same shared secret */ @@ -104,7 +104,7 @@ int main(void) { print_hex(shared_secret1, sizeof(shared_secret1)); /* This will clear everything from the context and free the memory */ - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); /* It's best practice to try to clear secrets from memory after using them. * This is done because some bugs can allow an attacker to leak memory, for diff --git a/secp256k1-zkp-sys/depend/secp256k1/examples/ecdsa.c b/secp256k1-zkp-sys/depend/secp256k1/examples/ecdsa.c index 479d0503..94f9e8b0 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/examples/ecdsa.c +++ b/secp256k1-zkp-sys/depend/secp256k1/examples/ecdsa.c @@ -34,18 +34,18 @@ int main(void) { size_t len; int is_signature_valid, is_signature_valid2; int return_val; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; /* Before we can call actual API functions, we need to create a "context". */ - rustsecp256k1zkp_v0_10_0_context* ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + rustsecp256k1zkp_v0_10_1_context* ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (!fill_random(randomize, sizeof(randomize))) { printf("Failed to generate randomness\n"); return 1; } /* Randomizing the context is recommended to protect against side-channel - * leakage See `rustsecp256k1zkp_v0_10_0_context_randomize` in secp256k1.h for more + * leakage See `rustsecp256k1zkp_v0_10_1_context_randomize` in secp256k1.h for more * information about it. This should never fail. */ - return_val = rustsecp256k1zkp_v0_10_0_context_randomize(ctx, randomize); + return_val = rustsecp256k1zkp_v0_10_1_context_randomize(ctx, randomize); assert(return_val); /*** Key Generation ***/ @@ -58,18 +58,18 @@ int main(void) { printf("Failed to generate randomness\n"); return 1; } - if (rustsecp256k1zkp_v0_10_0_ec_seckey_verify(ctx, seckey)) { + if (rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, seckey)) { break; } } /* Public key creation using a valid context with a verified secret key should never fail */ - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey, seckey); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey, seckey); assert(return_val); /* Serialize the pubkey in a compressed form(33 bytes). Should always return 1. */ len = sizeof(compressed_pubkey); - return_val = rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, compressed_pubkey, &len, &pubkey, SECP256K1_EC_COMPRESSED); + return_val = rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, compressed_pubkey, &len, &pubkey, SECP256K1_EC_COMPRESSED); assert(return_val); /* Should be the same size as the size of the output, because we passed a 33 byte array. */ assert(len == sizeof(compressed_pubkey)); @@ -80,31 +80,31 @@ int main(void) { * custom nonce function, passing `NULL` will use the RFC-6979 safe default. * Signing with a valid context, verified secret key * and the default nonce function should never fail. */ - return_val = rustsecp256k1zkp_v0_10_0_ecdsa_sign(ctx, &sig, msg_hash, seckey, NULL, NULL); + return_val = rustsecp256k1zkp_v0_10_1_ecdsa_sign(ctx, &sig, msg_hash, seckey, NULL, NULL); assert(return_val); /* Serialize the signature in a compact form. Should always return 1 * according to the documentation in secp256k1.h. */ - return_val = rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(ctx, serialized_signature, &sig); + return_val = rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(ctx, serialized_signature, &sig); assert(return_val); /*** Verification ***/ /* Deserialize the signature. This will return 0 if the signature can't be parsed correctly. */ - if (!rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(ctx, &sig, serialized_signature)) { + if (!rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(ctx, &sig, serialized_signature)) { printf("Failed parsing the signature\n"); return 1; } /* Deserialize the public key. This will return 0 if the public key can't be parsed correctly. */ - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(ctx, &pubkey, compressed_pubkey, sizeof(compressed_pubkey))) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(ctx, &pubkey, compressed_pubkey, sizeof(compressed_pubkey))) { printf("Failed parsing the public key\n"); return 1; } /* Verify a signature. This will return 1 if it's valid and 0 if it's not. */ - is_signature_valid = rustsecp256k1zkp_v0_10_0_ecdsa_verify(ctx, &sig, msg_hash, &pubkey); + is_signature_valid = rustsecp256k1zkp_v0_10_1_ecdsa_verify(ctx, &sig, msg_hash, &pubkey); printf("Is the signature valid? %s\n", is_signature_valid ? "true" : "false"); printf("Secret Key: "); @@ -115,14 +115,14 @@ int main(void) { print_hex(serialized_signature, sizeof(serialized_signature)); /* This will clear everything from the context and free the memory */ - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); /* Bonus example: if all we need is signature verification (and no key generation or signing), we don't need to use a context created via - rustsecp256k1zkp_v0_10_0_context_create(). We can simply use the static (i.e., global) - context rustsecp256k1zkp_v0_10_0_context_static. See its description in + rustsecp256k1zkp_v0_10_1_context_create(). We can simply use the static (i.e., global) + context rustsecp256k1zkp_v0_10_1_context_static. See its description in include/secp256k1.h for details. */ - is_signature_valid2 = rustsecp256k1zkp_v0_10_0_ecdsa_verify(rustsecp256k1zkp_v0_10_0_context_static, + is_signature_valid2 = rustsecp256k1zkp_v0_10_1_ecdsa_verify(rustsecp256k1zkp_v0_10_1_context_static, &sig, msg_hash, &pubkey); assert(is_signature_valid2 == is_signature_valid); diff --git a/secp256k1-zkp-sys/depend/secp256k1/examples/musig.c b/secp256k1-zkp-sys/depend/secp256k1/examples/musig.c index ff6beb4e..63ac5d74 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/examples/musig.c +++ b/secp256k1-zkp-sys/depend/secp256k1/examples/musig.c @@ -9,7 +9,7 @@ /** This file demonstrates how to use the MuSig module to create a * 3-of-3 multisignature. Additionally, see the documentation in - * include/rustsecp256k1zkp_v0_10_0_musig.h and src/modules/musig/musig.md. + * include/rustsecp256k1zkp_v0_10_1_musig.h and src/modules/musig/musig.md. */ #include @@ -23,31 +23,31 @@ #include "examples_util.h" struct signer_secrets { - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; }; struct signer { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; }; /* Number of public keys involved in creating the aggregate signature */ #define N_SIGNERS 3 /* Create a key pair, store it in signer_secrets->keypair and signer->pubkey */ -static int create_keypair(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secrets *signer_secrets, struct signer *signer) { +static int create_keypair(const rustsecp256k1zkp_v0_10_1_context* ctx, struct signer_secrets *signer_secrets, struct signer *signer) { unsigned char seckey[32]; while (1) { if (!fill_random(seckey, sizeof(seckey))) { printf("Failed to generate randomness\n"); return 1; } - if (rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &signer_secrets->keypair, seckey)) { + if (rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &signer_secrets->keypair, seckey)) { break; } } - if (!rustsecp256k1zkp_v0_10_0_keypair_pub(ctx, &signer->pubkey, &signer_secrets->keypair)) { + if (!rustsecp256k1zkp_v0_10_1_keypair_pub(ctx, &signer->pubkey, &signer_secrets->keypair)) { return 0; } return 1; @@ -55,66 +55,66 @@ static int create_keypair(const rustsecp256k1zkp_v0_10_0_context* ctx, struct si /* Tweak the pubkey corresponding to the provided keyagg cache, update the cache * and return the tweaked aggregate pk. */ -static int tweak(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_xonly_pubkey *agg_pk, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *cache) { - rustsecp256k1zkp_v0_10_0_pubkey output_pk; +static int tweak(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_xonly_pubkey *agg_pk, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *cache) { + rustsecp256k1zkp_v0_10_1_pubkey output_pk; unsigned char plain_tweak[32] = "this could be a BIP32 tweak...."; unsigned char xonly_tweak[32] = "this could be a taproot tweak.."; /* Plain tweaking which, for example, allows deriving multiple child * public keys from a single aggregate key using BIP32 */ - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add(ctx, NULL, cache, plain_tweak)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add(ctx, NULL, cache, plain_tweak)) { return 0; } /* Note that we did not provided an output_pk argument, because the * resulting pk is also saved in the cache and so if one is just interested * in signing the output_pk argument is unnecessary. On the other hand, if * one is not interested in signing, the same output_pk can be obtained by - * calling `rustsecp256k1zkp_v0_10_0_musig_pubkey_get` right after key aggregation to get - * the full pubkey and then call `rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add`. */ + * calling `rustsecp256k1zkp_v0_10_1_musig_pubkey_get` right after key aggregation to get + * the full pubkey and then call `rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add`. */ /* Xonly tweaking which, for example, allows creating taproot commitments */ - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add(ctx, &output_pk, cache, xonly_tweak)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add(ctx, &output_pk, cache, xonly_tweak)) { return 0; } /* Note that if we wouldn't care about signing, we can arrive at the same * output_pk by providing the untweaked public key to - * `rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add` (after converting it to an xonly pubkey - * if necessary with `rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey`). */ + * `rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add` (after converting it to an xonly pubkey + * if necessary with `rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey`). */ /* Now we convert the output_pk to an xonly pubkey to allow to later verify * the Schnorr signature against it. For this purpose we can ignore the * `pk_parity` output argument; we would need it if we would have to open * the taproot commitment. */ - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(ctx, agg_pk, NULL, &output_pk)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(ctx, agg_pk, NULL, &output_pk)) { return 0; } return 1; } /* Sign a message hash with the given key pairs and store the result in sig */ -static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secrets *signer_secrets, struct signer *signer, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *cache, const unsigned char *msg32, unsigned char *sig64) { +static int sign(const rustsecp256k1zkp_v0_10_1_context* ctx, struct signer_secrets *signer_secrets, struct signer *signer, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *cache, const unsigned char *msg32, unsigned char *sig64) { int i; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonces[N_SIGNERS]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sigs[N_SIGNERS]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonces[N_SIGNERS]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sigs[N_SIGNERS]; /* The same for all signers */ - rustsecp256k1zkp_v0_10_0_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_session session; for (i = 0; i < N_SIGNERS; i++) { unsigned char seckey[32]; unsigned char session_id[32]; /* Create random session ID. It is absolutely necessary that the session ID - * is unique for every call of rustsecp256k1zkp_v0_10_0_musig_nonce_gen. Otherwise + * is unique for every call of rustsecp256k1zkp_v0_10_1_musig_nonce_gen. Otherwise * it's trivial for an attacker to extract the secret key! */ if (!fill_random(session_id, sizeof(session_id))) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_keypair_sec(ctx, seckey, &signer_secrets[i].keypair)) { + if (!rustsecp256k1zkp_v0_10_1_keypair_sec(ctx, seckey, &signer_secrets[i].keypair)) { return 0; } /* Initialize session and create secret nonce for signing and public * nonce to send to the other signers. */ - if (!rustsecp256k1zkp_v0_10_0_musig_nonce_gen(ctx, &signer_secrets[i].secnonce, &signer[i].pubnonce, session_id, seckey, &signer[i].pubkey, msg32, NULL, NULL)) { + if (!rustsecp256k1zkp_v0_10_1_musig_nonce_gen(ctx, &signer_secrets[i].secnonce, &signer[i].pubnonce, session_id, seckey, &signer[i].pubkey, msg32, NULL, NULL)) { return 0; } pubnonces[i] = &signer[i].pubnonce; @@ -122,20 +122,20 @@ static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secre /* Communication round 1: A production system would exchange public nonces * here before moving on. */ for (i = 0; i < N_SIGNERS; i++) { - rustsecp256k1zkp_v0_10_0_musig_aggnonce agg_pubnonce; + rustsecp256k1zkp_v0_10_1_musig_aggnonce agg_pubnonce; /* Create aggregate nonce and initialize the session */ - if (!rustsecp256k1zkp_v0_10_0_musig_nonce_agg(ctx, &agg_pubnonce, pubnonces, N_SIGNERS)) { + if (!rustsecp256k1zkp_v0_10_1_musig_nonce_agg(ctx, &agg_pubnonce, pubnonces, N_SIGNERS)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_musig_nonce_process(ctx, &session, &agg_pubnonce, msg32, cache, NULL)) { + if (!rustsecp256k1zkp_v0_10_1_musig_nonce_process(ctx, &session, &agg_pubnonce, msg32, cache, NULL)) { return 0; } /* partial_sign will clear the secnonce by setting it to 0. That's because * you must _never_ reuse the secnonce (or use the same session_id to * create a secnonce). If you do, you effectively reuse the nonce and * leak the secret key. */ - if (!rustsecp256k1zkp_v0_10_0_musig_partial_sign(ctx, &signer[i].partial_sig, &signer_secrets[i].secnonce, &signer_secrets[i].keypair, cache, &session)) { + if (!rustsecp256k1zkp_v0_10_1_musig_partial_sign(ctx, &signer[i].partial_sig, &signer_secrets[i].secnonce, &signer_secrets[i].keypair, cache, &session)) { return 0; } partial_sigs[i] = &signer[i].partial_sig; @@ -145,7 +145,7 @@ static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secre for (i = 0; i < N_SIGNERS; i++) { /* To check whether signing was successful, it suffices to either verify * the aggregate signature with the aggregate public key using - * rustsecp256k1zkp_v0_10_0_schnorrsig_verify, or verify all partial signatures of all + * rustsecp256k1zkp_v0_10_1_schnorrsig_verify, or verify all partial signatures of all * signers individually. Verifying the aggregate signature is cheaper but * verifying the individual partial signatures has the advantage that it * can be used to determine which of the partial signatures are invalid @@ -154,26 +154,26 @@ static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secre * fine to first verify the aggregate sig, and only verify the individual * sigs if it does not work. */ - if (!rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(ctx, &signer[i].partial_sig, &signer[i].pubnonce, &signer[i].pubkey, cache, &session)) { + if (!rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(ctx, &signer[i].partial_sig, &signer[i].pubnonce, &signer[i].pubkey, cache, &session)) { return 0; } } - return rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(ctx, sig64, &session, partial_sigs, N_SIGNERS); + return rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(ctx, sig64, &session, partial_sigs, N_SIGNERS); } int main(void) { - rustsecp256k1zkp_v0_10_0_context* ctx; + rustsecp256k1zkp_v0_10_1_context* ctx; int i; struct signer_secrets signer_secrets[N_SIGNERS]; struct signer signers[N_SIGNERS]; - const rustsecp256k1zkp_v0_10_0_pubkey *pubkeys_ptr[N_SIGNERS]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache cache; + const rustsecp256k1zkp_v0_10_1_pubkey *pubkeys_ptr[N_SIGNERS]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache cache; unsigned char msg[32] = "this_could_be_the_hash_of_a_msg!"; unsigned char sig[64]; /* Create a secp256k1 context */ - ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); printf("Creating key pairs......"); for (i = 0; i < N_SIGNERS; i++) { if (!create_keypair(ctx, &signer_secrets[i], &signers[i])) { @@ -185,7 +185,7 @@ static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secre printf("ok\n"); printf("Combining public keys..."); /* If you just want to aggregate and not sign the cache can be NULL */ - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(ctx, NULL, &agg_pk, &cache, pubkeys_ptr, N_SIGNERS)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(ctx, NULL, &agg_pk, &cache, pubkeys_ptr, N_SIGNERS)) { printf("FAILED\n"); return 1; } @@ -204,11 +204,11 @@ static int sign(const rustsecp256k1zkp_v0_10_0_context* ctx, struct signer_secre } printf("ok\n"); printf("Verifying signature....."); - if (!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(ctx, sig, msg, 32, &agg_pk)) { + if (!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(ctx, sig, msg, 32, &agg_pk)) { printf("FAILED\n"); return 1; } printf("ok\n"); - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); return 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr.c b/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr.c index 93db74e7..bdb46174 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr.c +++ b/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr.c @@ -28,18 +28,18 @@ int main(void) { unsigned char signature[64]; int is_signature_valid, is_signature_valid2; int return_val; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_keypair keypair; /* Before we can call actual API functions, we need to create a "context". */ - rustsecp256k1zkp_v0_10_0_context* ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + rustsecp256k1zkp_v0_10_1_context* ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (!fill_random(randomize, sizeof(randomize))) { printf("Failed to generate randomness\n"); return 1; } /* Randomizing the context is recommended to protect against side-channel - * leakage See `rustsecp256k1zkp_v0_10_0_context_randomize` in secp256k1.h for more + * leakage See `rustsecp256k1zkp_v0_10_1_context_randomize` in secp256k1.h for more * information about it. This should never fail. */ - return_val = rustsecp256k1zkp_v0_10_0_context_randomize(ctx, randomize); + return_val = rustsecp256k1zkp_v0_10_1_context_randomize(ctx, randomize); assert(return_val); /*** Key Generation ***/ @@ -54,21 +54,21 @@ int main(void) { } /* Try to create a keypair with a valid context, it should only fail if * the secret key is zero or out of range. */ - if (rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair, seckey)) { + if (rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, seckey)) { break; } } /* Extract the X-only public key from the keypair. We pass NULL for * `pk_parity` as the parity isn't needed for signing or verification. - * `rustsecp256k1zkp_v0_10_0_keypair_xonly_pub` supports returning the parity for + * `rustsecp256k1zkp_v0_10_1_keypair_xonly_pub` supports returning the parity for * other use cases such as tests or verifying Taproot tweaks. * This should never fail with a valid context and public key. */ - return_val = rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(ctx, &pubkey, NULL, &keypair); + return_val = rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(ctx, &pubkey, NULL, &keypair); assert(return_val); /* Serialize the public key. Should always return 1 for a valid public key. */ - return_val = rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, serialized_pubkey, &pubkey); + return_val = rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, serialized_pubkey, &pubkey); assert(return_val); /*** Signing ***/ @@ -76,7 +76,7 @@ int main(void) { /* Instead of signing (possibly very long) messages directly, we sign a * 32-byte hash of the message in this example. * - * We use rustsecp256k1zkp_v0_10_0_tagged_sha256 to create this hash. This function expects + * We use rustsecp256k1zkp_v0_10_1_tagged_sha256 to create this hash. This function expects * a context-specific "tag", which restricts the context in which the signed * messages should be considered valid. For example, if protocol A mandates * to use the tag "my_fancy_protocol" and protocol B mandates to use the tag @@ -87,7 +87,7 @@ int main(void) { * message that has intended consequences in the intended context (e.g., * protocol A) but would have unintended consequences if it were valid in * some other context (e.g., protocol B). */ - return_val = rustsecp256k1zkp_v0_10_0_tagged_sha256(ctx, msg_hash, tag, sizeof(tag), msg, sizeof(msg)); + return_val = rustsecp256k1zkp_v0_10_1_tagged_sha256(ctx, msg_hash, tag, sizeof(tag), msg, sizeof(msg)); assert(return_val); /* Generate 32 bytes of randomness to use with BIP-340 schnorr signing. */ @@ -98,30 +98,30 @@ int main(void) { /* Generate a Schnorr signature. * - * We use the rustsecp256k1zkp_v0_10_0_schnorrsig_sign32 function that provides a simple + * We use the rustsecp256k1zkp_v0_10_1_schnorrsig_sign32 function that provides a simple * interface for signing 32-byte messages (which in our case is a hash of * the actual message). BIP-340 recommends passing 32 bytes of randomness * to the signing function to improve security against side-channel attacks. * Signing with a valid context, a 32-byte message, a verified keypair, and * any 32 bytes of auxiliary random data should never fail. */ - return_val = rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(ctx, signature, msg_hash, &keypair, auxiliary_rand); + return_val = rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(ctx, signature, msg_hash, &keypair, auxiliary_rand); assert(return_val); /*** Verification ***/ /* Deserialize the public key. This will return 0 if the public key can't * be parsed correctly */ - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(ctx, &pubkey, serialized_pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(ctx, &pubkey, serialized_pubkey)) { printf("Failed parsing the public key\n"); return 1; } /* Compute the tagged hash on the received messages using the same tag as the signer. */ - return_val = rustsecp256k1zkp_v0_10_0_tagged_sha256(ctx, msg_hash, tag, sizeof(tag), msg, sizeof(msg)); + return_val = rustsecp256k1zkp_v0_10_1_tagged_sha256(ctx, msg_hash, tag, sizeof(tag), msg, sizeof(msg)); assert(return_val); /* Verify a signature. This will return 1 if it's valid and 0 if it's not. */ - is_signature_valid = rustsecp256k1zkp_v0_10_0_schnorrsig_verify(ctx, signature, msg_hash, 32, &pubkey); + is_signature_valid = rustsecp256k1zkp_v0_10_1_schnorrsig_verify(ctx, signature, msg_hash, 32, &pubkey); printf("Is the signature valid? %s\n", is_signature_valid ? "true" : "false"); @@ -133,14 +133,14 @@ int main(void) { print_hex(signature, sizeof(signature)); /* This will clear everything from the context and free the memory */ - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); /* Bonus example: if all we need is signature verification (and no key generation or signing), we don't need to use a context created via - rustsecp256k1zkp_v0_10_0_context_create(). We can simply use the static (i.e., global) - context rustsecp256k1zkp_v0_10_0_context_static. See its description in + rustsecp256k1zkp_v0_10_1_context_create(). We can simply use the static (i.e., global) + context rustsecp256k1zkp_v0_10_1_context_static. See its description in include/secp256k1.h for details. */ - is_signature_valid2 = rustsecp256k1zkp_v0_10_0_schnorrsig_verify(rustsecp256k1zkp_v0_10_0_context_static, + is_signature_valid2 = rustsecp256k1zkp_v0_10_1_schnorrsig_verify(rustsecp256k1zkp_v0_10_1_context_static, signature, msg_hash, 32, &pubkey); assert(is_signature_valid2 == is_signature_valid); diff --git a/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr_adaptor.c b/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr_adaptor.c new file mode 100644 index 00000000..f7111ebc --- /dev/null +++ b/secp256k1-zkp-sys/depend/secp256k1/examples/schnorr_adaptor.c @@ -0,0 +1,184 @@ +/************************************************************************* + * Written in 2024 by Sivaram Dhakshinamoorthy * + * To the extent possible under law, the author(s) have dedicated all * + * copyright and related and neighboring rights to the software in this * + * file to the public domain worldwide. This software is distributed * + * without any warranty. For the CC0 Public Domain Dedication, see * + * EXAMPLES_COPYING or https://creativecommons.org/publicdomain/zero/1.0 * + *************************************************************************/ + +#include +#include +#include + +#include +#include +#include + +#include "examples_util.h" + +/** This example implements the Multi-hop Locks protocol described in + * https://github.com/BlockstreamResearch/scriptless-scripts/blob/master/md/multi-hop-locks.md, + * using the Schnorr adaptor module. + * + * In this example, Alice (sender) sends a payment to Carol (recipient) + * via Bob (intermediate hop). The protocol ensures that Alice exchanges + * her coins for a proof of payment from Carol, and Bob securely forwards + * the payment without being able to access its details. + * + * Carol provides Alice with a point (z*G), which acts as the proof of + * payment. Alice sets up cryptographic locks with Bob, and Bob forwards + * the payment to Carol. When Carol reveals the secret z to claim the + * payment, Alice learns the proof of payment. + */ + +static int create_keypair(const rustsecp256k1zkp_v0_10_1_context *ctx, rustsecp256k1zkp_v0_10_1_keypair *keypair, rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey) { + unsigned char seckey[32]; + while (1) { + if (!fill_random(seckey, sizeof(seckey))) { + printf("Failed to generate randomness\n"); + return 0; + } + if (rustsecp256k1zkp_v0_10_1_keypair_create(ctx, keypair, seckey)) { + break; + } + } + if(!rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(ctx, pubkey, NULL, keypair)){ + return 0; + } + return 1; +} + +/* Creates the locks required for multi-hop payments */ +static int create_hop_locks(const rustsecp256k1zkp_v0_10_1_context *ctx, rustsecp256k1zkp_v0_10_1_pubkey *left_lock, rustsecp256k1zkp_v0_10_1_pubkey *right_lock, rustsecp256k1zkp_v0_10_1_pubkey *adaptor_pop, unsigned char *tweak_sum, unsigned char *tweak1, unsigned char *tweak2) { + while (1) { + if (!fill_random(tweak1, 32)) { + printf("Failed to generate randomness\n"); + return 0; + } + if (!fill_random(tweak2, 32)) { + printf("Failed to generate randomness\n"); + return 0; + } + if (rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, tweak1) && rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, tweak2)) { + break; + } + } + /* Create left lock = (z + tweak1)*G */ + memcpy(left_lock, adaptor_pop, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + if(!rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(ctx, left_lock, tweak1)) { + return 0; + } + + /* Create right lock = (z + tweak1 + tweak2)*G */ + memcpy(tweak_sum, tweak1, 32); + if(!rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, tweak_sum, tweak2)) { + return 0; + } + memcpy(right_lock, adaptor_pop, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + if(!rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(ctx, right_lock, tweak_sum)) { + return 0; + } + + return 1; +} + +int main(void) { + unsigned char tx_ab[32] = "alice sends a payment to bob...."; + unsigned char tx_bc[32] = "bob sends a payment to carol...."; + unsigned char presig_ab[65]; + unsigned char presig_bc[65]; + unsigned char sig_ab[64]; + unsigned char sig_bc[64]; + unsigned char tmp[32]; + unsigned char tweak1[32]; + unsigned char tweak2[32]; + unsigned char tweak_sum[32]; + unsigned char secret_pop[32]; /* Carol's secret proof of payment */ + rustsecp256k1zkp_v0_10_1_pubkey adaptor_pop; + rustsecp256k1zkp_v0_10_1_pubkey left_lock; + rustsecp256k1zkp_v0_10_1_pubkey right_lock; + rustsecp256k1zkp_v0_10_1_pubkey tmp_pubkey; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkey_a, pubkey_b; + rustsecp256k1zkp_v0_10_1_keypair keypair_a, keypair_b; + int ret; + + rustsecp256k1zkp_v0_10_1_context* ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); + + /* Generate keypairs for Alice and Bob */ + ret = create_keypair(ctx, &keypair_a, &pubkey_a); + assert(ret); + ret = create_keypair(ctx, &keypair_b, &pubkey_b); + assert(ret); + + /* Carol setup: creates a proof of payment (z*G) */ + if (!fill_random(secret_pop, sizeof(secret_pop))) { + printf("Failed to generate randomness\n"); + return 1; + } + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &adaptor_pop, secret_pop); + assert(ret); + + /* Alice's setup: Generates tweak1, tweak2, left lock, and right lock + * for the payment. She shares the following: + * + * 1. With Bob: tweak2, left lock, right lock + * 2. With Carol: tweak1 + tweak2, right lock + */ + if (!create_hop_locks(ctx, &left_lock, &right_lock, &adaptor_pop, tweak_sum, tweak1, tweak2)) { + return 1; + } + /* Alice sends a pre-signature to Bob */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(ctx, presig_ab, tx_ab, &keypair_a, &left_lock, NULL); + assert(ret); + + /* Bob setup: extracts the left lock from Alice's pre-signature and verifies it */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(ctx, &tmp_pubkey, presig_ab, tx_ab, &pubkey_a); + assert(ret); + assert(memcmp(&tmp_pubkey, &left_lock, sizeof(left_lock)) == 0); + /* Bob creates a pre-signature that forwards the payment to Carol */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(ctx, presig_bc, tx_bc, &keypair_b, &right_lock, NULL); + assert(ret); + + /* Carol extracts the right lock from Bob's pre-signature and verifies it */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(ctx, &tmp_pubkey, presig_bc, tx_bc, &pubkey_b); + assert(ret); + assert(memcmp(&tmp_pubkey, &right_lock, sizeof(right_lock)) == 0); + /* Carol claims her payment by adapting Bob's pre-signature with the + * secret = z + tweak1 + tweak2, to produce a valid BIP340 Schnorr + * signature. */ + memcpy(tmp, secret_pop, sizeof(secret_pop)); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, tmp, tweak_sum); + assert(ret); + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(ctx, sig_bc, presig_bc, tmp); + assert(ret); + assert(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(ctx, sig_bc, tx_bc, sizeof(tx_bc), &pubkey_b)); + + /* Bob extracts the secret = z + tweak1 + tweak2 from his pre-signature + * and the BIP340 signature created by Carol. */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(ctx, tmp, presig_bc, sig_bc); + assert(ret); + /* Bob claims his payment by adapting Alice's pre-signature with the + * secret = z + tweak1, to produce a valid BIP340 Schnorr signature. */ + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_negate(ctx, tweak2); + assert(ret); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, tmp, tweak2); + assert(ret); + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(ctx, sig_ab, presig_ab, tmp); + assert(ret); + assert(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(ctx, sig_ab, tx_ab, sizeof(tx_ab), &pubkey_a)); + + /* Alice extracts the proof of payment = z from her pre-signature + * and the BIP340 signature created by Bob. */ + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(ctx, tmp, presig_ab, sig_ab); + assert(ret); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_negate(ctx, tweak1); + assert(ret); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, tmp, tweak1); + assert(ret); + assert(memcmp(tmp, secret_pop, sizeof(secret_pop)) == 0); + + printf("Multi-hop locks protocol successfully executed!!!\n"); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); + return 0; +} diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1.h index 3cca2c75..c2be54be 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1.h @@ -29,25 +29,25 @@ extern "C" { * The primary purpose of context objects is to store randomization data for * enhanced protection against side-channel leakage. This protection is only * effective if the context is randomized after its creation. See - * rustsecp256k1zkp_v0_10_0_context_create for creation of contexts and - * rustsecp256k1zkp_v0_10_0_context_randomize for randomization. + * rustsecp256k1zkp_v0_10_1_context_create for creation of contexts and + * rustsecp256k1zkp_v0_10_1_context_randomize for randomization. * * A secondary purpose of context objects is to store pointers to callback * functions that the library will call when certain error states arise. See - * rustsecp256k1zkp_v0_10_0_context_set_error_callback as well as - * rustsecp256k1zkp_v0_10_0_context_set_illegal_callback for details. Future library versions + * rustsecp256k1zkp_v0_10_1_context_set_error_callback as well as + * rustsecp256k1zkp_v0_10_1_context_set_illegal_callback for details. Future library versions * may use context objects for additional purposes. * * A constructed context can safely be used from multiple threads * simultaneously, but API calls that take a non-const pointer to a context * need exclusive access to it. In particular this is the case for - * rustsecp256k1zkp_v0_10_0_context_destroy, rustsecp256k1zkp_v0_10_0_context_preallocated_destroy, - * and rustsecp256k1zkp_v0_10_0_context_randomize. + * rustsecp256k1zkp_v0_10_1_context_destroy, rustsecp256k1zkp_v0_10_1_context_preallocated_destroy, + * and rustsecp256k1zkp_v0_10_1_context_randomize. * * Regarding randomization, either do it once at creation time (in which case * you do not need any locking for the other calls), or use a read-write lock. */ -typedef struct rustsecp256k1zkp_v0_10_0_context_struct rustsecp256k1zkp_v0_10_0_context; +typedef struct rustsecp256k1zkp_v0_10_1_context_struct rustsecp256k1zkp_v0_10_1_context; /** Opaque data structure that holds rewritable "scratch space" * @@ -60,7 +60,7 @@ typedef struct rustsecp256k1zkp_v0_10_0_context_struct rustsecp256k1zkp_v0_10_0_ * Unlike the context object, this cannot safely be shared between threads * without additional synchronization logic. */ -typedef struct rustsecp256k1zkp_v0_10_0_scratch_space_struct rustsecp256k1zkp_v0_10_0_scratch_space; +typedef struct rustsecp256k1zkp_v0_10_1_scratch_space_struct rustsecp256k1zkp_v0_10_1_scratch_space; /** Opaque data structure that holds a parsed and valid public key. * @@ -68,12 +68,12 @@ typedef struct rustsecp256k1zkp_v0_10_0_scratch_space_struct rustsecp256k1zkp_v0 * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage or transmission, - * use rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize and rustsecp256k1zkp_v0_10_0_ec_pubkey_parse. To - * compare keys, use rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp. + * use rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize and rustsecp256k1zkp_v0_10_1_ec_pubkey_parse. To + * compare keys, use rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_pubkey; +} rustsecp256k1zkp_v0_10_1_pubkey; /** Opaque data structured that holds a parsed ECDSA signature. * @@ -81,12 +81,12 @@ typedef struct { * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage, transmission, or - * comparison, use the rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_* and - * rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_* functions. + * comparison, use the rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_* and + * rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_* functions. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_ecdsa_signature; +} rustsecp256k1zkp_v0_10_1_ecdsa_signature; /** A pointer to a function to deterministically generate a nonce. * @@ -104,7 +104,7 @@ typedef struct { * Except for test cases, this function should compute some cryptographic hash of * the message, the algorithm, the key and the attempt. */ -typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function)( +typedef int (*rustsecp256k1zkp_v0_10_1_nonce_function)( unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, @@ -200,8 +200,8 @@ typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function)( #define SECP256K1_FLAGS_BIT_CONTEXT_DECLASSIFY (1 << 10) #define SECP256K1_FLAGS_BIT_COMPRESSION (1 << 8) -/** Context flags to pass to rustsecp256k1zkp_v0_10_0_context_create, rustsecp256k1zkp_v0_10_0_context_preallocated_size, and - * rustsecp256k1zkp_v0_10_0_context_preallocated_create. */ +/** Context flags to pass to rustsecp256k1zkp_v0_10_1_context_create, rustsecp256k1zkp_v0_10_1_context_preallocated_size, and + * rustsecp256k1zkp_v0_10_1_context_preallocated_create. */ #define SECP256K1_CONTEXT_NONE (SECP256K1_FLAGS_TYPE_CONTEXT) /** Deprecated context flags. These flags are treated equivalent to SECP256K1_CONTEXT_NONE. */ @@ -211,7 +211,7 @@ typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function)( /* Testing flag. Do not use. */ #define SECP256K1_CONTEXT_DECLASSIFY (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_DECLASSIFY) -/** Flag to pass to rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize. */ +/** Flag to pass to rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize. */ #define SECP256K1_EC_COMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BIT_COMPRESSION) #define SECP256K1_EC_UNCOMPRESSED (SECP256K1_FLAGS_TYPE_COMPRESSION) @@ -223,23 +223,23 @@ typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function)( #define SECP256K1_TAG_PUBKEY_HYBRID_ODD 0x07 /** A built-in constant secp256k1 context object with static storage duration, to be - * used in conjunction with rustsecp256k1zkp_v0_10_0_selftest. + * used in conjunction with rustsecp256k1zkp_v0_10_1_selftest. * * This context object offers *only limited functionality* , i.e., it cannot be used * for API functions that perform computations involving secret keys, e.g., signing * and public key generation. If this restriction applies to a specific API function, - * it is mentioned in its documentation. See rustsecp256k1zkp_v0_10_0_context_create if you need a + * it is mentioned in its documentation. See rustsecp256k1zkp_v0_10_1_context_create if you need a * full context object that supports all functionality offered by the library. * - * It is highly recommended to call rustsecp256k1zkp_v0_10_0_selftest before using this context. + * It is highly recommended to call rustsecp256k1zkp_v0_10_1_selftest before using this context. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_static; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_static; -/** Deprecated alias for rustsecp256k1zkp_v0_10_0_context_static. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_no_precomp -SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_context_static instead"); +/** Deprecated alias for rustsecp256k1zkp_v0_10_1_context_static. */ +SECP256K1_API const rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_no_precomp +SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_1_context_static instead"); -/** Perform basic self tests (to be used in conjunction with rustsecp256k1zkp_v0_10_0_context_static) +/** Perform basic self tests (to be used in conjunction with rustsecp256k1zkp_v0_10_1_context_static) * * This function performs self tests that detect some serious usage errors and * similar conditions, e.g., when the library is compiled for the wrong endianness. @@ -247,15 +247,15 @@ SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_context_static instead"); * very rudimentary and are not intended as a replacement for running the test * binaries. * - * It is highly recommended to call this before using rustsecp256k1zkp_v0_10_0_context_static. + * It is highly recommended to call this before using rustsecp256k1zkp_v0_10_1_context_static. * It is not necessary to call this function before using a context created with - * rustsecp256k1zkp_v0_10_0_context_create (or rustsecp256k1zkp_v0_10_0_context_preallocated_create), which will + * rustsecp256k1zkp_v0_10_1_context_create (or rustsecp256k1zkp_v0_10_1_context_preallocated_create), which will * take care of performing the self tests. * * If the tests fail, this function will call the default error handler to abort the - * program (see rustsecp256k1zkp_v0_10_0_context_set_error_callback). + * program (see rustsecp256k1zkp_v0_10_1_context_set_error_callback). */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_selftest(void); +SECP256K1_API void rustsecp256k1zkp_v0_10_1_selftest(void); /** Set a callback function to be called when an illegal argument is passed to * an API call. It will only trigger for violations that are mentioned @@ -278,11 +278,11 @@ SECP256K1_API void rustsecp256k1zkp_v0_10_0_selftest(void); * USE_EXTERNAL_DEFAULT_CALLBACKS is defined, which is the case if the build * has been configured with --enable-external-default-callbacks. Then the * following two symbols must be provided to link against: - * - void rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn(const char *message, void *data); - * - void rustsecp256k1zkp_v0_10_0_default_error_callback_fn(const char *message, void *data); + * - void rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn(const char *message, void *data); + * - void rustsecp256k1zkp_v0_10_1_default_error_callback_fn(const char *message, void *data); * The library can call these default handlers even before a proper callback data - * pointer could have been set using rustsecp256k1zkp_v0_10_0_context_set_illegal_callback or - * rustsecp256k1zkp_v0_10_0_context_set_error_callback, e.g., when the creation of a context + * pointer could have been set using rustsecp256k1zkp_v0_10_1_context_set_illegal_callback or + * rustsecp256k1zkp_v0_10_1_context_set_error_callback, e.g., when the creation of a context * fails. In this case, the corresponding default handler will be called with * the data pointer argument set to NULL. * @@ -292,10 +292,10 @@ SECP256K1_API void rustsecp256k1zkp_v0_10_0_selftest(void); * (NULL restores the default handler.) * data: the opaque pointer to pass to fun above, must be NULL for the default handler. * - * See also rustsecp256k1zkp_v0_10_0_context_set_error_callback. + * See also rustsecp256k1zkp_v0_10_1_context_set_error_callback. */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_context_set_illegal_callback( - rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API void rustsecp256k1zkp_v0_10_1_context_set_illegal_callback( + rustsecp256k1zkp_v0_10_1_context *ctx, void (*fun)(const char *message, void *data), const void *data ) SECP256K1_ARG_NONNULL(1); @@ -309,21 +309,21 @@ SECP256K1_API void rustsecp256k1zkp_v0_10_0_context_set_illegal_callback( * This can only trigger in case of a hardware failure, miscompilation, * memory corruption, serious bug in the library, or other error would can * otherwise result in undefined behaviour. It will not trigger due to mere - * incorrect usage of the API (see rustsecp256k1zkp_v0_10_0_context_set_illegal_callback + * incorrect usage of the API (see rustsecp256k1zkp_v0_10_1_context_set_illegal_callback * for that). After this callback returns, anything may happen, including * crashing. * * Args: ctx: pointer to a context object. * In: fun: pointer to a function to call when an internal error occurs, * taking a message and an opaque pointer (NULL restores the - * default handler, see rustsecp256k1zkp_v0_10_0_context_set_illegal_callback + * default handler, see rustsecp256k1zkp_v0_10_1_context_set_illegal_callback * for details). * data: the opaque pointer to pass to fun above, must be NULL for the default handler. * - * See also rustsecp256k1zkp_v0_10_0_context_set_illegal_callback. + * See also rustsecp256k1zkp_v0_10_1_context_set_illegal_callback. */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_context_set_error_callback( - rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API void rustsecp256k1zkp_v0_10_1_context_set_error_callback( + rustsecp256k1zkp_v0_10_1_context *ctx, void (*fun)(const char *message, void *data), const void *data ) SECP256K1_ARG_NONNULL(1); @@ -342,9 +342,9 @@ SECP256K1_API void rustsecp256k1zkp_v0_10_0_context_set_error_callback( * 0x03), uncompressed (65 bytes, header byte 0x04), or hybrid (65 bytes, header * byte 0x06 or 0x07) format public keys. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *input, size_t inputlen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -359,16 +359,16 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * In/Out: outputlen: pointer to an integer which is initially set to the * size of output, and is overwritten with the written * size. - * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_0_pubkey containing an + * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_1_pubkey containing an * initialized public key. * flags: SECP256K1_EC_COMPRESSED if serialization should be in * compressed format, otherwise SECP256K1_EC_UNCOMPRESSED. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, size_t *outputlen, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, unsigned int flags ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -381,10 +381,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize( * In: pubkey1: first public key to compare * pubkey2: second public key to compare */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey1, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey2 +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey1, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey2 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Parse an ECDSA signature in compact (64 bytes) format. @@ -402,9 +402,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * S are zero, the resulting sig value is guaranteed to fail verification for * any message and public key. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *input64 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -423,9 +423,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact( * encoded numbers are out of range, signature verification with it is * guaranteed to fail for every message and public key. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *input, size_t inputlen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -441,11 +441,11 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der( * if 0 was returned). * In: sig: pointer to an initialized signature object */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, size_t *outputlen, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Serialize an ECDSA signature in compact (64 byte) format. @@ -455,12 +455,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der( * Out: output64: pointer to a 64-byte array to store the compact serialization * In: sig: pointer to an initialized signature object * - * See rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact for details about the encoding. + * See rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact for details about the encoding. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output64, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Verify an ECDSA signature. @@ -483,16 +483,16 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact( * form are accepted. * * If you need to accept ECDSA signatures from sources that do not obey this - * rule, apply rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize to the signature prior to + * rule, apply rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize to the signature prior to * verification, but be aware that doing so results in malleable signatures. * * For details, see the comments for that function. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdsa_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msghash32, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Convert a signature to a normalized lower-S form. @@ -531,50 +531,50 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_ve * accept various non-unique encodings, so care should be taken when this * property is required for an application. * - * The rustsecp256k1zkp_v0_10_0_ecdsa_sign function will by default create signatures in the - * lower-S form, and rustsecp256k1zkp_v0_10_0_ecdsa_verify will not accept others. In case + * The rustsecp256k1zkp_v0_10_1_ecdsa_sign function will by default create signatures in the + * lower-S form, and rustsecp256k1zkp_v0_10_1_ecdsa_verify will not accept others. In case * signatures come from a system that cannot enforce this property, - * rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize must be called before verification. + * rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize must be called before verification. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sigout, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sigin +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sigout, + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sigin ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(3); /** An implementation of RFC6979 (using HMAC-SHA256) as nonce generation function. * If a data pointer is passed, it is assumed to be a pointer to 32 bytes of * extra entropy. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function rustsecp256k1zkp_v0_10_0_nonce_function_rfc6979; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_nonce_function rustsecp256k1zkp_v0_10_1_nonce_function_rfc6979; -/** A default safe nonce generation function (currently equal to rustsecp256k1zkp_v0_10_0_nonce_function_rfc6979). */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function rustsecp256k1zkp_v0_10_0_nonce_function_default; +/** A default safe nonce generation function (currently equal to rustsecp256k1zkp_v0_10_1_nonce_function_rfc6979). */ +SECP256K1_API const rustsecp256k1zkp_v0_10_1_nonce_function rustsecp256k1zkp_v0_10_1_nonce_function_default; /** Create an ECDSA signature. * * Returns: 1: signature created * 0: the nonce generation function failed, or the secret key was invalid. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: sig: pointer to an array where the signature will be placed. * In: msghash32: the 32-byte message hash being signed. * seckey: pointer to a 32-byte secret key. * noncefp: pointer to a nonce generation function. If NULL, - * rustsecp256k1zkp_v0_10_0_nonce_function_default is used. + * rustsecp256k1zkp_v0_10_1_nonce_function_default is used. * ndata: pointer to arbitrary data used by the nonce generation function * (can be NULL). If it is non-NULL and - * rustsecp256k1zkp_v0_10_0_nonce_function_default is used, then ndata must be a + * rustsecp256k1zkp_v0_10_1_nonce_function_default is used, then ndata must be a * pointer to 32-bytes of additional data. * * The created signature is always in lower-S form. See - * rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize for more details. + * rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize for more details. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msghash32, const unsigned char *seckey, - rustsecp256k1zkp_v0_10_0_nonce_function noncefp, + rustsecp256k1zkp_v0_10_1_nonce_function noncefp, const void *ndata ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -590,8 +590,8 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_sign( * Args: ctx: pointer to a context object. * In: seckey: pointer to a 32-byte secret key. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_seckey_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_seckey_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *seckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); @@ -599,38 +599,38 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_secke * * Returns: 1: secret was valid, public key stores. * 0: secret was invalid, try again. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: pubkey: pointer to the created public key. * In: seckey: pointer to a 32-byte secret key. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_create( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_create( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *seckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Negates a secret key in place. * * Returns: 0 if the given secret key is invalid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify. 1 otherwise + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify. 1 otherwise * Args: ctx: pointer to a context object * In/Out: seckey: pointer to the 32-byte secret key to be negated. If the * secret key is invalid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this function returns 0 and + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this function returns 0 and * seckey will be set to some unspecified value. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_seckey_negate( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_seckey_negate( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); -/** Same as rustsecp256k1zkp_v0_10_0_ec_seckey_negate, but DEPRECATED. Will be removed in +/** Same as rustsecp256k1zkp_v0_10_1_ec_seckey_negate, but DEPRECATED. Will be removed in * future versions. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privkey_negate( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_privkey_negate( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) - SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_ec_seckey_negate instead"); + SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_1_ec_seckey_negate instead"); /** Negates a public key in place. * @@ -638,9 +638,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privk * Args: ctx: pointer to a context object * In/Out: pubkey: pointer to the public key to be negated. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_negate( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_negate( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); /** Tweak a secret key by adding tweak to it. @@ -650,28 +650,28 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * otherwise. * Args: ctx: pointer to a context object. * In/Out: seckey: pointer to a 32-byte secret key. If the secret key is - * invalid according to rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this + * invalid according to rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this * function returns 0. seckey will be set to some unspecified * value if this function returns 0. * In: tweak32: pointer to a 32-byte tweak, which must be valid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify or 32 zero bytes. For uniformly + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify or 32 zero bytes. For uniformly * random 32-byte tweaks, the chance of being invalid is * negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); -/** Same as rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add, but DEPRECATED. Will be removed in +/** Same as rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add, but DEPRECATED. Will be removed in * future versions. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) - SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add instead"); + SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add instead"); /** Tweak a public key by adding tweak times the generator to it. * @@ -682,13 +682,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privk * In/Out: pubkey: pointer to a public key object. pubkey will be set to an * invalid value if this function returns 0. * In: tweak32: pointer to a 32-byte tweak, which must be valid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify or 32 zero bytes. For uniformly + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify or 32 zero bytes. For uniformly * random 32-byte tweaks, the chance of being invalid is * negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -697,28 +697,28 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * Returns: 0 if the arguments are invalid. 1 otherwise. * Args: ctx: pointer to a context object. * In/Out: seckey: pointer to a 32-byte secret key. If the secret key is - * invalid according to rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this + * invalid according to rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this * function returns 0. seckey will be set to some unspecified * value if this function returns 0. * In: tweak32: pointer to a 32-byte tweak. If the tweak is invalid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this function returns 0. For + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this function returns 0. For * uniformly random 32-byte arrays the chance of being invalid * is negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); -/** Same as rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul, but DEPRECATED. Will be removed in +/** Same as rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul, but DEPRECATED. Will be removed in * future versions. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_mul( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_mul( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) - SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul instead"); + SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul instead"); /** Tweak a public key by multiplying it by a tweak value. * @@ -727,13 +727,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_privk * In/Out: pubkey: pointer to a public key object. pubkey will be set to an * invalid value if this function returns 0. * In: tweak32: pointer to a 32-byte tweak. If the tweak is invalid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this function returns 0. For + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this function returns 0. For * uniformly random 32-byte arrays the chance of being invalid * is negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -741,7 +741,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * * Returns: 1: randomization successful * 0: error - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * In: seed32: pointer to a 32-byte random seed (NULL resets to initial state). * * While secp256k1 code is written and tested to be constant-time no matter what @@ -752,25 +752,25 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * certain computations which involve secret keys. * * It is highly recommended to call this function on contexts returned from - * rustsecp256k1zkp_v0_10_0_context_create or rustsecp256k1zkp_v0_10_0_context_clone (or from the corresponding - * functions in rustsecp256k1zkp_v0_10_0_preallocated.h) before using these contexts to call API + * rustsecp256k1zkp_v0_10_1_context_create or rustsecp256k1zkp_v0_10_1_context_clone (or from the corresponding + * functions in rustsecp256k1zkp_v0_10_1_preallocated.h) before using these contexts to call API * functions that perform computations involving secret keys, e.g., signing and * public key generation. It is possible to call this function more than once on * the same context, and doing so before every few computations involving secret * keys is recommended as a defense-in-depth measure. Randomization of the static - * context rustsecp256k1zkp_v0_10_0_context_static is not supported. + * context rustsecp256k1zkp_v0_10_1_context_static is not supported. * * Currently, the random seed is mainly used for blinding multiplications of a * secret scalar with the elliptic curve base point. Multiplications of this * kind are performed by exactly those API functions which are documented to - * require a context that is not rustsecp256k1zkp_v0_10_0_context_static. As a rule of thumb, + * require a context that is not rustsecp256k1zkp_v0_10_1_context_static. As a rule of thumb, * these are all functions which take a secret key (or a keypair) as an input. * A notable exception to that rule is the ECDH module, which relies on a different * kind of elliptic curve point multiplication and thus does not benefit from * enhanced protection against side-channel leakage currently. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_context_randomize( - rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_context_randomize( + rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *seed32 ) SECP256K1_ARG_NONNULL(1); @@ -783,10 +783,10 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_context_ * In: ins: pointer to array of pointers to public keys. * n: the number of public keys to add together (must be at least 1). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubkey_combine( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *out, - const rustsecp256k1zkp_v0_10_0_pubkey * const *ins, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ec_pubkey_combine( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *out, + const rustsecp256k1zkp_v0_10_1_pubkey * const *ins, size_t n ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -806,8 +806,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ec_pubke * msg: pointer to an array containing the message * msglen: length of the message array */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_tagged_sha256( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_tagged_sha256( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *hash32, const unsigned char *tag, size_t taglen, diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_bppp.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_bppp.h index 3405551e..2518a54f 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_bppp.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_bppp.h @@ -10,7 +10,7 @@ extern "C" { #include /** Opaque structure representing a large number of NUMS generators */ -typedef struct rustsecp256k1zkp_v0_10_0_bppp_generators rustsecp256k1zkp_v0_10_0_bppp_generators; +typedef struct rustsecp256k1zkp_v0_10_1_bppp_generators rustsecp256k1zkp_v0_10_1_bppp_generators; /** Allocates and initializes a list of NUMS generators. * Returns a list of generators, or calls the error callback if the allocation fails. @@ -21,19 +21,19 @@ typedef struct rustsecp256k1zkp_v0_10_0_bppp_generators rustsecp256k1zkp_v0_10_0 * points. We will later use G = H0(required for compatibility with pedersen_commitment DS) * in a separate commit to make review easier. */ -SECP256K1_API rustsecp256k1zkp_v0_10_0_bppp_generators *rustsecp256k1zkp_v0_10_0_bppp_generators_create( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API rustsecp256k1zkp_v0_10_1_bppp_generators *rustsecp256k1zkp_v0_10_1_bppp_generators_create( + const rustsecp256k1zkp_v0_10_1_context *ctx, size_t n ) SECP256K1_ARG_NONNULL(1); /** Allocates a list of generators from a static array * Returns a list of generators or NULL in case of failure. * Args: ctx: pointer to a context object - * In: data: data that came from `rustsecp256k1zkp_v0_10_0_bppp_generators_serialize` + * In: data: data that came from `rustsecp256k1zkp_v0_10_1_bppp_generators_serialize` * data_len: the length of the `data` buffer */ -SECP256K1_API rustsecp256k1zkp_v0_10_0_bppp_generators *rustsecp256k1zkp_v0_10_0_bppp_generators_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API rustsecp256k1zkp_v0_10_1_bppp_generators *rustsecp256k1zkp_v0_10_1_bppp_generators_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *data, size_t data_len ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); @@ -49,9 +49,9 @@ SECP256K1_API rustsecp256k1zkp_v0_10_0_bppp_generators *rustsecp256k1zkp_v0_10_0 * TODO: For ease of review, this setting G = H0 is not included in this commit. We will * add it in the follow-up rangeproof PR. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_bppp_generators_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_bppp_generators *gen, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_bppp_generators_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_bppp_generators *gen, unsigned char *data, size_t *data_len ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -61,9 +61,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_bppp_generators_serialize( * gen: pointer to the generator set to be destroyed * (can be NULL, in which case this function is a no-op) */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_bppp_generators_destroy( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_bppp_generators *gen +SECP256K1_API void rustsecp256k1zkp_v0_10_1_bppp_generators_destroy( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_bppp_generators *gen ) SECP256K1_ARG_NONNULL(1); # ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdh.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdh.h index 0d2942fe..4c324d7c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdh.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdh.h @@ -10,15 +10,15 @@ extern "C" { /** A pointer to a function that hashes an EC point to obtain an ECDH secret * * Returns: 1 if the point was successfully hashed. - * 0 will cause rustsecp256k1zkp_v0_10_0_ecdh to fail and return 0. + * 0 will cause rustsecp256k1zkp_v0_10_1_ecdh to fail and return 0. * Other return values are not allowed, and the behaviour of - * rustsecp256k1zkp_v0_10_0_ecdh is undefined for other return values. + * rustsecp256k1zkp_v0_10_1_ecdh is undefined for other return values. * Out: output: pointer to an array to be filled by the function * In: x32: pointer to a 32-byte x coordinate * y32: pointer to a 32-byte y coordinate * data: arbitrary data pointer that is passed through */ -typedef int (*rustsecp256k1zkp_v0_10_0_ecdh_hash_function)( +typedef int (*rustsecp256k1zkp_v0_10_1_ecdh_hash_function)( unsigned char *output, const unsigned char *x32, const unsigned char *y32, @@ -27,11 +27,11 @@ typedef int (*rustsecp256k1zkp_v0_10_0_ecdh_hash_function)( /** An implementation of SHA256 hash function that applies to compressed public key. * Populates the output parameter with 32 bytes. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_ecdh_hash_function rustsecp256k1zkp_v0_10_0_ecdh_hash_function_sha256; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_ecdh_hash_function rustsecp256k1zkp_v0_10_1_ecdh_hash_function_sha256; -/** A default ECDH hash function (currently equal to rustsecp256k1zkp_v0_10_0_ecdh_hash_function_sha256). +/** A default ECDH hash function (currently equal to rustsecp256k1zkp_v0_10_1_ecdh_hash_function_sha256). * Populates the output parameter with 32 bytes. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_ecdh_hash_function rustsecp256k1zkp_v0_10_0_ecdh_hash_function_default; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_ecdh_hash_function rustsecp256k1zkp_v0_10_1_ecdh_hash_function_default; /** Compute an EC Diffie-Hellman secret in constant time * @@ -39,20 +39,20 @@ SECP256K1_API const rustsecp256k1zkp_v0_10_0_ecdh_hash_function rustsecp256k1zkp * 0: scalar was invalid (zero or overflow) or hashfp returned 0 * Args: ctx: pointer to a context object. * Out: output: pointer to an array to be filled by hashfp. - * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_0_pubkey containing an initialized public key. + * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_1_pubkey containing an initialized public key. * seckey: a 32-byte scalar with which to multiply the point. * hashfp: pointer to a hash function. If NULL, - * rustsecp256k1zkp_v0_10_0_ecdh_hash_function_sha256 is used + * rustsecp256k1zkp_v0_10_1_ecdh_hash_function_sha256 is used * (in which case, 32 bytes will be written to output). * data: arbitrary data pointer that is passed through to hashfp - * (can be NULL for rustsecp256k1zkp_v0_10_0_ecdh_hash_function_sha256). + * (can be NULL for rustsecp256k1zkp_v0_10_1_ecdh_hash_function_sha256). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdh( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdh( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *seckey, - rustsecp256k1zkp_v0_10_0_ecdh_hash_function hashfp, + rustsecp256k1zkp_v0_10_1_ecdh_hash_function hashfp, void *data ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_adaptor.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_adaptor.h index 4bb9d54d..8e124943 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_adaptor.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_adaptor.h @@ -31,7 +31,7 @@ extern "C" { /** A pointer to a function to deterministically generate a nonce. * - * Same as rustsecp256k1zkp_v0_10_0_nonce_function_hardened with the exception of using the + * Same as rustsecp256k1zkp_v0_10_1_nonce_function_hardened with the exception of using the * compressed 33-byte encoding for the pubkey argument. * * Returns: 1 if a nonce was successfully generated. 0 will cause signing to @@ -47,7 +47,7 @@ extern "C" { * Except for test cases, this function should compute some cryptographic hash of * the message, the key, the pubkey, the algorithm description, and data. */ -typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor)( +typedef int (*rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor)( unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, @@ -61,7 +61,7 @@ typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor)( * assumed to be a pointer to 32 bytes of auxiliary random data as defined in BIP-340. * The hash will be tagged with algo after removing all terminating null bytes. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor; /** Encrypted Signing * @@ -71,27 +71,27 @@ SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adapt * this file and applied the suggested countermeasures. * * Returns: 1 on success, 0 on failure - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: adaptor_sig162: pointer to 162 byte to store the returned signature * In: seckey32: pointer to 32 byte secret key that will be used for * signing * enckey: pointer to the encryption public key * msg32: pointer to the 32-byte message hash to sign * noncefp: pointer to a nonce generation function. If NULL, - * rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor is used + * rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor is used * ndata: pointer to arbitrary data used by the nonce generation * function (can be NULL). If it is non-NULL and - * rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor is used, then + * rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor is used, then * ndata must be a pointer to 32-byte auxiliary randomness * as per BIP-340. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *adaptor_sig162, unsigned char *seckey32, - const rustsecp256k1zkp_v0_10_0_pubkey *enckey, + const rustsecp256k1zkp_v0_10_1_pubkey *enckey, const unsigned char *msg32, - rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor noncefp, + rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); @@ -108,12 +108,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt( * msg32: pointer to the 32-byte message hash being verified * enckey: pointer to the adaptor encryption public key */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *adaptor_sig162, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_pubkey *enckey + const rustsecp256k1zkp_v0_10_1_pubkey *enckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); /** Signature Decryption @@ -127,9 +127,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify( * encryption public key * adaptor_sig162: pointer to 162-byte adaptor sig */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *deckey32, const unsigned char *adaptor_sig162 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -140,7 +140,7 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt( * signature. * * Returns: 1 on success, 0 on failure - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: deckey32: pointer to 32-byte adaptor decryption key for the adaptor * encryption public key * In: sig: pointer to ECDSA signature to recover the adaptor decryption @@ -149,12 +149,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt( * decryption key from * enckey: pointer to the adaptor encryption public key */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *deckey32, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *adaptor_sig162, - const rustsecp256k1zkp_v0_10_0_pubkey *enckey + const rustsecp256k1zkp_v0_10_1_pubkey *enckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_s2c.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_s2c.h index d9083afe..d3efd031 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_s2c.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ecdsa_s2c.h @@ -23,11 +23,11 @@ extern "C" { * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage, transmission, or - * comparison, use rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize and rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse. + * comparison, use rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize and rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening; +} rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening; /** Parse a sign-to-contract opening. * @@ -39,9 +39,9 @@ typedef struct { * In: input33: pointer to 33-byte array with a serialized opening * */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *opening, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *opening, const unsigned char *input33 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -51,29 +51,29 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2 * 0 if the opening could not be serialized * Args: ctx: pointer to a context object * Out: output33: pointer to a 33-byte array to place the serialized opening in - * In: opening: pointer to an initialized `rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening` + * In: opening: pointer to an initialized `rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening` */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output33, - const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *opening + const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *opening ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); -/** Same as rustsecp256k1zkp_v0_10_0_ecdsa_sign, but s2c_data32 is committed to inside the nonce +/** Same as rustsecp256k1zkp_v0_10_1_ecdsa_sign, but s2c_data32 is committed to inside the nonce * * Returns: 1: signature created * 0: the nonce generation function failed, or the private key was invalid. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: sig: pointer to an array where the signature will be placed (cannot be NULL) - * s2c_opening: if non-NULL, pointer to an rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening structure to populate + * s2c_opening: if non-NULL, pointer to an rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening structure to populate * In: msg32: the 32-byte message hash being signed (cannot be NULL) * seckey: pointer to a 32-byte secret key (cannot be NULL) * s2c_data32: pointer to a 32-byte data to commit to in the nonce (cannot be NULL) */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *s2c_opening, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *s2c_opening, const unsigned char *msg32, const unsigned char *seckey, const unsigned char *s2c_data32 @@ -89,11 +89,11 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign( * data32: the 32-byte data that was committed to (cannot be NULL) * opening: pointer to the opening created during signing (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *data32, - const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *opening + const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *opening ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -113,9 +113,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2 * keys, or the signing device to bias the nonce despite the host's contributions, * the host and client must engage in a commit-reveal protocol as follows: * 1. The host draws randomness `rho` and computes a sha256 commitment to it using - * `rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit`. It sends this to the signing device. + * `rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit`. It sends this to the signing device. * 2. The signing device computes a public nonce `R` using the host's commitment - * as auxiliary randomness, using `rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit`. + * as auxiliary randomness, using `rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit`. * The signing device sends the resulting `R` to the host as a s2c_opening. * * If, at any point from this step onward, the hardware device fails, it is @@ -135,10 +135,10 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2 * EVER, they should change hardware vendors and perhaps sweep their coins. * * 3. The host replies with `rho` generated in step 1. - * 4. The device signs with `rustsecp256k1zkp_v0_10_0_anti_exfil_sign`, using `rho` as `host_data32`, + * 4. The device signs with `rustsecp256k1zkp_v0_10_1_anti_exfil_sign`, using `rho` as `host_data32`, * and sends the signature to the host. * 5. The host verifies that the signature's public nonce matches the opening from - * step 2 and its original randomness `rho`, using `rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify`. + * step 2 and its original randomness `rho`, using `rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify`. * * Rationale: * - The reason for having a host commitment is to allow the signing device to @@ -164,8 +164,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_s2 * be revealed to the client until after the host has received the client * commitment. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *rand_commitment32, const unsigned char *rand32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -173,35 +173,35 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit( /** Compute signer's original nonce. Part of the ECDSA Anti-Exfil Protocol. * * Returns 1 on success, 0 on failure. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: s2c_opening: pointer to an s2c_opening where the signer's public nonce will be * placed. (cannot be NULL) * In: msg32: the 32-byte message hash to be signed (cannot be NULL) * seckey32: the 32-byte secret key used for signing (cannot be NULL) * rand_commitment32: the 32-byte randomness commitment from the host (cannot be NULL) */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *s2c_opening, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *s2c_opening, const unsigned char *msg32, const unsigned char *seckey32, const unsigned char *rand_commitment32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); -/** Same as rustsecp256k1zkp_v0_10_0_ecdsa_sign, but commits to host randomness in the nonce. Part of the +/** Same as rustsecp256k1zkp_v0_10_1_ecdsa_sign, but commits to host randomness in the nonce. Part of the * ECDSA Anti-Exfil Protocol. * * Returns: 1: signature created * 0: the nonce generation function failed, or the private key was invalid. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: sig: pointer to an array where the signature will be placed (cannot be NULL) * In: msg32: the 32-byte message hash being signed (cannot be NULL) * seckey: pointer to a 32-byte secret key (cannot be NULL) * host_data32: pointer to 32-byte host-provided randomness (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_anti_exfil_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_anti_exfil_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msg32, const unsigned char *seckey, const unsigned char *host_data32 @@ -218,13 +218,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_anti_exf * host_data32: the 32-byte data provided by the host (cannot be NULL) * opening: the s2c opening provided by the signer (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *host_data32, - const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *opening + const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *opening ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(6); #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ellswift.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ellswift.h index 346ffece..6b1a6f65 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ellswift.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_ellswift.h @@ -45,13 +45,13 @@ extern "C" { * For mathematical background about the scheme, see the doc/ellswift.md file. */ -/** A pointer to a function used by rustsecp256k1zkp_v0_10_0_ellswift_xdh to hash the shared X +/** A pointer to a function used by rustsecp256k1zkp_v0_10_1_ellswift_xdh to hash the shared X * coordinate along with the encoded public keys to a uniform shared secret. * * Returns: 1 if a shared secret was successfully computed. - * 0 will cause rustsecp256k1zkp_v0_10_0_ellswift_xdh to fail and return 0. + * 0 will cause rustsecp256k1zkp_v0_10_1_ellswift_xdh to fail and return 0. * Other return values are not allowed, and the behaviour of - * rustsecp256k1zkp_v0_10_0_ellswift_xdh is undefined for other return values. + * rustsecp256k1zkp_v0_10_1_ellswift_xdh is undefined for other return values. * Out: output: pointer to an array to be filled by the function * In: x32: pointer to the 32-byte serialized X coordinate * of the resulting shared point (will not be NULL) @@ -61,7 +61,7 @@ extern "C" { * (will not be NULL) * data: arbitrary data pointer that is passed through */ -typedef int (*rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function)( +typedef int (*rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function)( unsigned char *output, const unsigned char *x32, const unsigned char *ell_a64, @@ -69,25 +69,25 @@ typedef int (*rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function)( void *data ); -/** An implementation of an rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function which uses +/** An implementation of an rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function which uses * SHA256(prefix64 || ell_a64 || ell_b64 || x32), where prefix64 is the 64-byte * array pointed to by data. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_prefix; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_prefix; -/** An implementation of an rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function compatible with +/** An implementation of an rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function compatible with * BIP324. It returns H_tag(ell_a64 || ell_b64 || x32), where H_tag is the * BIP340 tagged hash function with tag "bip324_ellswift_xonly_ecdh". Equivalent - * to rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_prefix with prefix64 set to + * to rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_prefix with prefix64 set to * SHA256("bip324_ellswift_xonly_ecdh")||SHA256("bip324_ellswift_xonly_ecdh"). * The data argument is ignored. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324; /** Construct a 64-byte ElligatorSwift encoding of a given pubkey. * * Returns: 1 always. * Args: ctx: pointer to a context object * Out: ell64: pointer to a 64-byte array to be filled - * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_0_pubkey containing an + * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_1_pubkey containing an * initialized public key * rnd32: pointer to 32 bytes of randomness * @@ -104,10 +104,10 @@ SECP256K1_API const rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function rustsecp * * This function runs in variable time. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ellswift_encode( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ellswift_encode( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *ell64, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *rnd32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -115,14 +115,14 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ellswift_encode( * * Returns: always 1 * Args: ctx: pointer to a context object - * Out: pubkey: pointer to a rustsecp256k1zkp_v0_10_0_pubkey that will be filled + * Out: pubkey: pointer to a rustsecp256k1zkp_v0_10_1_pubkey that will be filled * In: ell64: pointer to a 64-byte array to decode * * This function runs in variable time. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ellswift_decode( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ellswift_decode( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *ell64 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -141,18 +141,18 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ellswift_decode( * It is recommended that auxrnd32 contains 32 uniformly random bytes, though * it is optional (and does result in encodings that are indistinguishable from * uniform even without any auxrnd32). It differs from the (mandatory) rnd32 - * argument to rustsecp256k1zkp_v0_10_0_ellswift_encode in this regard. + * argument to rustsecp256k1zkp_v0_10_1_ellswift_encode in this regard. * - * This function can be used instead of calling rustsecp256k1zkp_v0_10_0_ec_pubkey_create - * followed by rustsecp256k1zkp_v0_10_0_ellswift_encode. It is safer, as it uses the secret + * This function can be used instead of calling rustsecp256k1zkp_v0_10_1_ec_pubkey_create + * followed by rustsecp256k1zkp_v0_10_1_ellswift_encode. It is safer, as it uses the secret * key as entropy for the encoding (supplemented with auxrnd32, if provided). * - * Like rustsecp256k1zkp_v0_10_0_ellswift_encode, this function does not guarantee that the + * Like rustsecp256k1zkp_v0_10_1_ellswift_encode, this function does not guarantee that the * computed encoding is stable across versions of the library, even if all * arguments (including auxrnd32) are the same. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ellswift_create( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ellswift_create( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *ell64, const unsigned char *seckey32, const unsigned char *auxrnd32 @@ -182,14 +182,14 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ellswift * This function is more efficient than decoding the public keys, and performing * ECDH on them. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ellswift_xdh( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ellswift_xdh( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, const unsigned char *ell_a64, const unsigned char *ell_b64, const unsigned char *seckey32, int party, - rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function hashfp, + rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function hashfp, void *data ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(7); diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_extrakeys.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_extrakeys.h index b1e483bf..1d40f7f5 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_extrakeys.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_extrakeys.h @@ -16,12 +16,12 @@ extern "C" { * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage, transmission, use - * use rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize and rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse. To - * compare keys, use rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp. + * use rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize and rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse. To + * compare keys, use rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_xonly_pubkey; +} rustsecp256k1zkp_v0_10_1_xonly_pubkey; /** Opaque data structure that holds a keypair consisting of a secret and a * public key. @@ -32,7 +32,7 @@ typedef struct { */ typedef struct { unsigned char data[96]; -} rustsecp256k1zkp_v0_10_0_keypair; +} rustsecp256k1zkp_v0_10_1_keypair; /** Parse a 32-byte sequence into a xonly_pubkey object. * @@ -44,9 +44,9 @@ typedef struct { * parsed version of input. If not, it's set to an invalid value. * In: input32: pointer to a serialized xonly_pubkey. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey, const unsigned char *input32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -56,12 +56,12 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pu * * Args: ctx: pointer to a context object. * Out: output32: pointer to a 32-byte array to place the serialized key in. - * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_0_xonly_pubkey containing an initialized public key. + * In: pubkey: pointer to a rustsecp256k1zkp_v0_10_1_xonly_pubkey containing an initialized public key. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output32, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Compare two x-only public keys using lexicographic order @@ -73,13 +73,13 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize( * In: pubkey1: first public key to compare * pubkey2: second public key to compare */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pk1, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pk2 +SECP256K1_API int rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pk1, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pk2 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); -/** Converts a rustsecp256k1zkp_v0_10_0_pubkey into a rustsecp256k1zkp_v0_10_0_xonly_pubkey. +/** Converts a rustsecp256k1zkp_v0_10_1_pubkey into a rustsecp256k1zkp_v0_10_1_xonly_pubkey. * * Returns: 1 always. * @@ -90,11 +90,11 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp( * the negation of the pubkey and set to 0 otherwise. * In: pubkey: pointer to a public key that is converted. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_xonly_pubkey *xonly_pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_xonly_pubkey *xonly_pubkey, int *pk_parity, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(4); /** Tweak an x-only public key by adding the generator multiplied with tweak32 @@ -102,7 +102,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pu * * Note that the resulting point can not in general be represented by an x-only * pubkey because it may have an odd Y coordinate. Instead, the output_pubkey - * is a normal rustsecp256k1zkp_v0_10_0_pubkey. + * is a normal rustsecp256k1zkp_v0_10_1_pubkey. * * Returns: 0 if the arguments are invalid or the resulting public key would be * invalid (only when the tweak is the negation of the corresponding @@ -113,23 +113,23 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pu * to an invalid value if this function returns 0. * In: internal_pubkey: pointer to an x-only pubkey to apply the tweak to. * tweak32: pointer to a 32-byte tweak, which must be valid - * according to rustsecp256k1zkp_v0_10_0_ec_seckey_verify or 32 zero + * according to rustsecp256k1zkp_v0_10_1_ec_seckey_verify or 32 zero * bytes. For uniformly random 32-byte tweaks, the chance of * being invalid is negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *internal_pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *internal_pubkey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Checks that a tweaked pubkey is the result of calling - * rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add with internal_pubkey and tweak32. + * rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add with internal_pubkey and tweak32. * * The tweaked pubkey is represented by its 32-byte x-only serialization and * its pk_parity, which can both be obtained by converting the result of - * tweak_add to a rustsecp256k1zkp_v0_10_0_xonly_pubkey. + * tweak_add to a rustsecp256k1zkp_v0_10_1_xonly_pubkey. * * Note that this alone does _not_ verify that the tweaked pubkey is a * commitment. If the tweak is not chosen in a specific way, the tweaked pubkey @@ -142,16 +142,16 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pu * tweaked_pk_parity: the parity of the tweaked pubkey (whose serialization * is passed in as tweaked_pubkey32). This must match the * pk_parity value that is returned when calling - * rustsecp256k1zkp_v0_10_0_xonly_pubkey with the tweaked pubkey, or + * rustsecp256k1zkp_v0_10_1_xonly_pubkey with the tweaked pubkey, or * this function will fail. * internal_pubkey: pointer to an x-only public key object to apply the tweak to. * tweak32: pointer to a 32-byte tweak. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check( + const rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *tweaked_pubkey32, int tweaked_pk_parity, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *internal_pubkey, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *internal_pubkey, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); @@ -159,13 +159,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_xonly_pu * * Returns: 1: secret was valid, keypair is ready to use * 0: secret was invalid, try again with a different secret - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: keypair: pointer to the created keypair. * In: seckey: pointer to a 32-byte secret key. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_create( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_keypair *keypair, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_keypair_create( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *seckey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -176,10 +176,10 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_ * Out: seckey: pointer to a 32-byte buffer for the secret key. * In: keypair: pointer to a keypair. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_sec( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_keypair_sec( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *seckey, - const rustsecp256k1zkp_v0_10_0_keypair *keypair + const rustsecp256k1zkp_v0_10_1_keypair *keypair ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Get the public key from a keypair. @@ -189,38 +189,38 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_ * Out: pubkey: pointer to a pubkey object, set to the keypair public key. * In: keypair: pointer to a keypair. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_pub( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, - const rustsecp256k1zkp_v0_10_0_keypair *keypair +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_keypair_pub( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_keypair *keypair ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Get the x-only public key from a keypair. * - * This is the same as calling rustsecp256k1zkp_v0_10_0_keypair_pub and then - * rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey. + * This is the same as calling rustsecp256k1zkp_v0_10_1_keypair_pub and then + * rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey. * * Returns: 1 always. * Args: ctx: pointer to a context object. * Out: pubkey: pointer to an xonly_pubkey object, set to the keypair * public key after converting it to an xonly_pubkey. * pk_parity: Ignored if NULL. Otherwise, pointer to an integer that will be set to the - * pk_parity argument of rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey. + * pk_parity argument of rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey. * In: keypair: pointer to a keypair. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_xonly_pub( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_keypair_xonly_pub( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey, int *pk_parity, - const rustsecp256k1zkp_v0_10_0_keypair *keypair + const rustsecp256k1zkp_v0_10_1_keypair *keypair ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(4); /** Tweak a keypair by adding tweak32 to the secret key and updating the public * key accordingly. * - * Calling this function and then rustsecp256k1zkp_v0_10_0_keypair_pub results in the same - * public key as calling rustsecp256k1zkp_v0_10_0_keypair_xonly_pub and then - * rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add. + * Calling this function and then rustsecp256k1zkp_v0_10_1_keypair_pub results in the same + * public key as calling rustsecp256k1zkp_v0_10_1_keypair_xonly_pub and then + * rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add. * * Returns: 0 if the arguments are invalid or the resulting keypair would be * invalid (only when the tweak is the negation of the keypair's @@ -230,13 +230,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_ * In/Out: keypair: pointer to a keypair to apply the tweak to. Will be set to * an invalid value if this function returns 0. * In: tweak32: pointer to a 32-byte tweak, which must be valid according to - * rustsecp256k1zkp_v0_10_0_ec_seckey_verify or 32 zero bytes. For uniformly + * rustsecp256k1zkp_v0_10_1_ec_seckey_verify or 32 zero bytes. For uniformly * random 32-byte tweaks, the chance of being invalid is * negligible (around 1 in 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_keypair *keypair, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -249,9 +249,9 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_keypair_ * In: pubkeys: array of pointers to pubkeys to sort * n_pubkeys: number of elements in the pubkeys array */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_pubkey_sort( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_pubkey **pubkeys, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_pubkey_sort( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_pubkey **pubkeys, size_t n_pubkeys ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_generator.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_generator.h index afa2c0b1..2d2aff6c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_generator.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_generator.h @@ -15,16 +15,16 @@ extern "C" { * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage, transmission, or - * comparison, use rustsecp256k1zkp_v0_10_0_generator_serialize and rustsecp256k1zkp_v0_10_0_generator_parse. + * comparison, use rustsecp256k1zkp_v0_10_1_generator_serialize and rustsecp256k1zkp_v0_10_1_generator_parse. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_generator; +} rustsecp256k1zkp_v0_10_1_generator; /** * Static constant generator 'h' maintained for historical reasons. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_generator *rustsecp256k1zkp_v0_10_0_generator_h; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_generator *rustsecp256k1zkp_v0_10_1_generator_h; /** Parse a 33-byte generator byte sequence into a generator object. * @@ -33,9 +33,9 @@ SECP256K1_API const rustsecp256k1zkp_v0_10_0_generator *rustsecp256k1zkp_v0_10_0 * Out: gen: pointer to the output generator object * In: input: pointer to a 33-byte serialized generator */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generator_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_generator *gen, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_generator_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_generator *gen, const unsigned char *input ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -46,10 +46,10 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generato * Out: output: pointer to a 33-byte byte array * In: gen: pointer to a generator object */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_generator_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_generator_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, - const rustsecp256k1zkp_v0_10_0_generator *gen + const rustsecp256k1zkp_v0_10_1_generator *gen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Generate a generator for the curve. @@ -65,9 +65,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_generator_serialize( * known discrete logarithm with respect to any other generator produced, * or to the base generator G. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generator_generate( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_generator *gen, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_generator_generate( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_generator *gen, const unsigned char *seed32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -75,18 +75,18 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generato * * Returns: 0 in the highly unlikely case the seed is not acceptable or when * blind is out of range. 1 otherwise. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: gen: pointer to a generator object * In: seed32: 32-byte seed * blind32: 32-byte secret value to blind the generator with. * - * The result is equivalent to first calling rustsecp256k1zkp_v0_10_0_generator_generate, - * converting the result to a public key, calling rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add, + * The result is equivalent to first calling rustsecp256k1zkp_v0_10_1_generator_generate, + * converting the result to a public key, calling rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add, * and then converting back to generator form. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generator_generate_blinded( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_generator *gen, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_generator_generate_blinded( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_generator *gen, const unsigned char *seed32, const unsigned char *blind32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -97,12 +97,12 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_generato * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 64 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage, transmission, or - * comparison, use rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize and - * rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse. + * comparison, use rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize and + * rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse. */ typedef struct { unsigned char data[64]; -} rustsecp256k1zkp_v0_10_0_pedersen_commitment; +} rustsecp256k1zkp_v0_10_1_pedersen_commitment; /** Parse a 33-byte commitment into a commitment object. * @@ -111,9 +111,9 @@ typedef struct { * Out: commit: pointer to the output commitment object * In: input: pointer to a 33-byte serialized commitment key */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *input ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -122,13 +122,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen * Returns: 1 always. * Args: ctx: pointer to a context object * Out: output: pointer to a 33-byte byte array - * In: commit: pointer to a rustsecp256k1zkp_v0_10_0_pedersen_commitment containing an + * In: commit: pointer to a rustsecp256k1zkp_v0_10_1_pedersen_commitment containing an * initialized commitment */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Generate a pedersen commitment. @@ -136,7 +136,7 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize( * 0: Error. The blinding factor is larger than the group order * (probability for random 32 byte number < 2^-127) or results in the * point at infinity. Retry with a different factor. - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * blind: pointer to a 32-byte blinding factor (cannot be NULL) * value: unsigned 64-bit integer value to commit to. * gen: additional generator 'h' @@ -144,12 +144,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize( * * Blinding factors can be generated and verified in the same way as secp256k1 private keys for ECDSA. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen_commit( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_pedersen_commit( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *blind, uint64_t value, - const rustsecp256k1zkp_v0_10_0_generator *gen + const rustsecp256k1zkp_v0_10_1_generator *gen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(5); /** Computes the sum of multiple positive and negative blinding factors. @@ -163,8 +163,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen * npositive: how many of the initial factors should be treated with a positive sign. * Out: blind_out: pointer to a 32-byte array for the sum (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen_blind_sum( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_pedersen_blind_sum( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *blind_out, const unsigned char * const *blinds, size_t n, @@ -187,11 +187,11 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen * A all blinding factors and all values must sum to zero. * */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen_verify_tally( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment * const *commits, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_pedersen_verify_tally( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment * const *commits, size_t pcnt, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment * const *ncommits, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment * const *ncommits, size_t ncnt ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(4); @@ -225,8 +225,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen * May not be NULL unless `n_total` is 0. * the last value will be modified to get the total sum to zero. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum( + const rustsecp256k1zkp_v0_10_1_context *ctx, const uint64_t *value, const unsigned char * const *generator_blind, unsigned char * const *blinding_factor, diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_musig.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_musig.h index 7e244817..2d5cbd5c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_musig.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_musig.h @@ -41,7 +41,7 @@ extern "C" { */ typedef struct { unsigned char data[197]; -} rustsecp256k1zkp_v0_10_0_musig_keyagg_cache; +} rustsecp256k1zkp_v0_10_1_musig_keyagg_cache; /** Opaque data structure that holds a signer's _secret_ nonce. * @@ -60,7 +60,7 @@ typedef struct { */ typedef struct { unsigned char data[132]; -} rustsecp256k1zkp_v0_10_0_musig_secnonce; +} rustsecp256k1zkp_v0_10_1_musig_secnonce; /** Opaque data structure that holds a signer's public nonce. * @@ -69,7 +69,7 @@ typedef struct { */ typedef struct { unsigned char data[132]; -} rustsecp256k1zkp_v0_10_0_musig_pubnonce; +} rustsecp256k1zkp_v0_10_1_musig_pubnonce; /** Opaque data structure that holds an aggregate public nonce. * @@ -79,7 +79,7 @@ typedef struct { */ typedef struct { unsigned char data[132]; -} rustsecp256k1zkp_v0_10_0_musig_aggnonce; +} rustsecp256k1zkp_v0_10_1_musig_aggnonce; /** Opaque data structure that holds a MuSig session. * @@ -89,7 +89,7 @@ typedef struct { */ typedef struct { unsigned char data[133]; -} rustsecp256k1zkp_v0_10_0_musig_session; +} rustsecp256k1zkp_v0_10_1_musig_session; /** Opaque data structure that holds a partial MuSig signature. * @@ -98,7 +98,7 @@ typedef struct { */ typedef struct { unsigned char data[36]; -} rustsecp256k1zkp_v0_10_0_musig_partial_sig; +} rustsecp256k1zkp_v0_10_1_musig_partial_sig; /** Parse a signer's public nonce. * @@ -107,9 +107,9 @@ typedef struct { * Out: nonce: pointer to a nonce object * In: in66: pointer to the 66-byte nonce to be parsed */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_pubnonce *nonce, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_pubnonce *nonce, const unsigned char *in66 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -120,10 +120,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse( * Out: out66: pointer to a 66-byte array to store the serialized nonce * In: nonce: pointer to the nonce */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *out66, - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *nonce + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *nonce ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Parse an aggregate public nonce. @@ -133,9 +133,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize( * Out: nonce: pointer to a nonce object * In: in66: pointer to the 66-byte nonce to be parsed */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_aggnonce *nonce, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_aggnonce *nonce, const unsigned char *in66 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -146,10 +146,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse( * Out: out66: pointer to a 66-byte array to store the serialized nonce * In: nonce: pointer to the nonce */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *out66, - const rustsecp256k1zkp_v0_10_0_musig_aggnonce *nonce + const rustsecp256k1zkp_v0_10_1_musig_aggnonce *nonce ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Serialize a MuSig partial signature @@ -159,10 +159,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize( * Out: out32: pointer to a 32-byte array to store the serialized signature * In: sig: pointer to the signature */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *out32, - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *sig + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *sig ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Parse a MuSig partial signature. @@ -176,9 +176,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize( * encoded numbers are out of range, signature verification with it is * guaranteed to fail for every message and public key. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_partial_sig *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_partial_sig *sig, const unsigned char *in32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -186,7 +186,7 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse( * * Different orders of `pubkeys` result in different `agg_pk`s. * - * Before aggregating, the pubkeys can be sorted with `rustsecp256k1zkp_v0_10_0_pubkey_sort` + * Before aggregating, the pubkeys can be sorted with `rustsecp256k1zkp_v0_10_1_pubkey_sort` * which ensures the same `agg_pk` result for the same multiset of pubkeys. * This is useful to do before `pubkey_agg`, such that the order of pubkeys * does not affect the aggregate public key. @@ -210,12 +210,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse( * aggregate public key. * n_pubkeys: length of pubkeys array. Must be greater than 0. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_scratch_space *scratch, - rustsecp256k1zkp_v0_10_0_xonly_pubkey *agg_pk, - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, - const rustsecp256k1zkp_v0_10_0_pubkey * const *pubkeys, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_pubkey_agg( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_scratch_space *scratch, + rustsecp256k1zkp_v0_10_1_xonly_pubkey *agg_pk, + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, + const rustsecp256k1zkp_v0_10_1_pubkey * const *pubkeys, size_t n_pubkeys ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(5); @@ -231,31 +231,31 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg( * In: keyagg_cache: pointer to a `musig_keyagg_cache` struct initialized by * `musig_pubkey_agg` */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pubkey_get( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *agg_pk, - const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_pubkey_get( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *agg_pk, + const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Apply plain "EC" tweaking to a public key in a given keyagg_cache by * adding the generator multiplied with `tweak32` to it. This is useful for * deriving child keys from an aggregate public key via BIP32. * - * The tweaking method is the same as `rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add`. So after + * The tweaking method is the same as `rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add`. So after * the following pseudocode buf and buf2 have identical contents (absent * earlier failures). * - * rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(..., keyagg_cache, pubkeys, ...) - * rustsecp256k1zkp_v0_10_0_musig_pubkey_get(..., agg_pk, keyagg_cache) - * rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add(..., output_pk, tweak32, keyagg_cache) - * rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(..., buf, output_pk) - * rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(..., agg_pk, tweak32) - * rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(..., buf2, agg_pk) + * rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(..., keyagg_cache, pubkeys, ...) + * rustsecp256k1zkp_v0_10_1_musig_pubkey_get(..., agg_pk, keyagg_cache) + * rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add(..., output_pk, tweak32, keyagg_cache) + * rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(..., buf, output_pk) + * rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(..., agg_pk, tweak32) + * rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(..., buf2, agg_pk) * * This function is required if you want to _sign_ for a tweaked aggregate key. * On the other hand, if you are only computing a public key, but not intending * to create a signature for it, you can just use - * `rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add`. + * `rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add`. * * Returns: 0 if the arguments are invalid or the resulting public key would be * invalid (only when the tweak is the negation of the corresponding @@ -267,15 +267,15 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pu * In/Out: keyagg_cache: pointer to a `musig_keyagg_cache` struct initialized by * `musig_pubkey_agg` * In: tweak32: pointer to a 32-byte tweak. If the tweak is invalid - * according to `rustsecp256k1zkp_v0_10_0_ec_seckey_verify`, this function + * according to `rustsecp256k1zkp_v0_10_1_ec_seckey_verify`, this function * returns 0. For uniformly random 32-byte arrays the * chance of being invalid is negligible (around 1 in * 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -283,19 +283,19 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pu * generator multiplied with `tweak32` to it. This is useful for creating * Taproot outputs. * - * The tweaking method is the same as `rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add`. So in + * The tweaking method is the same as `rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add`. So in * the following pseudocode xonly_pubkey_tweak_add_check (absent earlier * failures) returns 1. * - * rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(..., agg_pk, keyagg_cache, pubkeys, ...) - * rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add(..., output_pk, tweak32, keyagg_cache) - * rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(..., buf, output_pk) - * rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(..., buf, ..., agg_pk, tweak32) + * rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(..., agg_pk, keyagg_cache, pubkeys, ...) + * rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add(..., output_pk, tweak32, keyagg_cache) + * rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(..., buf, output_pk) + * rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(..., buf, ..., agg_pk, tweak32) * * This function is required if you want to _sign_ for a tweaked aggregate key. * On the other hand, if you are only computing a public key, but not intending * to create a signature for it, you can just use - * `rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add`. + * `rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add`. * * Returns: 0 if the arguments are invalid or the resulting public key would be * invalid (only when the tweak is the negation of the corresponding @@ -307,15 +307,15 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pu * In/Out: keyagg_cache: pointer to a `musig_keyagg_cache` struct initialized by * `musig_pubkey_agg` * In: tweak32: pointer to a 32-byte tweak. If the tweak is invalid - * according to rustsecp256k1zkp_v0_10_0_ec_seckey_verify, this function + * according to rustsecp256k1zkp_v0_10_1_ec_seckey_verify, this function * returns 0. For uniformly random 32-byte arrays the * chance of being invalid is negligible (around 1 in * 2^128). */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -345,11 +345,11 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pu * Note that using the same seckey for multiple MuSig sessions is fine. * * Returns: 0 if the arguments are invalid and 1 otherwise - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * Out: secnonce: pointer to a structure to store the secret nonce * pubnonce: pointer to a structure to store the public nonce * In: session_id32: a 32-byte session_id32 as explained above. Must be unique to this - * call to rustsecp256k1zkp_v0_10_0_musig_nonce_gen and must be uniformly random + * call to rustsecp256k1zkp_v0_10_1_musig_nonce_gen and must be uniformly random * unless you really know what you are doing. * seckey: the 32-byte secret key that will later be used for signing, if * already known (can be NULL) @@ -364,15 +364,15 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pu * extra_input32: an optional 32-byte array that is input to the nonce * derivation function (can be NULL) */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_gen( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, - rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_nonce_gen( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, + rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce, const unsigned char *session_id32, const unsigned char *seckey, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, + const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *extra_input32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(6); @@ -392,10 +392,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_gen( * n_pubnonces: number of elements in the pubnonces array. Must be * greater than 0. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_agg( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_aggnonce *aggnonce, - const rustsecp256k1zkp_v0_10_0_musig_pubnonce * const *pubnonces, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_nonce_agg( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_aggnonce *aggnonce, + const rustsecp256k1zkp_v0_10_1_musig_pubnonce * const *pubnonces, size_t n_pubnonces ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -420,13 +420,13 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_agg( * key if this signing session is part of an adaptor * signature protocol (can be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_nonce_process( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_session *session, - const rustsecp256k1zkp_v0_10_0_musig_aggnonce *aggnonce, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_nonce_process( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_session *session, + const rustsecp256k1zkp_v0_10_1_musig_aggnonce *aggnonce, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, - const rustsecp256k1zkp_v0_10_0_pubkey *adaptor + const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, + const rustsecp256k1zkp_v0_10_1_pubkey *adaptor ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); /** Produces a partial signature @@ -444,7 +444,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_no * * This function does not verify the output partial signature, deviating from * the BIP 327 specification. It is recommended to verify the output partial - * signature with `rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify` to prevent random or + * signature with `rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify` to prevent random or * adversarially provoked computation errors. * * Returns: 0 if the arguments are invalid or the provided secnonce has already @@ -461,13 +461,13 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_no * session: pointer to the session that was created with * musig_nonce_process */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig, - rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, - const rustsecp256k1zkp_v0_10_0_keypair *keypair, - const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, - const rustsecp256k1zkp_v0_10_0_musig_session *session +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_partial_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig, + rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, + const rustsecp256k1zkp_v0_10_1_keypair *keypair, + const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, + const rustsecp256k1zkp_v0_10_1_musig_session *session ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(6); /** Verifies an individual signer's partial signature @@ -502,13 +502,13 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sign( * session: pointer to the session that was created with * `musig_nonce_process` */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig, - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce, - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, - const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, - const rustsecp256k1zkp_v0_10_0_musig_session *session +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig, + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce, + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, + const rustsecp256k1zkp_v0_10_1_musig_session *session ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(6); /** Aggregates partial signatures @@ -523,11 +523,11 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_pa * n_sigs: number of elements in the partial_sigs array. Must be * greater than 0. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, - const rustsecp256k1zkp_v0_10_0_musig_session *session, - const rustsecp256k1zkp_v0_10_0_musig_partial_sig * const *partial_sigs, + const rustsecp256k1zkp_v0_10_1_musig_session *session, + const rustsecp256k1zkp_v0_10_1_musig_partial_sig * const *partial_sigs, size_t n_sigs ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -543,10 +543,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg( * In: session: pointer to the session that was created with * musig_nonce_process */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_parity( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_musig_nonce_parity( + const rustsecp256k1zkp_v0_10_1_context *ctx, int *nonce_parity, - const rustsecp256k1zkp_v0_10_0_musig_session *session + const rustsecp256k1zkp_v0_10_1_musig_session *session ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Creates a signature from a pre-signature and an adaptor. @@ -565,8 +565,8 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_musig_nonce_parity( * nonce_parity: the output of `musig_nonce_parity` called with the * session used for producing the pre-signature */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_adapt( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_adapt( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, const unsigned char *pre_sig64, const unsigned char *sec_adaptor32, @@ -594,8 +594,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_ad * nonce_parity: the output of `musig_nonce_parity` called with the * session used for producing sig64 */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_musig_extract_adaptor( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_musig_extract_adaptor( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sec_adaptor32, const unsigned char *sig64, const unsigned char *pre_sig64, diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_preallocated.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_preallocated.h index 9809ccf0..a076b3af 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_preallocated.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_preallocated.h @@ -16,8 +16,8 @@ extern "C" { * objects created by functions in secp256k1.h, i.e., they can be passed to any * API function that expects a context object (see secp256k1.h for details). The * only exception is that context objects created by functions in this module - * must be destroyed using rustsecp256k1zkp_v0_10_0_context_preallocated_destroy (in this - * module) instead of rustsecp256k1zkp_v0_10_0_context_destroy (in secp256k1.h). + * must be destroyed using rustsecp256k1zkp_v0_10_1_context_preallocated_destroy (in this + * module) instead of rustsecp256k1zkp_v0_10_1_context_destroy (in secp256k1.h). * * It is guaranteed that functions in this module will not call malloc or its * friends realloc, calloc, and free. @@ -27,24 +27,24 @@ extern "C" { * caller-provided memory. * * The purpose of this function is to determine how much memory must be provided - * to rustsecp256k1zkp_v0_10_0_context_preallocated_create. + * to rustsecp256k1zkp_v0_10_1_context_preallocated_create. * * Returns: the required size of the caller-provided memory block * In: flags: which parts of the context to initialize. */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_context_preallocated_size( +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_context_preallocated_size( unsigned int flags ) SECP256K1_WARN_UNUSED_RESULT; /** Create a secp256k1 context object in caller-provided memory. * * The caller must provide a pointer to a rewritable contiguous block of memory - * of size at least rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags) bytes, suitably + * of size at least rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags) bytes, suitably * aligned to hold an object of any type. * * The block of memory is exclusively owned by the created context object during * the lifetime of this context object, which begins with the call to this - * function and ends when a call to rustsecp256k1zkp_v0_10_0_context_preallocated_destroy + * function and ends when a call to rustsecp256k1zkp_v0_10_1_context_preallocated_destroy * (which destroys the context object again) returns. During the lifetime of the * context object, the caller is obligated not to access this block of memory, * i.e., the caller may not read or write the memory, e.g., by copying the memory @@ -54,16 +54,16 @@ SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_context_preallocated_size( * * Returns: pointer to newly created context object. * In: prealloc: pointer to a rewritable contiguous block of memory of - * size at least rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags) + * size at least rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags) * bytes, as detailed above. * flags: which parts of the context to initialize. * - * See rustsecp256k1zkp_v0_10_0_context_create (in secp256k1.h) for further details. + * See rustsecp256k1zkp_v0_10_1_context_create (in secp256k1.h) for further details. * - * See also rustsecp256k1zkp_v0_10_0_context_randomize (in secp256k1.h) - * and rustsecp256k1zkp_v0_10_0_context_preallocated_destroy. + * See also rustsecp256k1zkp_v0_10_1_context_randomize (in secp256k1.h) + * and rustsecp256k1zkp_v0_10_1_context_preallocated_destroy. */ -SECP256K1_API rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_preallocated_create( +SECP256K1_API rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_preallocated_create( void *prealloc, unsigned int flags ) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT; @@ -74,31 +74,31 @@ SECP256K1_API rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context * Returns: the required size of the caller-provided memory block. * In: ctx: pointer to a context to copy. */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_context_preallocated_clone_size( - const rustsecp256k1zkp_v0_10_0_context *ctx +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_context_preallocated_clone_size( + const rustsecp256k1zkp_v0_10_1_context *ctx ) SECP256K1_ARG_NONNULL(1) SECP256K1_WARN_UNUSED_RESULT; /** Copy a secp256k1 context object into caller-provided memory. * * The caller must provide a pointer to a rewritable contiguous block of memory - * of size at least rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags) bytes, suitably + * of size at least rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags) bytes, suitably * aligned to hold an object of any type. * * The block of memory is exclusively owned by the created context object during * the lifetime of this context object, see the description of - * rustsecp256k1zkp_v0_10_0_context_preallocated_create for details. + * rustsecp256k1zkp_v0_10_1_context_preallocated_create for details. * - * Cloning rustsecp256k1zkp_v0_10_0_context_static is not possible, and should not be emulated by + * Cloning rustsecp256k1zkp_v0_10_1_context_static is not possible, and should not be emulated by * the caller (e.g., using memcpy). Create a new context instead. * * Returns: pointer to a newly created context object. - * Args: ctx: pointer to a context to copy (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context to copy (not rustsecp256k1zkp_v0_10_1_context_static). * In: prealloc: pointer to a rewritable contiguous block of memory of - * size at least rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags) + * size at least rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags) * bytes, as detailed above. */ -SECP256K1_API rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_preallocated_clone( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_preallocated_clone( + const rustsecp256k1zkp_v0_10_1_context *ctx, void *prealloc ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_WARN_UNUSED_RESULT; @@ -108,23 +108,23 @@ SECP256K1_API rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context * The context pointer may not be used afterwards. * * The context to destroy must have been created using - * rustsecp256k1zkp_v0_10_0_context_preallocated_create or rustsecp256k1zkp_v0_10_0_context_preallocated_clone. - * If the context has instead been created using rustsecp256k1zkp_v0_10_0_context_create or - * rustsecp256k1zkp_v0_10_0_context_clone, the behaviour is undefined. In that case, - * rustsecp256k1zkp_v0_10_0_context_destroy must be used instead. + * rustsecp256k1zkp_v0_10_1_context_preallocated_create or rustsecp256k1zkp_v0_10_1_context_preallocated_clone. + * If the context has instead been created using rustsecp256k1zkp_v0_10_1_context_create or + * rustsecp256k1zkp_v0_10_1_context_clone, the behaviour is undefined. In that case, + * rustsecp256k1zkp_v0_10_1_context_destroy must be used instead. * * If required, it is the responsibility of the caller to deallocate the block * of memory properly after this function returns, e.g., by calling free on the - * preallocated pointer given to rustsecp256k1zkp_v0_10_0_context_preallocated_create or - * rustsecp256k1zkp_v0_10_0_context_preallocated_clone. + * preallocated pointer given to rustsecp256k1zkp_v0_10_1_context_preallocated_create or + * rustsecp256k1zkp_v0_10_1_context_preallocated_clone. * * Args: ctx: pointer to a context to destroy, constructed using - * rustsecp256k1zkp_v0_10_0_context_preallocated_create or - * rustsecp256k1zkp_v0_10_0_context_preallocated_clone - * (i.e., not rustsecp256k1zkp_v0_10_0_context_static). + * rustsecp256k1zkp_v0_10_1_context_preallocated_create or + * rustsecp256k1zkp_v0_10_1_context_preallocated_clone + * (i.e., not rustsecp256k1zkp_v0_10_1_context_static). */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_context_preallocated_destroy( - rustsecp256k1zkp_v0_10_0_context *ctx +SECP256K1_API void rustsecp256k1zkp_v0_10_1_context_preallocated_destroy( + rustsecp256k1zkp_v0_10_1_context *ctx ) SECP256K1_ARG_NONNULL(1); #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_rangeproof.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_rangeproof.h index 9a24e184..7e9cd0bd 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_rangeproof.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_rangeproof.h @@ -22,7 +22,7 @@ extern "C" { /** Verify a proof that a committed value is within a range. * Returns 1: Value is within the range [0..2^64), the specifically proven range is in the min/max value outputs. * 0: Proof failed or other error. - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * commit: the commitment being proved. (cannot be NULL) * proof: pointer to character array with the proof. (cannot be NULL) * plen: length of proof in bytes. @@ -32,22 +32,22 @@ extern "C" { * Out: min_value: pointer to a unsigned int64 which will be updated with the minimum value that commit could have. (cannot be NULL) * max_value: pointer to a unsigned int64 which will be updated with the maximum value that commit could have. (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangeproof_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_rangeproof_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, uint64_t *min_value, uint64_t *max_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, - const rustsecp256k1zkp_v0_10_0_generator *gen + const rustsecp256k1zkp_v0_10_1_generator *gen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(9); /** Verify a range proof proof and rewind the proof to recover information sent by its author. * Returns 1: Value is within the range [0..2^64), the specifically proven range is in the min/max value outputs, and the value and blinding were recovered. * 0: Proof failed, rewind failed, or other error. - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * commit: the commitment being proved. (cannot be NULL) * proof: pointer to character array with the proof. (cannot be NULL) * plen: length of proof in bytes. @@ -64,8 +64,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro * min_value: pointer to an unsigned int64 which will be updated with the minimum value that commit could have. (cannot be NULL) * max_value: pointer to an unsigned int64 which will be updated with the maximum value that commit could have. (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangeproof_rewind( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_rangeproof_rewind( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *blind_out, uint64_t *value_out, unsigned char *message_out, @@ -73,18 +73,18 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro const unsigned char *nonce, uint64_t *min_value, uint64_t *max_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, - const rustsecp256k1zkp_v0_10_0_generator *gen + const rustsecp256k1zkp_v0_10_1_generator *gen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(6) SECP256K1_ARG_NONNULL(7) SECP256K1_ARG_NONNULL(8) SECP256K1_ARG_NONNULL(9) SECP256K1_ARG_NONNULL(10) SECP256K1_ARG_NONNULL(14); /** Author a proof that a committed value is within a range. * Returns 1: Proof successfully created. * 0: Error - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * proof: pointer to array to receive the proof, can be up to 5134 bytes. (cannot be NULL) * min_value: constructs a proof where the verifer can tell the minimum value is at least the specified amount. * commit: the commitment being proved. @@ -109,12 +109,12 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro * This can randomly fail with probability around one in 2^100. If this happens, buy a lottery ticket and retry with a different nonce or blinding. * */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangeproof_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_rangeproof_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *proof, size_t *plen, uint64_t min_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *blind, const unsigned char *nonce, int exp, @@ -124,7 +124,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, - const rustsecp256k1zkp_v0_10_0_generator *gen + const rustsecp256k1zkp_v0_10_1_generator *gen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(5) SECP256K1_ARG_NONNULL(6) SECP256K1_ARG_NONNULL(7) SECP256K1_ARG_NONNULL(15); /** Extract some basic information from a range-proof. @@ -138,8 +138,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro * min_value: pointer to an unsigned int64 which will be updated with the minimum value that commit could have. (cannot be NULL) * max_value: pointer to an unsigned int64 which will be updated with the maximum value that commit could have. (cannot be NULL) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangeproof_info( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_rangeproof_info( + const rustsecp256k1zkp_v0_10_1_context *ctx, int *exp, int *mantissa, uint64_t *min_value, @@ -152,7 +152,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro * * An actual rangeproof may be smaller, for example if the actual value * is less than both the provided `max_value` and 2^`min_bits`, or if - * the `exp` parameter to `rustsecp256k1zkp_v0_10_0_rangeproof_sign` is set such that + * the `exp` parameter to `rustsecp256k1zkp_v0_10_1_rangeproof_sign` is set such that * the proven range is compressed. In particular this function will always * overestimate the size of single-value proofs. Also, if `min_value` * is set to 0 in the proof, the result will usually, but not always, @@ -169,8 +169,8 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_rangepro * max_value: the maximum value that might be passed for `value` for the proof. * min_bits: the value that will be passed as `min_bits` for the proof. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT size_t rustsecp256k1zkp_v0_10_0_rangeproof_max_size( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT size_t rustsecp256k1zkp_v0_10_1_rangeproof_max_size( + const rustsecp256k1zkp_v0_10_1_context *ctx, uint64_t max_value, int min_bits ) SECP256K1_ARG_NONNULL(1); diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_recovery.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_recovery.h index deed80b7..d50b25c6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_recovery.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_recovery.h @@ -14,8 +14,8 @@ extern "C" { * guaranteed to be portable between different platforms or versions. It is * however guaranteed to be 65 bytes in size, and can be safely copied/moved. * If you need to convert to a format suitable for storage or transmission, use - * the rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_* and - * rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_* functions. + * the rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_* and + * rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_* functions. * * Furthermore, it is guaranteed that identical signatures (including their * recoverability) will have identical representation, so they can be @@ -23,7 +23,7 @@ extern "C" { */ typedef struct { unsigned char data[65]; -} rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature; +} rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature; /** Parse a compact ECDSA signature (64 bytes + recovery id). * @@ -33,9 +33,9 @@ typedef struct { * In: input64: pointer to a 64-byte compact signature * recid: the recovery id (0, 1, 2 or 3) */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *sig, const unsigned char *input64, int recid ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -47,10 +47,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_com * Out: sig: pointer to a normal signature. * In: sigin: pointer to a recoverable signature. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, - const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *sigin +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, + const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *sigin ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); /** Serialize an ECDSA signature in compact format (64 bytes + recovery id). @@ -61,32 +61,32 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert( * recid: pointer to an integer to hold the recovery id. * In: sig: pointer to an initialized signature object. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output64, int *recid, - const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *sig + const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *sig ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Create a recoverable ECDSA signature. * * Returns: 1: signature created * 0: the nonce generation function failed, or the secret key was invalid. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: sig: pointer to an array where the signature will be placed. * In: msghash32: the 32-byte message hash being signed. * seckey: pointer to a 32-byte secret key. * noncefp: pointer to a nonce generation function. If NULL, - * rustsecp256k1zkp_v0_10_0_nonce_function_default is used. + * rustsecp256k1zkp_v0_10_1_nonce_function_default is used. * ndata: pointer to arbitrary data used by the nonce generation function - * (can be NULL for rustsecp256k1zkp_v0_10_0_nonce_function_default). + * (can be NULL for rustsecp256k1zkp_v0_10_1_nonce_function_default). */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *sig, const unsigned char *msghash32, const unsigned char *seckey, - rustsecp256k1zkp_v0_10_0_nonce_function noncefp, + rustsecp256k1zkp_v0_10_1_nonce_function noncefp, const void *ndata ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); @@ -99,10 +99,10 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable( * In: sig: pointer to initialized signature that supports pubkey recovery. * msghash32: the 32-byte message hash assumed to be signed. */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_ecdsa_recover( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_pubkey *pubkey, - const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *sig, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_ecdsa_recover( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *pubkey, + const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *sig, const unsigned char *msghash32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorr_adaptor.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorr_adaptor.h new file mode 100644 index 00000000..ad6ba91c --- /dev/null +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorr_adaptor.h @@ -0,0 +1,215 @@ +#ifndef SECP256K1_SCHNORR_ADAPTOR_H +#define SECP256K1_SCHNORR_ADAPTOR_H + +#include "secp256k1.h" +#include "secp256k1_extrakeys.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** This module provides an experimental implementation of a Schnorr adaptor + * signature protocol variant. + * + * The test vectors have been generated and cross-verified using a Python + * implementation of this adaptor signature variant available at [0]. + * + * The protocol involves two parties, Alice and Bob. The general sequence of + * their interaction is as follows: + * 1. Alice calls the `schnorr_adaptor_presign` function for an adaptor point T + * and sends the pre-signature to Bob. + * 2. Bob extracts the adaptor point T from the pre-signature using + * `schnorr_adaptor_extract`. + * 3. Bob provides the pre-signature and the discrete logarithm of T to + * `schnorr_adaptor_adapt` which outputs a valid BIP 340 Schnorr signature. + * 4. Alice extracts the discrete logarithm of T from the pre-signature and the + * BIP 340 signature using `schnorr_adaptor_extract_sec`. + * + * In contrast to common descriptions of adaptor signature protocols, this + * module does not provide a verification algorithm for pre-signatures. + * Instead, `schnorr_adaptor_extract` returns the adaptor point encoded by a + * pre-signature, reducing communication cost. If a verification function for + * pre-signatures is needed, it can be easily simulated with + * `schnorr_adaptor_extract`. + * + * Assuming that BIP 340 Schnorr signatures satisfy strong unforgeability under + * chosen message attack, the Schnorr adaptor signature scheme fulfills the + * following properties as formalized by [1]. + * + * - Witness extractability: + * If Alice + * 1. creates a pre-signature with `schnorr_adaptor_presign` for message m + * and adaptor point T and + * 2. receives a Schnorr signature for message m that she hasn't created + * herself, + * then Alice is able to obtain the discrete logarithm of T with + * `schnorr_adaptor_extract_sec`. + * + * - Pre-signature adaptability: + * If Bob + * 1. receives a pre-signature and extracts an adaptor point T using + * `schnorr_adaptor_extract`, and + * 2. obtains the discrete logarithm of the adaptor point T + * Then then Bob is able to adapt the received pre-signature to a valid BIP + * 340 Schnorr signature using `schnorr_adaptor_adapt`. + * + * - Existential Unforgeability: + * Bob is not able to create a BIP 340 signature from a pre-signature for + * adaptor T without knowing the discrete logarithm of T. + * + * - Pre-signature existiential unforgeability: + * Only Alice can create a pre-signature for her public key. + * + * [0] https://github.com/ZhePang/Python_Specification_for_Schnorr_Adaptor + * [1] https://eprint.iacr.org/2020/476.pdf + */ + +/** A pointer to a function to deterministically generate a nonce. + * + * In addition to the features of rustsecp256k1zkp_v0_10_1_nonce_function_hardened, + * this function introduces an extra argument for a compressed 33-byte + * adaptor point. + * + * Returns: 1 if a nonce was successfully generated. 0 will cause signing to + * return an error. + * Out: nonce32: pointer to a 32-byte array to be filled by the function + * In: msg32: the 32-byte message being verified (will not be NULL) + * key32: pointer to a 32-byte secret key (will not be NULL) +* adaptor33: the 33-byte serialized adaptor point (will not be NULL) + * xonly_pk32: the 32-byte serialized xonly pubkey corresponding to key32 + * (will not be NULL) + * algo: pointer to an array describing the signature + * algorithm (will not be NULL) + * algolen: the length of the algo array + * data: arbitrary data pointer that is passed through + * + * Except for test cases, this function should compute some cryptographic hash of + * the message, the key, the adaptor point, the pubkey, the algorithm description, and data. + */ +typedef int (*rustsecp256k1zkp_v0_10_1_nonce_function_hardened_schnorr_adaptor)( + unsigned char *nonce32, + const unsigned char *msg32, + const unsigned char *key32, + const unsigned char *adaptor33, + const unsigned char *xonly_pk32, + const unsigned char *algo, + size_t algolen, + void *data +); + +/** A modified BIP-340 nonce generation function. If a data pointer is passed, it is + * assumed to be a pointer to 32 bytes of auxiliary random data as defined in BIP-340. + * If the data pointer is NULL, the nonce derivation procedure uses a zeroed 32-byte + * auxiliary random data. The hash will be tagged with algo after removing all + * terminating null bytes. + */ +SECP256K1_API const rustsecp256k1zkp_v0_10_1_nonce_function_hardened_schnorr_adaptor rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor; + +/** Creates a pre-signature for a given message and adaptor point. + * + * The pre-signature can be converted into a valid BIP-340 Schnorr signature + * (using `schnorr_adaptor_adapt`) by combining it with the discrete logarithm + * of the adaptor point. + * + * This function only signs 32-byte messages. If you have messages of a + * different size (or the same size but without a context-specific tag + * prefix), it is recommended to create a 32-byte message hash with + * rustsecp256k1zkp_v0_10_1_tagged_sha256 and then sign the hash. Tagged hashing allows + * providing an context-specific tag for domain separation. This prevents + * signatures from being valid in multiple contexts by accident. + * + * Returns 1 on success, 0 on failure. + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). + * Out: pre_sig65: pointer to a 65-byte array to store the pre-signature. + * In: msg32: the 32-byte message being signed. + * keypair: pointer to an initialized keypair. + * adaptor: pointer to an adaptor point encoded as a public key. + * aux_rand32: pointer to arbitrary data used by the nonce generation + * function (can be NULL). If it is non-NULL and + * rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor is used, then + * aux_rand32 must be a pointer to 32-byte auxiliary randomness + * as per BIP-340. + */ +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + unsigned char *pre_sig65, + const unsigned char *msg32, + const rustsecp256k1zkp_v0_10_1_keypair *keypair, + const rustsecp256k1zkp_v0_10_1_pubkey *adaptor, + const unsigned char *aux_rand32 +) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); + +/** Extracts the adaptor point from a pre-signature. + * + * This function assumes that pre_sig65 was created using the corresponding + * msg32, pubkey, and a valid adaptor point, which it will extract. If these + * inputs are not related (e.g., if pre_sig65 was generated with a different + * key or message), the extracted adaptor point will be incorrect. However, + * the function will still return 1 to indicate a successful extraction. + * + * Returns 1 on success, 0 on failure. + * Args: ctx: pointer to a context object. + * Out: adaptor: pointer to store the adaptor point. + * In: pre_sig65: pointer to a 65-byte pre-signature. + * msg32: the 32-byte message associated with presig_65 + * pubkey: pointer to the x-only public key associated with pre_sig65 + */ +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_pubkey *adaptor, + const unsigned char *pre_sig65, + const unsigned char *msg32, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey +) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(5); + +/** Adapts the pre-signature to produce a BIP-340 Schnorr signature. + * + * The output BIP-340 signature is not verified by this function. + * To verify it, use `rustsecp256k1zkp_v0_10_1_schnorrsig_verify`. + * + * If the pre_sig65 and sec_adaptor32 values are not related, the + * output signature will be invalid. In this case, the function will + * still return 1 to indicate successful execution. + * + * Returns 1 on success, 0 on failure. + * Args: ctx: pointer to a context object. + * Out: sig64: pointer to a 64-byte array to store the adapted + * pre-signature. This pointer may point to the same + * memory area as `pre_sig65`. + * In: pre_sig65: pointer to a 65-byte pre-signature. + * sec_adaptor32: pointer to a 32-byte secret adaptor associated with pre_sig65 + */ +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt( + const rustsecp256k1zkp_v0_10_1_context *ctx, + unsigned char *sig64, + const unsigned char *pre_sig65, + const unsigned char *sec_adaptor32 +) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); + +/** Extracts the secret adaptor (discrete logarithm of the adaptor point) + * from a pre-signature and the corresponding BIP-340 signature. + * + * This function assumes that the sig64 was created by adapting pre_sig65. + * If these inputs are not related, the extracted secret adaptor will be + * incorrect. However, the function will still return 1 to indicate successful + * extraction. + * + * Returns 1 on success, 0 on failure. + * Args: ctx: pointer to a context object. + * Out: sec_adaptor32: pointer to a 32-byte array to store the secret adaptor. + * In: pre_sig65: pointer to a 65-byte pre-signature. + * sig64: pointer to a valid 64-byte BIP-340 Schnorr signature + * associated with pre_sig65. + */ +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec( + const rustsecp256k1zkp_v0_10_1_context *ctx, + unsigned char *sec_adaptor32, + const unsigned char *pre_sig65, + const unsigned char *sig64 +) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); + +#ifdef __cplusplus +} +#endif + +#endif /* SECP256K1_SCHNORR_ADAPTOR_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig.h index d10ce0dd..4494de35 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig.h @@ -15,7 +15,7 @@ extern "C" { /** A pointer to a function to deterministically generate a nonce. * - * Same as rustsecp256k1zkp_v0_10_0_nonce function with the exception of accepting an + * Same as rustsecp256k1zkp_v0_10_1_nonce function with the exception of accepting an * additional pubkey argument and not requiring an attempt argument. The pubkey * argument can protect signature schemes with key-prefixed challenge hash * inputs against reusing the nonce when signing with the wrong precomputed @@ -38,7 +38,7 @@ extern "C" { * Except for test cases, this function should compute some cryptographic hash of * the message, the key, the pubkey, the algorithm description, and data. */ -typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function_hardened)( +typedef int (*rustsecp256k1zkp_v0_10_1_nonce_function_hardened)( unsigned char *nonce32, const unsigned char *msg, size_t msglen, @@ -61,7 +61,7 @@ typedef int (*rustsecp256k1zkp_v0_10_0_nonce_function_hardened)( * Therefore, to create BIP-340 compliant signatures, algo must be set to * "BIP0340/nonce" and algolen to 13. */ -SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function_hardened rustsecp256k1zkp_v0_10_0_nonce_function_bip340; +SECP256K1_API const rustsecp256k1zkp_v0_10_1_nonce_function_hardened rustsecp256k1zkp_v0_10_1_nonce_function_bip340; /** Data structure that contains additional arguments for schnorrsig_sign_custom. * @@ -73,17 +73,17 @@ SECP256K1_API const rustsecp256k1zkp_v0_10_0_nonce_function_hardened rustsecp256 * and has no other function than making sure the object is * initialized. * noncefp: pointer to a nonce generation function. If NULL, - * rustsecp256k1zkp_v0_10_0_nonce_function_bip340 is used + * rustsecp256k1zkp_v0_10_1_nonce_function_bip340 is used * ndata: pointer to arbitrary data used by the nonce generation function * (can be NULL). If it is non-NULL and - * rustsecp256k1zkp_v0_10_0_nonce_function_bip340 is used, then ndata must be a + * rustsecp256k1zkp_v0_10_1_nonce_function_bip340 is used, then ndata must be a * pointer to 32-byte auxiliary randomness as per BIP-340. */ typedef struct { unsigned char magic[4]; - rustsecp256k1zkp_v0_10_0_nonce_function_hardened noncefp; + rustsecp256k1zkp_v0_10_1_nonce_function_hardened noncefp; void *ndata; -} rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams; +} rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams; #define SECP256K1_SCHNORRSIG_EXTRAPARAMS_MAGIC { 0xda, 0x6f, 0xb3, 0x8c } #define SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT {\ @@ -95,18 +95,18 @@ typedef struct { /** Create a Schnorr signature. * * Does _not_ strictly follow BIP-340 because it does not verify the resulting - * signature. Instead, you can manually use rustsecp256k1zkp_v0_10_0_schnorrsig_verify and + * signature. Instead, you can manually use rustsecp256k1zkp_v0_10_1_schnorrsig_verify and * abort if it fails. * * This function only signs 32-byte messages. If you have messages of a * different size (or the same size but without a context-specific tag * prefix), it is recommended to create a 32-byte message hash with - * rustsecp256k1zkp_v0_10_0_tagged_sha256 and then sign the hash. Tagged hashing allows + * rustsecp256k1zkp_v0_10_1_tagged_sha256 and then sign the hash. Tagged hashing allows * providing an context-specific tag for domain separation. This prevents * signatures from being valid in multiple contexts by accident. * * Returns 1 on success, 0 on failure. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: sig64: pointer to a 64-byte array to store the serialized signature. * In: msg32: the 32-byte message being signed. * keypair: pointer to an initialized keypair. @@ -116,53 +116,53 @@ typedef struct { * BIP-340 "Default Signing" for a full explanation of this * argument and for guidance if randomness is expensive. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_sign32( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorrsig_sign32( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_keypair *keypair, + const rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *aux_rand32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); -/** Same as rustsecp256k1zkp_v0_10_0_schnorrsig_sign32, but DEPRECATED. Will be removed in +/** Same as rustsecp256k1zkp_v0_10_1_schnorrsig_sign32, but DEPRECATED. Will be removed in * future versions. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorrsig_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, const unsigned char *msg32, - const rustsecp256k1zkp_v0_10_0_keypair *keypair, + const rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *aux_rand32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) - SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_0_schnorrsig_sign32 instead"); + SECP256K1_DEPRECATED("Use rustsecp256k1zkp_v0_10_1_schnorrsig_sign32 instead"); /** Create a Schnorr signature with a more flexible API. * - * Same arguments as rustsecp256k1zkp_v0_10_0_schnorrsig_sign except that it allows signing + * Same arguments as rustsecp256k1zkp_v0_10_1_schnorrsig_sign except that it allows signing * variable length messages and accepts a pointer to an extraparams object that * allows customizing signing by passing additional arguments. * - * Equivalent to rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(..., aux_rand32) if msglen is 32 + * Equivalent to rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(..., aux_rand32) if msglen is 32 * and extraparams is initialized as follows: * ``` - * rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; + * rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; * extraparams.ndata = (unsigned char*)aux_rand32; * ``` * * Returns 1 on success, 0 on failure. - * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static). + * Args: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static). * Out: sig64: pointer to a 64-byte array to store the serialized signature. * In: msg: the message being signed. Can only be NULL if msglen is 0. * msglen: length of the message. * keypair: pointer to an initialized keypair. * extraparams: pointer to an extraparams object (can be NULL). */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, const unsigned char *msg, size_t msglen, - const rustsecp256k1zkp_v0_10_0_keypair *keypair, - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams *extraparams + const rustsecp256k1zkp_v0_10_1_keypair *keypair, + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams *extraparams ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(5); /** Verify a Schnorr signature. @@ -175,12 +175,12 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom( * msglen: length of the message * pubkey: pointer to an x-only public key to verify with */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_schnorrsig_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_schnorrsig_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, const unsigned char *sig64, const unsigned char *msg, size_t msglen, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(5); #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig_halfagg.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig_halfagg.h index 8e4b42b1..4c1cbc22 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig_halfagg.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_schnorrsig_halfagg.h @@ -40,11 +40,11 @@ extern "C" { * n_new: Number of signatures that should now be added * to the aggregate signature. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *aggsig, size_t *aggsig_len, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey* all_pubkeys, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey* all_pubkeys, const unsigned char *all_msgs32, const unsigned char *new_sigs64, size_t n_before, @@ -67,11 +67,11 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate( * Can only be NULL if n is 0. * n: number of signatures to be aggregated. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *aggsig, size_t *aggsig_len, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkeys, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkeys, const unsigned char *msgs32, const unsigned char *sigs64, size_t n @@ -91,9 +91,9 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate( * aggsig_len: Size of the aggregate signature in bytes. * Should be aggsig_len = 32*(n+1) */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkeys, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkeys, const unsigned char *msgs32, size_t n, const unsigned char *aggsig, diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_surjectionproof.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_surjectionproof.h index a9fc20f5..94af8b3d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_surjectionproof.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_surjectionproof.h @@ -32,8 +32,8 @@ extern "C" { * will have identical representation. (That is, memcmp may return nonzero * even for identical proofs.) * - * To obtain these properties, instead use rustsecp256k1zkp_v0_10_0_surjectionproof_parse - * and rustsecp256k1zkp_v0_10_0_surjectionproof_serialize to encode/decode proofs into a + * To obtain these properties, instead use rustsecp256k1zkp_v0_10_1_surjectionproof_parse + * and rustsecp256k1zkp_v0_10_1_surjectionproof_serialize to encode/decode proofs into a * well-defined format. * * The representation is exposed to allow creation of these objects on the @@ -41,7 +41,7 @@ extern "C" { */ typedef struct { #ifdef VERIFY - /** Mark whether this proof has gone through `rustsecp256k1zkp_v0_10_0_surjectionproof_initialize` */ + /** Mark whether this proof has gone through `rustsecp256k1zkp_v0_10_1_surjectionproof_initialize` */ int initialized; #endif /** Total number of input asset tags */ @@ -50,7 +50,7 @@ typedef struct { unsigned char used_inputs[SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS / 8]; /** Borromean signature: e0, scalars */ unsigned char data[32 * (1 + SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS)]; -} rustsecp256k1zkp_v0_10_0_surjectionproof; +} rustsecp256k1zkp_v0_10_1_surjectionproof; #ifndef USE_REDUCED_SURJECTION_PROOF_SIZE /** Parse a surjection proof @@ -68,9 +68,9 @@ typedef struct { * - `m` big-endian 32-byte borromean signature s values, where `m` * is the number of set bits in the bitmap */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_surjectionproof_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_surjectionproof *proof, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_surjectionproof_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_surjectionproof *proof, const unsigned char *input, size_t inputlen ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -85,13 +85,13 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_surjectionproof_parse( * of output, and is overwritten with the written size. * In: proof: pointer to an initialized proof object * - * See rustsecp256k1zkp_v0_10_0_surjectionproof_parse for details about the encoding. + * See rustsecp256k1zkp_v0_10_1_surjectionproof_parse for details about the encoding. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_surjectionproof_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_surjectionproof_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, size_t *outputlen, - const rustsecp256k1zkp_v0_10_0_surjectionproof *proof + const rustsecp256k1zkp_v0_10_1_surjectionproof *proof ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Data structure that holds a fixed asset tag. @@ -102,7 +102,7 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_surjectionproof_serialize( */ typedef struct { unsigned char data[32]; -} rustsecp256k1zkp_v0_10_0_fixed_asset_tag; +} rustsecp256k1zkp_v0_10_1_fixed_asset_tag; /** Returns the total number of inputs a proof expects to be over. * @@ -110,9 +110,9 @@ typedef struct { * In: ctx: pointer to a context object * proof: pointer to a proof object */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_surjectionproof *proof +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_surjectionproof *proof ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); /** Returns the actual number of inputs that a proof uses @@ -121,9 +121,9 @@ SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs( * In: ctx: pointer to a context object * proof: pointer to a proof object */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_surjectionproof *proof +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_surjectionproof *proof ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); /** Returns the total size this proof would take, in bytes, when serialized @@ -132,13 +132,13 @@ SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs( * In: ctx: pointer to a context object * proof: pointer to a proof object */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_surjectionproof *proof +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_surjectionproof *proof ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2); /** Surjection proof initialization function; decides on inputs to use - * To be used to initialize stack-allocated rustsecp256k1zkp_v0_10_0_surjectionproof struct + * To be used to initialize stack-allocated rustsecp256k1zkp_v0_10_1_surjectionproof struct * Returns 0: inputs could not be selected * n: inputs were selected after n iterations of random selection * @@ -160,14 +160,14 @@ SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size( * the state of the proof is undefined. * input_index: The index of the actual input that is secretly mapped to the output */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjectionproof_initialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_surjectionproof *proof, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_surjectionproof_initialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_surjectionproof *proof, size_t *input_index, - const rustsecp256k1zkp_v0_10_0_fixed_asset_tag *fixed_input_tags, + const rustsecp256k1zkp_v0_10_1_fixed_asset_tag *fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, - const rustsecp256k1zkp_v0_10_0_fixed_asset_tag *fixed_output_tag, + const rustsecp256k1zkp_v0_10_1_fixed_asset_tag *fixed_output_tag, const size_t n_max_iterations, const unsigned char *random_seed32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(7); @@ -178,7 +178,7 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjecti * n: inputs were selected after n iterations of random selection * * In: ctx: pointer to a context object - * proof_out_p: pointer to a pointer to `rustsecp256k1zkp_v0_10_0_surjectionproof*`. + * proof_out_p: pointer to a pointer to `rustsecp256k1zkp_v0_10_1_surjectionproof*`. * The newly-allocated struct pointer will be saved here. * fixed_input_tags: fixed input tags `A_i` for all inputs. (If the fixed tag is not known, * e.g. in a coinjoin with others' inputs, an ephemeral tag can be given; @@ -196,46 +196,46 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjecti * In case of failure, the pointer will be NULL. * input_index: The index of the actual input that is secretly mapped to the output */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_surjectionproof **proof_out_p, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_surjectionproof **proof_out_p, size_t *input_index, - const rustsecp256k1zkp_v0_10_0_fixed_asset_tag *fixed_input_tags, + const rustsecp256k1zkp_v0_10_1_fixed_asset_tag *fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, - const rustsecp256k1zkp_v0_10_0_fixed_asset_tag *fixed_output_tag, + const rustsecp256k1zkp_v0_10_1_fixed_asset_tag *fixed_output_tag, const size_t n_max_iterations, const unsigned char *random_seed32 ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(7); /** Surjection proof destroy function - * deallocates the struct that was allocated with rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized + * deallocates the struct that was allocated with rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized * - * In: proof: pointer to rustsecp256k1zkp_v0_10_0_surjectionproof struct + * In: proof: pointer to rustsecp256k1zkp_v0_10_1_surjectionproof struct */ -SECP256K1_API void rustsecp256k1zkp_v0_10_0_surjectionproof_destroy( - rustsecp256k1zkp_v0_10_0_surjectionproof *proof +SECP256K1_API void rustsecp256k1zkp_v0_10_1_surjectionproof_destroy( + rustsecp256k1zkp_v0_10_1_surjectionproof *proof ) SECP256K1_ARG_NONNULL(1); /** Surjection proof generation function * Returns 0: proof could not be created * 1: proof was successfully created * - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * ephemeral_input_tags: the ephemeral asset tag of all inputs * n_ephemeral_input_tags: the number of entries in the ephemeral_input_tags array * ephemeral_output_tag: the ephemeral asset tag of the output * input_index: the index of the input that actually maps to the output * input_blinding_key: the blinding key of the input * output_blinding_key: the blinding key of the output - * In/Out: proof: The produced surjection proof. Must have already gone through `rustsecp256k1zkp_v0_10_0_surjectionproof_initialize` + * In/Out: proof: The produced surjection proof. Must have already gone through `rustsecp256k1zkp_v0_10_1_surjectionproof_initialize` */ -SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjectionproof_generate( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_surjectionproof *proof, - const rustsecp256k1zkp_v0_10_0_generator *ephemeral_input_tags, +SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_1_surjectionproof_generate( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_surjectionproof *proof, + const rustsecp256k1zkp_v0_10_1_generator *ephemeral_input_tags, size_t n_ephemeral_input_tags, - const rustsecp256k1zkp_v0_10_0_generator *ephemeral_output_tag, + const rustsecp256k1zkp_v0_10_1_generator *ephemeral_output_tag, size_t input_index, const unsigned char *input_blinding_key, const unsigned char *output_blinding_key @@ -247,18 +247,18 @@ SECP256K1_API SECP256K1_WARN_UNUSED_RESULT int rustsecp256k1zkp_v0_10_0_surjecti * Returns 0: proof was invalid * 1: proof was valid * - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * proof: proof to be verified * ephemeral_input_tags: the ephemeral asset tag of all inputs * n_ephemeral_input_tags: the number of entries in the ephemeral_input_tags array * ephemeral_output_tag: the ephemeral asset tag of the output */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_surjectionproof_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_surjectionproof *proof, - const rustsecp256k1zkp_v0_10_0_generator *ephemeral_input_tags, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_surjectionproof_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_surjectionproof *proof, + const rustsecp256k1zkp_v0_10_1_generator *ephemeral_input_tags, size_t n_ephemeral_input_tags, - const rustsecp256k1zkp_v0_10_0_generator *ephemeral_output_tag + const rustsecp256k1zkp_v0_10_1_generator *ephemeral_output_tag ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(5); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_whitelist.h b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_whitelist.h index e12be56e..4ccb29b5 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_whitelist.h +++ b/secp256k1-zkp-sys/depend/secp256k1/include/secp256k1_whitelist.h @@ -23,19 +23,19 @@ extern "C" { * will have identical representation. (That is, memcmp may return nonzero * even for identical signatures.) * - * To obtain these properties, instead use rustsecp256k1zkp_v0_10_0_whitelist_signature_parse - * and rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize to encode/decode signatures + * To obtain these properties, instead use rustsecp256k1zkp_v0_10_1_whitelist_signature_parse + * and rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize to encode/decode signatures * into a well-defined format. * * The representation is exposed to allow creation of these objects on the * stack; please *do not* use these internals directly. To learn the number - * of keys for a signature, use `rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys`. + * of keys for a signature, use `rustsecp256k1zkp_v0_10_1_whitelist_signature_n_keys`. */ typedef struct { size_t n_keys; /* e0, scalars */ unsigned char data[32 * (1 + SECP256K1_WHITELIST_MAX_N_KEYS)]; -} rustsecp256k1zkp_v0_10_0_whitelist_signature; +} rustsecp256k1zkp_v0_10_1_whitelist_signature; /** Parse a whitelist signature * @@ -57,9 +57,9 @@ typedef struct { * scalar values overflow or are zero, the resulting sig value is guaranteed * to fail validation for any set of keys. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_signature_parse( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_whitelist_signature_parse( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, const unsigned char *input, size_t input_len ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3); @@ -69,8 +69,8 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_signature_parse( * Returns: the number of keys for the given signature * In: sig: pointer to a signature object */ -SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys( - const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig +SECP256K1_API size_t rustsecp256k1zkp_v0_10_1_whitelist_signature_n_keys( + const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig ) SECP256K1_ARG_NONNULL(1); /** Serialize a whitelist signature @@ -81,19 +81,19 @@ SECP256K1_API size_t rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys( * In/Out: output_len: length of the above array, updated with the actual serialized length * In: sig: pointer to an initialized signature object * - * See rustsecp256k1zkp_v0_10_0_whitelist_signature_parse for details about the encoding. + * See rustsecp256k1zkp_v0_10_1_whitelist_signature_parse for details about the encoding. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize( - const rustsecp256k1zkp_v0_10_0_context *ctx, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize( + const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, size_t *output_len, - const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig + const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4); /** Compute a whitelist signature * Returns 1: signature was successfully created * 0: signature was not successfully created - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * online_pubkeys: list of all online pubkeys * offline_pubkeys: list of all offline pubkeys * n_keys: the number of entries in each of the above two arrays @@ -110,13 +110,13 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize( * for each public key pair (offline_i, offline_i). Here H means sha256 of the * compressed serialization of the key. */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_sign( - const rustsecp256k1zkp_v0_10_0_context *ctx, - rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, - const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, - const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_whitelist_sign( + const rustsecp256k1zkp_v0_10_1_context *ctx, + rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, + const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, + const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const size_t n_keys, - const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey, + const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey, const unsigned char *online_seckey, const unsigned char *summed_seckeyx, const size_t index @@ -125,20 +125,20 @@ SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_sign( /** Verify a whitelist signature * Returns 1: signature is valid * 0: signature is not valid - * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_0_context_static) + * In: ctx: pointer to a context object (not rustsecp256k1zkp_v0_10_1_context_static) * sig: the signature to be verified * online_pubkeys: list of all online pubkeys * offline_pubkeys: list of all offline pubkeys * n_keys: the number of entries in each of the above two arrays * sub_pubkey: the key to be whitelisted */ -SECP256K1_API int rustsecp256k1zkp_v0_10_0_whitelist_verify( - const rustsecp256k1zkp_v0_10_0_context *ctx, - const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, - const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, - const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, +SECP256K1_API int rustsecp256k1zkp_v0_10_1_whitelist_verify( + const rustsecp256k1zkp_v0_10_1_context *ctx, + const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, + const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, + const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const size_t n_keys, - const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey + const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey ) SECP256K1_ARG_NONNULL(1) SECP256K1_ARG_NONNULL(2) SECP256K1_ARG_NONNULL(3) SECP256K1_ARG_NONNULL(4) SECP256K1_ARG_NONNULL(6); #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/sage/gen_exhaustive_groups.sage b/secp256k1-zkp-sys/depend/secp256k1/sage/gen_exhaustive_groups.sage index d2826076..7ed9b9c3 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/sage/gen_exhaustive_groups.sage +++ b/secp256k1-zkp-sys/depend/secp256k1/sage/gen_exhaustive_groups.sage @@ -1,4 +1,4 @@ -load("rustsecp256k1zkp_v0_10_0_params.sage") +load("rustsecp256k1zkp_v0_10_1_params.sage") MAX_ORDER = 1000 @@ -124,7 +124,7 @@ for f in sorted(solutions.keys()): print(f"# {'if' if first else 'elif'} EXHAUSTIVE_TEST_ORDER == {f}") first = False print() - print(f"static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G_ORDER_{f};") + print(f"static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G_ORDER_{f};") output_b(b) print() print("# else") @@ -132,7 +132,7 @@ print("# error No known generator for the specified exhaustive test group ord print("# endif") print("#else") print() -print("static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G;") +print("static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G;") output_b(7) print() print("#endif") diff --git a/secp256k1-zkp-sys/depend/secp256k1/sage/gen_split_lambda_constants.sage b/secp256k1-zkp-sys/depend/secp256k1/sage/gen_split_lambda_constants.sage index 69715250..aa9282a9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/sage/gen_split_lambda_constants.sage +++ b/secp256k1-zkp-sys/depend/secp256k1/sage/gen_split_lambda_constants.sage @@ -1,9 +1,9 @@ -""" Generates the constants used in rustsecp256k1zkp_v0_10_0_scalar_split_lambda. +""" Generates the constants used in rustsecp256k1zkp_v0_10_1_scalar_split_lambda. -See the comments for rustsecp256k1zkp_v0_10_0_scalar_split_lambda in src/scalar_impl.h for detailed explanations. +See the comments for rustsecp256k1zkp_v0_10_1_scalar_split_lambda in src/scalar_impl.h for detailed explanations. """ -load("rustsecp256k1zkp_v0_10_0_params.sage") +load("rustsecp256k1zkp_v0_10_1_params.sage") def inf_norm(v): """Returns the infinity norm of a vector.""" @@ -24,17 +24,17 @@ def gauss_reduction(i1, i2): v2[1] -= m*v1[1] def find_split_constants_gauss(): - """Find constants for rustsecp256k1zkp_v0_10_0_scalar_split_lamdba using gauss reduction.""" + """Find constants for rustsecp256k1zkp_v0_10_1_scalar_split_lamdba using gauss reduction.""" (v11, v12), (v21, v22) = gauss_reduction([0, N], [1, int(LAMBDA)]) - # We use related vectors in rustsecp256k1zkp_v0_10_0_scalar_split_lambda. + # We use related vectors in rustsecp256k1zkp_v0_10_1_scalar_split_lambda. A1, B1 = -v21, -v11 A2, B2 = v22, -v21 return A1, B1, A2, B2 def find_split_constants_explicit_tof(): - """Find constants for rustsecp256k1zkp_v0_10_0_scalar_split_lamdba using the trace of Frobenius. + """Find constants for rustsecp256k1zkp_v0_10_1_scalar_split_lamdba using the trace of Frobenius. See Benjamin Smith: "Easy scalar decompositions for efficient scalar multiplication on elliptic curves and genus 2 Jacobians" (https://eprint.iacr.org/2013/672), Example 2 @@ -51,7 +51,7 @@ def find_split_constants_explicit_tof(): A2 = Integer((t + c)/2 - 1) B2 = Integer(1 - (t - c)/2) - # We use a negated b values in rustsecp256k1zkp_v0_10_0_scalar_split_lambda. + # We use a negated b values in rustsecp256k1zkp_v0_10_1_scalar_split_lambda. B1, B2 = -B1, -B2 return A1, B1, A2, B2 @@ -90,7 +90,7 @@ def rnddiv2(v): return v >> 1 def scalar_lambda_split(k): - """Equivalent to rustsecp256k1zkp_v0_10_0_scalar_lambda_split().""" + """Equivalent to rustsecp256k1zkp_v0_10_1_scalar_lambda_split().""" c1 = rnddiv2((k * G1) >> 383) c2 = rnddiv2((k * G2) >> 383) c1 = (c1 * -B1) % N diff --git a/secp256k1-zkp-sys/depend/secp256k1/sage/prove_group_implementations.sage b/secp256k1-zkp-sys/depend/secp256k1/sage/prove_group_implementations.sage index 89419b80..f179381e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/sage/prove_group_implementations.sage +++ b/secp256k1-zkp-sys/depend/secp256k1/sage/prove_group_implementations.sage @@ -5,8 +5,8 @@ import sys load("group_prover.sage") load("weierstrass_prover.sage") -def formula_rustsecp256k1zkp_v0_10_0_gej_double_var(a): - """libsecp256k1's rustsecp256k1zkp_v0_10_0_gej_double_var, used by various addition functions""" +def formula_rustsecp256k1zkp_v0_10_1_gej_double_var(a): + """libsecp256k1's rustsecp256k1zkp_v0_10_1_gej_double_var, used by various addition functions""" rz = a.Z * a.Y s = a.Y^2 l = a.X^2 @@ -24,8 +24,8 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_double_var(a): ry = -ry return jacobianpoint(rx, ry, rz) -def formula_rustsecp256k1zkp_v0_10_0_gej_add_var(branch, a, b): - """libsecp256k1's rustsecp256k1zkp_v0_10_0_gej_add_var""" +def formula_rustsecp256k1zkp_v0_10_1_gej_add_var(branch, a, b): + """libsecp256k1's rustsecp256k1zkp_v0_10_1_gej_add_var""" if branch == 0: return (constraints(), constraints(nonzero={a.Infinity : 'a_infinite'}), b) if branch == 1: @@ -43,7 +43,7 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_var(branch, a, b): i = -s2 i = i + s1 if branch == 2: - r = formula_rustsecp256k1zkp_v0_10_0_gej_double_var(a) + r = formula_rustsecp256k1zkp_v0_10_1_gej_double_var(a) return (constraints(), constraints(zero={h : 'h=0', i : 'i=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}), r) if branch == 3: return (constraints(), constraints(zero={h : 'h=0', a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={i : 'i!=0'}), point_at_infinity()) @@ -63,8 +63,8 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_var(branch, a, b): ry = ry + h3 return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz)) -def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_var(branch, a, b): - """libsecp256k1's rustsecp256k1zkp_v0_10_0_gej_add_ge_var, which assume bz==1""" +def formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_var(branch, a, b): + """libsecp256k1's rustsecp256k1zkp_v0_10_1_gej_add_ge_var, which assume bz==1""" if branch == 0: return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(nonzero={a.Infinity : 'a_infinite'}), b) if branch == 1: @@ -80,7 +80,7 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_var(branch, a, b): i = -s2 i = i + s1 if (branch == 2): - r = formula_rustsecp256k1zkp_v0_10_0_gej_double_var(a) + r = formula_rustsecp256k1zkp_v0_10_1_gej_double_var(a) return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r) if (branch == 3): return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity()) @@ -99,8 +99,8 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_var(branch, a, b): ry = ry + h3 return (constraints(zero={b.Z - 1 : 'b.z=1'}), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz)) -def formula_rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(branch, a, b): - """libsecp256k1's rustsecp256k1zkp_v0_10_0_gej_add_zinv_var""" +def formula_rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(branch, a, b): + """libsecp256k1's rustsecp256k1zkp_v0_10_1_gej_add_zinv_var""" bzinv = b.Z^(-1) if branch == 0: rinf = b.Infinity @@ -124,7 +124,7 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(branch, a, b): i = -s2 i = i + s1 if branch == 2: - r = formula_rustsecp256k1zkp_v0_10_0_gej_double_var(a) + r = formula_rustsecp256k1zkp_v0_10_1_gej_double_var(a) return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0', i : 'i=0'}), r) if branch == 3: return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite', h : 'h=0'}, nonzero={i : 'i!=0'}), point_at_infinity()) @@ -143,8 +143,8 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(branch, a, b): ry = ry + h3 return (constraints(), constraints(zero={a.Infinity : 'a_finite', b.Infinity : 'b_finite'}, nonzero={h : 'h!=0'}), jacobianpoint(rx, ry, rz)) -def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge(branch, a, b): - """libsecp256k1's rustsecp256k1zkp_v0_10_0_gej_add_ge""" +def formula_rustsecp256k1zkp_v0_10_1_gej_add_ge(branch, a, b): + """libsecp256k1's rustsecp256k1zkp_v0_10_1_gej_add_ge""" zeroes = {} nonzeroes = {} a_infinity = False @@ -205,8 +205,8 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge(branch, a, b): nonzeroes.update({rz : 'r.z != 0'}) return (constraints(zero={b.Z - 1 : 'b.z=1', b.Infinity : 'b_finite'}), constraints(zero=zeroes, nonzero=nonzeroes), jacobianpoint(rx, ry, rz)) -def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_old(branch, a, b): - """libsecp256k1's old rustsecp256k1zkp_v0_10_0_gej_add_ge, which fails when ay+by=0 but ax!=bx""" +def formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_old(branch, a, b): + """libsecp256k1's old rustsecp256k1zkp_v0_10_1_gej_add_ge, which fails when ay+by=0 but ax!=bx""" a_infinity = (branch & 1) != 0 zero = {} nonzero = {} @@ -269,17 +269,17 @@ def formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_old(branch, a, b): if __name__ == "__main__": success = True - success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_var) - success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_var) - success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_zinv_var) - success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge", 0, 7, 8, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge) - success = success & (not check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_old)) + success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_var) + success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_var) + success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_zinv_var) + success = success & check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge", 0, 7, 8, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge) + success = success & (not check_symbolic_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_old)) if len(sys.argv) >= 2 and sys.argv[1] == "--exhaustive": - success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_var, 43) - success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_var, 43) - success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_0_gej_add_zinv_var, 43) - success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge", 0, 7, 8, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge, 43) - success = success & (not check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_0_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1zkp_v0_10_0_gej_add_ge_old, 43)) + success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_var, 43) + success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_var, 43) + success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_zinv_var", 0, 7, 5, formula_rustsecp256k1zkp_v0_10_1_gej_add_zinv_var, 43) + success = success & check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge", 0, 7, 8, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge, 43) + success = success & (not check_exhaustive_jacobian_weierstrass("rustsecp256k1zkp_v0_10_1_gej_add_ge_old [should fail]", 0, 7, 4, formula_rustsecp256k1zkp_v0_10_1_gej_add_ge_old, 43)) sys.exit(int(not success)) diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/asm/field_10x26_arm.s b/secp256k1-zkp-sys/depend/secp256k1/src/asm/field_10x26_arm.s index e6e7b24e..f041792a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/asm/field_10x26_arm.s +++ b/secp256k1-zkp-sys/depend/secp256k1/src/asm/field_10x26_arm.s @@ -27,9 +27,9 @@ Note: .set field_not_M, 0xfc000000 @ ~M = ~0x3ffffff .align 2 - .global rustsecp256k1zkp_v0_10_0_fe_mul_inner - .type rustsecp256k1zkp_v0_10_0_fe_mul_inner, %function - .hidden rustsecp256k1zkp_v0_10_0_fe_mul_inner + .global rustsecp256k1zkp_v0_10_1_fe_mul_inner + .type rustsecp256k1zkp_v0_10_1_fe_mul_inner, %function + .hidden rustsecp256k1zkp_v0_10_1_fe_mul_inner @ Arguments: @ r0 r Restrict: can overlap with a, not with b @ r1 a @@ -37,7 +37,7 @@ Note: @ Stack (total 4+10*4 = 44) @ sp + #0 saved 'r' pointer @ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9 -rustsecp256k1zkp_v0_10_0_fe_mul_inner: +rustsecp256k1zkp_v0_10_1_fe_mul_inner: stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14} sub sp, sp, #48 @ frame=44 + alignment str r0, [sp, #0] @ save result address, we need it only at the end @@ -512,19 +512,19 @@ rustsecp256k1zkp_v0_10_0_fe_mul_inner: add sp, sp, #48 ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc} - .size rustsecp256k1zkp_v0_10_0_fe_mul_inner, .-rustsecp256k1zkp_v0_10_0_fe_mul_inner + .size rustsecp256k1zkp_v0_10_1_fe_mul_inner, .-rustsecp256k1zkp_v0_10_1_fe_mul_inner .align 2 - .global rustsecp256k1zkp_v0_10_0_fe_sqr_inner - .type rustsecp256k1zkp_v0_10_0_fe_sqr_inner, %function - .hidden rustsecp256k1zkp_v0_10_0_fe_sqr_inner + .global rustsecp256k1zkp_v0_10_1_fe_sqr_inner + .type rustsecp256k1zkp_v0_10_1_fe_sqr_inner, %function + .hidden rustsecp256k1zkp_v0_10_1_fe_sqr_inner @ Arguments: @ r0 r Can overlap with a @ r1 a @ Stack (total 4+10*4 = 44) @ sp + #0 saved 'r' pointer @ sp + #4 + 4*X t0,t1,t2,t3,t4,t5,t6,t7,u8,t9 -rustsecp256k1zkp_v0_10_0_fe_sqr_inner: +rustsecp256k1zkp_v0_10_1_fe_sqr_inner: stmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, r14} sub sp, sp, #48 @ frame=44 + alignment str r0, [sp, #0] @ save result address, we need it only at the end @@ -911,6 +911,6 @@ rustsecp256k1zkp_v0_10_0_fe_sqr_inner: add sp, sp, #48 ldmfd sp!, {r4, r5, r6, r7, r8, r9, r10, r11, pc} - .size rustsecp256k1zkp_v0_10_0_fe_sqr_inner, .-rustsecp256k1zkp_v0_10_0_fe_sqr_inner + .size rustsecp256k1zkp_v0_10_1_fe_sqr_inner, .-rustsecp256k1zkp_v0_10_1_fe_sqr_inner .section .note.GNU-stack,"",%progbits diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/assumptions.h b/secp256k1-zkp-sys/depend/secp256k1/src/assumptions.h index 754041a0..985c836c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/assumptions.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/assumptions.h @@ -24,7 +24,7 @@ __attribute__((__unavailable__("Don't call this function. It only exists because STATIC_ASSERT cannot be used outside a function."))) # endif #endif -static void rustsecp256k1zkp_v0_10_0_assumption_checker(void) { +static void rustsecp256k1zkp_v0_10_1_assumption_checker(void) { /* Bytes are 8 bits. */ STATIC_ASSERT(CHAR_BIT == 8); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench.c index 1014c344..794d20ec 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench.c @@ -67,7 +67,7 @@ static void help(int default_iters) { } typedef struct { - rustsecp256k1zkp_v0_10_0_context *ctx; + rustsecp256k1zkp_v0_10_1_context *ctx; unsigned char msg[32]; unsigned char key[32]; unsigned char sig[72]; @@ -81,14 +81,14 @@ static void bench_verify(void* arg, int iters) { bench_data* data = (bench_data*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; data->sig[data->siglen - 1] ^= (i & 0xFF); data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF); data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(data->ctx, &pubkey, data->pubkey, data->pubkeylen) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(data->ctx, &sig, data->sig, data->siglen) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(data->ctx, &sig, data->msg, &pubkey) == (i == 0)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(data->ctx, &pubkey, data->pubkey, data->pubkeylen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(data->ctx, &sig, data->sig, data->siglen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(data->ctx, &sig, data->msg, &pubkey) == (i == 0)); data->sig[data->siglen - 1] ^= (i & 0xFF); data->sig[data->siglen - 2] ^= ((i >> 8) & 0xFF); data->sig[data->siglen - 3] ^= ((i >> 16) & 0xFF); @@ -115,9 +115,9 @@ static void bench_sign_run(void* arg, int iters) { for (i = 0; i < iters; i++) { size_t siglen = 74; int j; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(data->ctx, &signature, data->msg, data->key, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(data->ctx, sig, &siglen, &signature)); + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(data->ctx, &signature, data->msg, data->key, NULL, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(data->ctx, sig, &siglen, &signature)); for (j = 0; j < 32; j++) { data->msg[j] = sig[j]; data->key[j] = sig[j + 32]; @@ -141,9 +141,9 @@ static void bench_keygen_run(void *arg, int iters) { for (i = 0; i < iters; i++) { unsigned char pub33[33]; size_t len = 33; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(data->ctx, &pubkey, data->key)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(data->ctx, pub33, &len, &pubkey, SECP256K1_EC_COMPRESSED)); + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(data->ctx, &pubkey, data->key)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(data->ctx, pub33, &len, &pubkey, SECP256K1_EC_COMPRESSED)); memcpy(data->key, pub33 + 1, 32); } } @@ -167,8 +167,8 @@ static void bench_keygen_run(void *arg, int iters) { int main(int argc, char** argv) { int i; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; bench_data data; int d = argc == 1; @@ -232,7 +232,7 @@ int main(int argc, char** argv) { #endif /* ECDSA benchmark */ - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); for (i = 0; i < 32; i++) { data.msg[i] = 1 + i; @@ -241,11 +241,11 @@ int main(int argc, char** argv) { data.key[i] = 33 + i; } data.siglen = 72; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(data.ctx, &sig, data.msg, data.key, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(data.ctx, data.sig, &data.siglen, &sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(data.ctx, &pubkey, data.key)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(data.ctx, &sig, data.msg, data.key, NULL, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(data.ctx, data.sig, &data.siglen, &sig)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(data.ctx, &pubkey, data.key)); data.pubkeylen = 33; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(data.ctx, data.pubkey, &data.pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(data.ctx, data.pubkey, &data.pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED) == 1); print_output_table_header_row(); if (d || have_flag(argc, argv, "ecdsa") || have_flag(argc, argv, "verify") || have_flag(argc, argv, "ecdsa_verify")) run_benchmark("ecdsa_verify", bench_verify, NULL, NULL, &data, 10, iters); @@ -253,7 +253,7 @@ int main(int argc, char** argv) { if (d || have_flag(argc, argv, "ecdsa") || have_flag(argc, argv, "sign") || have_flag(argc, argv, "ecdsa_sign")) run_benchmark("ecdsa_sign", bench_sign_run, bench_sign_setup, NULL, &data, 10, iters); if (d || have_flag(argc, argv, "ec") || have_flag(argc, argv, "keygen") || have_flag(argc, argv, "ec_keygen")) run_benchmark("ec_keygen", bench_keygen_run, bench_keygen_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); #ifdef ENABLE_MODULE_ECDH /* ECDH benchmarks */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_bppp.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_bppp.c index b17951c1..30ad59ca 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_bppp.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_bppp.c @@ -11,7 +11,7 @@ #include "bench.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context* ctx; + rustsecp256k1zkp_v0_10_1_context* ctx; } bench_bppp_data; static void bench_bppp_setup(void* arg) { @@ -29,10 +29,10 @@ int main(void) { bench_bppp_data data; int iters = get_iters(32); - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); run_benchmark("bppp_verify_bit", bench_bppp, bench_bppp_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); return 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_ecmult.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_ecmult.c index d4b9fdfa..1c71dee9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_ecmult.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_ecmult.c @@ -35,14 +35,14 @@ static void help(char **argv) { typedef struct { /* Setup once in advance */ - rustsecp256k1zkp_v0_10_0_context* ctx; - rustsecp256k1zkp_v0_10_0_scratch_space* scratch; - rustsecp256k1zkp_v0_10_0_scalar* scalars; - rustsecp256k1zkp_v0_10_0_ge* pubkeys; - rustsecp256k1zkp_v0_10_0_gej* pubkeys_gej; - rustsecp256k1zkp_v0_10_0_scalar* seckeys; - rustsecp256k1zkp_v0_10_0_gej* expected_output; - rustsecp256k1zkp_v0_10_0_ecmult_multi_func ecmult_multi; + rustsecp256k1zkp_v0_10_1_context* ctx; + rustsecp256k1zkp_v0_10_1_scratch_space* scratch; + rustsecp256k1zkp_v0_10_1_scalar* scalars; + rustsecp256k1zkp_v0_10_1_ge* pubkeys; + rustsecp256k1zkp_v0_10_1_gej* pubkeys_gej; + rustsecp256k1zkp_v0_10_1_scalar* seckeys; + rustsecp256k1zkp_v0_10_1_gej* expected_output; + rustsecp256k1zkp_v0_10_1_ecmult_multi_func ecmult_multi; /* Changes per benchmark */ size_t count; @@ -54,7 +54,7 @@ typedef struct { size_t offset2; /* Benchmark output. */ - rustsecp256k1zkp_v0_10_0_gej* output; + rustsecp256k1zkp_v0_10_1_gej* output; } bench_data; /* Hashes x into [0, POINTS) twice and store the result in offset1 and offset2. */ @@ -67,24 +67,24 @@ static void hash_into_offset(bench_data* data, size_t x) { * sum(outputs) ?= (sum(scalars_gen) + sum(seckeys)*sum(scalars))*G */ static void bench_ecmult_teardown_helper(bench_data* data, size_t* seckey_offset, size_t* scalar_offset, size_t* scalar_gen_offset, int iters) { int i; - rustsecp256k1zkp_v0_10_0_gej sum_output, tmp; - rustsecp256k1zkp_v0_10_0_scalar sum_scalars; + rustsecp256k1zkp_v0_10_1_gej sum_output, tmp; + rustsecp256k1zkp_v0_10_1_scalar sum_scalars; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&sum_output); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sum_scalars); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&sum_output); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sum_scalars); for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&sum_output, &sum_output, &data->output[i], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&sum_output, &sum_output, &data->output[i], NULL); if (scalar_gen_offset != NULL) { - rustsecp256k1zkp_v0_10_0_scalar_add(&sum_scalars, &sum_scalars, &data->scalars[(*scalar_gen_offset+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_scalar_add(&sum_scalars, &sum_scalars, &data->scalars[(*scalar_gen_offset+i) % POINTS]); } if (seckey_offset != NULL) { - rustsecp256k1zkp_v0_10_0_scalar s = data->seckeys[(*seckey_offset+i) % POINTS]; - rustsecp256k1zkp_v0_10_0_scalar_mul(&s, &s, &data->scalars[(*scalar_offset+i) % POINTS]); - rustsecp256k1zkp_v0_10_0_scalar_add(&sum_scalars, &sum_scalars, &s); + rustsecp256k1zkp_v0_10_1_scalar s = data->seckeys[(*seckey_offset+i) % POINTS]; + rustsecp256k1zkp_v0_10_1_scalar_mul(&s, &s, &data->scalars[(*scalar_offset+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_scalar_add(&sum_scalars, &sum_scalars, &s); } } - rustsecp256k1zkp_v0_10_0_ecmult_gen(&data->ctx->ecmult_gen_ctx, &tmp, &sum_scalars); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&tmp, &sum_output)); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&data->ctx->ecmult_gen_ctx, &tmp, &sum_scalars); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&tmp, &sum_output)); } static void bench_ecmult_setup(void* arg) { @@ -99,7 +99,7 @@ static void bench_ecmult_gen(void* arg, int iters) { int i; for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_ecmult_gen(&data->ctx->ecmult_gen_ctx, &data->output[i], &data->scalars[(data->offset1+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&data->ctx->ecmult_gen_ctx, &data->output[i], &data->scalars[(data->offset1+i) % POINTS]); } } @@ -113,7 +113,7 @@ static void bench_ecmult_const(void* arg, int iters) { int i; for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_ecmult_const(&data->output[i], &data->pubkeys[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_ecmult_const(&data->output[i], &data->pubkeys[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS]); } } @@ -127,7 +127,7 @@ static void bench_ecmult_1p(void* arg, int iters) { int i; for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_ecmult(&data->output[i], &data->pubkeys_gej[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS], NULL); + rustsecp256k1zkp_v0_10_1_ecmult(&data->output[i], &data->pubkeys_gej[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS], NULL); } } @@ -141,7 +141,7 @@ static void bench_ecmult_0p_g(void* arg, int iters) { int i; for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_ecmult(&data->output[i], NULL, &rustsecp256k1zkp_v0_10_0_scalar_zero, &data->scalars[(data->offset1+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_ecmult(&data->output[i], NULL, &rustsecp256k1zkp_v0_10_1_scalar_zero, &data->scalars[(data->offset1+i) % POINTS]); } } @@ -155,7 +155,7 @@ static void bench_ecmult_1p_g(void* arg, int iters) { int i; for (i = 0; i < iters/2; ++i) { - rustsecp256k1zkp_v0_10_0_ecmult(&data->output[i], &data->pubkeys_gej[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS], &data->scalars[(data->offset1+i) % POINTS]); + rustsecp256k1zkp_v0_10_1_ecmult(&data->output[i], &data->pubkeys_gej[(data->offset1+i) % POINTS], &data->scalars[(data->offset2+i) % POINTS], &data->scalars[(data->offset1+i) % POINTS]); } } @@ -181,12 +181,12 @@ static void run_ecmult_bench(bench_data* data, int iters) { run_benchmark(str, bench_ecmult_1p_g, bench_ecmult_setup, bench_ecmult_1p_g_teardown, data, 10, 2*iters); } -static int bench_ecmult_multi_callback(rustsecp256k1zkp_v0_10_0_scalar* sc, rustsecp256k1zkp_v0_10_0_ge* ge, size_t idx, void* arg) { +static int bench_ecmult_multi_callback(rustsecp256k1zkp_v0_10_1_scalar* sc, rustsecp256k1zkp_v0_10_1_ge* ge, size_t idx, void* arg) { bench_data* data = (bench_data*)arg; if (data->includes_g) ++idx; if (idx == 0) { *sc = data->scalars[data->offset1]; - *ge = rustsecp256k1zkp_v0_10_0_ge_const_g; + *ge = rustsecp256k1zkp_v0_10_1_ge_const_g; } else { *sc = data->scalars[(data->offset1 + idx) % POINTS]; *ge = data->pubkeys[(data->offset2 + idx - 1) % POINTS]; @@ -220,14 +220,14 @@ static void bench_ecmult_multi_teardown(void* arg, int iters) { iters = iters / data->count; /* Verify the results in teardown, to avoid doing comparisons while benchmarking. */ for (iter = 0; iter < iters; ++iter) { - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_gej_add_var(&tmp, &data->output[iter], &data->expected_output[iter], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&tmp)); + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_gej_add_var(&tmp, &data->output[iter], &data->expected_output[iter], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&tmp)); } } -static void generate_scalar(uint32_t num, rustsecp256k1zkp_v0_10_0_scalar* scalar) { - rustsecp256k1zkp_v0_10_0_sha256 sha256; +static void generate_scalar(uint32_t num, rustsecp256k1zkp_v0_10_1_scalar* scalar) { + rustsecp256k1zkp_v0_10_1_sha256 sha256; unsigned char c[10] = {'e', 'c', 'm', 'u', 'l', 't', 0, 0, 0, 0}; unsigned char buf[32]; int overflow = 0; @@ -235,10 +235,10 @@ static void generate_scalar(uint32_t num, rustsecp256k1zkp_v0_10_0_scalar* scala c[7] = num >> 8; c[8] = num >> 16; c[9] = num >> 24; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, c, sizeof(c)); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256, buf); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(scalar, buf, &overflow); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, c, sizeof(c)); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256, buf); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(scalar, buf, &overflow); CHECK(!overflow); } @@ -253,15 +253,15 @@ static void run_ecmult_multi_bench(bench_data* data, size_t count, int includes_ /* Compute (the negation of) the expected results directly. */ hash_into_offset(data, data->count); for (iter = 0; iter < iters; ++iter) { - rustsecp256k1zkp_v0_10_0_scalar tmp; - rustsecp256k1zkp_v0_10_0_scalar total = data->scalars[(data->offset1++) % POINTS]; + rustsecp256k1zkp_v0_10_1_scalar tmp; + rustsecp256k1zkp_v0_10_1_scalar total = data->scalars[(data->offset1++) % POINTS]; size_t i = 0; for (i = 0; i + 1 < count; ++i) { - rustsecp256k1zkp_v0_10_0_scalar_mul(&tmp, &data->seckeys[(data->offset2++) % POINTS], &data->scalars[(data->offset1++) % POINTS]); - rustsecp256k1zkp_v0_10_0_scalar_add(&total, &total, &tmp); + rustsecp256k1zkp_v0_10_1_scalar_mul(&tmp, &data->seckeys[(data->offset2++) % POINTS], &data->scalars[(data->offset1++) % POINTS]); + rustsecp256k1zkp_v0_10_1_scalar_add(&total, &total, &tmp); } - rustsecp256k1zkp_v0_10_0_scalar_negate(&total, &total); - rustsecp256k1zkp_v0_10_0_ecmult(&data->expected_output[iter], NULL, &rustsecp256k1zkp_v0_10_0_scalar_zero, &total); + rustsecp256k1zkp_v0_10_1_scalar_negate(&total, &total); + rustsecp256k1zkp_v0_10_1_ecmult(&data->expected_output[iter], NULL, &rustsecp256k1zkp_v0_10_1_scalar_zero, &total); } /* Run the benchmark. */ @@ -280,7 +280,7 @@ int main(int argc, char **argv) { int iters = get_iters(10000); - data.ecmult_multi = rustsecp256k1zkp_v0_10_0_ecmult_multi_var; + data.ecmult_multi = rustsecp256k1zkp_v0_10_1_ecmult_multi_var; if (argc > 1) { if(have_flag(argc, argv, "-h") @@ -290,10 +290,10 @@ int main(int argc, char **argv) { return 0; } else if(have_flag(argc, argv, "pippenger_wnaf")) { printf("Using pippenger_wnaf:\n"); - data.ecmult_multi = rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch_single; + data.ecmult_multi = rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch_single; } else if(have_flag(argc, argv, "strauss_wnaf")) { printf("Using strauss_wnaf:\n"); - data.ecmult_multi = rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch_single; + data.ecmult_multi = rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch_single; } else if(have_flag(argc, argv, "simple")) { printf("Using simple algorithm:\n"); } else { @@ -303,33 +303,33 @@ int main(int argc, char **argv) { } } - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); - scratch_size = rustsecp256k1zkp_v0_10_0_strauss_scratch_size(POINTS) + STRAUSS_SCRATCH_OBJECTS*16; + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); + scratch_size = rustsecp256k1zkp_v0_10_1_strauss_scratch_size(POINTS) + STRAUSS_SCRATCH_OBJECTS*16; if (!have_flag(argc, argv, "simple")) { - data.scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(data.ctx, scratch_size); + data.scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(data.ctx, scratch_size); } else { data.scratch = NULL; } /* Allocate stuff */ - data.scalars = malloc(sizeof(rustsecp256k1zkp_v0_10_0_scalar) * POINTS); - data.seckeys = malloc(sizeof(rustsecp256k1zkp_v0_10_0_scalar) * POINTS); - data.pubkeys = malloc(sizeof(rustsecp256k1zkp_v0_10_0_ge) * POINTS); - data.pubkeys_gej = malloc(sizeof(rustsecp256k1zkp_v0_10_0_gej) * POINTS); - data.expected_output = malloc(sizeof(rustsecp256k1zkp_v0_10_0_gej) * (iters + 1)); - data.output = malloc(sizeof(rustsecp256k1zkp_v0_10_0_gej) * (iters + 1)); + data.scalars = malloc(sizeof(rustsecp256k1zkp_v0_10_1_scalar) * POINTS); + data.seckeys = malloc(sizeof(rustsecp256k1zkp_v0_10_1_scalar) * POINTS); + data.pubkeys = malloc(sizeof(rustsecp256k1zkp_v0_10_1_ge) * POINTS); + data.pubkeys_gej = malloc(sizeof(rustsecp256k1zkp_v0_10_1_gej) * POINTS); + data.expected_output = malloc(sizeof(rustsecp256k1zkp_v0_10_1_gej) * (iters + 1)); + data.output = malloc(sizeof(rustsecp256k1zkp_v0_10_1_gej) * (iters + 1)); /* Generate a set of scalars, and private/public keypairs. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&data.pubkeys_gej[0], &rustsecp256k1zkp_v0_10_0_ge_const_g); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&data.seckeys[0], 1); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&data.pubkeys_gej[0], &rustsecp256k1zkp_v0_10_1_ge_const_g); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&data.seckeys[0], 1); for (i = 0; i < POINTS; ++i) { generate_scalar(i, &data.scalars[i]); if (i) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&data.pubkeys_gej[i], &data.pubkeys_gej[i - 1], NULL); - rustsecp256k1zkp_v0_10_0_scalar_add(&data.seckeys[i], &data.seckeys[i - 1], &data.seckeys[i - 1]); + rustsecp256k1zkp_v0_10_1_gej_double_var(&data.pubkeys_gej[i], &data.pubkeys_gej[i - 1], NULL); + rustsecp256k1zkp_v0_10_1_scalar_add(&data.seckeys[i], &data.seckeys[i - 1], &data.seckeys[i - 1]); } } - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(data.pubkeys, data.pubkeys_gej, POINTS); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(data.pubkeys, data.pubkeys_gej, POINTS); print_output_table_header_row(); @@ -353,9 +353,9 @@ int main(int argc, char **argv) { } if (data.scratch != NULL) { - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(data.ctx, data.scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(data.ctx, data.scratch); } - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); free(data.scalars); free(data.pubkeys); free(data.pubkeys_gej); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_generator.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_generator.c index b3f873db..d611babe 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_generator.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_generator.c @@ -12,7 +12,7 @@ #include "bench.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context* ctx; + rustsecp256k1zkp_v0_10_1_context* ctx; unsigned char key[32]; unsigned char blind[32]; } bench_generator_t; @@ -28,8 +28,8 @@ static void bench_generator_generate(void* arg, int iters) { bench_generator_t *data = (bench_generator_t*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_generator gen; - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate(data->ctx, &gen, data->key)); + rustsecp256k1zkp_v0_10_1_generator gen; + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate(data->ctx, &gen, data->key)); data->key[i & 31]++; } } @@ -39,8 +39,8 @@ static void bench_generator_generate_blinded(void* arg, int iters) { bench_generator_t *data = (bench_generator_t*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_generator gen; - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(data->ctx, &gen, data->key, data->blind)); + rustsecp256k1zkp_v0_10_1_generator gen; + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(data->ctx, &gen, data->key, data->blind)); data->key[1 + (i & 30)]++; data->blind[1 + (i & 30)]++; } @@ -50,11 +50,11 @@ int main(void) { bench_generator_t data; int iters = get_iters(20000); - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); run_benchmark("generator_generate", bench_generator_generate, bench_generator_setup, NULL, &data, 10, iters); run_benchmark("generator_generate_blinded", bench_generator_generate_blinded, bench_generator_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); return 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_internal.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_internal.c index 81ba113a..aae48f24 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_internal.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_internal.c @@ -37,10 +37,10 @@ static void help(int default_iters) { } typedef struct { - rustsecp256k1zkp_v0_10_0_scalar scalar[2]; - rustsecp256k1zkp_v0_10_0_fe fe[4]; - rustsecp256k1zkp_v0_10_0_ge ge[2]; - rustsecp256k1zkp_v0_10_0_gej gej[2]; + rustsecp256k1zkp_v0_10_1_scalar scalar[2]; + rustsecp256k1zkp_v0_10_1_fe fe[4]; + rustsecp256k1zkp_v0_10_1_ge ge[2]; + rustsecp256k1zkp_v0_10_1_gej gej[2]; unsigned char data[64]; int wnaf[256]; } bench_inv; @@ -81,18 +81,18 @@ static void bench_setup(void* arg) { } }; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&data->scalar[0], init[0], NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&data->scalar[1], init[1], NULL); - rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&data->fe[0], init[0]); - rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&data->fe[1], init[1]); - rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&data->fe[2], init[2]); - rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&data->fe[3], init[3]); - CHECK(rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&data->ge[0], &data->fe[0], 0)); - CHECK(rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&data->ge[1], &data->fe[1], 1)); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&data->gej[0], &data->ge[0]); - rustsecp256k1zkp_v0_10_0_gej_rescale(&data->gej[0], &data->fe[2]); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&data->gej[1], &data->ge[1]); - rustsecp256k1zkp_v0_10_0_gej_rescale(&data->gej[1], &data->fe[3]); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&data->scalar[0], init[0], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&data->scalar[1], init[1], NULL); + rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&data->fe[0], init[0]); + rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&data->fe[1], init[1]); + rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&data->fe[2], init[2]); + rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&data->fe[3], init[3]); + CHECK(rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&data->ge[0], &data->fe[0], 0)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&data->ge[1], &data->fe[1], 1)); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&data->gej[0], &data->ge[0]); + rustsecp256k1zkp_v0_10_1_gej_rescale(&data->gej[0], &data->fe[2]); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&data->gej[1], &data->ge[1]); + rustsecp256k1zkp_v0_10_1_gej_rescale(&data->gej[1], &data->fe[3]); memcpy(data->data, init[0], 32); memcpy(data->data + 32, init[1], 32); } @@ -102,7 +102,7 @@ static void bench_scalar_add(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - j += rustsecp256k1zkp_v0_10_0_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); + j += rustsecp256k1zkp_v0_10_1_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); } CHECK(j <= iters); } @@ -112,7 +112,7 @@ static void bench_scalar_negate(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&data->scalar[0], &data->scalar[0]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&data->scalar[0], &data->scalar[0]); } } @@ -121,17 +121,17 @@ static void bench_scalar_sqr(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_sqr(&data->scalar[0], &data->scalar[0]); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&data->scalar[0], &data->scalar[0]); } } static void bench_scalar_half(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_scalar s = data->scalar[0]; + rustsecp256k1zkp_v0_10_1_scalar s = data->scalar[0]; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_half(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_half(&s, &s); } data->scalar[0] = s; @@ -142,18 +142,18 @@ static void bench_scalar_mul(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_mul(&data->scalar[0], &data->scalar[0], &data->scalar[1]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&data->scalar[0], &data->scalar[0], &data->scalar[1]); } } static void bench_scalar_split(void* arg, int iters) { int i, j = 0; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_scalar tmp; + rustsecp256k1zkp_v0_10_1_scalar tmp; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_split_lambda(&tmp, &data->scalar[1], &data->scalar[0]); - j += rustsecp256k1zkp_v0_10_0_scalar_add(&data->scalar[0], &tmp, &data->scalar[1]); + rustsecp256k1zkp_v0_10_1_scalar_split_lambda(&tmp, &data->scalar[1], &data->scalar[0]); + j += rustsecp256k1zkp_v0_10_1_scalar_add(&data->scalar[0], &tmp, &data->scalar[1]); } CHECK(j <= iters); } @@ -163,8 +163,8 @@ static void bench_scalar_inverse(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_inverse(&data->scalar[0], &data->scalar[0]); - j += rustsecp256k1zkp_v0_10_0_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&data->scalar[0], &data->scalar[0]); + j += rustsecp256k1zkp_v0_10_1_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); } CHECK(j <= iters); } @@ -174,8 +174,8 @@ static void bench_scalar_inverse_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&data->scalar[0], &data->scalar[0]); - j += rustsecp256k1zkp_v0_10_0_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&data->scalar[0], &data->scalar[0]); + j += rustsecp256k1zkp_v0_10_1_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); } CHECK(j <= iters); } @@ -185,7 +185,7 @@ static void bench_field_half(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_half(&data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_half(&data->fe[0]); } } @@ -194,7 +194,7 @@ static void bench_field_normalize(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_normalize(&data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_normalize(&data->fe[0]); } } @@ -203,7 +203,7 @@ static void bench_field_normalize_weak(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&data->fe[0]); } } @@ -212,7 +212,7 @@ static void bench_field_mul(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_mul(&data->fe[0], &data->fe[0], &data->fe[1]); + rustsecp256k1zkp_v0_10_1_fe_mul(&data->fe[0], &data->fe[0], &data->fe[1]); } } @@ -221,7 +221,7 @@ static void bench_field_sqr(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&data->fe[0], &data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_sqr(&data->fe[0], &data->fe[0]); } } @@ -230,8 +230,8 @@ static void bench_field_inverse(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_inv(&data->fe[0], &data->fe[0]); - rustsecp256k1zkp_v0_10_0_fe_add(&data->fe[0], &data->fe[1]); + rustsecp256k1zkp_v0_10_1_fe_inv(&data->fe[0], &data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_add(&data->fe[0], &data->fe[1]); } } @@ -240,20 +240,20 @@ static void bench_field_inverse_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_fe_inv_var(&data->fe[0], &data->fe[0]); - rustsecp256k1zkp_v0_10_0_fe_add(&data->fe[0], &data->fe[1]); + rustsecp256k1zkp_v0_10_1_fe_inv_var(&data->fe[0], &data->fe[0]); + rustsecp256k1zkp_v0_10_1_fe_add(&data->fe[0], &data->fe[1]); } } static void bench_field_sqrt(void* arg, int iters) { int i, j = 0; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_fe t; + rustsecp256k1zkp_v0_10_1_fe t; for (i = 0; i < iters; i++) { t = data->fe[0]; - j += rustsecp256k1zkp_v0_10_0_fe_sqrt(&data->fe[0], &t); - rustsecp256k1zkp_v0_10_0_fe_add(&data->fe[0], &data->fe[1]); + j += rustsecp256k1zkp_v0_10_1_fe_sqrt(&data->fe[0], &t); + rustsecp256k1zkp_v0_10_1_fe_add(&data->fe[0], &data->fe[1]); } CHECK(j <= iters); } @@ -261,12 +261,12 @@ static void bench_field_sqrt(void* arg, int iters) { static void bench_field_is_square_var(void* arg, int iters) { int i, j = 0; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_fe t = data->fe[0]; + rustsecp256k1zkp_v0_10_1_fe t = data->fe[0]; for (i = 0; i < iters; i++) { - j += rustsecp256k1zkp_v0_10_0_fe_is_square_var(&t); - rustsecp256k1zkp_v0_10_0_fe_add(&t, &data->fe[1]); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&t); + j += rustsecp256k1zkp_v0_10_1_fe_is_square_var(&t); + rustsecp256k1zkp_v0_10_1_fe_add(&t, &data->fe[1]); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&t); } CHECK(j <= iters); } @@ -276,7 +276,7 @@ static void bench_group_double_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&data->gej[0], &data->gej[0], NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&data->gej[0], &data->gej[0], NULL); } } @@ -285,7 +285,7 @@ static void bench_group_add_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&data->gej[0], &data->gej[0], &data->gej[1], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&data->gej[0], &data->gej[0], &data->gej[1], NULL); } } @@ -294,7 +294,7 @@ static void bench_group_add_affine(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_ge(&data->gej[0], &data->gej[0], &data->ge[1]); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&data->gej[0], &data->gej[0], &data->ge[1]); } } @@ -303,7 +303,7 @@ static void bench_group_add_affine_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&data->gej[0], &data->gej[0], &data->ge[1], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&data->gej[0], &data->gej[0], &data->ge[1], NULL); } } @@ -312,17 +312,17 @@ static void bench_group_jacobi_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - j += rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&data->gej[0]); + j += rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&data->gej[0]); /* Vary the Y and Z coordinates of the input (the X coordinate doesn't matter to - rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var). Note that the resulting coordinates will + rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var). Note that the resulting coordinates will generally not correspond to a point on the curve, but this is not a problem for the code being benchmarked here. Adding and normalizing have less overhead than EC operations (which could guarantee the point remains on the curve). */ - rustsecp256k1zkp_v0_10_0_fe_add(&data->gej[0].y, &data->fe[1]); - rustsecp256k1zkp_v0_10_0_fe_add(&data->gej[0].z, &data->fe[2]); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&data->gej[0].y); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&data->gej[0].z); + rustsecp256k1zkp_v0_10_1_fe_add(&data->gej[0].y, &data->fe[1]); + rustsecp256k1zkp_v0_10_1_fe_add(&data->gej[0].z, &data->fe[2]); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&data->gej[0].y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&data->gej[0].z); } CHECK(j <= iters); } @@ -332,7 +332,7 @@ static void bench_group_add_zinv_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(&data->gej[0], &data->gej[0], &data->ge[1], &data->gej[0].y); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(&data->gej[0], &data->gej[0], &data->ge[1], &data->gej[0].y); } } @@ -341,16 +341,16 @@ static void bench_group_to_affine_var(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; ++i) { - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&data->ge[1], &data->gej[0]); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&data->ge[1], &data->gej[0]); /* Use the output affine X/Y coordinates to vary the input X/Y/Z coordinates. Similar to bench_group_jacobi_var, this approach does not result in coordinates of points on the curve. */ - rustsecp256k1zkp_v0_10_0_fe_add(&data->gej[0].x, &data->ge[1].y); - rustsecp256k1zkp_v0_10_0_fe_add(&data->gej[0].y, &data->fe[2]); - rustsecp256k1zkp_v0_10_0_fe_add(&data->gej[0].z, &data->ge[1].x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&data->gej[0].x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&data->gej[0].y); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&data->gej[0].z); + rustsecp256k1zkp_v0_10_1_fe_add(&data->gej[0].x, &data->ge[1].y); + rustsecp256k1zkp_v0_10_1_fe_add(&data->gej[0].y, &data->fe[2]); + rustsecp256k1zkp_v0_10_1_fe_add(&data->gej[0].z, &data->ge[1].x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&data->gej[0].x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&data->gej[0].y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&data->gej[0].z); } } @@ -359,8 +359,8 @@ static void bench_ecmult_wnaf(void* arg, int iters) { bench_inv *data = (bench_inv*)arg; for (i = 0; i < iters; i++) { - bits += rustsecp256k1zkp_v0_10_0_ecmult_wnaf(data->wnaf, 256, &data->scalar[0], WINDOW_A); - overflow += rustsecp256k1zkp_v0_10_0_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); + bits += rustsecp256k1zkp_v0_10_1_ecmult_wnaf(data->wnaf, 256, &data->scalar[0], WINDOW_A); + overflow += rustsecp256k1zkp_v0_10_1_scalar_add(&data->scalar[0], &data->scalar[0], &data->scalar[1]); } CHECK(overflow >= 0); CHECK(bits <= 256*iters); @@ -369,35 +369,35 @@ static void bench_ecmult_wnaf(void* arg, int iters) { static void bench_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, data->data, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, data->data); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, data->data, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, data->data); } } static void bench_hmac_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_hmac_sha256 hmac; + rustsecp256k1zkp_v0_10_1_hmac_sha256 hmac; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, data->data, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, data->data, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, data->data); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, data->data, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, data->data, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, data->data); } } static void bench_rfc6979_hmac_sha256(void* arg, int iters) { int i; bench_inv *data = (bench_inv*)arg; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, data->data, 64); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, data->data, 32); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, data->data, 64); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, data->data, 32); } } @@ -405,7 +405,7 @@ static void bench_context(void* arg, int iters) { int i; (void)arg; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_context_destroy(rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE)); + rustsecp256k1zkp_v0_10_1_context_destroy(rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE)); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_rangeproof.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_rangeproof.c index a40e6b1b..56403cb4 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_rangeproof.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_rangeproof.c @@ -11,8 +11,8 @@ #include "bench.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context* ctx; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; + rustsecp256k1zkp_v0_10_1_context* ctx; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; unsigned char proof[5134]; unsigned char blind[32]; size_t len; @@ -28,10 +28,10 @@ static void bench_rangeproof_setup(void* arg) { data->v = 0; for (i = 0; i < 32; i++) data->blind[i] = i + 1; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(data->ctx, &data->commit, data->blind, data->v, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(data->ctx, &data->commit, data->blind, data->v, rustsecp256k1zkp_v0_10_1_generator_h)); data->len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(data->ctx, data->proof, &data->len, 0, &data->commit, data->blind, (const unsigned char*)&data->commit, 0, data->min_bits, data->v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(data->ctx, &minv, &maxv, &data->commit, data->proof, data->len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(data->ctx, data->proof, &data->len, 0, &data->commit, data->blind, (const unsigned char*)&data->commit, 0, data->min_bits, data->v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(data->ctx, &minv, &maxv, &data->commit, data->proof, data->len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); } static void bench_rangeproof(void* arg, int iters) { @@ -42,7 +42,7 @@ static void bench_rangeproof(void* arg, int iters) { int j; uint64_t minv; uint64_t maxv; - j = rustsecp256k1zkp_v0_10_0_rangeproof_verify(data->ctx, &minv, &maxv, &data->commit, data->proof, data->len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h); + j = rustsecp256k1zkp_v0_10_1_rangeproof_verify(data->ctx, &minv, &maxv, &data->commit, data->proof, data->len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h); for (j = 0; j < 4; j++) { data->proof[j + 2 + 32 *((data->min_bits + 1) >> 1) - 4] = (i >> 8)&255; } @@ -53,13 +53,13 @@ int main(void) { bench_rangeproof_t data; int iters; - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); data.min_bits = 32; iters = data.min_bits*get_iters(32); run_benchmark("rangeproof_verify_bit", bench_rangeproof, bench_rangeproof_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); return 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/bench_whitelist.c b/secp256k1-zkp-sys/depend/secp256k1/src/bench_whitelist.c index b27839fe..f6d82b2b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/bench_whitelist.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/bench_whitelist.c @@ -18,14 +18,14 @@ #define MAX_N_KEYS 30 typedef struct { - rustsecp256k1zkp_v0_10_0_context* ctx; + rustsecp256k1zkp_v0_10_1_context* ctx; unsigned char online_seckey[MAX_N_KEYS][32]; unsigned char summed_seckey[MAX_N_KEYS][32]; - rustsecp256k1zkp_v0_10_0_pubkey online_pubkeys[MAX_N_KEYS]; - rustsecp256k1zkp_v0_10_0_pubkey offline_pubkeys[MAX_N_KEYS]; + rustsecp256k1zkp_v0_10_1_pubkey online_pubkeys[MAX_N_KEYS]; + rustsecp256k1zkp_v0_10_1_pubkey offline_pubkeys[MAX_N_KEYS]; unsigned char csub[32]; - rustsecp256k1zkp_v0_10_0_pubkey sub_pubkey; - rustsecp256k1zkp_v0_10_0_whitelist_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey sub_pubkey; + rustsecp256k1zkp_v0_10_1_whitelist_signature sig; size_t n_keys; } bench_data; @@ -33,14 +33,14 @@ static void bench_whitelist(void* arg, int iters) { bench_data* data = (bench_data*)arg; int i; for (i = 0; i < iters; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(data->ctx, &data->sig, data->online_pubkeys, data->offline_pubkeys, data->n_keys, &data->sub_pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(data->ctx, &data->sig, data->online_pubkeys, data->offline_pubkeys, data->n_keys, &data->sub_pubkey) == 1); } } static void bench_whitelist_setup(void* arg) { bench_data* data = (bench_data*)arg; int i = 0; - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_sign(data->ctx, &data->sig, data->online_pubkeys, data->offline_pubkeys, data->n_keys, &data->sub_pubkey, data->online_seckey[i], data->summed_seckey[i], i)); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_sign(data->ctx, &data->sig, data->online_pubkeys, data->offline_pubkeys, data->n_keys, &data->sub_pubkey, data->online_seckey[i], data->summed_seckey[i], i)); } static void run_test(bench_data* data, int iters) { @@ -49,13 +49,13 @@ static void run_test(bench_data* data, int iters) { run_benchmark(str, bench_whitelist, bench_whitelist_setup, NULL, data, 100, iters); } -static void random_scalar_order(rustsecp256k1zkp_v0_10_0_scalar *num) { +static void random_scalar_order(rustsecp256k1zkp_v0_10_1_scalar *num) { do { unsigned char b32[32]; int overflow = 0; - rustsecp256k1zkp_v0_10_0_testrand256(b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(num, b32, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(num)) { + rustsecp256k1zkp_v0_10_1_testrand256(b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(num, b32, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(num)) { continue; } break; @@ -66,35 +66,35 @@ int main(void) { bench_data data; size_t i; size_t n_keys = 30; - rustsecp256k1zkp_v0_10_0_scalar ssub; + rustsecp256k1zkp_v0_10_1_scalar ssub; int iters = get_iters(5); - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); /* Start with subkey */ random_scalar_order(&ssub); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(data.csub, &ssub); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(data.ctx, data.csub) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(data.ctx, &data.sub_pubkey, data.csub) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(data.csub, &ssub); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(data.ctx, data.csub) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(data.ctx, &data.sub_pubkey, data.csub) == 1); /* Then offline and online whitelist keys */ for (i = 0; i < n_keys; i++) { - rustsecp256k1zkp_v0_10_0_scalar son, soff; + rustsecp256k1zkp_v0_10_1_scalar son, soff; /* Create two keys */ random_scalar_order(&son); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(data.online_seckey[i], &son); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(data.ctx, data.online_seckey[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(data.ctx, &data.online_pubkeys[i], data.online_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(data.online_seckey[i], &son); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(data.ctx, data.online_seckey[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(data.ctx, &data.online_pubkeys[i], data.online_seckey[i]) == 1); random_scalar_order(&soff); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(data.summed_seckey[i], &soff); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(data.ctx, data.summed_seckey[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(data.ctx, &data.offline_pubkeys[i], data.summed_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(data.summed_seckey[i], &soff); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(data.ctx, data.summed_seckey[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(data.ctx, &data.offline_pubkeys[i], data.summed_seckey[i]) == 1); /* Make summed_seckey correspond to the sum of offline_pubkey and sub_pubkey */ - rustsecp256k1zkp_v0_10_0_scalar_add(&soff, &soff, &ssub); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(data.summed_seckey[i], &soff); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(data.ctx, data.summed_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_add(&soff, &soff, &ssub); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(data.summed_seckey[i], &soff); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(data.ctx, data.summed_seckey[i]) == 1); } /* Run test */ @@ -103,6 +103,6 @@ int main(void) { run_test(&data, iters); } - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); return(0); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ctime_tests.c b/secp256k1-zkp-sys/depend/secp256k1/src/ctime_tests.c index 0b9cccc4..270dc70a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ctime_tests.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ctime_tests.c @@ -31,6 +31,10 @@ #include "../include/secp256k1_schnorrsig.h" #endif +#ifdef ENABLE_MODULE_SCHNORR_ADAPTOR +#include "../include/secp256k1_schnorr_adaptor.h" +#endif + #ifdef ENABLE_MODULE_ELLSWIFT #include "../include/secp256k1_ellswift.h" #endif @@ -47,10 +51,10 @@ #include "../include/secp256k1_musig.h" #endif -static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key); +static void run_tests(rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *key); int main(void) { - rustsecp256k1zkp_v0_10_0_context* ctx; + rustsecp256k1zkp_v0_10_1_context* ctx; unsigned char key[32]; int ret, i; @@ -59,7 +63,7 @@ int main(void) { fprintf(stderr, "Usage: libtool --mode=execute valgrind ./ctime_tests\n"); return 1; } - ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_DECLASSIFY); + ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_DECLASSIFY); /** In theory, testing with a single secret input should be sufficient: * If control flow depended on secrets the tool would generate an error. */ @@ -72,17 +76,17 @@ int main(void) { /* Test context randomisation. Do this last because it leaves the context * tainted. */ SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_context_randomize(ctx, key); + ret = rustsecp256k1zkp_v0_10_1_context_randomize(ctx, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret); - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); return 0; } -static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) { - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; +static void run_tests(rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *key) { + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; size_t siglen = 74; size_t outputlen = 33; int i; @@ -91,11 +95,11 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) unsigned char sig[74]; unsigned char spubkey[33]; #ifdef ENABLE_MODULE_RECOVERY - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature recoverable_signature; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature recoverable_signature; int recid; #endif #ifdef ENABLE_MODULE_EXTRAKEYS - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_keypair keypair; #endif #ifdef ENABLE_MODULE_ELLSWIFT unsigned char ellswift[64]; @@ -108,24 +112,24 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) /* Test keygen. */ SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey, key); - SECP256K1_CHECKMEM_DEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey, key); + SECP256K1_CHECKMEM_DEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, spubkey, &outputlen, &pubkey, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, spubkey, &outputlen, &pubkey, SECP256K1_EC_COMPRESSED) == 1); /* Test signing. */ SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign(ctx, &signature, msg, key, NULL, NULL); - SECP256K1_CHECKMEM_DEFINE(&signature, sizeof(rustsecp256k1zkp_v0_10_0_ecdsa_signature)); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign(ctx, &signature, msg, key, NULL, NULL); + SECP256K1_CHECKMEM_DEFINE(&signature, sizeof(rustsecp256k1zkp_v0_10_1_ecdsa_signature)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(ctx, sig, &siglen, &signature)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(ctx, sig, &siglen, &signature)); #ifdef ENABLE_MODULE_ECDH /* Test ECDH. */ SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdh(ctx, msg, &pubkey, key, NULL, NULL); + ret = rustsecp256k1zkp_v0_10_1_ecdh(ctx, msg, &pubkey, key, NULL, NULL); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); #endif @@ -133,87 +137,128 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) #ifdef ENABLE_MODULE_RECOVERY /* Test signing a recoverable signature. */ SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(ctx, &recoverable_signature, msg, key, NULL, NULL); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(ctx, &recoverable_signature, msg, key, NULL, NULL); SECP256K1_CHECKMEM_DEFINE(&recoverable_signature, sizeof(recoverable_signature)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &recoverable_signature)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(ctx, sig, &recid, &recoverable_signature)); CHECK(recid >= 0 && recid <= 3); #endif SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ec_seckey_verify(ctx, key); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_verify(ctx, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ec_seckey_negate(ctx, key); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_negate(ctx, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(msg, 32); - ret = rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(ctx, key, msg); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, key, msg); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(msg, 32); - ret = rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(ctx, key, msg); + ret = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(ctx, key, msg); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); /* Test keypair_create and keypair_xonly_tweak_add. */ #ifdef ENABLE_MODULE_EXTRAKEYS SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair, key); + ret = rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); /* The tweak is not treated as a secret in keypair_tweak_add */ SECP256K1_CHECKMEM_DEFINE(msg, 32); - ret = rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(ctx, &keypair, msg); + ret = rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(ctx, &keypair, msg); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(&keypair, sizeof(keypair)); - ret = rustsecp256k1zkp_v0_10_0_keypair_sec(ctx, key, &keypair); + ret = rustsecp256k1zkp_v0_10_1_keypair_sec(ctx, key, &keypair); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); #endif #ifdef ENABLE_MODULE_SCHNORRSIG SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair, key); + ret = rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); - ret = rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(ctx, sig, msg, &keypair, NULL); + ret = rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(ctx, sig, msg, &keypair, NULL); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); #endif +#ifdef ENABLE_MODULE_SCHNORR_ADAPTOR + { + unsigned char pre_sig[65]; + unsigned char bip340_sig[64]; + unsigned char sec_adaptor[32]; + unsigned char extracted_sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey adaptor_pk; + + for (i = 0; i < 32; i++) { + sec_adaptor[i] = i + 2; + } + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &adaptor_pk, sec_adaptor); + CHECK(ret == 1); + + SECP256K1_CHECKMEM_UNDEFINE(key, 32); + ret = rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, key); + SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); + CHECK(ret == 1); + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(ctx, pre_sig, msg, &keypair, &adaptor_pk, NULL); + SECP256K1_CHECKMEM_DEFINE(pre_sig, sizeof(pre_sig)); + SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); + CHECK(ret == 1); + + SECP256K1_CHECKMEM_UNDEFINE(sec_adaptor, sizeof(sec_adaptor)); + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(ctx, bip340_sig, pre_sig, sec_adaptor); + SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); + CHECK(ret == 1); + + SECP256K1_CHECKMEM_UNDEFINE(bip340_sig, sizeof(bip340_sig)); + ret = rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(ctx, extracted_sec_adaptor, pre_sig, bip340_sig); + SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); + CHECK(ret == 1); + + SECP256K1_CHECKMEM_DEFINE(sec_adaptor, sizeof(sec_adaptor)); + SECP256K1_CHECKMEM_DEFINE(extracted_sec_adaptor, sizeof(extracted_sec_adaptor)); + ret = rustsecp256k1zkp_v0_10_1_memcmp_var(sec_adaptor, extracted_sec_adaptor, sizeof(sec_adaptor)); + SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); + CHECK(ret == 0); + } +#endif + #ifdef ENABLE_MODULE_ELLSWIFT SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(ctx, ellswift, key, NULL); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(ctx, ellswift, key, NULL); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(ctx, ellswift, key, ellswift); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(ctx, ellswift, key, ellswift); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); for (i = 0; i < 2; i++) { SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_DEFINE(&ellswift, sizeof(ellswift)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(ctx, msg, ellswift, ellswift, key, i, rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324, NULL); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(ctx, msg, ellswift, ellswift, key, i, rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324, NULL); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_DEFINE(&ellswift, sizeof(ellswift)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(ctx, msg, ellswift, ellswift, key, i, rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_prefix, (void *)prefix); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(ctx, msg, ellswift, ellswift, key, i, rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_prefix, (void *)prefix); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); } @@ -223,22 +268,22 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) { unsigned char s2c_data[32] = {0}; unsigned char s2c_data_comm[32] = {0}; - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(s2c_data, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(ctx, &signature, &s2c_opening, msg, key, s2c_data); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(ctx, &signature, &s2c_opening, msg, key, s2c_data); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(s2c_data, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(ctx, s2c_data_comm, s2c_data); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(ctx, s2c_data_comm, s2c_data); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(s2c_data, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(ctx, &s2c_opening, msg, key, s2c_data); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(ctx, &s2c_opening, msg, key, s2c_data); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); } @@ -249,34 +294,34 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) unsigned char adaptor_sig[162]; unsigned char deckey[32]; unsigned char expected_deckey[32]; - rustsecp256k1zkp_v0_10_0_pubkey enckey; + rustsecp256k1zkp_v0_10_1_pubkey enckey; for (i = 0; i < 32; i++) { deckey[i] = i + 2; } - ret = rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &enckey, deckey); + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &enckey, deckey); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(key, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(ctx, adaptor_sig, key, &enckey, msg, NULL, NULL); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(ctx, adaptor_sig, key, &enckey, msg, NULL, NULL); SECP256K1_CHECKMEM_DEFINE(adaptor_sig, sizeof(adaptor_sig)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(deckey, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(ctx, &signature, deckey, adaptor_sig); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(ctx, &signature, deckey, adaptor_sig); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_UNDEFINE(&signature, 32); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(ctx, expected_deckey, &signature, adaptor_sig, &enckey); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(ctx, expected_deckey, &signature, adaptor_sig, &enckey); SECP256K1_CHECKMEM_DEFINE(expected_deckey, sizeof(expected_deckey)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_DEFINE(deckey, sizeof(deckey)); - ret = rustsecp256k1zkp_v0_10_0_memcmp_var(deckey, expected_deckey, sizeof(expected_deckey)); + ret = rustsecp256k1zkp_v0_10_1_memcmp_var(deckey, expected_deckey, sizeof(expected_deckey)); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 0); } @@ -284,21 +329,21 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) #ifdef ENABLE_MODULE_MUSIG { - rustsecp256k1zkp_v0_10_0_pubkey pk; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[1]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_pubkey pk; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[1]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk; unsigned char session_id[32]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[1]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache cache; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_ptr[1]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[1]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache cache; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_ptr[1]; unsigned char extra_input[32]; unsigned char sec_adaptor[32]; - rustsecp256k1zkp_v0_10_0_pubkey adaptor; + rustsecp256k1zkp_v0_10_1_pubkey adaptor; unsigned char pre_sig[64]; int nonce_parity; @@ -313,38 +358,38 @@ static void run_tests(rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *key) sec_adaptor[0] = extra_input[0] + 3; partial_sig_ptr[0] = &partial_sig; - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair, key)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(ctx, &pk, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(ctx, NULL, &agg_pk, &cache, pk_ptr, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &adaptor, sec_adaptor)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, key)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(ctx, &pk, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(ctx, NULL, &agg_pk, &cache, pk_ptr, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &adaptor, sec_adaptor)); SECP256K1_CHECKMEM_UNDEFINE(key, 32); SECP256K1_CHECKMEM_UNDEFINE(session_id, sizeof(session_id)); SECP256K1_CHECKMEM_UNDEFINE(extra_input, sizeof(extra_input)); SECP256K1_CHECKMEM_UNDEFINE(sec_adaptor, sizeof(sec_adaptor)); - ret = rustsecp256k1zkp_v0_10_0_musig_nonce_gen(ctx, &secnonce, &pubnonce, session_id, key, &pk, msg, &cache, extra_input); + ret = rustsecp256k1zkp_v0_10_1_musig_nonce_gen(ctx, &secnonce, &pubnonce, session_id, key, &pk, msg, &cache, extra_input); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(ctx, &aggnonce, pubnonce_ptr, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(ctx, &aggnonce, pubnonce_ptr, 1)); /* Make sure that previous tests don't undefine msg. It's not used as a secret here. */ SECP256K1_CHECKMEM_DEFINE(msg, sizeof(msg)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(ctx, &session, &aggnonce, msg, &cache, &adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(ctx, &session, &aggnonce, msg, &cache, &adaptor) == 1); - ret = rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair, key); + ret = rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair, key); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); - ret = rustsecp256k1zkp_v0_10_0_musig_partial_sign(ctx, &partial_sig, &secnonce, &keypair, &cache, &session); + ret = rustsecp256k1zkp_v0_10_1_musig_partial_sign(ctx, &partial_sig, &secnonce, &keypair, &cache, &session); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); SECP256K1_CHECKMEM_DEFINE(&partial_sig, sizeof(partial_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(ctx, pre_sig, &session, partial_sig_ptr, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(ctx, pre_sig, &session, partial_sig_ptr, 1)); SECP256K1_CHECKMEM_DEFINE(pre_sig, sizeof(pre_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_parity(ctx, &nonce_parity, &session)); - ret = rustsecp256k1zkp_v0_10_0_musig_adapt(ctx, sig, pre_sig, sec_adaptor, nonce_parity); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_parity(ctx, &nonce_parity, &session)); + ret = rustsecp256k1zkp_v0_10_1_musig_adapt(ctx, sig, pre_sig, sec_adaptor, nonce_parity); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); - ret = rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(ctx, sec_adaptor, sig, pre_sig, nonce_parity); + ret = rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(ctx, sec_adaptor, sig, pre_sig, nonce_parity); SECP256K1_CHECKMEM_DEFINE(&ret, sizeof(ret)); CHECK(ret == 1); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/eccommit.h b/secp256k1-zkp-sys/depend/secp256k1/src/eccommit.h index 332f0a9c..0f8d974e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/eccommit.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/eccommit.h @@ -8,21 +8,21 @@ #define SECP256K1_ECCOMMIT_H /** Helper function to add a 32-byte value to a scalar */ -static int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_0_scalar *sec, const unsigned char *tweak); +static int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_1_scalar *sec, const unsigned char *tweak); /** Helper function to add a 32-byte value, times G, to an EC point */ -static int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(const rustsecp256k1zkp_v0_10_0_ecmult_context* ecmult_ctx, rustsecp256k1zkp_v0_10_0_ge *p, const unsigned char *tweak); +static int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(const rustsecp256k1zkp_v0_10_1_ecmult_context* ecmult_ctx, rustsecp256k1zkp_v0_10_1_ge *p, const unsigned char *tweak); /** Serializes elem as a 33 byte array. This is non-constant time with respect to * whether pubp is the point at infinity. Thus, you may need to declassify * pubp->infinity before calling this function. */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_pubkey_serialize_const(rustsecp256k1zkp_v0_10_0_ge *pubp, unsigned char *buf33); +static int rustsecp256k1zkp_v0_10_1_ec_commit_pubkey_serialize_const(rustsecp256k1zkp_v0_10_1_ge *pubp, unsigned char *buf33); /** Compute an ec commitment tweak as hash(pubkey, data). */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_tweak(unsigned char *tweak32, rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size); +static int rustsecp256k1zkp_v0_10_1_ec_commit_tweak(unsigned char *tweak32, rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size); /** Compute an ec commitment as pubkey + hash(pubkey, data)*G. */ -static int rustsecp256k1zkp_v0_10_0_ec_commit(const rustsecp256k1zkp_v0_10_0_ecmult_context* ecmult_ctx, rustsecp256k1zkp_v0_10_0_ge* commitp, const rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size); +static int rustsecp256k1zkp_v0_10_1_ec_commit(const rustsecp256k1zkp_v0_10_1_ecmult_context* ecmult_ctx, rustsecp256k1zkp_v0_10_1_ge* commitp, const rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size); /** Compute a secret key commitment as seckey + hash(pubkey, data). */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_seckey(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ecmult_gen_ctx, rustsecp256k1zkp_v0_10_0_scalar* seckey, rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size); +static int rustsecp256k1zkp_v0_10_1_ec_commit_seckey(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ecmult_gen_ctx, rustsecp256k1zkp_v0_10_1_scalar* seckey, rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size); /** Verify an ec commitment as pubkey + hash(pubkey, data)*G ?= commitment. */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_verify(const rustsecp256k1zkp_v0_10_0_ecmult_context* ecmult_ctx, const rustsecp256k1zkp_v0_10_0_ge* commitp, const rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size); +static int rustsecp256k1zkp_v0_10_1_ec_commit_verify(const rustsecp256k1zkp_v0_10_1_ecmult_context* ecmult_ctx, const rustsecp256k1zkp_v0_10_1_ge* commitp, const rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size); #endif /* SECP256K1_ECCOMMIT_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/eccommit_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/eccommit_impl.h index c9b5fc57..01ffb6ba 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/eccommit_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/eccommit_impl.h @@ -10,64 +10,64 @@ #include "hash.h" /* from secp256k1.c */ -static int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_0_scalar *sec, const unsigned char *tweak); -static int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(rustsecp256k1zkp_v0_10_0_ge *pubp, const unsigned char *tweak); +static int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_1_scalar *sec, const unsigned char *tweak); +static int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(rustsecp256k1zkp_v0_10_1_ge *pubp, const unsigned char *tweak); -static int rustsecp256k1zkp_v0_10_0_ec_commit_pubkey_serialize_const(rustsecp256k1zkp_v0_10_0_ge *pubp, unsigned char *buf33) { - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(pubp)) { +static int rustsecp256k1zkp_v0_10_1_ec_commit_pubkey_serialize_const(rustsecp256k1zkp_v0_10_1_ge *pubp, unsigned char *buf33) { + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(pubp)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_normalize(&pubp->x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&pubp->y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&buf33[1], &pubp->x); - buf33[0] = rustsecp256k1zkp_v0_10_0_fe_is_odd(&pubp->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN; + rustsecp256k1zkp_v0_10_1_fe_normalize(&pubp->x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&pubp->y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&buf33[1], &pubp->x); + buf33[0] = rustsecp256k1zkp_v0_10_1_fe_is_odd(&pubp->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN; return 1; } /* Compute an ec commitment tweak as hash(pubp, data). */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_tweak(unsigned char *tweak32, rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size) +static int rustsecp256k1zkp_v0_10_1_ec_commit_tweak(unsigned char *tweak32, rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char rbuf[33]; - if (!rustsecp256k1zkp_v0_10_0_ec_commit_pubkey_serialize_const(pubp, rbuf)) { + if (!rustsecp256k1zkp_v0_10_1_ec_commit_pubkey_serialize_const(pubp, rbuf)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(sha, rbuf, sizeof(rbuf)); - rustsecp256k1zkp_v0_10_0_sha256_write(sha, data, data_size); - rustsecp256k1zkp_v0_10_0_sha256_finalize(sha, tweak32); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, rbuf, sizeof(rbuf)); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, data, data_size); + rustsecp256k1zkp_v0_10_1_sha256_finalize(sha, tweak32); return 1; } /* Compute an ec commitment as pubp + hash(pubp, data)*G. */ -static int rustsecp256k1zkp_v0_10_0_ec_commit(rustsecp256k1zkp_v0_10_0_ge* commitp, const rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size) { +static int rustsecp256k1zkp_v0_10_1_ec_commit(rustsecp256k1zkp_v0_10_1_ge* commitp, const rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char tweak[32]; *commitp = *pubp; - return rustsecp256k1zkp_v0_10_0_ec_commit_tweak(tweak, commitp, sha, data, data_size) - && rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(commitp, tweak); + return rustsecp256k1zkp_v0_10_1_ec_commit_tweak(tweak, commitp, sha, data, data_size) + && rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(commitp, tweak); } /* Compute the seckey of an ec commitment from the original secret key of the pubkey as seckey + * hash(pubp, data). */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_seckey(rustsecp256k1zkp_v0_10_0_scalar* seckey, rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size) { +static int rustsecp256k1zkp_v0_10_1_ec_commit_seckey(rustsecp256k1zkp_v0_10_1_scalar* seckey, rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size) { unsigned char tweak[32]; - return rustsecp256k1zkp_v0_10_0_ec_commit_tweak(tweak, pubp, sha, data, data_size) - && rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(seckey, tweak); + return rustsecp256k1zkp_v0_10_1_ec_commit_tweak(tweak, pubp, sha, data, data_size) + && rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(seckey, tweak); } /* Verify an ec commitment as pubp + hash(pubp, data)*G ?= commitment. */ -static int rustsecp256k1zkp_v0_10_0_ec_commit_verify(const rustsecp256k1zkp_v0_10_0_ge* commitp, const rustsecp256k1zkp_v0_10_0_ge* pubp, rustsecp256k1zkp_v0_10_0_sha256* sha, const unsigned char *data, size_t data_size) { - rustsecp256k1zkp_v0_10_0_gej pj; - rustsecp256k1zkp_v0_10_0_ge p; +static int rustsecp256k1zkp_v0_10_1_ec_commit_verify(const rustsecp256k1zkp_v0_10_1_ge* commitp, const rustsecp256k1zkp_v0_10_1_ge* pubp, rustsecp256k1zkp_v0_10_1_sha256* sha, const unsigned char *data, size_t data_size) { + rustsecp256k1zkp_v0_10_1_gej pj; + rustsecp256k1zkp_v0_10_1_ge p; - if (!rustsecp256k1zkp_v0_10_0_ec_commit(&p, pubp, sha, data, data_size)) { + if (!rustsecp256k1zkp_v0_10_1_ec_commit(&p, pubp, sha, data, data_size)) { return 0; } /* Return p == commitp */ - rustsecp256k1zkp_v0_10_0_ge_neg(&p, &p); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pj, &p); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&pj, &pj, commitp, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pj); + rustsecp256k1zkp_v0_10_1_ge_neg(&p, &p); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pj, &p); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&pj, &pj, commitp, NULL); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pj); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa.h index f7376280..0ed0567c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa.h @@ -13,9 +13,9 @@ #include "group.h" #include "ecmult.h" -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_0_scalar *r, rustsecp256k1zkp_v0_10_0_scalar *s, const unsigned char *sig, size_t size); -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *s); -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(const rustsecp256k1zkp_v0_10_0_scalar* r, const rustsecp256k1zkp_v0_10_0_scalar* s, const rustsecp256k1zkp_v0_10_0_ge *pubkey, const rustsecp256k1zkp_v0_10_0_scalar *message); -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_0_scalar* r, rustsecp256k1zkp_v0_10_0_scalar* s, const rustsecp256k1zkp_v0_10_0_scalar *seckey, const rustsecp256k1zkp_v0_10_0_scalar *message, const rustsecp256k1zkp_v0_10_0_scalar *nonce, int *recid); +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_1_scalar *r, rustsecp256k1zkp_v0_10_1_scalar *s, const unsigned char *sig, size_t size); +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *s); +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(const rustsecp256k1zkp_v0_10_1_scalar* r, const rustsecp256k1zkp_v0_10_1_scalar* s, const rustsecp256k1zkp_v0_10_1_ge *pubkey, const rustsecp256k1zkp_v0_10_1_scalar *message); +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_1_scalar* r, rustsecp256k1zkp_v0_10_1_scalar* s, const rustsecp256k1zkp_v0_10_1_scalar *seckey, const rustsecp256k1zkp_v0_10_1_scalar *message, const rustsecp256k1zkp_v0_10_1_scalar *nonce, int *recid); #endif /* SECP256K1_ECDSA_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa_impl.h index 46366f3a..52e00545 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecdsa_impl.h @@ -16,24 +16,24 @@ #include "ecdsa.h" /** Group order for secp256k1 defined as 'n' in "Standards for Efficient Cryptography" (SEC2) 2.7.1 - * $ sage -c 'load("rustsecp256k1zkp_v0_10_0_params.sage"); print(hex(N))' + * $ sage -c 'load("rustsecp256k1zkp_v0_10_1_params.sage"); print(hex(N))' * 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ecdsa_const_order_as_fe = SECP256K1_FE_CONST( +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ecdsa_const_order_as_fe = SECP256K1_FE_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL, 0xBAAEDCE6UL, 0xAF48A03BUL, 0xBFD25E8CUL, 0xD0364141UL ); /** Difference between field and order, values 'p' and 'n' values defined in * "Standards for Efficient Cryptography" (SEC2) 2.7.1. - * $ sage -c 'load("rustsecp256k1zkp_v0_10_0_params.sage"); print(hex(P-N))' + * $ sage -c 'load("rustsecp256k1zkp_v0_10_1_params.sage"); print(hex(P-N))' * 0x14551231950b75fc4402da1722fc9baee */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ecdsa_const_p_minus_order = SECP256K1_FE_CONST( +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ecdsa_const_p_minus_order = SECP256K1_FE_CONST( 0, 0, 0, 1, 0x45512319UL, 0x50B75FC4UL, 0x402DA172UL, 0x2FC9BAEEUL ); -static int rustsecp256k1zkp_v0_10_0_der_read_len(size_t *len, const unsigned char **sigp, const unsigned char *sigend) { +static int rustsecp256k1zkp_v0_10_1_der_read_len(size_t *len, const unsigned char **sigp, const unsigned char *sigend) { size_t lenleft; unsigned char b1; VERIFY_CHECK(len != NULL); @@ -87,7 +87,7 @@ static int rustsecp256k1zkp_v0_10_0_der_read_len(size_t *len, const unsigned cha return 1; } -static int rustsecp256k1zkp_v0_10_0_der_parse_integer(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char **sig, const unsigned char *sigend) { +static int rustsecp256k1zkp_v0_10_1_der_parse_integer(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char **sig, const unsigned char *sigend) { int overflow = 0; unsigned char ra[32] = {0}; size_t rlen; @@ -97,7 +97,7 @@ static int rustsecp256k1zkp_v0_10_0_der_parse_integer(rustsecp256k1zkp_v0_10_0_s return 0; } (*sig)++; - if (rustsecp256k1zkp_v0_10_0_der_read_len(&rlen, sig, sigend) == 0) { + if (rustsecp256k1zkp_v0_10_1_der_read_len(&rlen, sig, sigend) == 0) { return 0; } if (rlen == 0 || rlen > (size_t)(sigend - *sig)) { @@ -129,23 +129,23 @@ static int rustsecp256k1zkp_v0_10_0_der_parse_integer(rustsecp256k1zkp_v0_10_0_s } if (!overflow) { if (rlen) memcpy(ra + 32 - rlen, *sig, rlen); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, ra, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, ra, &overflow); } if (overflow) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(r, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(r, 0); } (*sig) += rlen; return 1; } -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_0_scalar *rr, rustsecp256k1zkp_v0_10_0_scalar *rs, const unsigned char *sig, size_t size) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_1_scalar *rr, rustsecp256k1zkp_v0_10_1_scalar *rs, const unsigned char *sig, size_t size) { const unsigned char *sigend = sig + size; size_t rlen; if (sig == sigend || *(sig++) != 0x30) { /* The encoding doesn't start with a constructed sequence (X.690-0207 8.9.1). */ return 0; } - if (rustsecp256k1zkp_v0_10_0_der_read_len(&rlen, &sig, sigend) == 0) { + if (rustsecp256k1zkp_v0_10_1_der_read_len(&rlen, &sig, sigend) == 0) { return 0; } if (rlen != (size_t)(sigend - sig)) { @@ -153,10 +153,10 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_0_sca return 0; } - if (!rustsecp256k1zkp_v0_10_0_der_parse_integer(rr, &sig, sigend)) { + if (!rustsecp256k1zkp_v0_10_1_der_parse_integer(rr, &sig, sigend)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_der_parse_integer(rs, &sig, sigend)) { + if (!rustsecp256k1zkp_v0_10_1_der_parse_integer(rs, &sig, sigend)) { return 0; } @@ -168,12 +168,12 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_parse(rustsecp256k1zkp_v0_10_0_sca return 1; } -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1zkp_v0_10_0_scalar* ar, const rustsecp256k1zkp_v0_10_0_scalar* as) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_serialize(unsigned char *sig, size_t *size, const rustsecp256k1zkp_v0_10_1_scalar* ar, const rustsecp256k1zkp_v0_10_1_scalar* as) { unsigned char r[33] = {0}, s[33] = {0}; unsigned char *rp = r, *sp = s; size_t lenR = 33, lenS = 33; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&r[1], ar); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&s[1], as); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&r[1], ar); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&s[1], as); while (lenR > 1 && rp[0] == 0 && rp[1] < 0x80) { lenR--; rp++; } while (lenS > 1 && sp[0] == 0 && sp[1] < 0x80) { lenS--; sp++; } if (*size < 6+lenS+lenR) { @@ -192,43 +192,43 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_serialize(unsigned char *sig, size return 1; } -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(const rustsecp256k1zkp_v0_10_0_scalar *sigr, const rustsecp256k1zkp_v0_10_0_scalar *sigs, const rustsecp256k1zkp_v0_10_0_ge *pubkey, const rustsecp256k1zkp_v0_10_0_scalar *message) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(const rustsecp256k1zkp_v0_10_1_scalar *sigr, const rustsecp256k1zkp_v0_10_1_scalar *sigs, const rustsecp256k1zkp_v0_10_1_ge *pubkey, const rustsecp256k1zkp_v0_10_1_scalar *message) { unsigned char c[32]; - rustsecp256k1zkp_v0_10_0_scalar sn, u1, u2; + rustsecp256k1zkp_v0_10_1_scalar sn, u1, u2; #if !defined(EXHAUSTIVE_TEST_ORDER) - rustsecp256k1zkp_v0_10_0_fe xr; + rustsecp256k1zkp_v0_10_1_fe xr; #endif - rustsecp256k1zkp_v0_10_0_gej pubkeyj; - rustsecp256k1zkp_v0_10_0_gej pr; + rustsecp256k1zkp_v0_10_1_gej pubkeyj; + rustsecp256k1zkp_v0_10_1_gej pr; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigr) || rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigs)) { + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigr) || rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigs)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&sn, sigs); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u1, &sn, message); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u2, &sn, sigr); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pubkeyj, pubkey); - rustsecp256k1zkp_v0_10_0_ecmult(&pr, &pubkeyj, &u2, &u1); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pr)) { + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&sn, sigs); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u1, &sn, message); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u2, &sn, sigr); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pubkeyj, pubkey); + rustsecp256k1zkp_v0_10_1_ecmult(&pr, &pubkeyj, &u2, &u1); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pr)) { return 0; } #if defined(EXHAUSTIVE_TEST_ORDER) { - rustsecp256k1zkp_v0_10_0_scalar computed_r; - rustsecp256k1zkp_v0_10_0_ge pr_ge; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pr_ge, &pr); - rustsecp256k1zkp_v0_10_0_fe_normalize(&pr_ge.x); + rustsecp256k1zkp_v0_10_1_scalar computed_r; + rustsecp256k1zkp_v0_10_1_ge pr_ge; + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pr_ge, &pr); + rustsecp256k1zkp_v0_10_1_fe_normalize(&pr_ge.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(c, &pr_ge.x); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&computed_r, c, NULL); - return rustsecp256k1zkp_v0_10_0_scalar_eq(sigr, &computed_r); + rustsecp256k1zkp_v0_10_1_fe_get_b32(c, &pr_ge.x); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&computed_r, c, NULL); + return rustsecp256k1zkp_v0_10_1_scalar_eq(sigr, &computed_r); } #else - rustsecp256k1zkp_v0_10_0_scalar_get_b32(c, sigr); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(c, sigr); /* we can ignore the fe_set_b32_limit return value, because we know the input is in range */ - (void)rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&xr, c); + (void)rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&xr, c); /** We now have the recomputed R point in pr, and its claimed x coordinate (modulo n) * in xr. Naively, we would extract the x coordinate from pr (requiring a inversion modulo p), @@ -244,18 +244,18 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(const rustsecp256k1zkp_v0_1 * <=> (xr * pr.z^2 mod p == pr.x) || (xr + n < p && (xr + n) * pr.z^2 mod p == pr.x) * * Thus, we can avoid the inversion, but we have to check both cases separately. - * rustsecp256k1zkp_v0_10_0_gej_eq_x implements the (xr * pr.z^2 mod p == pr.x) test. + * rustsecp256k1zkp_v0_10_1_gej_eq_x implements the (xr * pr.z^2 mod p == pr.x) test. */ - if (rustsecp256k1zkp_v0_10_0_gej_eq_x_var(&xr, &pr)) { + if (rustsecp256k1zkp_v0_10_1_gej_eq_x_var(&xr, &pr)) { /* xr * pr.z^2 mod p == pr.x, so the signature is valid. */ return 1; } - if (rustsecp256k1zkp_v0_10_0_fe_cmp_var(&xr, &rustsecp256k1zkp_v0_10_0_ecdsa_const_p_minus_order) >= 0) { + if (rustsecp256k1zkp_v0_10_1_fe_cmp_var(&xr, &rustsecp256k1zkp_v0_10_1_ecdsa_const_p_minus_order) >= 0) { /* xr + n >= p, so we can skip testing the second case. */ return 0; } - rustsecp256k1zkp_v0_10_0_fe_add(&xr, &rustsecp256k1zkp_v0_10_0_ecdsa_const_order_as_fe); - if (rustsecp256k1zkp_v0_10_0_gej_eq_x_var(&xr, &pr)) { + rustsecp256k1zkp_v0_10_1_fe_add(&xr, &rustsecp256k1zkp_v0_10_1_ecdsa_const_order_as_fe); + if (rustsecp256k1zkp_v0_10_1_gej_eq_x_var(&xr, &pr)) { /* (xr + n) * pr.z^2 mod p == pr.x, so the signature is valid. */ return 1; } @@ -263,42 +263,42 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(const rustsecp256k1zkp_v0_1 #endif } -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_0_scalar *sigr, rustsecp256k1zkp_v0_10_0_scalar *sigs, const rustsecp256k1zkp_v0_10_0_scalar *seckey, const rustsecp256k1zkp_v0_10_0_scalar *message, const rustsecp256k1zkp_v0_10_0_scalar *nonce, int *recid) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_1_scalar *sigr, rustsecp256k1zkp_v0_10_1_scalar *sigs, const rustsecp256k1zkp_v0_10_1_scalar *seckey, const rustsecp256k1zkp_v0_10_1_scalar *message, const rustsecp256k1zkp_v0_10_1_scalar *nonce, int *recid) { unsigned char b[32]; - rustsecp256k1zkp_v0_10_0_gej rp; - rustsecp256k1zkp_v0_10_0_ge r; - rustsecp256k1zkp_v0_10_0_scalar n; + rustsecp256k1zkp_v0_10_1_gej rp; + rustsecp256k1zkp_v0_10_1_ge r; + rustsecp256k1zkp_v0_10_1_scalar n; int overflow = 0; int high; - rustsecp256k1zkp_v0_10_0_ecmult_gen(ctx, &rp, nonce); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r, &rp); - rustsecp256k1zkp_v0_10_0_fe_normalize(&r.x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&r.y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(b, &r.x); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(sigr, b, &overflow); + rustsecp256k1zkp_v0_10_1_ecmult_gen(ctx, &rp, nonce); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rp); + rustsecp256k1zkp_v0_10_1_fe_normalize(&r.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&r.y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(b, &r.x); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(sigr, b, &overflow); if (recid) { /* The overflow condition is cryptographically unreachable as hitting it requires finding the discrete log * of some P where P.x >= order, and only 1 in about 2^127 points meet this criteria. */ - *recid = (overflow << 1) | rustsecp256k1zkp_v0_10_0_fe_is_odd(&r.y); - } - rustsecp256k1zkp_v0_10_0_scalar_mul(&n, sigr, seckey); - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, message); - rustsecp256k1zkp_v0_10_0_scalar_inverse(sigs, nonce); - rustsecp256k1zkp_v0_10_0_scalar_mul(sigs, sigs, &n); - rustsecp256k1zkp_v0_10_0_scalar_clear(&n); - rustsecp256k1zkp_v0_10_0_gej_clear(&rp); - rustsecp256k1zkp_v0_10_0_ge_clear(&r); - high = rustsecp256k1zkp_v0_10_0_scalar_is_high(sigs); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(sigs, high); + *recid = (overflow << 1) | rustsecp256k1zkp_v0_10_1_fe_is_odd(&r.y); + } + rustsecp256k1zkp_v0_10_1_scalar_mul(&n, sigr, seckey); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, message); + rustsecp256k1zkp_v0_10_1_scalar_inverse(sigs, nonce); + rustsecp256k1zkp_v0_10_1_scalar_mul(sigs, sigs, &n); + rustsecp256k1zkp_v0_10_1_scalar_clear(&n); + rustsecp256k1zkp_v0_10_1_gej_clear(&rp); + rustsecp256k1zkp_v0_10_1_ge_clear(&r); + high = rustsecp256k1zkp_v0_10_1_scalar_is_high(sigs); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(sigs, high); if (recid) { *recid ^= high; } /* P.x = order is on the curve, so technically sig->r could end up being zero, which would be an invalid signature. * This is cryptographically unreachable as hitting it requires finding the discrete log of P.x = N. */ - return (int)(!rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigr)) & (int)(!rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigs)); + return (int)(!rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigr)) & (int)(!rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigs)); } #endif /* SECP256K1_ECDSA_IMPL_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/eckey.h b/secp256k1-zkp-sys/depend/secp256k1/src/eckey.h index 4bad71af..1426a2e6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/eckey.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/eckey.h @@ -14,12 +14,12 @@ #include "ecmult.h" #include "ecmult_gen.h" -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(rustsecp256k1zkp_v0_10_0_ge *elem, const unsigned char *pub, size_t size); -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(rustsecp256k1zkp_v0_10_0_ge *elem, unsigned char *pub, size_t *size, int compressed); +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(rustsecp256k1zkp_v0_10_1_ge *elem, const unsigned char *pub, size_t size); +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(rustsecp256k1zkp_v0_10_1_ge *elem, unsigned char *pub, size_t *size, int compressed); -static int rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_add(rustsecp256k1zkp_v0_10_0_scalar *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak); -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(rustsecp256k1zkp_v0_10_0_ge *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak); -static int rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_mul(rustsecp256k1zkp_v0_10_0_scalar *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak); -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_mul(rustsecp256k1zkp_v0_10_0_ge *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak); +static int rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_add(rustsecp256k1zkp_v0_10_1_scalar *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak); +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(rustsecp256k1zkp_v0_10_1_ge *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak); +static int rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_mul(rustsecp256k1zkp_v0_10_1_scalar *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak); +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_mul(rustsecp256k1zkp_v0_10_1_ge *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak); #endif /* SECP256K1_ECKEY_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/eckey_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/eckey_impl.h index 04913ff8..06ecc372 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/eckey_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/eckey_impl.h @@ -14,78 +14,78 @@ #include "group.h" #include "ecmult_gen.h" -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(rustsecp256k1zkp_v0_10_0_ge *elem, const unsigned char *pub, size_t size) { +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(rustsecp256k1zkp_v0_10_1_ge *elem, const unsigned char *pub, size_t size) { if (size == 33 && (pub[0] == SECP256K1_TAG_PUBKEY_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_ODD)) { - rustsecp256k1zkp_v0_10_0_fe x; - return rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&x, pub+1) && rustsecp256k1zkp_v0_10_0_ge_set_xo_var(elem, &x, pub[0] == SECP256K1_TAG_PUBKEY_ODD); + rustsecp256k1zkp_v0_10_1_fe x; + return rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&x, pub+1) && rustsecp256k1zkp_v0_10_1_ge_set_xo_var(elem, &x, pub[0] == SECP256K1_TAG_PUBKEY_ODD); } else if (size == 65 && (pub[0] == SECP256K1_TAG_PUBKEY_UNCOMPRESSED || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) { - rustsecp256k1zkp_v0_10_0_fe x, y; - if (!rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&x, pub+1) || !rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&y, pub+33)) { + rustsecp256k1zkp_v0_10_1_fe x, y; + if (!rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&x, pub+1) || !rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&y, pub+33)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_xy(elem, &x, &y); + rustsecp256k1zkp_v0_10_1_ge_set_xy(elem, &x, &y); if ((pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_EVEN || pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD) && - rustsecp256k1zkp_v0_10_0_fe_is_odd(&y) != (pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) { + rustsecp256k1zkp_v0_10_1_fe_is_odd(&y) != (pub[0] == SECP256K1_TAG_PUBKEY_HYBRID_ODD)) { return 0; } - return rustsecp256k1zkp_v0_10_0_ge_is_valid_var(elem); + return rustsecp256k1zkp_v0_10_1_ge_is_valid_var(elem); } else { return 0; } } -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(rustsecp256k1zkp_v0_10_0_ge *elem, unsigned char *pub, size_t *size, int compressed) { - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(elem)) { +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(rustsecp256k1zkp_v0_10_1_ge *elem, unsigned char *pub, size_t *size, int compressed) { + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(elem)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&elem->x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&elem->y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&pub[1], &elem->x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&elem->x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&elem->y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&pub[1], &elem->x); if (compressed) { *size = 33; - pub[0] = rustsecp256k1zkp_v0_10_0_fe_is_odd(&elem->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN; + pub[0] = rustsecp256k1zkp_v0_10_1_fe_is_odd(&elem->y) ? SECP256K1_TAG_PUBKEY_ODD : SECP256K1_TAG_PUBKEY_EVEN; } else { *size = 65; pub[0] = SECP256K1_TAG_PUBKEY_UNCOMPRESSED; - rustsecp256k1zkp_v0_10_0_fe_get_b32(&pub[33], &elem->y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&pub[33], &elem->y); } return 1; } -static int rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_add(rustsecp256k1zkp_v0_10_0_scalar *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak) { - rustsecp256k1zkp_v0_10_0_scalar_add(key, key, tweak); - return !rustsecp256k1zkp_v0_10_0_scalar_is_zero(key); +static int rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_add(rustsecp256k1zkp_v0_10_1_scalar *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak) { + rustsecp256k1zkp_v0_10_1_scalar_add(key, key, tweak); + return !rustsecp256k1zkp_v0_10_1_scalar_is_zero(key); } -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(rustsecp256k1zkp_v0_10_0_ge *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak) { - rustsecp256k1zkp_v0_10_0_gej pt; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pt, key); - rustsecp256k1zkp_v0_10_0_ecmult(&pt, &pt, &rustsecp256k1zkp_v0_10_0_scalar_one, tweak); +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(rustsecp256k1zkp_v0_10_1_ge *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak) { + rustsecp256k1zkp_v0_10_1_gej pt; + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pt, key); + rustsecp256k1zkp_v0_10_1_ecmult(&pt, &pt, &rustsecp256k1zkp_v0_10_1_scalar_one, tweak); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pt)) { + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pt)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej(key, &pt); + rustsecp256k1zkp_v0_10_1_ge_set_gej(key, &pt); return 1; } -static int rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_mul(rustsecp256k1zkp_v0_10_0_scalar *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak) { +static int rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_mul(rustsecp256k1zkp_v0_10_1_scalar *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak) { int ret; - ret = !rustsecp256k1zkp_v0_10_0_scalar_is_zero(tweak); + ret = !rustsecp256k1zkp_v0_10_1_scalar_is_zero(tweak); - rustsecp256k1zkp_v0_10_0_scalar_mul(key, key, tweak); + rustsecp256k1zkp_v0_10_1_scalar_mul(key, key, tweak); return ret; } -static int rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_mul(rustsecp256k1zkp_v0_10_0_ge *key, const rustsecp256k1zkp_v0_10_0_scalar *tweak) { - rustsecp256k1zkp_v0_10_0_gej pt; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(tweak)) { +static int rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_mul(rustsecp256k1zkp_v0_10_1_ge *key, const rustsecp256k1zkp_v0_10_1_scalar *tweak) { + rustsecp256k1zkp_v0_10_1_gej pt; + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(tweak)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pt, key); - rustsecp256k1zkp_v0_10_0_ecmult(&pt, &pt, tweak, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ge_set_gej(key, &pt); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pt, key); + rustsecp256k1zkp_v0_10_1_ecmult(&pt, &pt, tweak, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ge_set_gej(key, &pt); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult.h index c2578eda..9a8ca3ac 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult.h @@ -41,9 +41,9 @@ #define ECMULT_TABLE_SIZE(w) (1L << ((w)-2)) /** Double multiply: R = na*A + ng*G */ -static void rustsecp256k1zkp_v0_10_0_ecmult(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_scalar *na, const rustsecp256k1zkp_v0_10_0_scalar *ng); +static void rustsecp256k1zkp_v0_10_1_ecmult(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_scalar *na, const rustsecp256k1zkp_v0_10_1_scalar *ng); -typedef int (rustsecp256k1zkp_v0_10_0_ecmult_multi_callback)(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *data); +typedef int (rustsecp256k1zkp_v0_10_1_ecmult_multi_callback)(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *data); /** * Multi-multiply: R = inp_g_sc * G + sum_i ni * Ai. @@ -56,6 +56,6 @@ typedef int (rustsecp256k1zkp_v0_10_0_ecmult_multi_callback)(rustsecp256k1zkp_v0 * 0 if there is not enough scratch space for a single point or * callback returns 0 */ -static int rustsecp256k1zkp_v0_10_0_ecmult_multi_var(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n); +static int rustsecp256k1zkp_v0_10_1_ecmult_multi_var(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n); #endif /* SECP256K1_ECMULT_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table.h index 278eff9e..5352673e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table.h @@ -8,9 +8,9 @@ #define SECP256K1_ECMULT_COMPUTE_TABLE_H /* Construct table of all odd multiples of gen in range 1..(2**(window_g-1)-1). */ -static void rustsecp256k1zkp_v0_10_0_ecmult_compute_table(rustsecp256k1zkp_v0_10_0_ge_storage* table, int window_g, const rustsecp256k1zkp_v0_10_0_gej* gen); +static void rustsecp256k1zkp_v0_10_1_ecmult_compute_table(rustsecp256k1zkp_v0_10_1_ge_storage* table, int window_g, const rustsecp256k1zkp_v0_10_1_gej* gen); -/* Like rustsecp256k1zkp_v0_10_0_ecmult_compute_table, but one for both gen and gen*2^128. */ -static void rustsecp256k1zkp_v0_10_0_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_0_ge_storage* table, rustsecp256k1zkp_v0_10_0_ge_storage* table_128, int window_g, const rustsecp256k1zkp_v0_10_0_ge* gen); +/* Like rustsecp256k1zkp_v0_10_1_ecmult_compute_table, but one for both gen and gen*2^128. */ +static void rustsecp256k1zkp_v0_10_1_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_1_ge_storage* table, rustsecp256k1zkp_v0_10_1_ge_storage* table_128, int window_g, const rustsecp256k1zkp_v0_10_1_ge* gen); #endif /* SECP256K1_ECMULT_COMPUTE_TABLE_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table_impl.h index 2e096bfd..22f1b15d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_compute_table_impl.h @@ -13,37 +13,37 @@ #include "ecmult.h" #include "util.h" -static void rustsecp256k1zkp_v0_10_0_ecmult_compute_table(rustsecp256k1zkp_v0_10_0_ge_storage* table, int window_g, const rustsecp256k1zkp_v0_10_0_gej* gen) { - rustsecp256k1zkp_v0_10_0_gej gj; - rustsecp256k1zkp_v0_10_0_ge ge, dgen; +static void rustsecp256k1zkp_v0_10_1_ecmult_compute_table(rustsecp256k1zkp_v0_10_1_ge_storage* table, int window_g, const rustsecp256k1zkp_v0_10_1_gej* gen) { + rustsecp256k1zkp_v0_10_1_gej gj; + rustsecp256k1zkp_v0_10_1_ge ge, dgen; int j; gj = *gen; - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&ge, &gj); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&table[0], &ge); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&ge, &gj); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&table[0], &ge); - rustsecp256k1zkp_v0_10_0_gej_double_var(&gj, gen, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&dgen, &gj); + rustsecp256k1zkp_v0_10_1_gej_double_var(&gj, gen, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&dgen, &gj); for (j = 1; j < ECMULT_TABLE_SIZE(window_g); ++j) { - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gj, &ge); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&gj, &gj, &dgen, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&ge, &gj); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&table[j], &ge); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gj, &ge); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&gj, &gj, &dgen, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&ge, &gj); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&table[j], &ge); } } -/* Like rustsecp256k1zkp_v0_10_0_ecmult_compute_table, but one for both gen and gen*2^128. */ -static void rustsecp256k1zkp_v0_10_0_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_0_ge_storage* table, rustsecp256k1zkp_v0_10_0_ge_storage* table_128, int window_g, const rustsecp256k1zkp_v0_10_0_ge* gen) { - rustsecp256k1zkp_v0_10_0_gej gj; +/* Like rustsecp256k1zkp_v0_10_1_ecmult_compute_table, but one for both gen and gen*2^128. */ +static void rustsecp256k1zkp_v0_10_1_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_1_ge_storage* table, rustsecp256k1zkp_v0_10_1_ge_storage* table_128, int window_g, const rustsecp256k1zkp_v0_10_1_ge* gen) { + rustsecp256k1zkp_v0_10_1_gej gj; int i; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gj, gen); - rustsecp256k1zkp_v0_10_0_ecmult_compute_table(table, window_g, &gj); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gj, gen); + rustsecp256k1zkp_v0_10_1_ecmult_compute_table(table, window_g, &gj); for (i = 0; i < 128; ++i) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&gj, &gj, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&gj, &gj, NULL); } - rustsecp256k1zkp_v0_10_0_ecmult_compute_table(table_128, window_g, &gj); + rustsecp256k1zkp_v0_10_1_ecmult_compute_table(table_128, window_g, &gj); } #endif /* SECP256K1_ECMULT_COMPUTE_TABLE_IMPL_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const.h index 383ec56a..ec3fd037 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const.h @@ -13,10 +13,10 @@ /** * Multiply: R = q*A (in constant-time for q) */ -static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_scalar *q); +static void rustsecp256k1zkp_v0_10_1_ecmult_const(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_scalar *q); /** - * Same as rustsecp256k1zkp_v0_10_0_ecmult_const, but takes in an x coordinate of the base point + * Same as rustsecp256k1zkp_v0_10_1_ecmult_const, but takes in an x coordinate of the base point * only, specified as fraction n/d (numerator/denominator). Only the x coordinate of the result is * returned. * @@ -27,11 +27,11 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej * * * Constant time in the value of q, but not any other inputs. */ -static int rustsecp256k1zkp_v0_10_0_ecmult_const_xonly( - rustsecp256k1zkp_v0_10_0_fe *r, - const rustsecp256k1zkp_v0_10_0_fe *n, - const rustsecp256k1zkp_v0_10_0_fe *d, - const rustsecp256k1zkp_v0_10_0_scalar *q, +static int rustsecp256k1zkp_v0_10_1_ecmult_const_xonly( + rustsecp256k1zkp_v0_10_1_fe *r, + const rustsecp256k1zkp_v0_10_1_fe *n, + const rustsecp256k1zkp_v0_10_1_fe *d, + const rustsecp256k1zkp_v0_10_1_scalar *q, int known_on_curve ); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const_impl.h index bd57dbda..5e27f10b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_const_impl.h @@ -41,11 +41,11 @@ * * 'pre' must be an array of size ECMULT_CONST_TABLE_SIZE. */ -static void rustsecp256k1zkp_v0_10_0_ecmult_const_odd_multiples_table_globalz(rustsecp256k1zkp_v0_10_0_ge *pre, rustsecp256k1zkp_v0_10_0_fe *globalz, const rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_fe zr[ECMULT_CONST_TABLE_SIZE]; +static void rustsecp256k1zkp_v0_10_1_ecmult_const_odd_multiples_table_globalz(rustsecp256k1zkp_v0_10_1_ge *pre, rustsecp256k1zkp_v0_10_1_fe *globalz, const rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_fe zr[ECMULT_CONST_TABLE_SIZE]; - rustsecp256k1zkp_v0_10_0_ecmult_odd_multiples_table(ECMULT_CONST_TABLE_SIZE, pre, zr, globalz, a); - rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(ECMULT_CONST_TABLE_SIZE, pre, zr); + rustsecp256k1zkp_v0_10_1_ecmult_odd_multiples_table(ECMULT_CONST_TABLE_SIZE, pre, zr, globalz, a); + rustsecp256k1zkp_v0_10_1_ge_table_set_globalz(ECMULT_CONST_TABLE_SIZE, pre, zr); } /* Given a table 'pre' with odd multiples of a point, put in r the signed-bit multiplication of n with that point. @@ -84,7 +84,7 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const_odd_multiples_table_globalz(ru * = sum((1 - n[i]) * 2^i, i=0..l-2) */ \ unsigned int index = ((unsigned int)(-negative) ^ n) & ((1U << (ECMULT_CONST_GROUP_SIZE - 1)) - 1U); \ - rustsecp256k1zkp_v0_10_0_fe neg_y; \ + rustsecp256k1zkp_v0_10_1_fe neg_y; \ VERIFY_CHECK((n) < (1U << ECMULT_CONST_GROUP_SIZE)); \ VERIFY_CHECK(index < (1U << (ECMULT_CONST_GROUP_SIZE - 1))); \ /* Unconditionally set r->x = (pre)[m].x. r->y = (pre)[m].y. because it's either the correct one @@ -94,34 +94,34 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const_odd_multiples_table_globalz(ru for (m = 1; m < ECMULT_CONST_TABLE_SIZE; m++) { \ /* This loop is used to avoid secret data in array indices. See * the comment in ecmult_gen_impl.h for rationale. */ \ - rustsecp256k1zkp_v0_10_0_fe_cmov(&(r)->x, &(pre)[m].x, m == index); \ - rustsecp256k1zkp_v0_10_0_fe_cmov(&(r)->y, &(pre)[m].y, m == index); \ + rustsecp256k1zkp_v0_10_1_fe_cmov(&(r)->x, &(pre)[m].x, m == index); \ + rustsecp256k1zkp_v0_10_1_fe_cmov(&(r)->y, &(pre)[m].y, m == index); \ } \ (r)->infinity = 0; \ - rustsecp256k1zkp_v0_10_0_fe_negate(&neg_y, &(r)->y, 1); \ - rustsecp256k1zkp_v0_10_0_fe_cmov(&(r)->y, &neg_y, negative); \ + rustsecp256k1zkp_v0_10_1_fe_negate(&neg_y, &(r)->y, 1); \ + rustsecp256k1zkp_v0_10_1_fe_cmov(&(r)->y, &neg_y, negative); \ } while(0) -/* For K as defined in the comment of rustsecp256k1zkp_v0_10_0_ecmult_const, we have several precomputed +/* For K as defined in the comment of rustsecp256k1zkp_v0_10_1_ecmult_const, we have several precomputed * formulas/constants. * - in exhaustive test mode, we give an explicit expression to compute it at compile time: */ #ifdef EXHAUSTIVE_TEST_ORDER -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_ecmult_const_K = ((SECP256K1_SCALAR_CONST(0, 0, 0, (1U << (ECMULT_CONST_BITS - 128)) - 2U, 0, 0, 0, 0) + EXHAUSTIVE_TEST_ORDER - 1U) * (1U + EXHAUSTIVE_TEST_LAMBDA)) % EXHAUSTIVE_TEST_ORDER; +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_ecmult_const_K = ((SECP256K1_SCALAR_CONST(0, 0, 0, (1U << (ECMULT_CONST_BITS - 128)) - 2U, 0, 0, 0, 0) + EXHAUSTIVE_TEST_ORDER - 1U) * (1U + EXHAUSTIVE_TEST_LAMBDA)) % EXHAUSTIVE_TEST_ORDER; /* - for the real secp256k1 group we have constants for various ECMULT_CONST_BITS values. */ #elif ECMULT_CONST_BITS == 129 /* For GROUP_SIZE = 1,3. */ -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_ecmult_const_K = SECP256K1_SCALAR_CONST(0xac9c52b3ul, 0x3fa3cf1ful, 0x5ad9e3fdul, 0x77ed9ba4ul, 0xa880b9fcul, 0x8ec739c2ul, 0xe0cfc810ul, 0xb51283ceul); +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_ecmult_const_K = SECP256K1_SCALAR_CONST(0xac9c52b3ul, 0x3fa3cf1ful, 0x5ad9e3fdul, 0x77ed9ba4ul, 0xa880b9fcul, 0x8ec739c2ul, 0xe0cfc810ul, 0xb51283ceul); #elif ECMULT_CONST_BITS == 130 /* For GROUP_SIZE = 2,5. */ -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_ecmult_const_K = SECP256K1_SCALAR_CONST(0xa4e88a7dul, 0xcb13034eul, 0xc2bdd6bful, 0x7c118d6bul, 0x589ae848ul, 0x26ba29e4ul, 0xb5c2c1dcul, 0xde9798d9ul); +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_ecmult_const_K = SECP256K1_SCALAR_CONST(0xa4e88a7dul, 0xcb13034eul, 0xc2bdd6bful, 0x7c118d6bul, 0x589ae848ul, 0x26ba29e4ul, 0xb5c2c1dcul, 0xde9798d9ul); #elif ECMULT_CONST_BITS == 132 /* For GROUP_SIZE = 4,6 */ -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_ecmult_const_K = SECP256K1_SCALAR_CONST(0x76b1d93dul, 0x0fae3c6bul, 0x3215874bul, 0x94e93813ul, 0x7937fe0dul, 0xb66bcaaful, 0xb3749ca5ul, 0xd7b6171bul); +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_ecmult_const_K = SECP256K1_SCALAR_CONST(0x76b1d93dul, 0x0fae3c6bul, 0x3215874bul, 0x94e93813ul, 0x7937fe0dul, 0xb66bcaaful, 0xb3749ca5ul, 0xd7b6171bul); #else # error "Unknown ECMULT_CONST_BITS" #endif -static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_scalar *q) { +static void rustsecp256k1zkp_v0_10_1_ecmult_const(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_scalar *q) { /* The approach below combines the signed-digit logic from Mike Hamburg's * "Fast and compact elliptic-curve cryptography" (https://eprint.iacr.org/2012/309) * Section 3.3, with the GLV endomorphism. @@ -142,7 +142,7 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej * * It is appealing to try to combine this with the GLV optimization: the idea that a scalar * s can be written as s1 + lambda*s2, where lambda is a curve-specific constant such that * lambda*A is easy to compute, and where s1 and s2 are small. In particular we have the - * rustsecp256k1zkp_v0_10_0_scalar_split_lambda function which performs such a split with the resulting s1 + * rustsecp256k1zkp_v0_10_1_scalar_split_lambda function which performs such a split with the resulting s1 * and s2 in range (-2^128, 2^128) mod n. This does work, but is uninteresting: * * To compute q*A: @@ -189,33 +189,33 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej * */ /* The offset to add to s1 and s2 to make them non-negative. Equal to 2^128. */ - static const rustsecp256k1zkp_v0_10_0_scalar S_OFFSET = SECP256K1_SCALAR_CONST(0, 0, 0, 1, 0, 0, 0, 0); - rustsecp256k1zkp_v0_10_0_scalar s, v1, v2; - rustsecp256k1zkp_v0_10_0_ge pre_a[ECMULT_CONST_TABLE_SIZE]; - rustsecp256k1zkp_v0_10_0_ge pre_a_lam[ECMULT_CONST_TABLE_SIZE]; - rustsecp256k1zkp_v0_10_0_fe global_z; + static const rustsecp256k1zkp_v0_10_1_scalar S_OFFSET = SECP256K1_SCALAR_CONST(0, 0, 0, 1, 0, 0, 0, 0); + rustsecp256k1zkp_v0_10_1_scalar s, v1, v2; + rustsecp256k1zkp_v0_10_1_ge pre_a[ECMULT_CONST_TABLE_SIZE]; + rustsecp256k1zkp_v0_10_1_ge pre_a_lam[ECMULT_CONST_TABLE_SIZE]; + rustsecp256k1zkp_v0_10_1_fe global_z; int group, i; /* We're allowed to be non-constant time in the point, and the code below (in particular, - * rustsecp256k1zkp_v0_10_0_ecmult_const_odd_multiples_table_globalz) cannot deal with infinity in a + * rustsecp256k1zkp_v0_10_1_ecmult_const_odd_multiples_table_globalz) cannot deal with infinity in a * constant-time manner anyway. */ - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(a)) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(a)) { + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); return; } /* Compute v1 and v2. */ - rustsecp256k1zkp_v0_10_0_scalar_add(&s, q, &rustsecp256k1zkp_v0_10_0_ecmult_const_K); - rustsecp256k1zkp_v0_10_0_scalar_half(&s, &s); - rustsecp256k1zkp_v0_10_0_scalar_split_lambda(&v1, &v2, &s); - rustsecp256k1zkp_v0_10_0_scalar_add(&v1, &v1, &S_OFFSET); - rustsecp256k1zkp_v0_10_0_scalar_add(&v2, &v2, &S_OFFSET); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, q, &rustsecp256k1zkp_v0_10_1_ecmult_const_K); + rustsecp256k1zkp_v0_10_1_scalar_half(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_split_lambda(&v1, &v2, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&v1, &v1, &S_OFFSET); + rustsecp256k1zkp_v0_10_1_scalar_add(&v2, &v2, &S_OFFSET); #ifdef VERIFY /* Verify that v1 and v2 are in range [0, 2^129-1]. */ for (i = 129; i < 256; ++i) { - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_get_bits(&v1, i, 1) == 0); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_get_bits(&v2, i, 1) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_get_bits(&v1, i, 1) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_get_bits(&v2, i, 1) == 0); } #endif @@ -225,10 +225,10 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej * * that the Z coordinate was 1, use affine addition formulae, and correct * the Z coordinate of the result once at the end. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(r, a); - rustsecp256k1zkp_v0_10_0_ecmult_const_odd_multiples_table_globalz(pre_a, &global_z, r); + rustsecp256k1zkp_v0_10_1_gej_set_ge(r, a); + rustsecp256k1zkp_v0_10_1_ecmult_const_odd_multiples_table_globalz(pre_a, &global_z, r); for (i = 0; i < ECMULT_CONST_TABLE_SIZE; i++) { - rustsecp256k1zkp_v0_10_0_ge_mul_lambda(&pre_a_lam[i], &pre_a[i]); + rustsecp256k1zkp_v0_10_1_ge_mul_lambda(&pre_a_lam[i], &pre_a[i]); } /* Next, we compute r = C_l(v1, A) + C_l(v2, lambda*A). @@ -243,31 +243,31 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_const(rustsecp256k1zkp_v0_10_0_gej * */ for (group = ECMULT_CONST_GROUPS - 1; group >= 0; --group) { /* Using the _var get_bits function is ok here, since it's only variable in offset and count, not in the scalar. */ - unsigned int bits1 = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(&v1, group * ECMULT_CONST_GROUP_SIZE, ECMULT_CONST_GROUP_SIZE); - unsigned int bits2 = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(&v2, group * ECMULT_CONST_GROUP_SIZE, ECMULT_CONST_GROUP_SIZE); - rustsecp256k1zkp_v0_10_0_ge t; + unsigned int bits1 = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(&v1, group * ECMULT_CONST_GROUP_SIZE, ECMULT_CONST_GROUP_SIZE); + unsigned int bits2 = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(&v2, group * ECMULT_CONST_GROUP_SIZE, ECMULT_CONST_GROUP_SIZE); + rustsecp256k1zkp_v0_10_1_ge t; int j; ECMULT_CONST_TABLE_GET_GE(&t, pre_a, bits1); if (group == ECMULT_CONST_GROUPS - 1) { /* Directly set r in the first iteration. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(r, &t); + rustsecp256k1zkp_v0_10_1_gej_set_ge(r, &t); } else { /* Shift the result so far up. */ for (j = 0; j < ECMULT_CONST_GROUP_SIZE; ++j) { - rustsecp256k1zkp_v0_10_0_gej_double(r, r); + rustsecp256k1zkp_v0_10_1_gej_double(r, r); } - rustsecp256k1zkp_v0_10_0_gej_add_ge(r, r, &t); + rustsecp256k1zkp_v0_10_1_gej_add_ge(r, r, &t); } ECMULT_CONST_TABLE_GET_GE(&t, pre_a_lam, bits2); - rustsecp256k1zkp_v0_10_0_gej_add_ge(r, r, &t); + rustsecp256k1zkp_v0_10_1_gej_add_ge(r, r, &t); } /* Map the result back to the secp256k1 curve from the isomorphic curve. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &r->z, &global_z); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &r->z, &global_z); } -static int rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(rustsecp256k1zkp_v0_10_0_fe* r, const rustsecp256k1zkp_v0_10_0_fe *n, const rustsecp256k1zkp_v0_10_0_fe *d, const rustsecp256k1zkp_v0_10_0_scalar *q, int known_on_curve) { +static int rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(rustsecp256k1zkp_v0_10_1_fe* r, const rustsecp256k1zkp_v0_10_1_fe *n, const rustsecp256k1zkp_v0_10_1_fe *d, const rustsecp256k1zkp_v0_10_1_scalar *q, int known_on_curve) { /* This algorithm is a generalization of Peter Dettman's technique for * avoiding the square root in a random-basepoint x-only multiplication @@ -338,21 +338,21 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(rustsecp256k1zkp_v0_10_0_ * is needed anywhere in this computation. */ - rustsecp256k1zkp_v0_10_0_fe g, i; - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_gej rj; + rustsecp256k1zkp_v0_10_1_fe g, i; + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_gej rj; /* Compute g = (n^3 + B*d^3). */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&g, n); - rustsecp256k1zkp_v0_10_0_fe_mul(&g, &g, n); + rustsecp256k1zkp_v0_10_1_fe_sqr(&g, n); + rustsecp256k1zkp_v0_10_1_fe_mul(&g, &g, n); if (d) { - rustsecp256k1zkp_v0_10_0_fe b; - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(d)); - rustsecp256k1zkp_v0_10_0_fe_sqr(&b, d); + rustsecp256k1zkp_v0_10_1_fe b; + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(d)); + rustsecp256k1zkp_v0_10_1_fe_sqr(&b, d); VERIFY_CHECK(SECP256K1_B <= 8); /* magnitude of b will be <= 8 after the next call */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&b, SECP256K1_B); - rustsecp256k1zkp_v0_10_0_fe_mul(&b, &b, d); - rustsecp256k1zkp_v0_10_0_fe_add(&g, &b); + rustsecp256k1zkp_v0_10_1_fe_mul_int(&b, SECP256K1_B); + rustsecp256k1zkp_v0_10_1_fe_mul(&b, &b, d); + rustsecp256k1zkp_v0_10_1_fe_add(&g, &b); if (!known_on_curve) { /* We need to determine whether (n/d)^3 + 7 is square. * @@ -361,37 +361,37 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(rustsecp256k1zkp_v0_10_0_ * <=> is_square((n^3 + 7*d^3) * d) * <=> is_square(g * d) */ - rustsecp256k1zkp_v0_10_0_fe c; - rustsecp256k1zkp_v0_10_0_fe_mul(&c, &g, d); - if (!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&c)) return 0; + rustsecp256k1zkp_v0_10_1_fe c; + rustsecp256k1zkp_v0_10_1_fe_mul(&c, &g, d); + if (!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&c)) return 0; } } else { - rustsecp256k1zkp_v0_10_0_fe_add_int(&g, SECP256K1_B); + rustsecp256k1zkp_v0_10_1_fe_add_int(&g, SECP256K1_B); if (!known_on_curve) { /* g at this point equals x^3 + 7. Test if it is square. */ - if (!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&g)) return 0; + if (!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&g)) return 0; } } /* Compute base point P = (n*g, g^2), the effective affine version of (n*g, g^2, v), which has * corresponding affine X coordinate n/d. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&p.x, &g, n); - rustsecp256k1zkp_v0_10_0_fe_sqr(&p.y, &g); + rustsecp256k1zkp_v0_10_1_fe_mul(&p.x, &g, n); + rustsecp256k1zkp_v0_10_1_fe_sqr(&p.y, &g); p.infinity = 0; /* Perform x-only EC multiplication of P with q. */ - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_scalar_is_zero(q)); - rustsecp256k1zkp_v0_10_0_ecmult_const(&rj, &p, q); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rj)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_scalar_is_zero(q)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&rj, &p, q); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rj)); /* The resulting (X, Y, Z) point on the effective-affine isomorphic curve corresponds to * (X, Y, Z*v) on the secp256k1 curve. The affine version of that has X coordinate * (X / (Z^2*d*g)). */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&i, &rj.z); - rustsecp256k1zkp_v0_10_0_fe_mul(&i, &i, &g); - if (d) rustsecp256k1zkp_v0_10_0_fe_mul(&i, &i, d); - rustsecp256k1zkp_v0_10_0_fe_inv(&i, &i); - rustsecp256k1zkp_v0_10_0_fe_mul(r, &rj.x, &i); + rustsecp256k1zkp_v0_10_1_fe_sqr(&i, &rj.z); + rustsecp256k1zkp_v0_10_1_fe_mul(&i, &i, &g); + if (d) rustsecp256k1zkp_v0_10_1_fe_mul(&i, &i, d); + rustsecp256k1zkp_v0_10_1_fe_inv(&i, &i); + rustsecp256k1zkp_v0_10_1_fe_mul(r, &rj.x, &i); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen.h index 830cad93..caabbc91 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen.h @@ -33,16 +33,16 @@ typedef struct { int built; /* Blinding values used when computing (n-b)G + bG. */ - rustsecp256k1zkp_v0_10_0_scalar blind; /* -b */ - rustsecp256k1zkp_v0_10_0_gej initial; /* bG */ -} rustsecp256k1zkp_v0_10_0_ecmult_gen_context; + rustsecp256k1zkp_v0_10_1_scalar blind; /* -b */ + rustsecp256k1zkp_v0_10_1_gej initial; /* bG */ +} rustsecp256k1zkp_v0_10_1_ecmult_gen_context; -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_context_build(rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ctx); -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_context_clear(rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ctx); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_context_build(rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ctx); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_context_clear(rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ctx); /** Multiply with the generator: R = a*G */ -static void rustsecp256k1zkp_v0_10_0_ecmult_gen(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ctx, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ctx, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *a); -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx, const unsigned char *seed32); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx, const unsigned char *seed32); #endif /* SECP256K1_ECMULT_GEN_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table.h index cffbaebb..6e863292 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table.h @@ -9,6 +9,6 @@ #include "ecmult_gen.h" -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_compute_table(rustsecp256k1zkp_v0_10_0_ge_storage* table, const rustsecp256k1zkp_v0_10_0_ge* gen, int bits); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_compute_table(rustsecp256k1zkp_v0_10_1_ge_storage* table, const rustsecp256k1zkp_v0_10_1_ge* gen, int bits); #endif /* SECP256K1_ECMULT_GEN_COMPUTE_TABLE_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table_impl.h index 13ca3109..0a5a88ab 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_compute_table_impl.h @@ -13,69 +13,69 @@ #include "ecmult_gen.h" #include "util.h" -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_compute_table(rustsecp256k1zkp_v0_10_0_ge_storage* table, const rustsecp256k1zkp_v0_10_0_ge* gen, int bits) { +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_compute_table(rustsecp256k1zkp_v0_10_1_ge_storage* table, const rustsecp256k1zkp_v0_10_1_ge* gen, int bits) { int g = ECMULT_GEN_PREC_G(bits); int n = ECMULT_GEN_PREC_N(bits); - rustsecp256k1zkp_v0_10_0_ge* prec = checked_malloc(&default_error_callback, n * g * sizeof(*prec)); - rustsecp256k1zkp_v0_10_0_gej gj; - rustsecp256k1zkp_v0_10_0_gej nums_gej; + rustsecp256k1zkp_v0_10_1_ge* prec = checked_malloc(&default_error_callback, n * g * sizeof(*prec)); + rustsecp256k1zkp_v0_10_1_gej gj; + rustsecp256k1zkp_v0_10_1_gej nums_gej; int i, j; VERIFY_CHECK(g > 0); VERIFY_CHECK(n > 0); /* get the generator */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gj, gen); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gj, gen); /* Construct a group element with no known corresponding scalar (nothing up my sleeve). */ { static const unsigned char nums_b32[33] = "The scalar for this x is unknown"; - rustsecp256k1zkp_v0_10_0_fe nums_x; - rustsecp256k1zkp_v0_10_0_ge nums_ge; + rustsecp256k1zkp_v0_10_1_fe nums_x; + rustsecp256k1zkp_v0_10_1_ge nums_ge; int r; - r = rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&nums_x, nums_b32); + r = rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&nums_x, nums_b32); (void)r; VERIFY_CHECK(r); - r = rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&nums_ge, &nums_x, 0); + r = rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&nums_ge, &nums_x, 0); (void)r; VERIFY_CHECK(r); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&nums_gej, &nums_ge); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&nums_gej, &nums_ge); /* Add G to make the bits in x uniformly distributed. */ - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&nums_gej, &nums_gej, gen, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&nums_gej, &nums_gej, gen, NULL); } /* compute prec. */ { - rustsecp256k1zkp_v0_10_0_gej gbase; - rustsecp256k1zkp_v0_10_0_gej numsbase; - rustsecp256k1zkp_v0_10_0_gej* precj = checked_malloc(&default_error_callback, n * g * sizeof(*precj)); /* Jacobian versions of prec. */ + rustsecp256k1zkp_v0_10_1_gej gbase; + rustsecp256k1zkp_v0_10_1_gej numsbase; + rustsecp256k1zkp_v0_10_1_gej* precj = checked_malloc(&default_error_callback, n * g * sizeof(*precj)); /* Jacobian versions of prec. */ gbase = gj; /* PREC_G^j * G */ numsbase = nums_gej; /* 2^j * nums. */ for (j = 0; j < n; j++) { /* Set precj[j*PREC_G .. j*PREC_G+(PREC_G-1)] to (numsbase, numsbase + gbase, ..., numsbase + (PREC_G-1)*gbase). */ precj[j*g] = numsbase; for (i = 1; i < g; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&precj[j*g + i], &precj[j*g + i - 1], &gbase, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&precj[j*g + i], &precj[j*g + i - 1], &gbase, NULL); } /* Multiply gbase by PREC_G. */ for (i = 0; i < bits; i++) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&gbase, &gbase, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&gbase, &gbase, NULL); } /* Multiply numbase by 2. */ - rustsecp256k1zkp_v0_10_0_gej_double_var(&numsbase, &numsbase, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&numsbase, &numsbase, NULL); if (j == n - 2) { /* In the last iteration, numsbase is (1 - 2^j) * nums instead. */ - rustsecp256k1zkp_v0_10_0_gej_neg(&numsbase, &numsbase); - rustsecp256k1zkp_v0_10_0_gej_add_var(&numsbase, &numsbase, &nums_gej, NULL); + rustsecp256k1zkp_v0_10_1_gej_neg(&numsbase, &numsbase); + rustsecp256k1zkp_v0_10_1_gej_add_var(&numsbase, &numsbase, &nums_gej, NULL); } } - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(prec, precj, n * g); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(prec, precj, n * g); free(precj); } for (j = 0; j < n; j++) { for (i = 0; i < g; i++) { - rustsecp256k1zkp_v0_10_0_ge_to_storage(&table[j*g + i], &prec[j*g + i]); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&table[j*g + i], &prec[j*g + i]); } } free(prec); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_impl.h index f013a773..6d5fa220 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_gen_impl.h @@ -14,19 +14,19 @@ #include "hash_impl.h" #include "precomputed_ecmult_gen.h" -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_context_build(rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx) { - rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(ctx, NULL); +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_context_build(rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx) { + rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(ctx, NULL); ctx->built = 1; } -static int rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ctx) { +static int rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ctx) { return ctx->built; } -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_context_clear(rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx) { +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_context_clear(rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx) { ctx->built = 0; - rustsecp256k1zkp_v0_10_0_scalar_clear(&ctx->blind); - rustsecp256k1zkp_v0_10_0_gej_clear(&ctx->initial); + rustsecp256k1zkp_v0_10_1_scalar_clear(&ctx->blind); + rustsecp256k1zkp_v0_10_1_gej_clear(&ctx->initial); } /* For accelerating the computation of a*G: @@ -40,25 +40,25 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_gen_context_clear(rustsecp256k1zkp_v * precomputed (call it prec(i, n_i)). The formula now becomes sum(prec(i, n_i), i=0 ... PREC_N-1). * None of the resulting prec group elements have a known scalar, and neither do any of * the intermediate sums while computing a*G. - * The prec values are stored in rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[i][n_i] = n_i * (PREC_G)^i * G + U_i. + * The prec values are stored in rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[i][n_i] = n_i * (PREC_G)^i * G + U_i. */ -static void rustsecp256k1zkp_v0_10_0_ecmult_gen(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *gn) { +static void rustsecp256k1zkp_v0_10_1_ecmult_gen(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *gn) { int bits = ECMULT_GEN_PREC_BITS; int g = ECMULT_GEN_PREC_G(bits); int n = ECMULT_GEN_PREC_N(bits); - rustsecp256k1zkp_v0_10_0_ge add; - rustsecp256k1zkp_v0_10_0_ge_storage adds; - rustsecp256k1zkp_v0_10_0_scalar gnb; + rustsecp256k1zkp_v0_10_1_ge add; + rustsecp256k1zkp_v0_10_1_ge_storage adds; + rustsecp256k1zkp_v0_10_1_scalar gnb; int i, j, n_i; memset(&adds, 0, sizeof(adds)); *r = ctx->initial; /* Blind scalar/point multiplication by computing (n-b)G + bG instead of nG. */ - rustsecp256k1zkp_v0_10_0_scalar_add(&gnb, gn, &ctx->blind); + rustsecp256k1zkp_v0_10_1_scalar_add(&gnb, gn, &ctx->blind); add.infinity = 0; for (i = 0; i < n; i++) { - n_i = rustsecp256k1zkp_v0_10_0_scalar_get_bits(&gnb, i * bits, bits); + n_i = rustsecp256k1zkp_v0_10_1_scalar_get_bits(&gnb, i * bits, bits); for (j = 0; j < g; j++) { /** This uses a conditional move to avoid any secret data in array indexes. * _Any_ use of secret indexes has been demonstrated to result in timing @@ -70,61 +70,61 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_gen(const rustsecp256k1zkp_v0_10_0_e * by Dag Arne Osvik, Adi Shamir, and Eran Tromer * (https://www.tau.ac.il/~tromer/papers/cache.pdf) */ - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&adds, &rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[i][j], j == n_i); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&adds, &rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[i][j], j == n_i); } - rustsecp256k1zkp_v0_10_0_ge_from_storage(&add, &adds); - rustsecp256k1zkp_v0_10_0_gej_add_ge(r, r, &add); + rustsecp256k1zkp_v0_10_1_ge_from_storage(&add, &adds); + rustsecp256k1zkp_v0_10_1_gej_add_ge(r, r, &add); } n_i = 0; - rustsecp256k1zkp_v0_10_0_ge_clear(&add); - rustsecp256k1zkp_v0_10_0_scalar_clear(&gnb); + rustsecp256k1zkp_v0_10_1_ge_clear(&add); + rustsecp256k1zkp_v0_10_1_scalar_clear(&gnb); } -/* Setup blinding values for rustsecp256k1zkp_v0_10_0_ecmult_gen. */ -static void rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ctx, const unsigned char *seed32) { - rustsecp256k1zkp_v0_10_0_scalar b; - rustsecp256k1zkp_v0_10_0_gej gb; - rustsecp256k1zkp_v0_10_0_fe s; +/* Setup blinding values for rustsecp256k1zkp_v0_10_1_ecmult_gen. */ +static void rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ctx, const unsigned char *seed32) { + rustsecp256k1zkp_v0_10_1_scalar b; + rustsecp256k1zkp_v0_10_1_gej gb; + rustsecp256k1zkp_v0_10_1_fe s; unsigned char nonce32[32]; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; unsigned char keydata[64]; if (seed32 == NULL) { /* When seed is NULL, reset the initial point and blinding value. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ctx->initial, &rustsecp256k1zkp_v0_10_0_ge_const_g); - rustsecp256k1zkp_v0_10_0_gej_neg(&ctx->initial, &ctx->initial); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ctx->blind, 1); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ctx->initial, &rustsecp256k1zkp_v0_10_1_ge_const_g); + rustsecp256k1zkp_v0_10_1_gej_neg(&ctx->initial, &ctx->initial); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ctx->blind, 1); return; } /* The prior blinding value (if not reset) is chained forward by including it in the hash. */ - rustsecp256k1zkp_v0_10_0_scalar_get_b32(keydata, &ctx->blind); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(keydata, &ctx->blind); /** Using a CSPRNG allows a failure free interface, avoids needing large amounts of random data, * and guards against weak or adversarial seeds. This is a simpler and safer interface than * asking the caller for blinding values directly and expecting them to retry on failure. */ VERIFY_CHECK(seed32 != NULL); memcpy(keydata + 32, seed32, 32); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, keydata, 64); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, keydata, 64); memset(keydata, 0, sizeof(keydata)); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&s, nonce32); - rustsecp256k1zkp_v0_10_0_fe_cmov(&s, &rustsecp256k1zkp_v0_10_0_fe_one, rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&s)); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&s, nonce32); + rustsecp256k1zkp_v0_10_1_fe_cmov(&s, &rustsecp256k1zkp_v0_10_1_fe_one, rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&s)); /* Randomize the projection to defend against multiplier sidechannels. - Do this before our own call to rustsecp256k1zkp_v0_10_0_ecmult_gen below. */ - rustsecp256k1zkp_v0_10_0_gej_rescale(&ctx->initial, &s); - rustsecp256k1zkp_v0_10_0_fe_clear(&s); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&b, nonce32, NULL); + Do this before our own call to rustsecp256k1zkp_v0_10_1_ecmult_gen below. */ + rustsecp256k1zkp_v0_10_1_gej_rescale(&ctx->initial, &s); + rustsecp256k1zkp_v0_10_1_fe_clear(&s); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&b, nonce32, NULL); /* A blinding value of 0 works, but would undermine the projection hardening. */ - rustsecp256k1zkp_v0_10_0_scalar_cmov(&b, &rustsecp256k1zkp_v0_10_0_scalar_one, rustsecp256k1zkp_v0_10_0_scalar_is_zero(&b)); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&b, &rustsecp256k1zkp_v0_10_1_scalar_one, rustsecp256k1zkp_v0_10_1_scalar_is_zero(&b)); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); memset(nonce32, 0, 32); /* The random projection in ctx->initial ensures that gb will have a random projection. */ - rustsecp256k1zkp_v0_10_0_ecmult_gen(ctx, &gb, &b); - rustsecp256k1zkp_v0_10_0_scalar_negate(&b, &b); + rustsecp256k1zkp_v0_10_1_ecmult_gen(ctx, &gb, &b); + rustsecp256k1zkp_v0_10_1_scalar_negate(&b, &b); ctx->blind = b; ctx->initial = gb; - rustsecp256k1zkp_v0_10_0_scalar_clear(&b); - rustsecp256k1zkp_v0_10_0_gej_clear(&gb); + rustsecp256k1zkp_v0_10_1_scalar_clear(&b); + rustsecp256k1zkp_v0_10_1_gej_clear(&gb); } #endif /* SECP256K1_ECMULT_GEN_IMPL_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_impl.h index d65a2c8a..ac456cd9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/ecmult_impl.h @@ -33,8 +33,8 @@ /** Larger values for ECMULT_WINDOW_SIZE result in possibly better * performance at the cost of an exponentially larger precomputed * table. The exact table size is - * (1 << (WINDOW_G - 2)) * sizeof(rustsecp256k1zkp_v0_10_0_ge_storage) bytes, - * where sizeof(rustsecp256k1zkp_v0_10_0_ge_storage) is typically 64 bytes but can + * (1 << (WINDOW_G - 2)) * sizeof(rustsecp256k1zkp_v0_10_1_ge_storage) bytes, + * where sizeof(rustsecp256k1zkp_v0_10_1_ge_storage) is typically 64 bytes but can * be larger due to platform-specific padding and alignment. * Two tables of this size are used (due to the endomorphism * optimization). @@ -70,14 +70,14 @@ * Lastly the zr[0] value, which isn't used above, is set so that: * - a.z = z(pre_a[0]) / zr[0] */ -static void rustsecp256k1zkp_v0_10_0_ecmult_odd_multiples_table(int n, rustsecp256k1zkp_v0_10_0_ge *pre_a, rustsecp256k1zkp_v0_10_0_fe *zr, rustsecp256k1zkp_v0_10_0_fe *z, const rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_gej d, ai; - rustsecp256k1zkp_v0_10_0_ge d_ge; +static void rustsecp256k1zkp_v0_10_1_ecmult_odd_multiples_table(int n, rustsecp256k1zkp_v0_10_1_ge *pre_a, rustsecp256k1zkp_v0_10_1_fe *zr, rustsecp256k1zkp_v0_10_1_fe *z, const rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_gej d, ai; + rustsecp256k1zkp_v0_10_1_ge d_ge; int i; VERIFY_CHECK(!a->infinity); - rustsecp256k1zkp_v0_10_0_gej_double_var(&d, a, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&d, a, NULL); /* * Perform the additions using an isomorphic curve Y^2 = X^3 + 7*C^6 where C := d.z. @@ -90,11 +90,11 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_odd_multiples_table(int n, rustsecp2 * * The group addition functions work correctly on these isomorphic curves. * In particular phi(d) is easy to represent in affine coordinates under this isomorphism. - * This lets us use the faster rustsecp256k1zkp_v0_10_0_gej_add_ge_var group addition function that we wouldn't be able to use otherwise. + * This lets us use the faster rustsecp256k1zkp_v0_10_1_gej_add_ge_var group addition function that we wouldn't be able to use otherwise. */ - rustsecp256k1zkp_v0_10_0_ge_set_xy(&d_ge, &d.x, &d.y); - rustsecp256k1zkp_v0_10_0_ge_set_gej_zinv(&pre_a[0], a, &d.z); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ai, &pre_a[0]); + rustsecp256k1zkp_v0_10_1_ge_set_xy(&d_ge, &d.x, &d.y); + rustsecp256k1zkp_v0_10_1_ge_set_gej_zinv(&pre_a[0], a, &d.z); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ai, &pre_a[0]); ai.z = a->z; /* pre_a[0] is the point (a.x*C^2, a.y*C^3, a.z*C) which is equivalent to a. @@ -103,18 +103,18 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_odd_multiples_table(int n, rustsecp2 zr[0] = d.z; for (i = 1; i < n; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&ai, &ai, &d_ge, &zr[i]); - rustsecp256k1zkp_v0_10_0_ge_set_xy(&pre_a[i], &ai.x, &ai.y); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&ai, &ai, &d_ge, &zr[i]); + rustsecp256k1zkp_v0_10_1_ge_set_xy(&pre_a[i], &ai.x, &ai.y); } /* Multiply the last z-coordinate by C to undo the isomorphism. * Since the z-coordinates of the pre_a values are implied by the zr array of z-coordinate ratios, * undoing the isomorphism here undoes the isomorphism for all pre_a values. */ - rustsecp256k1zkp_v0_10_0_fe_mul(z, &ai.z, &d.z); + rustsecp256k1zkp_v0_10_1_fe_mul(z, &ai.z, &d.z); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_verify(int n, int w) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_ecmult_table_verify(int n, int w) { (void)n; (void)w; VERIFY_CHECK(((n) & 1) == 1); @@ -122,33 +122,33 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_verify(int n, VERIFY_CHECK((n) <= ((1 << ((w)-1)) - 1)); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *pre, int n, int w) { - rustsecp256k1zkp_v0_10_0_ecmult_table_verify(n,w); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *pre, int n, int w) { + rustsecp256k1zkp_v0_10_1_ecmult_table_verify(n,w); if (n > 0) { *r = pre[(n-1)/2]; } else { *r = pre[(-n-1)/2]; - rustsecp256k1zkp_v0_10_0_fe_negate(&(r->y), &(r->y), 1); + rustsecp256k1zkp_v0_10_1_fe_negate(&(r->y), &(r->y), 1); } } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_lambda(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *pre, const rustsecp256k1zkp_v0_10_0_fe *x, int n, int w) { - rustsecp256k1zkp_v0_10_0_ecmult_table_verify(n,w); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge_lambda(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *pre, const rustsecp256k1zkp_v0_10_1_fe *x, int n, int w) { + rustsecp256k1zkp_v0_10_1_ecmult_table_verify(n,w); if (n > 0) { - rustsecp256k1zkp_v0_10_0_ge_set_xy(r, &x[(n-1)/2], &pre[(n-1)/2].y); + rustsecp256k1zkp_v0_10_1_ge_set_xy(r, &x[(n-1)/2], &pre[(n-1)/2].y); } else { - rustsecp256k1zkp_v0_10_0_ge_set_xy(r, &x[(-n-1)/2], &pre[(-n-1)/2].y); - rustsecp256k1zkp_v0_10_0_fe_negate(&(r->y), &(r->y), 1); + rustsecp256k1zkp_v0_10_1_ge_set_xy(r, &x[(-n-1)/2], &pre[(-n-1)/2].y); + rustsecp256k1zkp_v0_10_1_fe_negate(&(r->y), &(r->y), 1); } } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_storage(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge_storage *pre, int n, int w) { - rustsecp256k1zkp_v0_10_0_ecmult_table_verify(n,w); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge_storage(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge_storage *pre, int n, int w) { + rustsecp256k1zkp_v0_10_1_ecmult_table_verify(n,w); if (n > 0) { - rustsecp256k1zkp_v0_10_0_ge_from_storage(r, &pre[(n-1)/2]); + rustsecp256k1zkp_v0_10_1_ge_from_storage(r, &pre[(n-1)/2]); } else { - rustsecp256k1zkp_v0_10_0_ge_from_storage(r, &pre[(-n-1)/2]); - rustsecp256k1zkp_v0_10_0_fe_negate(&(r->y), &(r->y), 1); + rustsecp256k1zkp_v0_10_1_ge_from_storage(r, &pre[(-n-1)/2]); + rustsecp256k1zkp_v0_10_1_fe_negate(&(r->y), &(r->y), 1); } } @@ -159,8 +159,8 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_storag * - the number of set values in wnaf is returned. This number is at most 256, and at most one more * than the number of bits in the (absolute value) of the input. */ -static int rustsecp256k1zkp_v0_10_0_ecmult_wnaf(int *wnaf, int len, const rustsecp256k1zkp_v0_10_0_scalar *a, int w) { - rustsecp256k1zkp_v0_10_0_scalar s; +static int rustsecp256k1zkp_v0_10_1_ecmult_wnaf(int *wnaf, int len, const rustsecp256k1zkp_v0_10_1_scalar *a, int w) { + rustsecp256k1zkp_v0_10_1_scalar s; int last_set_bit = -1; int bit = 0; int sign = 1; @@ -174,15 +174,15 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_wnaf(int *wnaf, int len, const rustse memset(wnaf, 0, len * sizeof(wnaf[0])); s = *a; - if (rustsecp256k1zkp_v0_10_0_scalar_get_bits(&s, 255, 1)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); + if (rustsecp256k1zkp_v0_10_1_scalar_get_bits(&s, 255, 1)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); sign = -1; } while (bit < len) { int now; int word; - if (rustsecp256k1zkp_v0_10_0_scalar_get_bits(&s, bit, 1) == (unsigned int)carry) { + if (rustsecp256k1zkp_v0_10_1_scalar_get_bits(&s, bit, 1) == (unsigned int)carry) { bit++; continue; } @@ -192,7 +192,7 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_wnaf(int *wnaf, int len, const rustse now = len - bit; } - word = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(&s, bit, now) + carry; + word = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(&s, bit, now) + carry; carry = (word >> (w-1)) & 1; word -= carry << w; @@ -209,7 +209,7 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_wnaf(int *wnaf, int len, const rustse VERIFY_CHECK(carry == 0); while (verify_bit < 256) { - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_get_bits(&s, verify_bit, 1) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_get_bits(&s, verify_bit, 1) == 0); verify_bit++; } } @@ -217,25 +217,25 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_wnaf(int *wnaf, int len, const rustse return last_set_bit + 1; } -struct rustsecp256k1zkp_v0_10_0_strauss_point_state { +struct rustsecp256k1zkp_v0_10_1_strauss_point_state { int wnaf_na_1[129]; int wnaf_na_lam[129]; int bits_na_1; int bits_na_lam; }; -struct rustsecp256k1zkp_v0_10_0_strauss_state { +struct rustsecp256k1zkp_v0_10_1_strauss_state { /* aux is used to hold z-ratios, and then used to hold pre_a[i].x * BETA values. */ - rustsecp256k1zkp_v0_10_0_fe* aux; - rustsecp256k1zkp_v0_10_0_ge* pre_a; - struct rustsecp256k1zkp_v0_10_0_strauss_point_state* ps; + rustsecp256k1zkp_v0_10_1_fe* aux; + rustsecp256k1zkp_v0_10_1_ge* pre_a; + struct rustsecp256k1zkp_v0_10_1_strauss_point_state* ps; }; -static void rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(const struct rustsecp256k1zkp_v0_10_0_strauss_state *state, rustsecp256k1zkp_v0_10_0_gej *r, size_t num, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_scalar *na, const rustsecp256k1zkp_v0_10_0_scalar *ng) { - rustsecp256k1zkp_v0_10_0_ge tmpa; - rustsecp256k1zkp_v0_10_0_fe Z; +static void rustsecp256k1zkp_v0_10_1_ecmult_strauss_wnaf(const struct rustsecp256k1zkp_v0_10_1_strauss_state *state, rustsecp256k1zkp_v0_10_1_gej *r, size_t num, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_scalar *na, const rustsecp256k1zkp_v0_10_1_scalar *ng) { + rustsecp256k1zkp_v0_10_1_ge tmpa; + rustsecp256k1zkp_v0_10_1_fe Z; /* Split G factors. */ - rustsecp256k1zkp_v0_10_0_scalar ng_1, ng_128; + rustsecp256k1zkp_v0_10_1_scalar ng_1, ng_128; int wnaf_ng_1[129]; int bits_ng_1 = 0; int wnaf_ng_128[129]; @@ -245,19 +245,19 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(const struct rustsecp25 size_t np; size_t no = 0; - rustsecp256k1zkp_v0_10_0_fe_set_int(&Z, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(&Z, 1); for (np = 0; np < num; ++np) { - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_scalar na_1, na_lam; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(&na[np]) || rustsecp256k1zkp_v0_10_0_gej_is_infinity(&a[np])) { + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_scalar na_1, na_lam; + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(&na[np]) || rustsecp256k1zkp_v0_10_1_gej_is_infinity(&a[np])) { continue; } /* split na into na_1 and na_lam (where na = na_1 + na_lam*lambda, and na_1 and na_lam are ~128 bit) */ - rustsecp256k1zkp_v0_10_0_scalar_split_lambda(&na_1, &na_lam, &na[np]); + rustsecp256k1zkp_v0_10_1_scalar_split_lambda(&na_1, &na_lam, &na[np]); /* build wnaf representation for na_1 and na_lam. */ - state->ps[no].bits_na_1 = rustsecp256k1zkp_v0_10_0_ecmult_wnaf(state->ps[no].wnaf_na_1, 129, &na_1, WINDOW_A); - state->ps[no].bits_na_lam = rustsecp256k1zkp_v0_10_0_ecmult_wnaf(state->ps[no].wnaf_na_lam, 129, &na_lam, WINDOW_A); + state->ps[no].bits_na_1 = rustsecp256k1zkp_v0_10_1_ecmult_wnaf(state->ps[no].wnaf_na_1, 129, &na_1, WINDOW_A); + state->ps[no].bits_na_lam = rustsecp256k1zkp_v0_10_1_ecmult_wnaf(state->ps[no].wnaf_na_lam, 129, &na_lam, WINDOW_A); VERIFY_CHECK(state->ps[no].bits_na_1 <= 129); VERIFY_CHECK(state->ps[no].bits_na_lam <= 129); if (state->ps[no].bits_na_1 > bits) { @@ -274,37 +274,37 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(const struct rustsecp25 * the Z coordinate of the result once at the end. * The exception is the precomputed G table points, which are actually * affine. Compared to the base used for other points, they have a Z ratio - * of 1/Z, so we can use rustsecp256k1zkp_v0_10_0_gej_add_zinv_var, which uses the same + * of 1/Z, so we can use rustsecp256k1zkp_v0_10_1_gej_add_zinv_var, which uses the same * isomorphism to efficiently add with a known Z inverse. */ tmp = a[np]; if (no) { - rustsecp256k1zkp_v0_10_0_gej_rescale(&tmp, &Z); + rustsecp256k1zkp_v0_10_1_gej_rescale(&tmp, &Z); } - rustsecp256k1zkp_v0_10_0_ecmult_odd_multiples_table(ECMULT_TABLE_SIZE(WINDOW_A), state->pre_a + no * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), &Z, &tmp); - if (no) rustsecp256k1zkp_v0_10_0_fe_mul(state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), &(a[np].z)); + rustsecp256k1zkp_v0_10_1_ecmult_odd_multiples_table(ECMULT_TABLE_SIZE(WINDOW_A), state->pre_a + no * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), &Z, &tmp); + if (no) rustsecp256k1zkp_v0_10_1_fe_mul(state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + no * ECMULT_TABLE_SIZE(WINDOW_A), &(a[np].z)); ++no; } /* Bring them to the same Z denominator. */ if (no) { - rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(ECMULT_TABLE_SIZE(WINDOW_A) * no, state->pre_a, state->aux); + rustsecp256k1zkp_v0_10_1_ge_table_set_globalz(ECMULT_TABLE_SIZE(WINDOW_A) * no, state->pre_a, state->aux); } for (np = 0; np < no; ++np) { for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) { - rustsecp256k1zkp_v0_10_0_fe_mul(&state->aux[np * ECMULT_TABLE_SIZE(WINDOW_A) + i], &state->pre_a[np * ECMULT_TABLE_SIZE(WINDOW_A) + i].x, &rustsecp256k1zkp_v0_10_0_const_beta); + rustsecp256k1zkp_v0_10_1_fe_mul(&state->aux[np * ECMULT_TABLE_SIZE(WINDOW_A) + i], &state->pre_a[np * ECMULT_TABLE_SIZE(WINDOW_A) + i].x, &rustsecp256k1zkp_v0_10_1_const_beta); } } if (ng) { /* split ng into ng_1 and ng_128 (where gn = gn_1 + gn_128*2^128, and gn_1 and gn_128 are ~128 bit) */ - rustsecp256k1zkp_v0_10_0_scalar_split_128(&ng_1, &ng_128, ng); + rustsecp256k1zkp_v0_10_1_scalar_split_128(&ng_1, &ng_128, ng); /* Build wnaf representation for ng_1 and ng_128 */ - bits_ng_1 = rustsecp256k1zkp_v0_10_0_ecmult_wnaf(wnaf_ng_1, 129, &ng_1, WINDOW_G); - bits_ng_128 = rustsecp256k1zkp_v0_10_0_ecmult_wnaf(wnaf_ng_128, 129, &ng_128, WINDOW_G); + bits_ng_1 = rustsecp256k1zkp_v0_10_1_ecmult_wnaf(wnaf_ng_1, 129, &ng_1, WINDOW_G); + bits_ng_128 = rustsecp256k1zkp_v0_10_1_ecmult_wnaf(wnaf_ng_128, 129, &ng_128, WINDOW_G); if (bits_ng_1 > bits) { bits = bits_ng_1; } @@ -313,61 +313,61 @@ static void rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(const struct rustsecp25 } } - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); for (i = bits - 1; i >= 0; i--) { int n; - rustsecp256k1zkp_v0_10_0_gej_double_var(r, r, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(r, r, NULL); for (np = 0; np < no; ++np) { if (i < state->ps[np].bits_na_1 && (n = state->ps[np].wnaf_na_1[i])) { - rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge(&tmpa, state->pre_a + np * ECMULT_TABLE_SIZE(WINDOW_A), n, WINDOW_A); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(r, r, &tmpa, NULL); + rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge(&tmpa, state->pre_a + np * ECMULT_TABLE_SIZE(WINDOW_A), n, WINDOW_A); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(r, r, &tmpa, NULL); } if (i < state->ps[np].bits_na_lam && (n = state->ps[np].wnaf_na_lam[i])) { - rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_lambda(&tmpa, state->pre_a + np * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + np * ECMULT_TABLE_SIZE(WINDOW_A), n, WINDOW_A); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(r, r, &tmpa, NULL); + rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge_lambda(&tmpa, state->pre_a + np * ECMULT_TABLE_SIZE(WINDOW_A), state->aux + np * ECMULT_TABLE_SIZE(WINDOW_A), n, WINDOW_A); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(r, r, &tmpa, NULL); } } if (i < bits_ng_1 && (n = wnaf_ng_1[i])) { - rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_storage(&tmpa, rustsecp256k1zkp_v0_10_0_pre_g, n, WINDOW_G); - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(r, r, &tmpa, &Z); + rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge_storage(&tmpa, rustsecp256k1zkp_v0_10_1_pre_g, n, WINDOW_G); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(r, r, &tmpa, &Z); } if (i < bits_ng_128 && (n = wnaf_ng_128[i])) { - rustsecp256k1zkp_v0_10_0_ecmult_table_get_ge_storage(&tmpa, rustsecp256k1zkp_v0_10_0_pre_g_128, n, WINDOW_G); - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(r, r, &tmpa, &Z); + rustsecp256k1zkp_v0_10_1_ecmult_table_get_ge_storage(&tmpa, rustsecp256k1zkp_v0_10_1_pre_g_128, n, WINDOW_G); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(r, r, &tmpa, &Z); } } if (!r->infinity) { - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &r->z, &Z); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &r->z, &Z); } } -static void rustsecp256k1zkp_v0_10_0_ecmult(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_scalar *na, const rustsecp256k1zkp_v0_10_0_scalar *ng) { - rustsecp256k1zkp_v0_10_0_fe aux[ECMULT_TABLE_SIZE(WINDOW_A)]; - rustsecp256k1zkp_v0_10_0_ge pre_a[ECMULT_TABLE_SIZE(WINDOW_A)]; - struct rustsecp256k1zkp_v0_10_0_strauss_point_state ps[1]; - struct rustsecp256k1zkp_v0_10_0_strauss_state state; +static void rustsecp256k1zkp_v0_10_1_ecmult(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_scalar *na, const rustsecp256k1zkp_v0_10_1_scalar *ng) { + rustsecp256k1zkp_v0_10_1_fe aux[ECMULT_TABLE_SIZE(WINDOW_A)]; + rustsecp256k1zkp_v0_10_1_ge pre_a[ECMULT_TABLE_SIZE(WINDOW_A)]; + struct rustsecp256k1zkp_v0_10_1_strauss_point_state ps[1]; + struct rustsecp256k1zkp_v0_10_1_strauss_state state; state.aux = aux; state.pre_a = pre_a; state.ps = ps; - rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(&state, r, 1, a, na, ng); + rustsecp256k1zkp_v0_10_1_ecmult_strauss_wnaf(&state, r, 1, a, na, ng); } -static size_t rustsecp256k1zkp_v0_10_0_strauss_scratch_size(size_t n_points) { - static const size_t point_size = (sizeof(rustsecp256k1zkp_v0_10_0_ge) + sizeof(rustsecp256k1zkp_v0_10_0_fe)) * ECMULT_TABLE_SIZE(WINDOW_A) + sizeof(struct rustsecp256k1zkp_v0_10_0_strauss_point_state) + sizeof(rustsecp256k1zkp_v0_10_0_gej) + sizeof(rustsecp256k1zkp_v0_10_0_scalar); +static size_t rustsecp256k1zkp_v0_10_1_strauss_scratch_size(size_t n_points) { + static const size_t point_size = (sizeof(rustsecp256k1zkp_v0_10_1_ge) + sizeof(rustsecp256k1zkp_v0_10_1_fe)) * ECMULT_TABLE_SIZE(WINDOW_A) + sizeof(struct rustsecp256k1zkp_v0_10_1_strauss_point_state) + sizeof(rustsecp256k1zkp_v0_10_1_gej) + sizeof(rustsecp256k1zkp_v0_10_1_scalar); return n_points*point_size; } -static int rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n_points, size_t cb_offset) { - rustsecp256k1zkp_v0_10_0_gej* points; - rustsecp256k1zkp_v0_10_0_scalar* scalars; - struct rustsecp256k1zkp_v0_10_0_strauss_state state; +static int rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n_points, size_t cb_offset) { + rustsecp256k1zkp_v0_10_1_gej* points; + rustsecp256k1zkp_v0_10_1_scalar* scalars; + struct rustsecp256k1zkp_v0_10_1_strauss_state state; size_t i; - const size_t scratch_checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(error_callback, scratch); + const size_t scratch_checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(error_callback, scratch); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); if (inp_g_sc == NULL && n_points == 0) { return 1; } @@ -375,37 +375,37 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch(const rustsecp256k1zkp_ /* We allocate STRAUSS_SCRATCH_OBJECTS objects on the scratch space. If these * allocations change, make sure to update the STRAUSS_SCRATCH_OBJECTS * constant and strauss_scratch_size accordingly. */ - points = (rustsecp256k1zkp_v0_10_0_gej*)rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, n_points * sizeof(rustsecp256k1zkp_v0_10_0_gej)); - scalars = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, n_points * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - state.aux = (rustsecp256k1zkp_v0_10_0_fe*)rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, n_points * ECMULT_TABLE_SIZE(WINDOW_A) * sizeof(rustsecp256k1zkp_v0_10_0_fe)); - state.pre_a = (rustsecp256k1zkp_v0_10_0_ge*)rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, n_points * ECMULT_TABLE_SIZE(WINDOW_A) * sizeof(rustsecp256k1zkp_v0_10_0_ge)); - state.ps = (struct rustsecp256k1zkp_v0_10_0_strauss_point_state*)rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, n_points * sizeof(struct rustsecp256k1zkp_v0_10_0_strauss_point_state)); + points = (rustsecp256k1zkp_v0_10_1_gej*)rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, n_points * sizeof(rustsecp256k1zkp_v0_10_1_gej)); + scalars = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, n_points * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + state.aux = (rustsecp256k1zkp_v0_10_1_fe*)rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, n_points * ECMULT_TABLE_SIZE(WINDOW_A) * sizeof(rustsecp256k1zkp_v0_10_1_fe)); + state.pre_a = (rustsecp256k1zkp_v0_10_1_ge*)rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, n_points * ECMULT_TABLE_SIZE(WINDOW_A) * sizeof(rustsecp256k1zkp_v0_10_1_ge)); + state.ps = (struct rustsecp256k1zkp_v0_10_1_strauss_point_state*)rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, n_points * sizeof(struct rustsecp256k1zkp_v0_10_1_strauss_point_state)); if (points == NULL || scalars == NULL || state.aux == NULL || state.pre_a == NULL || state.ps == NULL) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 0; } for (i = 0; i < n_points; i++) { - rustsecp256k1zkp_v0_10_0_ge point; + rustsecp256k1zkp_v0_10_1_ge point; if (!cb(&scalars[i], &point, i+cb_offset, cbdata)) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&points[i], &point); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&points[i], &point); } - rustsecp256k1zkp_v0_10_0_ecmult_strauss_wnaf(&state, r, n_points, points, scalars, inp_g_sc); - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_ecmult_strauss_wnaf(&state, r, n_points, points, scalars, inp_g_sc); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 1; } -/* Wrapper for rustsecp256k1zkp_v0_10_0_ecmult_multi_func interface */ -static int rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch_single(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n) { - return rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch(error_callback, scratch, r, inp_g_sc, cb, cbdata, n, 0); +/* Wrapper for rustsecp256k1zkp_v0_10_1_ecmult_multi_func interface */ +static int rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch_single(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n) { + return rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch(error_callback, scratch, r, inp_g_sc, cb, cbdata, n, 0); } -static size_t rustsecp256k1zkp_v0_10_0_strauss_max_points(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch) { - return rustsecp256k1zkp_v0_10_0_scratch_max_allocation(error_callback, scratch, STRAUSS_SCRATCH_OBJECTS) / rustsecp256k1zkp_v0_10_0_strauss_scratch_size(1); +static size_t rustsecp256k1zkp_v0_10_1_strauss_max_points(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch) { + return rustsecp256k1zkp_v0_10_1_scratch_max_allocation(error_callback, scratch, STRAUSS_SCRATCH_OBJECTS) / rustsecp256k1zkp_v0_10_1_strauss_scratch_size(1); } /** Convert a number to WNAF notation. @@ -415,25 +415,25 @@ static size_t rustsecp256k1zkp_v0_10_0_strauss_max_points(const rustsecp256k1zkp * - the number of words set is always WNAF_SIZE(w) * - the returned skew is 0 or 1 */ -static int rustsecp256k1zkp_v0_10_0_wnaf_fixed(int *wnaf, const rustsecp256k1zkp_v0_10_0_scalar *s, int w) { +static int rustsecp256k1zkp_v0_10_1_wnaf_fixed(int *wnaf, const rustsecp256k1zkp_v0_10_1_scalar *s, int w) { int skew = 0; int pos; int max_pos; int last_w; - const rustsecp256k1zkp_v0_10_0_scalar *work = s; + const rustsecp256k1zkp_v0_10_1_scalar *work = s; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(s)) { + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(s)) { for (pos = 0; pos < WNAF_SIZE(w); pos++) { wnaf[pos] = 0; } return 0; } - if (rustsecp256k1zkp_v0_10_0_scalar_is_even(s)) { + if (rustsecp256k1zkp_v0_10_1_scalar_is_even(s)) { skew = 1; } - wnaf[0] = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(work, 0, w) + skew; + wnaf[0] = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(work, 0, w) + skew; /* Compute last window size. Relevant when window size doesn't divide the * number of bits in the scalar */ last_w = WNAF_BITS - (WNAF_SIZE(w) - 1) * w; @@ -441,7 +441,7 @@ static int rustsecp256k1zkp_v0_10_0_wnaf_fixed(int *wnaf, const rustsecp256k1zkp /* Store the position of the first nonzero word in max_pos to allow * skipping leading zeros when calculating the wnaf. */ for (pos = WNAF_SIZE(w) - 1; pos > 0; pos--) { - int val = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(work, pos * w, pos == WNAF_SIZE(w)-1 ? last_w : w); + int val = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(work, pos * w, pos == WNAF_SIZE(w)-1 ? last_w : w); if(val != 0) { break; } @@ -451,7 +451,7 @@ static int rustsecp256k1zkp_v0_10_0_wnaf_fixed(int *wnaf, const rustsecp256k1zkp pos = 1; while (pos <= max_pos) { - int val = rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(work, pos * w, pos == WNAF_SIZE(w)-1 ? last_w : w); + int val = rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(work, pos * w, pos == WNAF_SIZE(w)-1 ? last_w : w); if ((val & 1) == 0) { wnaf[pos - 1] -= (1 << w); wnaf[pos] = (val + 1); @@ -477,14 +477,14 @@ static int rustsecp256k1zkp_v0_10_0_wnaf_fixed(int *wnaf, const rustsecp256k1zkp return skew; } -struct rustsecp256k1zkp_v0_10_0_pippenger_point_state { +struct rustsecp256k1zkp_v0_10_1_pippenger_point_state { int skew_na; size_t input_pos; }; -struct rustsecp256k1zkp_v0_10_0_pippenger_state { +struct rustsecp256k1zkp_v0_10_1_pippenger_state { int *wnaf_na; - struct rustsecp256k1zkp_v0_10_0_pippenger_point_state* ps; + struct rustsecp256k1zkp_v0_10_1_pippenger_point_state* ps; }; /* @@ -494,7 +494,7 @@ struct rustsecp256k1zkp_v0_10_0_pippenger_state { * to the point's wnaf[i]. Second, the buckets are added together such that * r += 1*bucket[0] + 3*bucket[1] + 5*bucket[2] + ... */ -static int rustsecp256k1zkp_v0_10_0_ecmult_pippenger_wnaf(rustsecp256k1zkp_v0_10_0_gej *buckets, int bucket_window, struct rustsecp256k1zkp_v0_10_0_pippenger_state *state, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *sc, const rustsecp256k1zkp_v0_10_0_ge *pt, size_t num) { +static int rustsecp256k1zkp_v0_10_1_ecmult_pippenger_wnaf(rustsecp256k1zkp_v0_10_1_gej *buckets, int bucket_window, struct rustsecp256k1zkp_v0_10_1_pippenger_state *state, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *sc, const rustsecp256k1zkp_v0_10_1_ge *pt, size_t num) { size_t n_wnaf = WNAF_SIZE(bucket_window+1); size_t np; size_t no = 0; @@ -502,55 +502,55 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_pippenger_wnaf(rustsecp256k1zkp_v0_10 int j; for (np = 0; np < num; ++np) { - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sc[np]) || rustsecp256k1zkp_v0_10_0_ge_is_infinity(&pt[np])) { + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sc[np]) || rustsecp256k1zkp_v0_10_1_ge_is_infinity(&pt[np])) { continue; } state->ps[no].input_pos = np; - state->ps[no].skew_na = rustsecp256k1zkp_v0_10_0_wnaf_fixed(&state->wnaf_na[no*n_wnaf], &sc[np], bucket_window+1); + state->ps[no].skew_na = rustsecp256k1zkp_v0_10_1_wnaf_fixed(&state->wnaf_na[no*n_wnaf], &sc[np], bucket_window+1); no++; } - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); if (no == 0) { return 1; } for (i = n_wnaf - 1; i >= 0; i--) { - rustsecp256k1zkp_v0_10_0_gej running_sum; + rustsecp256k1zkp_v0_10_1_gej running_sum; for(j = 0; j < ECMULT_TABLE_SIZE(bucket_window+2); j++) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&buckets[j]); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&buckets[j]); } for (np = 0; np < no; ++np) { int n = state->wnaf_na[np*n_wnaf + i]; - struct rustsecp256k1zkp_v0_10_0_pippenger_point_state point_state = state->ps[np]; - rustsecp256k1zkp_v0_10_0_ge tmp; + struct rustsecp256k1zkp_v0_10_1_pippenger_point_state point_state = state->ps[np]; + rustsecp256k1zkp_v0_10_1_ge tmp; int idx; if (i == 0) { /* correct for wnaf skew */ int skew = point_state.skew_na; if (skew) { - rustsecp256k1zkp_v0_10_0_ge_neg(&tmp, &pt[point_state.input_pos]); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&buckets[0], &buckets[0], &tmp, NULL); + rustsecp256k1zkp_v0_10_1_ge_neg(&tmp, &pt[point_state.input_pos]); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&buckets[0], &buckets[0], &tmp, NULL); } } if (n > 0) { idx = (n - 1)/2; - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&buckets[idx], &buckets[idx], &pt[point_state.input_pos], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&buckets[idx], &buckets[idx], &pt[point_state.input_pos], NULL); } else if (n < 0) { idx = -(n + 1)/2; - rustsecp256k1zkp_v0_10_0_ge_neg(&tmp, &pt[point_state.input_pos]); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&buckets[idx], &buckets[idx], &tmp, NULL); + rustsecp256k1zkp_v0_10_1_ge_neg(&tmp, &pt[point_state.input_pos]); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&buckets[idx], &buckets[idx], &tmp, NULL); } } for(j = 0; j < bucket_window; j++) { - rustsecp256k1zkp_v0_10_0_gej_double_var(r, r, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(r, r, NULL); } - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&running_sum); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&running_sum); /* Accumulate the sum: bucket[0] + 3*bucket[1] + 5*bucket[2] + 7*bucket[3] + ... * = bucket[0] + bucket[1] + bucket[2] + bucket[3] + ... * + 2 * (bucket[1] + 2*bucket[2] + 3*bucket[3] + ...) @@ -560,13 +560,13 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_pippenger_wnaf(rustsecp256k1zkp_v0_10 * The doubling is done implicitly by deferring the final window doubling (of 'r'). */ for(j = ECMULT_TABLE_SIZE(bucket_window+2) - 1; j > 0; j--) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&running_sum, &running_sum, &buckets[j], NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(r, r, &running_sum, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&running_sum, &running_sum, &buckets[j], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(r, r, &running_sum, NULL); } - rustsecp256k1zkp_v0_10_0_gej_add_var(&running_sum, &running_sum, &buckets[0], NULL); - rustsecp256k1zkp_v0_10_0_gej_double_var(r, r, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(r, r, &running_sum, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&running_sum, &running_sum, &buckets[0], NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(r, r, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(r, r, &running_sum, NULL); } return 1; } @@ -575,7 +575,7 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_pippenger_wnaf(rustsecp256k1zkp_v0_10 * Returns optimal bucket_window (number of bits of a scalar represented by a * set of buckets) for a given number of points. */ -static int rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(size_t n) { +static int rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(size_t n) { if (n <= 1) { return 1; } else if (n <= 4) { @@ -604,7 +604,7 @@ static int rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(size_t n) { /** * Returns the maximum optimal number of points for a bucket_window. */ -static size_t rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(int bucket_window) { +static size_t rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(int bucket_window) { switch(bucket_window) { case 1: return 1; case 2: return 4; @@ -623,18 +623,18 @@ static size_t rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(int bucket_wi } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_endo_split(rustsecp256k1zkp_v0_10_0_scalar *s1, rustsecp256k1zkp_v0_10_0_scalar *s2, rustsecp256k1zkp_v0_10_0_ge *p1, rustsecp256k1zkp_v0_10_0_ge *p2) { - rustsecp256k1zkp_v0_10_0_scalar tmp = *s1; - rustsecp256k1zkp_v0_10_0_scalar_split_lambda(s1, s2, &tmp); - rustsecp256k1zkp_v0_10_0_ge_mul_lambda(p2, p1); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_ecmult_endo_split(rustsecp256k1zkp_v0_10_1_scalar *s1, rustsecp256k1zkp_v0_10_1_scalar *s2, rustsecp256k1zkp_v0_10_1_ge *p1, rustsecp256k1zkp_v0_10_1_ge *p2) { + rustsecp256k1zkp_v0_10_1_scalar tmp = *s1; + rustsecp256k1zkp_v0_10_1_scalar_split_lambda(s1, s2, &tmp); + rustsecp256k1zkp_v0_10_1_ge_mul_lambda(p2, p1); - if (rustsecp256k1zkp_v0_10_0_scalar_is_high(s1)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(s1, s1); - rustsecp256k1zkp_v0_10_0_ge_neg(p1, p1); + if (rustsecp256k1zkp_v0_10_1_scalar_is_high(s1)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(s1, s1); + rustsecp256k1zkp_v0_10_1_ge_neg(p1, p1); } - if (rustsecp256k1zkp_v0_10_0_scalar_is_high(s2)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(s2, s2); - rustsecp256k1zkp_v0_10_0_ge_neg(p2, p2); + if (rustsecp256k1zkp_v0_10_1_scalar_is_high(s2)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(s2, s2); + rustsecp256k1zkp_v0_10_1_ge_neg(p2, p2); } } @@ -642,91 +642,91 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_ecmult_endo_split(rustsecp * Returns the scratch size required for a given number of points (excluding * base point G) without considering alignment. */ -static size_t rustsecp256k1zkp_v0_10_0_pippenger_scratch_size(size_t n_points, int bucket_window) { +static size_t rustsecp256k1zkp_v0_10_1_pippenger_scratch_size(size_t n_points, int bucket_window) { size_t entries = 2*n_points + 2; - size_t entry_size = sizeof(rustsecp256k1zkp_v0_10_0_ge) + sizeof(rustsecp256k1zkp_v0_10_0_scalar) + sizeof(struct rustsecp256k1zkp_v0_10_0_pippenger_point_state) + (WNAF_SIZE(bucket_window+1)+1)*sizeof(int); - return (sizeof(rustsecp256k1zkp_v0_10_0_gej) << bucket_window) + sizeof(struct rustsecp256k1zkp_v0_10_0_pippenger_state) + entries * entry_size; + size_t entry_size = sizeof(rustsecp256k1zkp_v0_10_1_ge) + sizeof(rustsecp256k1zkp_v0_10_1_scalar) + sizeof(struct rustsecp256k1zkp_v0_10_1_pippenger_point_state) + (WNAF_SIZE(bucket_window+1)+1)*sizeof(int); + return (sizeof(rustsecp256k1zkp_v0_10_1_gej) << bucket_window) + sizeof(struct rustsecp256k1zkp_v0_10_1_pippenger_state) + entries * entry_size; } -static int rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n_points, size_t cb_offset) { - const size_t scratch_checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(error_callback, scratch); +static int rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n_points, size_t cb_offset) { + const size_t scratch_checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(error_callback, scratch); /* Use 2(n+1) with the endomorphism, when calculating batch * sizes. The reason for +1 is that we add the G scalar to the list of * other scalars. */ size_t entries = 2*n_points + 2; - rustsecp256k1zkp_v0_10_0_ge *points; - rustsecp256k1zkp_v0_10_0_scalar *scalars; - rustsecp256k1zkp_v0_10_0_gej *buckets; - struct rustsecp256k1zkp_v0_10_0_pippenger_state *state_space; + rustsecp256k1zkp_v0_10_1_ge *points; + rustsecp256k1zkp_v0_10_1_scalar *scalars; + rustsecp256k1zkp_v0_10_1_gej *buckets; + struct rustsecp256k1zkp_v0_10_1_pippenger_state *state_space; size_t idx = 0; size_t point_idx = 0; int i, j; int bucket_window; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); if (inp_g_sc == NULL && n_points == 0) { return 1; } - bucket_window = rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(n_points); + bucket_window = rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(n_points); /* We allocate PIPPENGER_SCRATCH_OBJECTS objects on the scratch space. If * these allocations change, make sure to update the * PIPPENGER_SCRATCH_OBJECTS constant and pippenger_scratch_size * accordingly. */ - points = (rustsecp256k1zkp_v0_10_0_ge *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, entries * sizeof(*points)); - scalars = (rustsecp256k1zkp_v0_10_0_scalar *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, entries * sizeof(*scalars)); - state_space = (struct rustsecp256k1zkp_v0_10_0_pippenger_state *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, sizeof(*state_space)); + points = (rustsecp256k1zkp_v0_10_1_ge *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, entries * sizeof(*points)); + scalars = (rustsecp256k1zkp_v0_10_1_scalar *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, entries * sizeof(*scalars)); + state_space = (struct rustsecp256k1zkp_v0_10_1_pippenger_state *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, sizeof(*state_space)); if (points == NULL || scalars == NULL || state_space == NULL) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 0; } - state_space->ps = (struct rustsecp256k1zkp_v0_10_0_pippenger_point_state *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, entries * sizeof(*state_space->ps)); - state_space->wnaf_na = (int *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, entries*(WNAF_SIZE(bucket_window+1)) * sizeof(int)); - buckets = (rustsecp256k1zkp_v0_10_0_gej *) rustsecp256k1zkp_v0_10_0_scratch_alloc(error_callback, scratch, ((size_t)1 << bucket_window) * sizeof(*buckets)); + state_space->ps = (struct rustsecp256k1zkp_v0_10_1_pippenger_point_state *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, entries * sizeof(*state_space->ps)); + state_space->wnaf_na = (int *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, entries*(WNAF_SIZE(bucket_window+1)) * sizeof(int)); + buckets = (rustsecp256k1zkp_v0_10_1_gej *) rustsecp256k1zkp_v0_10_1_scratch_alloc(error_callback, scratch, ((size_t)1 << bucket_window) * sizeof(*buckets)); if (state_space->ps == NULL || state_space->wnaf_na == NULL || buckets == NULL) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 0; } if (inp_g_sc != NULL) { scalars[0] = *inp_g_sc; - points[0] = rustsecp256k1zkp_v0_10_0_ge_const_g; + points[0] = rustsecp256k1zkp_v0_10_1_ge_const_g; idx++; - rustsecp256k1zkp_v0_10_0_ecmult_endo_split(&scalars[0], &scalars[1], &points[0], &points[1]); + rustsecp256k1zkp_v0_10_1_ecmult_endo_split(&scalars[0], &scalars[1], &points[0], &points[1]); idx++; } while (point_idx < n_points) { if (!cb(&scalars[idx], &points[idx], point_idx + cb_offset, cbdata)) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(error_callback, scratch, scratch_checkpoint); return 0; } idx++; - rustsecp256k1zkp_v0_10_0_ecmult_endo_split(&scalars[idx - 1], &scalars[idx], &points[idx - 1], &points[idx]); + rustsecp256k1zkp_v0_10_1_ecmult_endo_split(&scalars[idx - 1], &scalars[idx], &points[idx - 1], &points[idx]); idx++; point_idx++; } - rustsecp256k1zkp_v0_10_0_ecmult_pippenger_wnaf(buckets, bucket_window, state_space, r, scalars, points, idx); + rustsecp256k1zkp_v0_10_1_ecmult_pippenger_wnaf(buckets, bucket_window, state_space, r, scalars, points, idx); /* Clear data */ for(i = 0; (size_t)i < idx; i++) { - rustsecp256k1zkp_v0_10_0_scalar_clear(&scalars[i]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&scalars[i]); state_space->ps[i].skew_na = 0; for(j = 0; j < WNAF_SIZE(bucket_window+1); j++) { state_space->wnaf_na[i * WNAF_SIZE(bucket_window+1) + j] = 0; } } for(i = 0; i < 1< max_alloc) { break; } @@ -770,32 +770,32 @@ static size_t rustsecp256k1zkp_v0_10_0_pippenger_max_points(const rustsecp256k1z /* Computes ecmult_multi by simply multiplying and adding each point. Does not * require a scratch space */ -static int rustsecp256k1zkp_v0_10_0_ecmult_multi_simple_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n_points) { +static int rustsecp256k1zkp_v0_10_1_ecmult_multi_simple_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n_points) { size_t point_idx; - rustsecp256k1zkp_v0_10_0_gej tmpj; + rustsecp256k1zkp_v0_10_1_gej tmpj; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&tmpj); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&tmpj); /* r = inp_g_sc*G */ - rustsecp256k1zkp_v0_10_0_ecmult(r, &tmpj, &rustsecp256k1zkp_v0_10_0_scalar_zero, inp_g_sc); + rustsecp256k1zkp_v0_10_1_ecmult(r, &tmpj, &rustsecp256k1zkp_v0_10_1_scalar_zero, inp_g_sc); for (point_idx = 0; point_idx < n_points; point_idx++) { - rustsecp256k1zkp_v0_10_0_ge point; - rustsecp256k1zkp_v0_10_0_gej pointj; - rustsecp256k1zkp_v0_10_0_scalar scalar; + rustsecp256k1zkp_v0_10_1_ge point; + rustsecp256k1zkp_v0_10_1_gej pointj; + rustsecp256k1zkp_v0_10_1_scalar scalar; if (!cb(&scalar, &point, point_idx, cbdata)) { return 0; } /* r += scalar*point */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pointj, &point); - rustsecp256k1zkp_v0_10_0_ecmult(&tmpj, &pointj, &scalar, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(r, r, &tmpj, NULL); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pointj, &point); + rustsecp256k1zkp_v0_10_1_ecmult(&tmpj, &pointj, &scalar, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(r, r, &tmpj, NULL); } return 1; } /* Compute the number of batches and the batch size given the maximum batch size and the * total number of points */ -static int rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(size_t *n_batches, size_t *n_batch_points, size_t max_n_batch_points, size_t n) { +static int rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(size_t *n_batches, size_t *n_batch_points, size_t max_n_batch_points, size_t n) { if (max_n_batch_points == 0) { return 0; } @@ -813,48 +813,48 @@ static int rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(size_t *n_bat return 1; } -typedef int (*rustsecp256k1zkp_v0_10_0_ecmult_multi_func)(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch*, rustsecp256k1zkp_v0_10_0_gej*, const rustsecp256k1zkp_v0_10_0_scalar*, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void*, size_t); -static int rustsecp256k1zkp_v0_10_0_ecmult_multi_var(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void *cbdata, size_t n) { +typedef int (*rustsecp256k1zkp_v0_10_1_ecmult_multi_func)(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch*, rustsecp256k1zkp_v0_10_1_gej*, const rustsecp256k1zkp_v0_10_1_scalar*, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void*, size_t); +static int rustsecp256k1zkp_v0_10_1_ecmult_multi_var(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_scalar *inp_g_sc, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void *cbdata, size_t n) { size_t i; - int (*f)(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch*, rustsecp256k1zkp_v0_10_0_gej*, const rustsecp256k1zkp_v0_10_0_scalar*, rustsecp256k1zkp_v0_10_0_ecmult_multi_callback cb, void*, size_t, size_t); + int (*f)(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch*, rustsecp256k1zkp_v0_10_1_gej*, const rustsecp256k1zkp_v0_10_1_scalar*, rustsecp256k1zkp_v0_10_1_ecmult_multi_callback cb, void*, size_t, size_t); size_t n_batches; size_t n_batch_points; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); if (inp_g_sc == NULL && n == 0) { return 1; } else if (n == 0) { - rustsecp256k1zkp_v0_10_0_ecmult(r, r, &rustsecp256k1zkp_v0_10_0_scalar_zero, inp_g_sc); + rustsecp256k1zkp_v0_10_1_ecmult(r, r, &rustsecp256k1zkp_v0_10_1_scalar_zero, inp_g_sc); return 1; } if (scratch == NULL) { - return rustsecp256k1zkp_v0_10_0_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); + return rustsecp256k1zkp_v0_10_1_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); } /* Compute the batch sizes for Pippenger's algorithm given a scratch space. If it's greater than * a threshold use Pippenger's algorithm. Otherwise use Strauss' algorithm. * As a first step check if there's enough space for Pippenger's algo (which requires less space * than Strauss' algo) and if not, use the simple algorithm. */ - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, rustsecp256k1zkp_v0_10_0_pippenger_max_points(error_callback, scratch), n)) { - return rustsecp256k1zkp_v0_10_0_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, rustsecp256k1zkp_v0_10_1_pippenger_max_points(error_callback, scratch), n)) { + return rustsecp256k1zkp_v0_10_1_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); } if (n_batch_points >= ECMULT_PIPPENGER_THRESHOLD) { - f = rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch; + f = rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch; } else { - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, rustsecp256k1zkp_v0_10_0_strauss_max_points(error_callback, scratch), n)) { - return rustsecp256k1zkp_v0_10_0_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, rustsecp256k1zkp_v0_10_1_strauss_max_points(error_callback, scratch), n)) { + return rustsecp256k1zkp_v0_10_1_ecmult_multi_simple_var(r, inp_g_sc, cb, cbdata, n); } - f = rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch; + f = rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch; } for(i = 0; i < n_batches; i++) { size_t nbp = n < n_batch_points ? n : n_batch_points; size_t offset = n_batch_points*i; - rustsecp256k1zkp_v0_10_0_gej tmp; + rustsecp256k1zkp_v0_10_1_gej tmp; if (!f(error_callback, scratch, &tmp, i == 0 ? inp_g_sc : NULL, cb, cbdata, nbp, offset)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_add_var(r, r, &tmp, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(r, r, &tmp, NULL); n -= nbp; } return 1; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field.h b/secp256k1-zkp-sys/depend/secp256k1/src/field.h index 914684e8..88b81ec9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field.h @@ -9,15 +9,15 @@ #include "util.h" -/* This file defines the generic interface for working with rustsecp256k1zkp_v0_10_0_fe +/* This file defines the generic interface for working with rustsecp256k1zkp_v0_10_1_fe * objects, which represent field elements (integers modulo 2^256 - 2^32 - 977). * - * The actual definition of the rustsecp256k1zkp_v0_10_0_fe type depends on the chosen field + * The actual definition of the rustsecp256k1zkp_v0_10_1_fe type depends on the chosen field * implementation; see the field_5x52.h and field_10x26.h files for details. * - * All rustsecp256k1zkp_v0_10_0_fe objects have implicit properties that determine what + * All rustsecp256k1zkp_v0_10_1_fe objects have implicit properties that determine what * operations are permitted on it. These are purely a function of what - * rustsecp256k1zkp_v0_10_0_fe_ operations are applied on it, generally (implicitly) fixed at + * rustsecp256k1zkp_v0_10_1_fe_ operations are applied on it, generally (implicitly) fixed at * compile time, and do not depend on the chosen field implementation. Despite * that, what these properties actually entail for the field representation * values depends on the chosen field implementation. These properties are: @@ -26,7 +26,7 @@ * * In VERIFY mode, they are materialized explicitly as fields in the struct, * allowing run-time verification of these properties. In that case, the field - * implementation also provides a rustsecp256k1zkp_v0_10_0_fe_verify routine to verify that + * implementation also provides a rustsecp256k1zkp_v0_10_1_fe_verify routine to verify that * these fields match the run-time value and perform internal consistency * checks. */ #ifdef VERIFY @@ -56,7 +56,7 @@ #define SECP256K1_FE_VERIFY_CONST(d7, d6, d5, d4, d3, d2, d1, d0) #endif -/** This expands to an initializer for a rustsecp256k1zkp_v0_10_0_fe valued sum((i*32) * d_i, i=0..7) mod p. +/** This expands to an initializer for a rustsecp256k1zkp_v0_10_1_fe valued sum((i*32) * d_i, i=0..7) mod p. * * It has magnitude 1, unless d_i are all 0, in which case the magnitude is 0. * It is normalized, unless sum(2^(i*32) * d_i, i=0..7) >= p. @@ -65,8 +65,8 @@ */ #define SECP256K1_FE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {SECP256K1_FE_CONST_INNER((d7), (d6), (d5), (d4), (d3), (d2), (d1), (d0)) SECP256K1_FE_VERIFY_CONST((d7), (d6), (d5), (d4), (d3), (d2), (d1), (d0)) } -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_fe_one = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1); -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_const_beta = SECP256K1_FE_CONST( +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_fe_one = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1); +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_const_beta = SECP256K1_FE_CONST( 0x7ae96a2bul, 0x657c0710ul, 0x6e64479eul, 0xac3434e9ul, 0x9cf04975ul, 0x12f58995ul, 0xc1396c28ul, 0x719501eeul ); @@ -75,33 +75,33 @@ static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_const_beta = S /* In non-VERIFY mode, we #define the fe operations to be identical to their * internal field implementation, to avoid the potential overhead of a * function call (even though presumably inlinable). */ -# define rustsecp256k1zkp_v0_10_0_fe_normalize rustsecp256k1zkp_v0_10_0_fe_impl_normalize -# define rustsecp256k1zkp_v0_10_0_fe_normalize_weak rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak -# define rustsecp256k1zkp_v0_10_0_fe_normalize_var rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var -# define rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero -# define rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var -# define rustsecp256k1zkp_v0_10_0_fe_set_int rustsecp256k1zkp_v0_10_0_fe_impl_set_int -# define rustsecp256k1zkp_v0_10_0_fe_clear rustsecp256k1zkp_v0_10_0_fe_impl_clear -# define rustsecp256k1zkp_v0_10_0_fe_is_zero rustsecp256k1zkp_v0_10_0_fe_impl_is_zero -# define rustsecp256k1zkp_v0_10_0_fe_is_odd rustsecp256k1zkp_v0_10_0_fe_impl_is_odd -# define rustsecp256k1zkp_v0_10_0_fe_cmp_var rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var -# define rustsecp256k1zkp_v0_10_0_fe_set_b32_mod rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod -# define rustsecp256k1zkp_v0_10_0_fe_set_b32_limit rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_limit -# define rustsecp256k1zkp_v0_10_0_fe_get_b32 rustsecp256k1zkp_v0_10_0_fe_impl_get_b32 -# define rustsecp256k1zkp_v0_10_0_fe_negate_unchecked rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked -# define rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked -# define rustsecp256k1zkp_v0_10_0_fe_add rustsecp256k1zkp_v0_10_0_fe_impl_add -# define rustsecp256k1zkp_v0_10_0_fe_mul rustsecp256k1zkp_v0_10_0_fe_impl_mul -# define rustsecp256k1zkp_v0_10_0_fe_sqr rustsecp256k1zkp_v0_10_0_fe_impl_sqr -# define rustsecp256k1zkp_v0_10_0_fe_cmov rustsecp256k1zkp_v0_10_0_fe_impl_cmov -# define rustsecp256k1zkp_v0_10_0_fe_to_storage rustsecp256k1zkp_v0_10_0_fe_impl_to_storage -# define rustsecp256k1zkp_v0_10_0_fe_from_storage rustsecp256k1zkp_v0_10_0_fe_impl_from_storage -# define rustsecp256k1zkp_v0_10_0_fe_inv rustsecp256k1zkp_v0_10_0_fe_impl_inv -# define rustsecp256k1zkp_v0_10_0_fe_inv_var rustsecp256k1zkp_v0_10_0_fe_impl_inv_var -# define rustsecp256k1zkp_v0_10_0_fe_get_bounds rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds -# define rustsecp256k1zkp_v0_10_0_fe_half rustsecp256k1zkp_v0_10_0_fe_impl_half -# define rustsecp256k1zkp_v0_10_0_fe_add_int rustsecp256k1zkp_v0_10_0_fe_impl_add_int -# define rustsecp256k1zkp_v0_10_0_fe_is_square_var rustsecp256k1zkp_v0_10_0_fe_impl_is_square_var +# define rustsecp256k1zkp_v0_10_1_fe_normalize rustsecp256k1zkp_v0_10_1_fe_impl_normalize +# define rustsecp256k1zkp_v0_10_1_fe_normalize_weak rustsecp256k1zkp_v0_10_1_fe_impl_normalize_weak +# define rustsecp256k1zkp_v0_10_1_fe_normalize_var rustsecp256k1zkp_v0_10_1_fe_impl_normalize_var +# define rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero +# define rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero_var +# define rustsecp256k1zkp_v0_10_1_fe_set_int rustsecp256k1zkp_v0_10_1_fe_impl_set_int +# define rustsecp256k1zkp_v0_10_1_fe_clear rustsecp256k1zkp_v0_10_1_fe_impl_clear +# define rustsecp256k1zkp_v0_10_1_fe_is_zero rustsecp256k1zkp_v0_10_1_fe_impl_is_zero +# define rustsecp256k1zkp_v0_10_1_fe_is_odd rustsecp256k1zkp_v0_10_1_fe_impl_is_odd +# define rustsecp256k1zkp_v0_10_1_fe_cmp_var rustsecp256k1zkp_v0_10_1_fe_impl_cmp_var +# define rustsecp256k1zkp_v0_10_1_fe_set_b32_mod rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod +# define rustsecp256k1zkp_v0_10_1_fe_set_b32_limit rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_limit +# define rustsecp256k1zkp_v0_10_1_fe_get_b32 rustsecp256k1zkp_v0_10_1_fe_impl_get_b32 +# define rustsecp256k1zkp_v0_10_1_fe_negate_unchecked rustsecp256k1zkp_v0_10_1_fe_impl_negate_unchecked +# define rustsecp256k1zkp_v0_10_1_fe_mul_int_unchecked rustsecp256k1zkp_v0_10_1_fe_impl_mul_int_unchecked +# define rustsecp256k1zkp_v0_10_1_fe_add rustsecp256k1zkp_v0_10_1_fe_impl_add +# define rustsecp256k1zkp_v0_10_1_fe_mul rustsecp256k1zkp_v0_10_1_fe_impl_mul +# define rustsecp256k1zkp_v0_10_1_fe_sqr rustsecp256k1zkp_v0_10_1_fe_impl_sqr +# define rustsecp256k1zkp_v0_10_1_fe_cmov rustsecp256k1zkp_v0_10_1_fe_impl_cmov +# define rustsecp256k1zkp_v0_10_1_fe_to_storage rustsecp256k1zkp_v0_10_1_fe_impl_to_storage +# define rustsecp256k1zkp_v0_10_1_fe_from_storage rustsecp256k1zkp_v0_10_1_fe_impl_from_storage +# define rustsecp256k1zkp_v0_10_1_fe_inv rustsecp256k1zkp_v0_10_1_fe_impl_inv +# define rustsecp256k1zkp_v0_10_1_fe_inv_var rustsecp256k1zkp_v0_10_1_fe_impl_inv_var +# define rustsecp256k1zkp_v0_10_1_fe_get_bounds rustsecp256k1zkp_v0_10_1_fe_impl_get_bounds +# define rustsecp256k1zkp_v0_10_1_fe_half rustsecp256k1zkp_v0_10_1_fe_impl_half +# define rustsecp256k1zkp_v0_10_1_fe_add_int rustsecp256k1zkp_v0_10_1_fe_impl_add_int +# define rustsecp256k1zkp_v0_10_1_fe_is_square_var rustsecp256k1zkp_v0_10_1_fe_impl_is_square_var #endif /* !defined(VERIFY) */ /** Normalize a field element. @@ -109,64 +109,64 @@ static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_const_beta = S * On input, r must be a valid field element. * On output, r represents the same value but has normalized=1 and magnitude=1. */ -static void rustsecp256k1zkp_v0_10_0_fe_normalize(rustsecp256k1zkp_v0_10_0_fe *r); +static void rustsecp256k1zkp_v0_10_1_fe_normalize(rustsecp256k1zkp_v0_10_1_fe *r); /** Give a field element magnitude 1. * * On input, r must be a valid field element. * On output, r represents the same value but has magnitude=1. Normalized is unchanged. */ -static void rustsecp256k1zkp_v0_10_0_fe_normalize_weak(rustsecp256k1zkp_v0_10_0_fe *r); +static void rustsecp256k1zkp_v0_10_1_fe_normalize_weak(rustsecp256k1zkp_v0_10_1_fe *r); /** Normalize a field element, without constant-time guarantee. * - * Identical in behavior to rustsecp256k1zkp_v0_10_0_fe_normalize, but not constant time in r. + * Identical in behavior to rustsecp256k1zkp_v0_10_1_fe_normalize, but not constant time in r. */ -static void rustsecp256k1zkp_v0_10_0_fe_normalize_var(rustsecp256k1zkp_v0_10_0_fe *r); +static void rustsecp256k1zkp_v0_10_1_fe_normalize_var(rustsecp256k1zkp_v0_10_1_fe *r); /** Determine whether r represents field element 0. * * On input, r must be a valid field element. * Returns whether r = 0 (mod p). */ -static int rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(const rustsecp256k1zkp_v0_10_0_fe *r); +static int rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(const rustsecp256k1zkp_v0_10_1_fe *r); /** Determine whether r represents field element 0, without constant-time guarantee. * - * Identical in behavior to rustsecp256k1zkp_v0_10_0_normalizes_to_zero, but not constant time in r. + * Identical in behavior to rustsecp256k1zkp_v0_10_1_normalizes_to_zero, but not constant time in r. */ -static int rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_0_fe *r); +static int rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_1_fe *r); /** Set a field element to an integer in range [0,0x7FFF]. * * On input, r does not need to be initialized, a must be in [0,0x7FFF]. * On output, r represents value a, is normalized and has magnitude (a!=0). */ -static void rustsecp256k1zkp_v0_10_0_fe_set_int(rustsecp256k1zkp_v0_10_0_fe *r, int a); +static void rustsecp256k1zkp_v0_10_1_fe_set_int(rustsecp256k1zkp_v0_10_1_fe *r, int a); /** Set a field element to 0. * * On input, a does not need to be initialized. * On output, a represents 0, is normalized and has magnitude 0. */ -static void rustsecp256k1zkp_v0_10_0_fe_clear(rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_clear(rustsecp256k1zkp_v0_10_1_fe *a); /** Determine whether a represents field element 0. * * On input, a must be a valid normalized field element. * Returns whether a = 0 (mod p). * - * This behaves identical to rustsecp256k1zkp_v0_10_0_normalizes_to_zero{,_var}, but requires + * This behaves identical to rustsecp256k1zkp_v0_10_1_normalizes_to_zero{,_var}, but requires * normalized input (and is much faster). */ -static int rustsecp256k1zkp_v0_10_0_fe_is_zero(const rustsecp256k1zkp_v0_10_0_fe *a); +static int rustsecp256k1zkp_v0_10_1_fe_is_zero(const rustsecp256k1zkp_v0_10_1_fe *a); /** Determine whether a (mod p) is odd. * * On input, a must be a valid normalized field element. * Returns (int(a) mod p) & 1. */ -static int rustsecp256k1zkp_v0_10_0_fe_is_odd(const rustsecp256k1zkp_v0_10_0_fe *a); +static int rustsecp256k1zkp_v0_10_1_fe_is_odd(const rustsecp256k1zkp_v0_10_1_fe *a); /** Determine whether two field elements are equal. * @@ -174,7 +174,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_is_odd(const rustsecp256k1zkp_v0_10_0_fe * 1 and 31, respectively. * Returns a = b (mod p). */ -static int rustsecp256k1zkp_v0_10_0_fe_equal(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b); +static int rustsecp256k1zkp_v0_10_1_fe_equal(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b); /** Compare the values represented by 2 field elements, without constant-time guarantee. * @@ -182,7 +182,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_equal(const rustsecp256k1zkp_v0_10_0_fe * * Returns 1 if a > b, -1 if a < b, and 0 if a = b (comparisons are done as integers * in range 0..p-1). */ -static int rustsecp256k1zkp_v0_10_0_fe_cmp_var(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b); +static int rustsecp256k1zkp_v0_10_1_fe_cmp_var(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b); /** Set a field element equal to the element represented by a provided 32-byte big endian value * interpreted modulo p. @@ -190,7 +190,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_cmp_var(const rustsecp256k1zkp_v0_10_0_fe * On input, r does not need to be initialized. a must be a pointer to an initialized 32-byte array. * On output, r = a (mod p). It will have magnitude 1, and not be normalized. */ -static void rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a); +static void rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a); /** Set a field element equal to a provided 32-byte big endian value, checking for overflow. * @@ -198,13 +198,13 @@ static void rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe * On output, r = a if (a < p), it will be normalized with magnitude 1, and 1 is returned. * If a >= p, 0 is returned, and r will be made invalid (and must not be used without overwriting). */ -static int rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a); +static int rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a); /** Convert a field element to 32-byte big endian byte array. * On input, a must be a valid normalized field element, and r a pointer to a 32-byte array. * On output, r = a (mod p). */ -static void rustsecp256k1zkp_v0_10_0_fe_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_1_fe *a); /** Negate a field element. * @@ -213,20 +213,20 @@ static void rustsecp256k1zkp_v0_10_0_fe_get_b32(unsigned char *r, const rustsecp * Performs {r = -a}. * On output, r will not be normalized, and will have magnitude m+1. */ -#define rustsecp256k1zkp_v0_10_0_fe_negate(r, a, m) ASSERT_INT_CONST_AND_DO(m, rustsecp256k1zkp_v0_10_0_fe_negate_unchecked(r, a, m)) +#define rustsecp256k1zkp_v0_10_1_fe_negate(r, a, m) ASSERT_INT_CONST_AND_DO(m, rustsecp256k1zkp_v0_10_1_fe_negate_unchecked(r, a, m)) -/** Like rustsecp256k1zkp_v0_10_0_fe_negate_unchecked but m is not checked to be an integer constant expression. +/** Like rustsecp256k1zkp_v0_10_1_fe_negate_unchecked but m is not checked to be an integer constant expression. * * Should not be called directly outside of tests. */ -static void rustsecp256k1zkp_v0_10_0_fe_negate_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int m); +static void rustsecp256k1zkp_v0_10_1_fe_negate_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int m); /** Add a small integer to a field element. * * Performs {r += a}. The magnitude of r increases by 1, and normalized is cleared. * a must be in range [0,0x7FFF]. */ -static void rustsecp256k1zkp_v0_10_0_fe_add_int(rustsecp256k1zkp_v0_10_0_fe *r, int a); +static void rustsecp256k1zkp_v0_10_1_fe_add_int(rustsecp256k1zkp_v0_10_1_fe *r, int a); /** Multiply a field element with a small integer. * @@ -235,13 +235,13 @@ static void rustsecp256k1zkp_v0_10_0_fe_add_int(rustsecp256k1zkp_v0_10_0_fe *r, * Performs {r *= a}. * On output, r's magnitude is multiplied by a, and r will not be normalized. */ -#define rustsecp256k1zkp_v0_10_0_fe_mul_int(r, a) ASSERT_INT_CONST_AND_DO(a, rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked(r, a)) +#define rustsecp256k1zkp_v0_10_1_fe_mul_int(r, a) ASSERT_INT_CONST_AND_DO(a, rustsecp256k1zkp_v0_10_1_fe_mul_int_unchecked(r, a)) -/** Like rustsecp256k1zkp_v0_10_0_fe_mul_int but a is not checked to be an integer constant expression. +/** Like rustsecp256k1zkp_v0_10_1_fe_mul_int but a is not checked to be an integer constant expression. * * Should not be called directly outside of tests. */ -static void rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, int a); +static void rustsecp256k1zkp_v0_10_1_fe_mul_int_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, int a); /** Increment a field element by another. * @@ -250,7 +250,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked(rustsecp256k1zkp_v0_10 * Performs {r += a}. * On output, r will not be normalized, and will have magnitude incremented by a's. */ -static void rustsecp256k1zkp_v0_10_0_fe_add(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_add(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); /** Multiply two field elements. * @@ -260,7 +260,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_add(rustsecp256k1zkp_v0_10_0_fe *r, cons * Performs {r = a * b} * On output, r will have magnitude 1, but won't be normalized. */ -static void rustsecp256k1zkp_v0_10_0_fe_mul(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT b); +static void rustsecp256k1zkp_v0_10_1_fe_mul(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT b); /** Square a field element. * @@ -269,7 +269,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_mul(rustsecp256k1zkp_v0_10_0_fe *r, cons * Performs {r = a**2} * On output, r will have magnitude 1, but won't be normalized. */ -static void rustsecp256k1zkp_v0_10_0_fe_sqr(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_sqr(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); /** Compute a square root of a field element. * @@ -280,7 +280,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_sqr(rustsecp256k1zkp_v0_10_0_fe *r, cons * Variables r and a must not point to the same object. * On output, r will have magnitude 1 but will not be normalized. */ -static int rustsecp256k1zkp_v0_10_0_fe_sqrt(rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT r, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT a); +static int rustsecp256k1zkp_v0_10_1_fe_sqrt(rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT r, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT a); /** Compute the modular inverse of a field element. * @@ -289,31 +289,31 @@ static int rustsecp256k1zkp_v0_10_0_fe_sqrt(rustsecp256k1zkp_v0_10_0_fe * SECP25 * inverse). * On output, r will have magnitude (a.magnitude != 0) and be normalized. */ -static void rustsecp256k1zkp_v0_10_0_fe_inv(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_inv(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); /** Compute the modular inverse of a field element, without constant-time guarantee. * - * Behaves identically to rustsecp256k1zkp_v0_10_0_fe_inv, but is not constant-time in a. + * Behaves identically to rustsecp256k1zkp_v0_10_1_fe_inv, but is not constant-time in a. */ -static void rustsecp256k1zkp_v0_10_0_fe_inv_var(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_inv_var(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); -/** Convert a field element to rustsecp256k1zkp_v0_10_0_fe_storage. +/** Convert a field element to rustsecp256k1zkp_v0_10_1_fe_storage. * * On input, a must be a valid normalized field element. * Performs {r = a}. */ -static void rustsecp256k1zkp_v0_10_0_fe_to_storage(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_to_storage(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe *a); -/** Convert a field element back from rustsecp256k1zkp_v0_10_0_fe_storage. +/** Convert a field element back from rustsecp256k1zkp_v0_10_1_fe_storage. * * On input, r need not be initialized. * Performs {r = a}. * On output, r will be normalized and will have magnitude 1. */ -static void rustsecp256k1zkp_v0_10_0_fe_from_storage(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a); +static void rustsecp256k1zkp_v0_10_1_fe_from_storage(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a); /** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. Both *r and *a must be initialized.*/ -static void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a, int flag); +static void rustsecp256k1zkp_v0_10_1_fe_storage_cmov(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a, int flag); /** Conditionally move a field element in constant time. * @@ -323,7 +323,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp256k1zkp_v0_10_0_fe * On output, r's magnitude will be the maximum of both input magnitudes. * It will be normalized if and only if both inputs were normalized. */ -static void rustsecp256k1zkp_v0_10_0_fe_cmov(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int flag); +static void rustsecp256k1zkp_v0_10_1_fe_cmov(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int flag); /** Halve the value of a field element modulo the field prime in constant-time. * @@ -331,25 +331,25 @@ static void rustsecp256k1zkp_v0_10_0_fe_cmov(rustsecp256k1zkp_v0_10_0_fe *r, con * On output, r will be normalized and have magnitude floor(m/2) + 1 where m is * the magnitude of r on input. */ -static void rustsecp256k1zkp_v0_10_0_fe_half(rustsecp256k1zkp_v0_10_0_fe *r); +static void rustsecp256k1zkp_v0_10_1_fe_half(rustsecp256k1zkp_v0_10_1_fe *r); /** Sets r to a field element with magnitude m, normalized if (and only if) m==0. * The value is chosen so that it is likely to trigger edge cases related to * internal overflows. */ -static void rustsecp256k1zkp_v0_10_0_fe_get_bounds(rustsecp256k1zkp_v0_10_0_fe *r, int m); +static void rustsecp256k1zkp_v0_10_1_fe_get_bounds(rustsecp256k1zkp_v0_10_1_fe *r, int m); /** Determine whether a is a square (modulo p). * * On input, a must be a valid field element. */ -static int rustsecp256k1zkp_v0_10_0_fe_is_square_var(const rustsecp256k1zkp_v0_10_0_fe *a); +static int rustsecp256k1zkp_v0_10_1_fe_is_square_var(const rustsecp256k1zkp_v0_10_1_fe *a); /** Check invariants on a field element (no-op unless VERIFY is enabled). */ -static void rustsecp256k1zkp_v0_10_0_fe_verify(const rustsecp256k1zkp_v0_10_0_fe *a); -#define SECP256K1_FE_VERIFY(a) rustsecp256k1zkp_v0_10_0_fe_verify(a) +static void rustsecp256k1zkp_v0_10_1_fe_verify(const rustsecp256k1zkp_v0_10_1_fe *a); +#define SECP256K1_FE_VERIFY(a) rustsecp256k1zkp_v0_10_1_fe_verify(a) /** Check that magnitude of a is at most m (no-op unless VERIFY is enabled). */ -static void rustsecp256k1zkp_v0_10_0_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_0_fe *a, int m); -#define SECP256K1_FE_VERIFY_MAGNITUDE(a, m) rustsecp256k1zkp_v0_10_0_fe_verify_magnitude(a, m) +static void rustsecp256k1zkp_v0_10_1_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_1_fe *a, int m); +#define SECP256K1_FE_VERIFY_MAGNITUDE(a, m) rustsecp256k1zkp_v0_10_1_fe_verify_magnitude(a, m) #endif /* SECP256K1_FIELD_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26.h index 31456d7b..947afe36 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26.h @@ -31,7 +31,7 @@ typedef struct { * (together these imply n[9] <= 2^22 - 1) */ SECP256K1_FE_VERIFY_FIELDS -} rustsecp256k1zkp_v0_10_0_fe; +} rustsecp256k1zkp_v0_10_1_fe; /* Unpacks a constant into a overlapping multi-limbed FE element. */ #define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \ @@ -49,7 +49,7 @@ typedef struct { typedef struct { uint32_t n[8]; -} rustsecp256k1zkp_v0_10_0_fe_storage; +} rustsecp256k1zkp_v0_10_1_fe_storage; #define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ (d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7) }} #define SECP256K1_FE_STORAGE_CONST_GET(d) d.n[7], d.n[6], d.n[5], d.n[4],d.n[3], d.n[2], d.n[1], d.n[0] diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26_impl.h index eb56ed1f..42db68a9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_10x26_impl.h @@ -13,7 +13,7 @@ #include "modinv32_impl.h" #ifdef VERIFY -static void rustsecp256k1zkp_v0_10_0_fe_impl_verify(const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_verify(const rustsecp256k1zkp_v0_10_1_fe *a) { const uint32_t *d = a->n; int m = a->normalized ? 1 : 2 * a->magnitude; VERIFY_CHECK(d[0] <= 0x3FFFFFFUL * m); @@ -37,7 +37,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_verify(const rustsecp256k1zkp_v0_10 } #endif -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_0_fe *r, int m) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_1_fe *r, int m) { r->n[0] = 0x3FFFFFFUL * 2 * m; r->n[1] = 0x3FFFFFFUL * 2 * m; r->n[2] = 0x3FFFFFFUL * 2 * m; @@ -50,7 +50,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_0 r->n[9] = 0x03FFFFFUL * 2 * m; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize(rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4], t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9]; @@ -99,7 +99,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize(rustsecp256k1zkp_v0_10_0_ r->n[5] = t5; r->n[6] = t6; r->n[7] = t7; r->n[8] = t8; r->n[9] = t9; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4], t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9]; @@ -125,7 +125,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(rustsecp256k1zkp_v0_ r->n[5] = t5; r->n[6] = t6; r->n[7] = t7; r->n[8] = t8; r->n[9] = t9; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4], t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9]; @@ -175,7 +175,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(rustsecp256k1zkp_v0_1 r->n[5] = t5; r->n[6] = t6; r->n[7] = t7; r->n[8] = t8; r->n[9] = t9; } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4], t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9]; @@ -204,7 +204,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(const rustsecp256 return (z0 == 0) | (z1 == 0x3FFFFFFUL); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0, t1, t2, t3, t4, t5, t6, t7, t8, t9; uint32_t z0, z1; uint32_t x; @@ -256,28 +256,28 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(const rustsec return (z0 == 0) | (z1 == 0x3FFFFFFUL); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_set_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_set_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] = a; r->n[1] = r->n[2] = r->n[3] = r->n[4] = r->n[5] = r->n[6] = r->n[7] = r->n[8] = r->n[9] = 0; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_1_fe *a) { const uint32_t *t = a->n; return (t[0] | t[1] | t[2] | t[3] | t[4] | t[5] | t[6] | t[7] | t[8] | t[9]) == 0; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_1_fe *a) { return a->n[0] & 1; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_clear(rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_clear(rustsecp256k1zkp_v0_10_1_fe *a) { int i; for (i=0; i<10; i++) { a->n[i] = 0; } } -static int rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { int i; for (i = 9; i >= 0; i--) { if (a->n[i] > b->n[i]) { @@ -290,7 +290,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10 return 0; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { r->n[0] = (uint32_t)a[31] | ((uint32_t)a[30] << 8) | ((uint32_t)a[29] << 16) | ((uint32_t)(a[28] & 0x3) << 24); r->n[1] = (uint32_t)((a[28] >> 2) & 0x3f) | ((uint32_t)a[27] << 6) | ((uint32_t)a[26] << 14) | ((uint32_t)(a[25] & 0xf) << 22); r->n[2] = (uint32_t)((a[25] >> 4) & 0xf) | ((uint32_t)a[24] << 4) | ((uint32_t)a[23] << 12) | ((uint32_t)(a[22] & 0x3f) << 20); @@ -303,13 +303,13 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_ r->n[9] = (uint32_t)((a[2] >> 2) & 0x3f) | ((uint32_t)a[1] << 6) | ((uint32_t)a[0] << 14); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { - rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(r, a); +static int rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { + rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(r, a); return !((r->n[9] == 0x3FFFFFUL) & ((r->n[8] & r->n[7] & r->n[6] & r->n[5] & r->n[4] & r->n[3] & r->n[2]) == 0x3FFFFFFUL) & ((r->n[1] + 0x40UL + ((r->n[0] + 0x3D1UL) >> 26)) > 0x3FFFFFFUL)); } /** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */ -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r[0] = (a->n[9] >> 14) & 0xff; r[1] = (a->n[9] >> 6) & 0xff; r[2] = ((a->n[9] & 0x3F) << 2) | ((a->n[8] >> 24) & 0x3); @@ -344,7 +344,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(unsigned char *r, const rus r[31] = a->n[0] & 0xff; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int m) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int m) { /* For all legal values of m (0..31), the following properties hold: */ VERIFY_CHECK(0x3FFFC2FUL * 2 * (m + 1) >= 0x3FFFFFFUL * 2 * m); VERIFY_CHECK(0x3FFFFBFUL * 2 * (m + 1) >= 0x3FFFFFFUL * 2 * m); @@ -365,7 +365,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(r r->n[9] = 0x03FFFFFUL * 2 * (m + 1) - a->n[9]; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] *= a; r->n[1] *= a; r->n[2] *= a; @@ -378,7 +378,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked( r->n[9] *= a; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_add(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r->n[0] += a->n[0]; r->n[1] += a->n[1]; r->n[2] += a->n[2]; @@ -391,21 +391,21 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add(rustsecp256k1z r->n[9] += a->n[9]; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_add_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] += a; } #if defined(USE_EXTERNAL_ASM) /* External assembler implementation */ -void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b); -void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint32_t *r, const uint32_t *a); +void rustsecp256k1zkp_v0_10_1_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b); +void rustsecp256k1zkp_v0_10_1_fe_sqr_inner(uint32_t *r, const uint32_t *a); #else #define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0) -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_mul_inner(uint32_t *r, const uint32_t *a, const uint32_t * SECP256K1_RESTRICT b) { uint64_t c, d; uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8; uint32_t t9, t1, t0, t2, t3, t4, t5, t6, t7; @@ -735,7 +735,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint32_t *r, /* [r9 r8 r7 r6 r5 r4 r3 r2 r1 r0] = [p18 p17 p16 p15 p14 p13 p12 p11 p10 p9 p8 p7 p6 p5 p4 p3 p2 p1 p0] */ } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint32_t *r, const uint32_t *a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_sqr_inner(uint32_t *r, const uint32_t *a) { uint64_t c, d; uint64_t u0, u1, u2, u3, u4, u5, u6, u7, u8; uint32_t t9, t0, t1, t2, t3, t4, t5, t6, t7; @@ -1010,15 +1010,15 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint32_t *r, } #endif -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT b) { - rustsecp256k1zkp_v0_10_0_fe_mul_inner(r->n, a->n, b->n); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_mul(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT b) { + rustsecp256k1zkp_v0_10_1_fe_mul_inner(r->n, a->n, b->n); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_sqr(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { - rustsecp256k1zkp_v0_10_0_fe_sqr_inner(r->n, a->n); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_sqr(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { + rustsecp256k1zkp_v0_10_1_fe_sqr_inner(r->n, a->n); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_cmov(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int flag) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_cmov(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int flag) { uint32_t mask0, mask1; volatile int vflag = flag; SECP256K1_CHECKMEM_CHECK_VERIFY(r->n, sizeof(r->n)); @@ -1036,7 +1036,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_cmov(rustsecp256k1 r->n[9] = (r->n[9] & mask0) | (a->n[9] & mask1); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_half(rustsecp256k1zkp_v0_10_0_fe *r) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_impl_half(rustsecp256k1zkp_v0_10_1_fe *r) { uint32_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4], t5 = r->n[5], t6 = r->n[6], t7 = r->n[7], t8 = r->n[8], t9 = r->n[9]; uint32_t one = (uint32_t)1; @@ -1101,7 +1101,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_half(rustsecp256k1 */ } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_storage_cmov(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a, int flag) { uint32_t mask0, mask1; volatile int vflag = flag; SECP256K1_CHECKMEM_CHECK_VERIFY(r->n, sizeof(r->n)); @@ -1117,7 +1117,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp25 r->n[7] = (r->n[7] & mask0) | (a->n[7] & mask1); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_to_storage(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_to_storage(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r->n[0] = a->n[0] | a->n[1] << 26; r->n[1] = a->n[1] >> 6 | a->n[2] << 20; r->n[2] = a->n[2] >> 12 | a->n[3] << 14; @@ -1128,7 +1128,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_to_storage(rustsecp256k1zkp_v0_10_0 r->n[7] = a->n[8] >> 16 | a->n[9] << 10; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_impl_from_storage(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a) { r->n[0] = a->n[0] & 0x3FFFFFFUL; r->n[1] = a->n[0] >> 26 | ((a->n[1] << 6) & 0x3FFFFFFUL); r->n[2] = a->n[1] >> 20 | ((a->n[2] << 12) & 0x3FFFFFFUL); @@ -1141,12 +1141,12 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(rusts r->n[9] = a->n[7] >> 10; } -static void rustsecp256k1zkp_v0_10_0_fe_from_signed30(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *a) { +static void rustsecp256k1zkp_v0_10_1_fe_from_signed30(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *a) { const uint32_t M26 = UINT32_MAX >> 6; const uint32_t a0 = a->v[0], a1 = a->v[1], a2 = a->v[2], a3 = a->v[3], a4 = a->v[4], a5 = a->v[5], a6 = a->v[6], a7 = a->v[7], a8 = a->v[8]; - /* The output from rustsecp256k1zkp_v0_10_0_modinv32{_var} should be normalized to range [0,modulus), and + /* The output from rustsecp256k1zkp_v0_10_1_modinv32{_var} should be normalized to range [0,modulus), and * have limbs in [0,2^30). The modulus is < 2^256, so the top limb must be below 2^(256-30*8). */ VERIFY_CHECK(a0 >> 30 == 0); @@ -1171,7 +1171,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_from_signed30(rustsecp256k1zkp_v0_10_0_f r->n[9] = (a7 >> 24 | a8 << 6); } -static void rustsecp256k1zkp_v0_10_0_fe_to_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_to_signed30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_1_fe *a) { const uint32_t M30 = UINT32_MAX >> 2; const uint64_t a0 = a->n[0], a1 = a->n[1], a2 = a->n[2], a3 = a->n[3], a4 = a->n[4], a5 = a->n[5], a6 = a->n[6], a7 = a->n[7], a8 = a->n[8], a9 = a->n[9]; @@ -1188,48 +1188,48 @@ static void rustsecp256k1zkp_v0_10_0_fe_to_signed30(rustsecp256k1zkp_v0_10_0_mod r->v[8] = a9 >> 6; } -static const rustsecp256k1zkp_v0_10_0_modinv32_modinfo rustsecp256k1zkp_v0_10_0_const_modinfo_fe = { +static const rustsecp256k1zkp_v0_10_1_modinv32_modinfo rustsecp256k1zkp_v0_10_1_const_modinfo_fe = { {{-0x3D1, -4, 0, 0, 0, 0, 0, 0, 65536}}, 0x2DDACACFL }; -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp = *x; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 s; +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp = *x; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 s; - rustsecp256k1zkp_v0_10_0_fe_normalize(&tmp); - rustsecp256k1zkp_v0_10_0_fe_to_signed30(&s, &tmp); - rustsecp256k1zkp_v0_10_0_modinv32(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); - rustsecp256k1zkp_v0_10_0_fe_from_signed30(r, &s); + rustsecp256k1zkp_v0_10_1_fe_normalize(&tmp); + rustsecp256k1zkp_v0_10_1_fe_to_signed30(&s, &tmp); + rustsecp256k1zkp_v0_10_1_modinv32(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_from_signed30(r, &s); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv_var(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp = *x; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 s; +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv_var(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp = *x; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 s; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&tmp); - rustsecp256k1zkp_v0_10_0_fe_to_signed30(&s, &tmp); - rustsecp256k1zkp_v0_10_0_modinv32_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); - rustsecp256k1zkp_v0_10_0_fe_from_signed30(r, &s); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&tmp); + rustsecp256k1zkp_v0_10_1_fe_to_signed30(&s, &tmp); + rustsecp256k1zkp_v0_10_1_modinv32_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_from_signed30(r, &s); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 s; +static int rustsecp256k1zkp_v0_10_1_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 s; int jac, ret; tmp = *x; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&tmp); - /* rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var cannot deal with input 0. */ - if (rustsecp256k1zkp_v0_10_0_fe_is_zero(&tmp)) return 1; - rustsecp256k1zkp_v0_10_0_fe_to_signed30(&s, &tmp); - jac = rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&tmp); + /* rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var cannot deal with input 0. */ + if (rustsecp256k1zkp_v0_10_1_fe_is_zero(&tmp)) return 1; + rustsecp256k1zkp_v0_10_1_fe_to_signed30(&s, &tmp); + jac = rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); if (jac == 0) { - /* rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var failed to compute the Jacobi symbol. Fall back + /* rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var failed to compute the Jacobi symbol. Fall back * to computing a square root. This should be extremely rare with random * input (except in VERIFY mode, where a lower iteration count is used). */ - rustsecp256k1zkp_v0_10_0_fe dummy; - ret = rustsecp256k1zkp_v0_10_0_fe_sqrt(&dummy, &tmp); + rustsecp256k1zkp_v0_10_1_fe dummy; + ret = rustsecp256k1zkp_v0_10_1_fe_sqrt(&dummy, &tmp); } else { ret = jac >= 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52.h index 12ba05e5..f7d1eef6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52.h @@ -31,7 +31,7 @@ typedef struct { * (together these imply n[4] <= 2^48 - 1) */ SECP256K1_FE_VERIFY_FIELDS -} rustsecp256k1zkp_v0_10_0_fe; +} rustsecp256k1zkp_v0_10_1_fe; /* Unpacks a constant into a overlapping multi-limbed FE element. */ #define SECP256K1_FE_CONST_INNER(d7, d6, d5, d4, d3, d2, d1, d0) { \ @@ -44,7 +44,7 @@ typedef struct { typedef struct { uint64_t n[4]; -} rustsecp256k1zkp_v0_10_0_fe_storage; +} rustsecp256k1zkp_v0_10_1_fe_storage; #define SECP256K1_FE_STORAGE_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{ \ (d0) | (((uint64_t)(d1)) << 32), \ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_impl.h index 6fca4b7d..f25e2d12 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_impl.h @@ -15,7 +15,7 @@ #include "field_5x52_int128_impl.h" #ifdef VERIFY -static void rustsecp256k1zkp_v0_10_0_fe_impl_verify(const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_verify(const rustsecp256k1zkp_v0_10_1_fe *a) { const uint64_t *d = a->n; int m = a->normalized ? 1 : 2 * a->magnitude; /* secp256k1 'p' value defined in "Standards for Efficient Cryptography" (SEC2) 2.7.1. */ @@ -32,7 +32,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_verify(const rustsecp256k1zkp_v0_10 } #endif -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_0_fe *r, int m) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_1_fe *r, int m) { r->n[0] = 0xFFFFFFFFFFFFFULL * 2 * m; r->n[1] = 0xFFFFFFFFFFFFFULL * 2 * m; r->n[2] = 0xFFFFFFFFFFFFFULL * 2 * m; @@ -40,7 +40,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_0 r->n[4] = 0x0FFFFFFFFFFFFULL * 2 * m; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize(rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4]; /* Reduce t4 at the start so there will be at most a single carry from the first pass */ @@ -77,7 +77,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize(rustsecp256k1zkp_v0_10_0_ r->n[0] = t0; r->n[1] = t1; r->n[2] = t2; r->n[3] = t3; r->n[4] = t4; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4]; /* Reduce t4 at the start so there will be at most a single carry from the first pass */ @@ -96,7 +96,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(rustsecp256k1zkp_v0_ r->n[0] = t0; r->n[1] = t1; r->n[2] = t2; r->n[3] = t3; r->n[4] = t4; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4]; /* Reduce t4 at the start so there will be at most a single carry from the first pass */ @@ -134,7 +134,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(rustsecp256k1zkp_v0_1 r->n[0] = t0; r->n[1] = t1; r->n[2] = t2; r->n[3] = t3; r->n[4] = t4; } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4]; /* z0 tracks a possible raw value of 0, z1 tracks a possible raw value of P */ @@ -157,7 +157,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(const rustsecp256 return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0, t1, t2, t3, t4; uint64_t z0, z1; uint64_t x; @@ -198,28 +198,28 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(const rustsec return (z0 == 0) | (z1 == 0xFFFFFFFFFFFFFULL); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_set_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_set_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] = a; r->n[1] = r->n[2] = r->n[3] = r->n[4] = 0; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_1_fe *a) { const uint64_t *t = a->n; return (t[0] | t[1] | t[2] | t[3] | t[4]) == 0; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_1_fe *a) { return a->n[0] & 1; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_clear(rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_clear(rustsecp256k1zkp_v0_10_1_fe *a) { int i; for (i=0; i<5; i++) { a->n[i] = 0; } } -static int rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { int i; for (i = 4; i >= 0; i--) { if (a->n[i] > b->n[i]) { @@ -232,7 +232,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10 return 0; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { r->n[0] = (uint64_t)a[31] | ((uint64_t)a[30] << 8) | ((uint64_t)a[29] << 16) @@ -269,13 +269,13 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_ | ((uint64_t)a[0] << 40); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { - rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(r, a); +static int rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { + rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(r, a); return !((r->n[4] == 0x0FFFFFFFFFFFFULL) & ((r->n[3] & r->n[2] & r->n[1]) == 0xFFFFFFFFFFFFFULL) & (r->n[0] >= 0xFFFFEFFFFFC2FULL)); } /** Convert a field element to a 32-byte big endian value. Requires the input to be normalized */ -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r[0] = (a->n[4] >> 40) & 0xFF; r[1] = (a->n[4] >> 32) & 0xFF; r[2] = (a->n[4] >> 24) & 0xFF; @@ -310,7 +310,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(unsigned char *r, const rus r[31] = a->n[0] & 0xFF; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int m) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int m) { /* For all legal values of m (0..31), the following properties hold: */ VERIFY_CHECK(0xFFFFEFFFFFC2FULL * 2 * (m + 1) >= 0xFFFFFFFFFFFFFULL * 2 * m); VERIFY_CHECK(0xFFFFFFFFFFFFFULL * 2 * (m + 1) >= 0xFFFFFFFFFFFFFULL * 2 * m); @@ -325,7 +325,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(r r->n[4] = 0x0FFFFFFFFFFFFULL * 2 * (m + 1) - a->n[4]; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] *= a; r->n[1] *= a; r->n[2] *= a; @@ -333,11 +333,11 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked( r->n[4] *= a; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_add_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { r->n[0] += a; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_add(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r->n[0] += a->n[0]; r->n[1] += a->n[1]; r->n[2] += a->n[2]; @@ -345,15 +345,15 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_add(rustsecp256k1z r->n[4] += a->n[4]; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_mul(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT b) { - rustsecp256k1zkp_v0_10_0_fe_mul_inner(r->n, a->n, b->n); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_mul(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT b) { + rustsecp256k1zkp_v0_10_1_fe_mul_inner(r->n, a->n, b->n); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_sqr(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { - rustsecp256k1zkp_v0_10_0_fe_sqr_inner(r->n, a->n); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_sqr(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { + rustsecp256k1zkp_v0_10_1_fe_sqr_inner(r->n, a->n); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_cmov(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int flag) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_impl_cmov(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int flag) { uint64_t mask0, mask1; volatile int vflag = flag; SECP256K1_CHECKMEM_CHECK_VERIFY(r->n, sizeof(r->n)); @@ -366,7 +366,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_impl_cmov(rustsecp256k1 r->n[4] = (r->n[4] & mask0) | (a->n[4] & mask1); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_half(rustsecp256k1zkp_v0_10_0_fe *r) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_impl_half(rustsecp256k1zkp_v0_10_1_fe *r) { uint64_t t0 = r->n[0], t1 = r->n[1], t2 = r->n[2], t3 = r->n[3], t4 = r->n[4]; uint64_t one = (uint64_t)1; uint64_t mask = -(t0 & one) >> 12; @@ -420,7 +420,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_half(rustsecp256k1 */ } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_storage_cmov(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a, int flag) { uint64_t mask0, mask1; volatile int vflag = flag; SECP256K1_CHECKMEM_CHECK_VERIFY(r->n, sizeof(r->n)); @@ -432,14 +432,14 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_storage_cmov(rustsecp25 r->n[3] = (r->n[3] & mask0) | (a->n[3] & mask1); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_to_storage(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_to_storage(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe *a) { r->n[0] = a->n[0] | a->n[1] << 52; r->n[1] = a->n[1] >> 12 | a->n[2] << 40; r->n[2] = a->n[2] >> 24 | a->n[3] << 28; r->n[3] = a->n[3] >> 36 | a->n[4] << 16; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_fe_impl_from_storage(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a) { r->n[0] = a->n[0] & 0xFFFFFFFFFFFFFULL; r->n[1] = a->n[0] >> 52 | ((a->n[1] << 12) & 0xFFFFFFFFFFFFFULL); r->n[2] = a->n[1] >> 40 | ((a->n[2] << 24) & 0xFFFFFFFFFFFFFULL); @@ -447,11 +447,11 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(rusts r->n[4] = a->n[3] >> 16; } -static void rustsecp256k1zkp_v0_10_0_fe_from_signed62(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *a) { +static void rustsecp256k1zkp_v0_10_1_fe_from_signed62(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *a) { const uint64_t M52 = UINT64_MAX >> 12; const uint64_t a0 = a->v[0], a1 = a->v[1], a2 = a->v[2], a3 = a->v[3], a4 = a->v[4]; - /* The output from rustsecp256k1zkp_v0_10_0_modinv64{_var} should be normalized to range [0,modulus), and + /* The output from rustsecp256k1zkp_v0_10_1_modinv64{_var} should be normalized to range [0,modulus), and * have limbs in [0,2^62). The modulus is < 2^256, so the top limb must be below 2^(256-62*4). */ VERIFY_CHECK(a0 >> 62 == 0); @@ -467,7 +467,7 @@ static void rustsecp256k1zkp_v0_10_0_fe_from_signed62(rustsecp256k1zkp_v0_10_0_f r->n[4] = (a3 >> 22 | a4 << 40); } -static void rustsecp256k1zkp_v0_10_0_fe_to_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_to_signed62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_1_fe *a) { const uint64_t M62 = UINT64_MAX >> 2; const uint64_t a0 = a->n[0], a1 = a->n[1], a2 = a->n[2], a3 = a->n[3], a4 = a->n[4]; @@ -478,48 +478,48 @@ static void rustsecp256k1zkp_v0_10_0_fe_to_signed62(rustsecp256k1zkp_v0_10_0_mod r->v[4] = a4 >> 40; } -static const rustsecp256k1zkp_v0_10_0_modinv64_modinfo rustsecp256k1zkp_v0_10_0_const_modinfo_fe = { +static const rustsecp256k1zkp_v0_10_1_modinv64_modinfo rustsecp256k1zkp_v0_10_1_const_modinfo_fe = { {{-0x1000003D1LL, 0, 0, 0, 256}}, 0x27C7F6E22DDACACFLL }; -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp = *x; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 s; +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp = *x; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 s; - rustsecp256k1zkp_v0_10_0_fe_normalize(&tmp); - rustsecp256k1zkp_v0_10_0_fe_to_signed62(&s, &tmp); - rustsecp256k1zkp_v0_10_0_modinv64(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); - rustsecp256k1zkp_v0_10_0_fe_from_signed62(r, &s); + rustsecp256k1zkp_v0_10_1_fe_normalize(&tmp); + rustsecp256k1zkp_v0_10_1_fe_to_signed62(&s, &tmp); + rustsecp256k1zkp_v0_10_1_modinv64(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_from_signed62(r, &s); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv_var(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp = *x; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 s; +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv_var(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp = *x; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 s; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&tmp); - rustsecp256k1zkp_v0_10_0_fe_to_signed62(&s, &tmp); - rustsecp256k1zkp_v0_10_0_modinv64_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); - rustsecp256k1zkp_v0_10_0_fe_from_signed62(r, &s); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&tmp); + rustsecp256k1zkp_v0_10_1_fe_to_signed62(&s, &tmp); + rustsecp256k1zkp_v0_10_1_modinv64_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_from_signed62(r, &s); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe tmp; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 s; +static int rustsecp256k1zkp_v0_10_1_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe tmp; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 s; int jac, ret; tmp = *x; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&tmp); - /* rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var cannot deal with input 0. */ - if (rustsecp256k1zkp_v0_10_0_fe_is_zero(&tmp)) return 1; - rustsecp256k1zkp_v0_10_0_fe_to_signed62(&s, &tmp); - jac = rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_fe); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&tmp); + /* rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var cannot deal with input 0. */ + if (rustsecp256k1zkp_v0_10_1_fe_is_zero(&tmp)) return 1; + rustsecp256k1zkp_v0_10_1_fe_to_signed62(&s, &tmp); + jac = rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_fe); if (jac == 0) { - /* rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var failed to compute the Jacobi symbol. Fall back + /* rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var failed to compute the Jacobi symbol. Fall back * to computing a square root. This should be extremely rare with random * input (except in VERIFY mode, where a lower iteration count is used). */ - rustsecp256k1zkp_v0_10_0_fe dummy; - ret = rustsecp256k1zkp_v0_10_0_fe_sqrt(&dummy, &tmp); + rustsecp256k1zkp_v0_10_1_fe dummy; + ret = rustsecp256k1zkp_v0_10_1_fe_sqrt(&dummy, &tmp); } else { ret = jac >= 0; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_int128_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_int128_impl.h index eb600862..0c47128d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_int128_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_5x52_int128_impl.h @@ -13,10 +13,10 @@ #include "util.h" #define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0) -#define VERIFY_BITS_128(x, n) VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_u128_check_bits((x), (n))) +#define VERIFY_BITS_128(x, n) VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_u128_check_bits((x), (n))) -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) { - rustsecp256k1zkp_v0_10_0_uint128 c, d; +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) { + rustsecp256k1zkp_v0_10_1_uint128 c, d; uint64_t t3, t4, tx, u0; uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4]; const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL; @@ -40,35 +40,35 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint64_t *r, * Note that [x 0 0 0 0 0] = [x*R]. */ - rustsecp256k1zkp_v0_10_0_u128_mul(&d, a0, b[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1, b[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, b[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, b[0]); + rustsecp256k1zkp_v0_10_1_u128_mul(&d, a0, b[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1, b[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, b[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, b[0]); VERIFY_BITS_128(&d, 114); /* [d 0 0 0] = [p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_mul(&c, a4, b[4]); + rustsecp256k1zkp_v0_10_1_u128_mul(&c, a4, b[4]); VERIFY_BITS_128(&c, 112); /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, R, rustsecp256k1zkp_v0_10_0_u128_to_u64(&c)); rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, R, rustsecp256k1zkp_v0_10_1_u128_to_u64(&c)); rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 64); VERIFY_BITS_128(&d, 115); VERIFY_BITS_128(&c, 48); /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ - t3 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + t3 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(t3, 52); VERIFY_BITS_128(&d, 63); /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a0, b[4]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1, b[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, b[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, b[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a4, b[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a0, b[4]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1, b[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, b[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, b[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a4, b[0]); VERIFY_BITS_128(&d, 115); /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, R << 12, rustsecp256k1zkp_v0_10_0_u128_to_u64(&c)); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, R << 12, rustsecp256k1zkp_v0_10_1_u128_to_u64(&c)); VERIFY_BITS_128(&d, 116); /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - t4 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + t4 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(t4, 52); VERIFY_BITS_128(&d, 64); /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ @@ -77,16 +77,16 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint64_t *r, VERIFY_BITS(t4, 48); /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_mul(&c, a0, b[0]); + rustsecp256k1zkp_v0_10_1_u128_mul(&c, a0, b[0]); VERIFY_BITS_128(&c, 112); /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1, b[4]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, b[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, b[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a4, b[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1, b[4]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, b[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, b[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a4, b[1]); VERIFY_BITS_128(&d, 114); /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - u0 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + u0 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(u0, 52); VERIFY_BITS_128(&d, 62); /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ @@ -94,65 +94,65 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_inner(uint64_t *r, u0 = (u0 << 4) | tx; VERIFY_BITS(u0, 56); /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, u0, R >> 4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, u0, R >> 4); VERIFY_BITS_128(&c, 113); /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - r[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[0], 52); VERIFY_BITS_128(&c, 61); /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a0, b[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a1, b[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a0, b[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a1, b[0]); VERIFY_BITS_128(&c, 114); /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, b[4]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, b[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a4, b[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, b[4]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, b[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a4, b[2]); VERIFY_BITS_128(&d, 114); /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M, R); rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M, R); rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS_128(&c, 115); VERIFY_BITS_128(&d, 62); /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - r[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[1], 52); VERIFY_BITS_128(&c, 63); /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a0, b[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a1, b[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a2, b[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a0, b[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a1, b[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a2, b[0]); VERIFY_BITS_128(&c, 114); /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, b[4]); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a4, b[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, b[4]); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a4, b[3]); VERIFY_BITS_128(&d, 114); /* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, R, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d)); rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, R, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d)); rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 64); VERIFY_BITS_128(&c, 115); VERIFY_BITS_128(&d, 50); /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[2], 52); VERIFY_BITS_128(&c, 63); /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, R << 12, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d)); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c, t3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, R << 12, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d)); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c, t3); VERIFY_BITS_128(&c, 100); /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[3], 52); VERIFY_BITS_128(&c, 48); /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[4] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) + t4; + r[4] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) + t4; VERIFY_BITS(r[4], 49); /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint64_t *r, const uint64_t *a) { - rustsecp256k1zkp_v0_10_0_uint128 c, d; +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_sqr_inner(uint64_t *r, const uint64_t *a) { + rustsecp256k1zkp_v0_10_1_uint128 c, d; uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4]; uint64_t t3, t4, tx, u0; const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL; @@ -168,32 +168,32 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint64_t *r, * Note that [x 0 0 0 0 0] = [x*R]. */ - rustsecp256k1zkp_v0_10_0_u128_mul(&d, a0*2, a3); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1*2, a2); + rustsecp256k1zkp_v0_10_1_u128_mul(&d, a0*2, a3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1*2, a2); VERIFY_BITS_128(&d, 114); /* [d 0 0 0] = [p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_mul(&c, a4, a4); + rustsecp256k1zkp_v0_10_1_u128_mul(&c, a4, a4); VERIFY_BITS_128(&c, 112); /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, R, rustsecp256k1zkp_v0_10_0_u128_to_u64(&c)); rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, R, rustsecp256k1zkp_v0_10_1_u128_to_u64(&c)); rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 64); VERIFY_BITS_128(&d, 115); VERIFY_BITS_128(&c, 48); /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ - t3 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + t3 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(t3, 52); VERIFY_BITS_128(&d, 63); /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */ a4 *= 2; - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a0, a4); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1*2, a3); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, a2); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a0, a4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1*2, a3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, a2); VERIFY_BITS_128(&d, 115); /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, R << 12, rustsecp256k1zkp_v0_10_0_u128_to_u64(&c)); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, R << 12, rustsecp256k1zkp_v0_10_1_u128_to_u64(&c)); VERIFY_BITS_128(&d, 116); /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - t4 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + t4 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(t4, 52); VERIFY_BITS_128(&d, 64); /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ @@ -202,14 +202,14 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint64_t *r, VERIFY_BITS(t4, 48); /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */ - rustsecp256k1zkp_v0_10_0_u128_mul(&c, a0, a0); + rustsecp256k1zkp_v0_10_1_u128_mul(&c, a0, a0); VERIFY_BITS_128(&c, 112); /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a1, a4); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2*2, a3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a1, a4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2*2, a3); VERIFY_BITS_128(&d, 114); /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - u0 = rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + u0 = rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS(u0, 52); VERIFY_BITS_128(&d, 62); /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ @@ -217,56 +217,56 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr_inner(uint64_t *r, u0 = (u0 << 4) | tx; VERIFY_BITS(u0, 56); /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, u0, R >> 4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, u0, R >> 4); VERIFY_BITS_128(&c, 113); /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */ - r[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[0], 52); VERIFY_BITS_128(&c, 61); /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */ a0 *= 2; - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a0, a1); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a0, a1); VERIFY_BITS_128(&c, 114); /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a2, a4); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, a3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a2, a4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, a3); VERIFY_BITS_128(&d, 114); /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d) & M, R); rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 52); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d) & M, R); rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 52); VERIFY_BITS_128(&c, 115); VERIFY_BITS_128(&d, 62); /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - r[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[1], 52); VERIFY_BITS_128(&c, 63); /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a0, a2); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, a1, a1); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a0, a2); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, a1, a1); VERIFY_BITS_128(&c, 114); /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&d, a3, a4); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&d, a3, a4); VERIFY_BITS_128(&d, 114); /* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, R, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d)); rustsecp256k1zkp_v0_10_0_u128_rshift(&d, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, R, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d)); rustsecp256k1zkp_v0_10_1_u128_rshift(&d, 64); VERIFY_BITS_128(&c, 115); VERIFY_BITS_128(&d, 50); /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[2], 52); VERIFY_BITS_128(&c, 63); /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c, R << 12, rustsecp256k1zkp_v0_10_0_u128_to_u64(&d)); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c, t3); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c, R << 12, rustsecp256k1zkp_v0_10_1_u128_to_u64(&d)); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c, t3); VERIFY_BITS_128(&c, 100); /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_0_u128_rshift(&c, 52); + r[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) & M; rustsecp256k1zkp_v0_10_1_u128_rshift(&c, 52); VERIFY_BITS(r[3], 52); VERIFY_BITS_128(&c, 48); /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ - r[4] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c) + t4; + r[4] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c) + t4; VERIFY_BITS(r[4], 49); /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */ } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/field_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/field_impl.h index d621eeb0..c0e25b4b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/field_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/field_impl.h @@ -18,19 +18,19 @@ #error "Please select wide multiplication implementation" #endif -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_equal(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { - rustsecp256k1zkp_v0_10_0_fe na; +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_equal(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { + rustsecp256k1zkp_v0_10_1_fe na; SECP256K1_FE_VERIFY(a); SECP256K1_FE_VERIFY(b); SECP256K1_FE_VERIFY_MAGNITUDE(a, 1); SECP256K1_FE_VERIFY_MAGNITUDE(b, 31); - rustsecp256k1zkp_v0_10_0_fe_negate(&na, a, 1); - rustsecp256k1zkp_v0_10_0_fe_add(&na, b); - return rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&na); + rustsecp256k1zkp_v0_10_1_fe_negate(&na, a, 1); + rustsecp256k1zkp_v0_10_1_fe_add(&na, b); + return rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&na); } -static int rustsecp256k1zkp_v0_10_0_fe_sqrt(rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT r, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT a) { +static int rustsecp256k1zkp_v0_10_1_fe_sqrt(rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT r, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT a) { /** Given that p is congruent to 3 mod 4, we can compute the square root of * a mod p as the (p+1)/4'th power of a. * @@ -40,7 +40,7 @@ static int rustsecp256k1zkp_v0_10_0_fe_sqrt(rustsecp256k1zkp_v0_10_0_fe * SECP25 * Also because (p+1)/4 is an even number, the computed square root is * itself always a square (a ** ((p+1)/4) is the square of a ** ((p+1)/8)). */ - rustsecp256k1zkp_v0_10_0_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1; + rustsecp256k1zkp_v0_10_1_fe x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223, t1; int j, ret; VERIFY_CHECK(r != a); @@ -52,101 +52,101 @@ static int rustsecp256k1zkp_v0_10_0_fe_sqrt(rustsecp256k1zkp_v0_10_0_fe * SECP25 * 1, [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223] */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&x2, a); - rustsecp256k1zkp_v0_10_0_fe_mul(&x2, &x2, a); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x2, a); + rustsecp256k1zkp_v0_10_1_fe_mul(&x2, &x2, a); - rustsecp256k1zkp_v0_10_0_fe_sqr(&x3, &x2); - rustsecp256k1zkp_v0_10_0_fe_mul(&x3, &x3, a); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x3, &x2); + rustsecp256k1zkp_v0_10_1_fe_mul(&x3, &x3, a); x6 = x3; for (j=0; j<3; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x6, &x6); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x6, &x6); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x6, &x6, &x3); + rustsecp256k1zkp_v0_10_1_fe_mul(&x6, &x6, &x3); x9 = x6; for (j=0; j<3; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x9, &x9); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x9, &x9); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x9, &x9, &x3); + rustsecp256k1zkp_v0_10_1_fe_mul(&x9, &x9, &x3); x11 = x9; for (j=0; j<2; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x11, &x11); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x11, &x11); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x11, &x11, &x2); + rustsecp256k1zkp_v0_10_1_fe_mul(&x11, &x11, &x2); x22 = x11; for (j=0; j<11; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x22, &x22); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x22, &x22); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x22, &x22, &x11); + rustsecp256k1zkp_v0_10_1_fe_mul(&x22, &x22, &x11); x44 = x22; for (j=0; j<22; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x44, &x44); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x44, &x44); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x44, &x44, &x22); + rustsecp256k1zkp_v0_10_1_fe_mul(&x44, &x44, &x22); x88 = x44; for (j=0; j<44; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x88, &x88); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x88, &x88); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x88, &x88, &x44); + rustsecp256k1zkp_v0_10_1_fe_mul(&x88, &x88, &x44); x176 = x88; for (j=0; j<88; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x176, &x176); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x176, &x176); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x176, &x176, &x88); + rustsecp256k1zkp_v0_10_1_fe_mul(&x176, &x176, &x88); x220 = x176; for (j=0; j<44; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x220, &x220); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x220, &x220); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x220, &x220, &x44); + rustsecp256k1zkp_v0_10_1_fe_mul(&x220, &x220, &x44); x223 = x220; for (j=0; j<3; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&x223, &x223); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x223, &x223); } - rustsecp256k1zkp_v0_10_0_fe_mul(&x223, &x223, &x3); + rustsecp256k1zkp_v0_10_1_fe_mul(&x223, &x223, &x3); /* The final result is then assembled using a sliding window over the blocks. */ t1 = x223; for (j=0; j<23; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&t1, &t1); + rustsecp256k1zkp_v0_10_1_fe_sqr(&t1, &t1); } - rustsecp256k1zkp_v0_10_0_fe_mul(&t1, &t1, &x22); + rustsecp256k1zkp_v0_10_1_fe_mul(&t1, &t1, &x22); for (j=0; j<6; j++) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&t1, &t1); + rustsecp256k1zkp_v0_10_1_fe_sqr(&t1, &t1); } - rustsecp256k1zkp_v0_10_0_fe_mul(&t1, &t1, &x2); - rustsecp256k1zkp_v0_10_0_fe_sqr(&t1, &t1); - rustsecp256k1zkp_v0_10_0_fe_sqr(r, &t1); + rustsecp256k1zkp_v0_10_1_fe_mul(&t1, &t1, &x2); + rustsecp256k1zkp_v0_10_1_fe_sqr(&t1, &t1); + rustsecp256k1zkp_v0_10_1_fe_sqr(r, &t1); /* Check that a square root was actually calculated */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&t1, r); - ret = rustsecp256k1zkp_v0_10_0_fe_equal(&t1, a); + rustsecp256k1zkp_v0_10_1_fe_sqr(&t1, r); + ret = rustsecp256k1zkp_v0_10_1_fe_equal(&t1, a); #ifdef VERIFY if (!ret) { - rustsecp256k1zkp_v0_10_0_fe_negate(&t1, &t1, 1); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&t1); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&t1, a)); + rustsecp256k1zkp_v0_10_1_fe_negate(&t1, &t1, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&t1); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&t1, a)); } #endif return ret; } #ifndef VERIFY -static void rustsecp256k1zkp_v0_10_0_fe_verify(const rustsecp256k1zkp_v0_10_0_fe *a) { (void)a; } -static void rustsecp256k1zkp_v0_10_0_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_0_fe *a, int m) { (void)a; (void)m; } +static void rustsecp256k1zkp_v0_10_1_fe_verify(const rustsecp256k1zkp_v0_10_1_fe *a) { (void)a; } +static void rustsecp256k1zkp_v0_10_1_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_1_fe *a, int m) { (void)a; (void)m; } #else -static void rustsecp256k1zkp_v0_10_0_fe_impl_verify(const rustsecp256k1zkp_v0_10_0_fe *a); -static void rustsecp256k1zkp_v0_10_0_fe_verify(const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_verify(const rustsecp256k1zkp_v0_10_1_fe *a); +static void rustsecp256k1zkp_v0_10_1_fe_verify(const rustsecp256k1zkp_v0_10_1_fe *a) { /* Magnitude between 0 and 32. */ SECP256K1_FE_VERIFY_MAGNITUDE(a, 32); /* Normalized is 0 or 1. */ @@ -154,131 +154,131 @@ static void rustsecp256k1zkp_v0_10_0_fe_verify(const rustsecp256k1zkp_v0_10_0_fe /* If normalized, magnitude must be 0 or 1. */ if (a->normalized) SECP256K1_FE_VERIFY_MAGNITUDE(a, 1); /* Invoke implementation-specific checks. */ - rustsecp256k1zkp_v0_10_0_fe_impl_verify(a); + rustsecp256k1zkp_v0_10_1_fe_impl_verify(a); } -static void rustsecp256k1zkp_v0_10_0_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_0_fe *a, int m) { +static void rustsecp256k1zkp_v0_10_1_fe_verify_magnitude(const rustsecp256k1zkp_v0_10_1_fe *a, int m) { VERIFY_CHECK(m >= 0); VERIFY_CHECK(m <= 32); VERIFY_CHECK(a->magnitude <= m); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize(rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_normalize(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize(rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_normalize(rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); - rustsecp256k1zkp_v0_10_0_fe_impl_normalize(r); + rustsecp256k1zkp_v0_10_1_fe_impl_normalize(r); r->magnitude = 1; r->normalized = 1; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_normalize_weak(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_weak(rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_normalize_weak(rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); - rustsecp256k1zkp_v0_10_0_fe_impl_normalize_weak(r); + rustsecp256k1zkp_v0_10_1_fe_impl_normalize_weak(r); r->magnitude = 1; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_normalize_var(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_normalize_var(rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_normalize_var(rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); - rustsecp256k1zkp_v0_10_0_fe_impl_normalize_var(r); + rustsecp256k1zkp_v0_10_1_fe_impl_normalize_var(r); r->magnitude = 1; r->normalized = 1; SECP256K1_FE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero(const rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(const rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); - return rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero(r); + return rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero(r); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_0_fe *r) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(const rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); - return rustsecp256k1zkp_v0_10_0_fe_impl_normalizes_to_zero_var(r); + return rustsecp256k1zkp_v0_10_1_fe_impl_normalizes_to_zero_var(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_set_int(rustsecp256k1zkp_v0_10_0_fe *r, int a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_set_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_set_int(rustsecp256k1zkp_v0_10_1_fe *r, int a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_set_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { VERIFY_CHECK(0 <= a && a <= 0x7FFF); - rustsecp256k1zkp_v0_10_0_fe_impl_set_int(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_set_int(r, a); r->magnitude = (a != 0); r->normalized = 1; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_add_int(rustsecp256k1zkp_v0_10_0_fe *r, int a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_add_int(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_add_int(rustsecp256k1zkp_v0_10_1_fe *r, int a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_add_int(rustsecp256k1zkp_v0_10_1_fe *r, int a) { VERIFY_CHECK(0 <= a && a <= 0x7FFF); SECP256K1_FE_VERIFY(r); - rustsecp256k1zkp_v0_10_0_fe_impl_add_int(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_add_int(r, a); r->magnitude += 1; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_clear(rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_clear(rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_clear(rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_clear(rustsecp256k1zkp_v0_10_1_fe *a) { a->magnitude = 0; a->normalized = 1; - rustsecp256k1zkp_v0_10_0_fe_impl_clear(a); + rustsecp256k1zkp_v0_10_1_fe_impl_clear(a); SECP256K1_FE_VERIFY(a); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_is_zero(const rustsecp256k1zkp_v0_10_0_fe *a) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_is_zero(const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_is_zero(const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(a); VERIFY_CHECK(a->normalized); - return rustsecp256k1zkp_v0_10_0_fe_impl_is_zero(a); + return rustsecp256k1zkp_v0_10_1_fe_impl_is_zero(a); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_is_odd(const rustsecp256k1zkp_v0_10_0_fe *a) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_is_odd(const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_is_odd(const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(a); VERIFY_CHECK(a->normalized); - return rustsecp256k1zkp_v0_10_0_fe_impl_is_odd(a); + return rustsecp256k1zkp_v0_10_1_fe_impl_is_odd(a); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_cmp_var(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_cmp_var(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_cmp_var(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { SECP256K1_FE_VERIFY(a); SECP256K1_FE_VERIFY(b); VERIFY_CHECK(a->normalized); VERIFY_CHECK(b->normalized); - return rustsecp256k1zkp_v0_10_0_fe_impl_cmp_var(a, b); + return rustsecp256k1zkp_v0_10_1_fe_impl_cmp_var(a, b); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { - rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_mod(r, a); +static void rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { + rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_mod(r, a); r->magnitude = 1; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(rustsecp256k1zkp_v0_10_0_fe *r, const unsigned char *a) { - if (rustsecp256k1zkp_v0_10_0_fe_impl_set_b32_limit(r, a)) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_limit(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(rustsecp256k1zkp_v0_10_1_fe *r, const unsigned char *a) { + if (rustsecp256k1zkp_v0_10_1_fe_impl_set_b32_limit(r, a)) { r->magnitude = 1; r->normalized = 1; SECP256K1_FE_VERIFY(r); @@ -290,55 +290,55 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(rustsecp25 } } -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_get_b32(unsigned char *r, const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(a); VERIFY_CHECK(a->normalized); - rustsecp256k1zkp_v0_10_0_fe_impl_get_b32(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_get_b32(r, a); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int m); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_negate_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int m) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_negate_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int m); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_negate_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int m) { SECP256K1_FE_VERIFY(a); VERIFY_CHECK(m >= 0 && m <= 31); SECP256K1_FE_VERIFY_MAGNITUDE(a, m); - rustsecp256k1zkp_v0_10_0_fe_impl_negate_unchecked(r, a, m); + rustsecp256k1zkp_v0_10_1_fe_impl_negate_unchecked(r, a, m); r->magnitude = m + 1; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, int a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked(rustsecp256k1zkp_v0_10_0_fe *r, int a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_mul_int_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, int a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_mul_int_unchecked(rustsecp256k1zkp_v0_10_1_fe *r, int a) { SECP256K1_FE_VERIFY(r); VERIFY_CHECK(a >= 0 && a <= 32); VERIFY_CHECK(a*r->magnitude <= 32); - rustsecp256k1zkp_v0_10_0_fe_impl_mul_int_unchecked(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_mul_int_unchecked(r, a); r->magnitude *= a; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_add(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_add(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_add(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_add(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(r); SECP256K1_FE_VERIFY(a); VERIFY_CHECK(r->magnitude + a->magnitude <= 32); - rustsecp256k1zkp_v0_10_0_fe_impl_add(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_add(r, a); r->magnitude += a->magnitude; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_mul(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT b); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe * SECP256K1_RESTRICT b) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_mul(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT b); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_mul(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe * SECP256K1_RESTRICT b) { SECP256K1_FE_VERIFY(a); SECP256K1_FE_VERIFY(b); SECP256K1_FE_VERIFY_MAGNITUDE(a, 8); @@ -346,111 +346,111 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_mul(rustsecp256k1zkp_v0 VERIFY_CHECK(r != b); VERIFY_CHECK(a != b); - rustsecp256k1zkp_v0_10_0_fe_impl_mul(r, a, b); + rustsecp256k1zkp_v0_10_1_fe_impl_mul(r, a, b); r->magnitude = 1; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_sqr(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_sqr(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_sqr(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_sqr(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(a); SECP256K1_FE_VERIFY_MAGNITUDE(a, 8); - rustsecp256k1zkp_v0_10_0_fe_impl_sqr(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_sqr(r, a); r->magnitude = 1; r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_cmov(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int flag); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_cmov(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *a, int flag) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_cmov(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int flag); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_cmov(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *a, int flag) { VERIFY_CHECK(flag == 0 || flag == 1); SECP256K1_FE_VERIFY(a); SECP256K1_FE_VERIFY(r); - rustsecp256k1zkp_v0_10_0_fe_impl_cmov(r, a, flag); + rustsecp256k1zkp_v0_10_1_fe_impl_cmov(r, a, flag); if (a->magnitude > r->magnitude) r->magnitude = a->magnitude; if (!a->normalized) r->normalized = 0; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_to_storage(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_to_storage(rustsecp256k1zkp_v0_10_0_fe_storage *r, const rustsecp256k1zkp_v0_10_0_fe *a) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_to_storage(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_to_storage(rustsecp256k1zkp_v0_10_1_fe_storage *r, const rustsecp256k1zkp_v0_10_1_fe *a) { SECP256K1_FE_VERIFY(a); VERIFY_CHECK(a->normalized); - rustsecp256k1zkp_v0_10_0_fe_impl_to_storage(r, a); + rustsecp256k1zkp_v0_10_1_fe_impl_to_storage(r, a); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_from_storage(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe_storage *a) { - rustsecp256k1zkp_v0_10_0_fe_impl_from_storage(r, a); +static void rustsecp256k1zkp_v0_10_1_fe_impl_from_storage(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_from_storage(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe_storage *a) { + rustsecp256k1zkp_v0_10_1_fe_impl_from_storage(r, a); r->magnitude = 1; r->normalized = 1; SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_inv(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - int input_is_zero = rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(x); +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_inv(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + int input_is_zero = rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(x); SECP256K1_FE_VERIFY(x); - rustsecp256k1zkp_v0_10_0_fe_impl_inv(r, x); + rustsecp256k1zkp_v0_10_1_fe_impl_inv(r, x); r->magnitude = x->magnitude > 0; r->normalized = 1; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(r) == input_is_zero); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(r) == input_is_zero); SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_inv_var(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_inv_var(rustsecp256k1zkp_v0_10_0_fe *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - int input_is_zero = rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(x); +static void rustsecp256k1zkp_v0_10_1_fe_impl_inv_var(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_inv_var(rustsecp256k1zkp_v0_10_1_fe *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + int input_is_zero = rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(x); SECP256K1_FE_VERIFY(x); - rustsecp256k1zkp_v0_10_0_fe_impl_inv_var(r, x); + rustsecp256k1zkp_v0_10_1_fe_impl_inv_var(r, x); r->magnitude = x->magnitude > 0; r->normalized = 1; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(r) == input_is_zero); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(r) == input_is_zero); SECP256K1_FE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_0_fe *x); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_fe_is_square_var(const rustsecp256k1zkp_v0_10_0_fe *x) { +static int rustsecp256k1zkp_v0_10_1_fe_impl_is_square_var(const rustsecp256k1zkp_v0_10_1_fe *x); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_fe_is_square_var(const rustsecp256k1zkp_v0_10_1_fe *x) { int ret; - rustsecp256k1zkp_v0_10_0_fe tmp = *x, sqrt; + rustsecp256k1zkp_v0_10_1_fe tmp = *x, sqrt; SECP256K1_FE_VERIFY(x); - ret = rustsecp256k1zkp_v0_10_0_fe_impl_is_square_var(x); - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&tmp); - VERIFY_CHECK(ret == rustsecp256k1zkp_v0_10_0_fe_sqrt(&sqrt, &tmp)); + ret = rustsecp256k1zkp_v0_10_1_fe_impl_is_square_var(x); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&tmp); + VERIFY_CHECK(ret == rustsecp256k1zkp_v0_10_1_fe_sqrt(&sqrt, &tmp)); return ret; } -static void rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_0_fe* r, int m); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_get_bounds(rustsecp256k1zkp_v0_10_0_fe* r, int m) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_get_bounds(rustsecp256k1zkp_v0_10_1_fe* r, int m); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_get_bounds(rustsecp256k1zkp_v0_10_1_fe* r, int m) { VERIFY_CHECK(m >= 0); VERIFY_CHECK(m <= 32); - rustsecp256k1zkp_v0_10_0_fe_impl_get_bounds(r, m); + rustsecp256k1zkp_v0_10_1_fe_impl_get_bounds(r, m); r->magnitude = m; r->normalized = (m == 0); SECP256K1_FE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_fe_impl_half(rustsecp256k1zkp_v0_10_0_fe *r); -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_fe_half(rustsecp256k1zkp_v0_10_0_fe *r) { +static void rustsecp256k1zkp_v0_10_1_fe_impl_half(rustsecp256k1zkp_v0_10_1_fe *r); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_fe_half(rustsecp256k1zkp_v0_10_1_fe *r) { SECP256K1_FE_VERIFY(r); SECP256K1_FE_VERIFY_MAGNITUDE(r, 31); - rustsecp256k1zkp_v0_10_0_fe_impl_half(r); + rustsecp256k1zkp_v0_10_1_fe_impl_half(r); r->magnitude = (r->magnitude >> 1) + 1; r->normalized = 0; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/group.h b/secp256k1-zkp-sys/depend/secp256k1/src/group.h index 8d3b9f0f..8353c0bf 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/group.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/group.h @@ -14,10 +14,10 @@ * Note: For exhaustive test mode, secp256k1 is replaced by a small subgroup of a different curve. */ typedef struct { - rustsecp256k1zkp_v0_10_0_fe x; - rustsecp256k1zkp_v0_10_0_fe y; + rustsecp256k1zkp_v0_10_1_fe x; + rustsecp256k1zkp_v0_10_1_fe y; int infinity; /* whether this represents the point at infinity */ -} rustsecp256k1zkp_v0_10_0_ge; +} rustsecp256k1zkp_v0_10_1_ge; #define SECP256K1_GE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), 0} #define SECP256K1_GE_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1} @@ -26,19 +26,19 @@ typedef struct { * Note: For exhastive test mode, secp256k1 is replaced by a small subgroup of a different curve. */ typedef struct { - rustsecp256k1zkp_v0_10_0_fe x; /* actual X: x/z^2 */ - rustsecp256k1zkp_v0_10_0_fe y; /* actual Y: y/z^3 */ - rustsecp256k1zkp_v0_10_0_fe z; + rustsecp256k1zkp_v0_10_1_fe x; /* actual X: x/z^2 */ + rustsecp256k1zkp_v0_10_1_fe y; /* actual Y: y/z^3 */ + rustsecp256k1zkp_v0_10_1_fe z; int infinity; /* whether this represents the point at infinity */ -} rustsecp256k1zkp_v0_10_0_gej; +} rustsecp256k1zkp_v0_10_1_gej; #define SECP256K1_GEJ_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_CONST((i),(j),(k),(l),(m),(n),(o),(p)), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1), 0} #define SECP256K1_GEJ_CONST_INFINITY {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), 1} typedef struct { - rustsecp256k1zkp_v0_10_0_fe_storage x; - rustsecp256k1zkp_v0_10_0_fe_storage y; -} rustsecp256k1zkp_v0_10_0_ge_storage; + rustsecp256k1zkp_v0_10_1_fe_storage x; + rustsecp256k1zkp_v0_10_1_fe_storage y; +} rustsecp256k1zkp_v0_10_1_ge_storage; #define SECP256K1_GE_STORAGE_CONST(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) {SECP256K1_FE_STORAGE_CONST((a),(b),(c),(d),(e),(f),(g),(h)), SECP256K1_FE_STORAGE_CONST((i),(j),(k),(l),(m),(n),(o),(p))} @@ -53,41 +53,41 @@ typedef struct { #define SECP256K1_GEJ_Z_MAGNITUDE_MAX 1 /** Set a group element equal to the point with given X and Y coordinates */ -static void rustsecp256k1zkp_v0_10_0_ge_set_xy(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_fe *y); +static void rustsecp256k1zkp_v0_10_1_ge_set_xy(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_fe *y); /** Set a group element (affine) equal to the point with the given X coordinate * and a Y coordinate that is a quadratic residue modulo p. The return value * is true iff a coordinate with the given X coordinate exists. */ -static int rustsecp256k1zkp_v0_10_0_ge_set_xquad(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x); +static int rustsecp256k1zkp_v0_10_1_ge_set_xquad(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x); /** Set a group element (affine) equal to the point with the given X coordinate, and given oddness * for Y. Return value indicates whether the result is valid. */ -static int rustsecp256k1zkp_v0_10_0_ge_set_xo_var(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x, int odd); +static int rustsecp256k1zkp_v0_10_1_ge_set_xo_var(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x, int odd); /** Determine whether x is a valid X coordinate on the curve. */ -static int rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(const rustsecp256k1zkp_v0_10_0_fe *x); +static int rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(const rustsecp256k1zkp_v0_10_1_fe *x); /** Determine whether fraction xn/xd is a valid X coordinate on the curve (xd != 0). */ -static int rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(const rustsecp256k1zkp_v0_10_0_fe *xn, const rustsecp256k1zkp_v0_10_0_fe *xd); +static int rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(const rustsecp256k1zkp_v0_10_1_fe *xn, const rustsecp256k1zkp_v0_10_1_fe *xd); /** Check whether a group element is the point at infinity. */ -static int rustsecp256k1zkp_v0_10_0_ge_is_infinity(const rustsecp256k1zkp_v0_10_0_ge *a); +static int rustsecp256k1zkp_v0_10_1_ge_is_infinity(const rustsecp256k1zkp_v0_10_1_ge *a); /** Check whether a group element is valid (i.e., on the curve). */ -static int rustsecp256k1zkp_v0_10_0_ge_is_valid_var(const rustsecp256k1zkp_v0_10_0_ge *a); +static int rustsecp256k1zkp_v0_10_1_ge_is_valid_var(const rustsecp256k1zkp_v0_10_1_ge *a); /** Set r equal to the inverse of a (i.e., mirrored around the X axis) */ -static void rustsecp256k1zkp_v0_10_0_ge_neg(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *a); +static void rustsecp256k1zkp_v0_10_1_ge_neg(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *a); /** Set a group element equal to another which is given in jacobian coordinates. Constant time. */ -static void rustsecp256k1zkp_v0_10_0_ge_set_gej(rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_gej *a); +static void rustsecp256k1zkp_v0_10_1_ge_set_gej(rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_gej *a); /** Set a group element equal to another which is given in jacobian coordinates. */ -static void rustsecp256k1zkp_v0_10_0_ge_set_gej_var(rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_gej *a); +static void rustsecp256k1zkp_v0_10_1_ge_set_gej_var(rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_gej *a); /** Set a batch of group elements equal to the inputs given in jacobian coordinates */ -static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_gej *a, size_t len); +static void rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_gej *a, size_t len); /** Bring a batch of inputs to the same global z "denominator", based on ratios between * (omitted) z coordinates of adjacent elements. @@ -106,90 +106,90 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0 * * The coordinates of the final element a[len-1] are not changed. */ -static void rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(size_t len, rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_fe *zr); +static void rustsecp256k1zkp_v0_10_1_ge_table_set_globalz(size_t len, rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_fe *zr); /** Check two group elements (affine) for equality in variable time. */ -static int rustsecp256k1zkp_v0_10_0_ge_eq_var(const rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_ge *b); +static int rustsecp256k1zkp_v0_10_1_ge_eq_var(const rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_ge *b); /** Set a group element (affine) equal to the point at infinity. */ -static void rustsecp256k1zkp_v0_10_0_ge_set_infinity(rustsecp256k1zkp_v0_10_0_ge *r); +static void rustsecp256k1zkp_v0_10_1_ge_set_infinity(rustsecp256k1zkp_v0_10_1_ge *r); /** Set a group element (jacobian) equal to the point at infinity. */ -static void rustsecp256k1zkp_v0_10_0_gej_set_infinity(rustsecp256k1zkp_v0_10_0_gej *r); +static void rustsecp256k1zkp_v0_10_1_gej_set_infinity(rustsecp256k1zkp_v0_10_1_gej *r); /** Set a group element (jacobian) equal to another which is given in affine coordinates. */ -static void rustsecp256k1zkp_v0_10_0_gej_set_ge(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_ge *a); +static void rustsecp256k1zkp_v0_10_1_gej_set_ge(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_ge *a); /** Check two group elements (jacobian) for equality in variable time. */ -static int rustsecp256k1zkp_v0_10_0_gej_eq_var(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b); +static int rustsecp256k1zkp_v0_10_1_gej_eq_var(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b); /** Check two group elements (jacobian and affine) for equality in variable time. */ -static int rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b); +static int rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b); /** Compare the X coordinate of a group element (jacobian). * The magnitude of the group element's X coordinate must not exceed 31. */ -static int rustsecp256k1zkp_v0_10_0_gej_eq_x_var(const rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_gej *a); +static int rustsecp256k1zkp_v0_10_1_gej_eq_x_var(const rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_gej *a); /** Set r equal to the inverse of a (i.e., mirrored around the X axis) */ -static void rustsecp256k1zkp_v0_10_0_gej_neg(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a); +static void rustsecp256k1zkp_v0_10_1_gej_neg(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a); /** Check whether a group element is the point at infinity. */ -static int rustsecp256k1zkp_v0_10_0_gej_is_infinity(const rustsecp256k1zkp_v0_10_0_gej *a); +static int rustsecp256k1zkp_v0_10_1_gej_is_infinity(const rustsecp256k1zkp_v0_10_1_gej *a); /** Check whether a group element's y coordinate is a quadratic residue. */ -static int rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(const rustsecp256k1zkp_v0_10_0_gej *a); +static int rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(const rustsecp256k1zkp_v0_10_1_gej *a); /** Set r equal to the double of a. Constant time. */ -static void rustsecp256k1zkp_v0_10_0_gej_double(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a); +static void rustsecp256k1zkp_v0_10_1_gej_double(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a); /** Set r equal to the double of a. If rzr is not-NULL this sets *rzr such that r->z == a->z * *rzr (where infinity means an implicit z = 0). */ -static void rustsecp256k1zkp_v0_10_0_gej_double_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, rustsecp256k1zkp_v0_10_0_fe *rzr); +static void rustsecp256k1zkp_v0_10_1_gej_double_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, rustsecp256k1zkp_v0_10_1_fe *rzr); /** Set r equal to the sum of a and b. If rzr is non-NULL this sets *rzr such that r->z == a->z * *rzr (a cannot be infinity in that case). */ -static void rustsecp256k1zkp_v0_10_0_gej_add_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b, rustsecp256k1zkp_v0_10_0_fe *rzr); +static void rustsecp256k1zkp_v0_10_1_gej_add_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b, rustsecp256k1zkp_v0_10_1_fe *rzr); /** Set r equal to the sum of a and b (with b given in affine coordinates, and not infinity). */ -static void rustsecp256k1zkp_v0_10_0_gej_add_ge(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b); +static void rustsecp256k1zkp_v0_10_1_gej_add_ge(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b); /** Set r equal to the sum of a and b (with b given in affine coordinates). This is more efficient - than rustsecp256k1zkp_v0_10_0_gej_add_var. It is identical to rustsecp256k1zkp_v0_10_0_gej_add_ge but without constant-time + than rustsecp256k1zkp_v0_10_1_gej_add_var. It is identical to rustsecp256k1zkp_v0_10_1_gej_add_ge but without constant-time guarantee, and b is allowed to be infinity. If rzr is non-NULL this sets *rzr such that r->z == a->z * *rzr (a cannot be infinity in that case). */ -static void rustsecp256k1zkp_v0_10_0_gej_add_ge_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b, rustsecp256k1zkp_v0_10_0_fe *rzr); +static void rustsecp256k1zkp_v0_10_1_gej_add_ge_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b, rustsecp256k1zkp_v0_10_1_fe *rzr); /** Set r equal to the sum of a and b (with the inverse of b's Z coordinate passed as bzinv). */ -static void rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b, const rustsecp256k1zkp_v0_10_0_fe *bzinv); +static void rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b, const rustsecp256k1zkp_v0_10_1_fe *bzinv); /** Set r to be equal to lambda times a, where lambda is chosen in a way such that this is very fast. */ -static void rustsecp256k1zkp_v0_10_0_ge_mul_lambda(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *a); +static void rustsecp256k1zkp_v0_10_1_ge_mul_lambda(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *a); -/** Clear a rustsecp256k1zkp_v0_10_0_gej to prevent leaking sensitive information. */ -static void rustsecp256k1zkp_v0_10_0_gej_clear(rustsecp256k1zkp_v0_10_0_gej *r); +/** Clear a rustsecp256k1zkp_v0_10_1_gej to prevent leaking sensitive information. */ +static void rustsecp256k1zkp_v0_10_1_gej_clear(rustsecp256k1zkp_v0_10_1_gej *r); -/** Clear a rustsecp256k1zkp_v0_10_0_ge to prevent leaking sensitive information. */ -static void rustsecp256k1zkp_v0_10_0_ge_clear(rustsecp256k1zkp_v0_10_0_ge *r); +/** Clear a rustsecp256k1zkp_v0_10_1_ge to prevent leaking sensitive information. */ +static void rustsecp256k1zkp_v0_10_1_ge_clear(rustsecp256k1zkp_v0_10_1_ge *r); /** Convert a group element to the storage type. */ -static void rustsecp256k1zkp_v0_10_0_ge_to_storage(rustsecp256k1zkp_v0_10_0_ge_storage *r, const rustsecp256k1zkp_v0_10_0_ge *a); +static void rustsecp256k1zkp_v0_10_1_ge_to_storage(rustsecp256k1zkp_v0_10_1_ge_storage *r, const rustsecp256k1zkp_v0_10_1_ge *a); /** Convert a group element back from the storage type. */ -static void rustsecp256k1zkp_v0_10_0_ge_from_storage(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge_storage *a); +static void rustsecp256k1zkp_v0_10_1_ge_from_storage(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge_storage *a); /** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. Both *r and *a must be initialized.*/ -static void rustsecp256k1zkp_v0_10_0_gej_cmov(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, int flag); +static void rustsecp256k1zkp_v0_10_1_gej_cmov(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, int flag); /** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. Both *r and *a must be initialized.*/ -static void rustsecp256k1zkp_v0_10_0_ge_storage_cmov(rustsecp256k1zkp_v0_10_0_ge_storage *r, const rustsecp256k1zkp_v0_10_0_ge_storage *a, int flag); +static void rustsecp256k1zkp_v0_10_1_ge_storage_cmov(rustsecp256k1zkp_v0_10_1_ge_storage *r, const rustsecp256k1zkp_v0_10_1_ge_storage *a, int flag); /** Rescale a jacobian point by b which must be non-zero. Constant-time. */ -static void rustsecp256k1zkp_v0_10_0_gej_rescale(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_fe *b); +static void rustsecp256k1zkp_v0_10_1_gej_rescale(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_fe *b); /** Convert a group element that is not infinity to a 64-byte array. The output * array is platform-dependent. */ -static void rustsecp256k1zkp_v0_10_0_ge_to_bytes(unsigned char *buf, rustsecp256k1zkp_v0_10_0_ge *a); +static void rustsecp256k1zkp_v0_10_1_ge_to_bytes(unsigned char *buf, rustsecp256k1zkp_v0_10_1_ge *a); /** Convert a 64-byte array into group element. This function assumes that the * provided buffer correctly encodes a group element. */ -static void rustsecp256k1zkp_v0_10_0_ge_from_bytes(rustsecp256k1zkp_v0_10_0_ge *r, const unsigned char *buf); +static void rustsecp256k1zkp_v0_10_1_ge_from_bytes(rustsecp256k1zkp_v0_10_1_ge *r, const unsigned char *buf); /** Determine if a point (which is assumed to be on the curve) is in the correct (sub)group of the curve. * @@ -200,14 +200,14 @@ static void rustsecp256k1zkp_v0_10_0_ge_from_bytes(rustsecp256k1zkp_v0_10_0_ge * * (very) small subgroup, and that subgroup is what is used for all cryptographic operations. In that mode, this * function checks whether a point that is on the curve is in fact also in that subgroup. */ -static int rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(const rustsecp256k1zkp_v0_10_0_ge* ge); +static int rustsecp256k1zkp_v0_10_1_ge_is_in_correct_subgroup(const rustsecp256k1zkp_v0_10_1_ge* ge); /** Check invariants on an affine group element (no-op unless VERIFY is enabled). */ -static void rustsecp256k1zkp_v0_10_0_ge_verify(const rustsecp256k1zkp_v0_10_0_ge *a); -#define SECP256K1_GE_VERIFY(a) rustsecp256k1zkp_v0_10_0_ge_verify(a) +static void rustsecp256k1zkp_v0_10_1_ge_verify(const rustsecp256k1zkp_v0_10_1_ge *a); +#define SECP256K1_GE_VERIFY(a) rustsecp256k1zkp_v0_10_1_ge_verify(a) /** Check invariants on a Jacobian group element (no-op unless VERIFY is enabled). */ -static void rustsecp256k1zkp_v0_10_0_gej_verify(const rustsecp256k1zkp_v0_10_0_gej *a); -#define SECP256K1_GEJ_VERIFY(a) rustsecp256k1zkp_v0_10_0_gej_verify(a) +static void rustsecp256k1zkp_v0_10_1_gej_verify(const rustsecp256k1zkp_v0_10_1_gej *a); +#define SECP256K1_GEJ_VERIFY(a) rustsecp256k1zkp_v0_10_1_gej_verify(a) #endif /* SECP256K1_GROUP_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/group_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/group_impl.h index 1b3681fc..007df1c8 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/group_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/group_impl.h @@ -51,17 +51,17 @@ #if defined(EXHAUSTIVE_TEST_ORDER) # if EXHAUSTIVE_TEST_ORDER == 7 -static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G_ORDER_7; +static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G_ORDER_7; #define SECP256K1_B 6 # elif EXHAUSTIVE_TEST_ORDER == 13 -static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G_ORDER_13; +static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G_ORDER_13; #define SECP256K1_B 2 # elif EXHAUSTIVE_TEST_ORDER == 199 -static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G_ORDER_199; +static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G_ORDER_199; #define SECP256K1_B 4 # else @@ -69,13 +69,13 @@ static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = S # endif #else -static const rustsecp256k1zkp_v0_10_0_ge rustsecp256k1zkp_v0_10_0_ge_const_g = SECP256K1_G; +static const rustsecp256k1zkp_v0_10_1_ge rustsecp256k1zkp_v0_10_1_ge_const_g = SECP256K1_G; #define SECP256K1_B 7 #endif /* End of section generated by sage/gen_exhaustive_groups.sage. */ -static void rustsecp256k1zkp_v0_10_0_ge_verify(const rustsecp256k1zkp_v0_10_0_ge *a) { +static void rustsecp256k1zkp_v0_10_1_ge_verify(const rustsecp256k1zkp_v0_10_1_ge *a) { SECP256K1_FE_VERIFY(&a->x); SECP256K1_FE_VERIFY(&a->y); SECP256K1_FE_VERIFY_MAGNITUDE(&a->x, SECP256K1_GE_X_MAGNITUDE_MAX); @@ -84,7 +84,7 @@ static void rustsecp256k1zkp_v0_10_0_ge_verify(const rustsecp256k1zkp_v0_10_0_ge (void)a; } -static void rustsecp256k1zkp_v0_10_0_gej_verify(const rustsecp256k1zkp_v0_10_0_gej *a) { +static void rustsecp256k1zkp_v0_10_1_gej_verify(const rustsecp256k1zkp_v0_10_1_gej *a) { SECP256K1_FE_VERIFY(&a->x); SECP256K1_FE_VERIFY(&a->y); SECP256K1_FE_VERIFY(&a->z); @@ -96,40 +96,40 @@ static void rustsecp256k1zkp_v0_10_0_gej_verify(const rustsecp256k1zkp_v0_10_0_g } /* Set r to the affine coordinates of Jacobian point (a.x, a.y, 1/zi). */ -static void rustsecp256k1zkp_v0_10_0_ge_set_gej_zinv(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_fe *zi) { - rustsecp256k1zkp_v0_10_0_fe zi2; - rustsecp256k1zkp_v0_10_0_fe zi3; +static void rustsecp256k1zkp_v0_10_1_ge_set_gej_zinv(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_fe *zi) { + rustsecp256k1zkp_v0_10_1_fe zi2; + rustsecp256k1zkp_v0_10_1_fe zi3; SECP256K1_GEJ_VERIFY(a); SECP256K1_FE_VERIFY(zi); VERIFY_CHECK(!a->infinity); - rustsecp256k1zkp_v0_10_0_fe_sqr(&zi2, zi); - rustsecp256k1zkp_v0_10_0_fe_mul(&zi3, &zi2, zi); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->x, &a->x, &zi2); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &a->y, &zi3); + rustsecp256k1zkp_v0_10_1_fe_sqr(&zi2, zi); + rustsecp256k1zkp_v0_10_1_fe_mul(&zi3, &zi2, zi); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->x, &a->x, &zi2); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &a->y, &zi3); r->infinity = a->infinity; SECP256K1_GE_VERIFY(r); } /* Set r to the affine coordinates of Jacobian point (a.x, a.y, 1/zi). */ -static void rustsecp256k1zkp_v0_10_0_ge_set_ge_zinv(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_fe *zi) { - rustsecp256k1zkp_v0_10_0_fe zi2; - rustsecp256k1zkp_v0_10_0_fe zi3; +static void rustsecp256k1zkp_v0_10_1_ge_set_ge_zinv(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_fe *zi) { + rustsecp256k1zkp_v0_10_1_fe zi2; + rustsecp256k1zkp_v0_10_1_fe zi3; SECP256K1_GE_VERIFY(a); SECP256K1_FE_VERIFY(zi); VERIFY_CHECK(!a->infinity); - rustsecp256k1zkp_v0_10_0_fe_sqr(&zi2, zi); - rustsecp256k1zkp_v0_10_0_fe_mul(&zi3, &zi2, zi); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->x, &a->x, &zi2); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &a->y, &zi3); + rustsecp256k1zkp_v0_10_1_fe_sqr(&zi2, zi); + rustsecp256k1zkp_v0_10_1_fe_mul(&zi3, &zi2, zi); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->x, &a->x, &zi2); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &a->y, &zi3); r->infinity = a->infinity; SECP256K1_GE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_set_xy(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_fe *y) { +static void rustsecp256k1zkp_v0_10_1_ge_set_xy(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_fe *y) { SECP256K1_FE_VERIFY(x); SECP256K1_FE_VERIFY(y); @@ -140,33 +140,33 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_xy(rustsecp256k1zkp_v0_10_0_ge *r, c SECP256K1_GE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_ge_is_infinity(const rustsecp256k1zkp_v0_10_0_ge *a) { +static int rustsecp256k1zkp_v0_10_1_ge_is_infinity(const rustsecp256k1zkp_v0_10_1_ge *a) { SECP256K1_GE_VERIFY(a); return a->infinity; } -static void rustsecp256k1zkp_v0_10_0_ge_neg(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *a) { +static void rustsecp256k1zkp_v0_10_1_ge_neg(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *a) { SECP256K1_GE_VERIFY(a); *r = *a; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&r->y); - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &r->y, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&r->y); + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &r->y, 1); SECP256K1_GE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_set_gej(rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_fe z2, z3; +static void rustsecp256k1zkp_v0_10_1_ge_set_gej(rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_fe z2, z3; SECP256K1_GEJ_VERIFY(a); r->infinity = a->infinity; - rustsecp256k1zkp_v0_10_0_fe_inv(&a->z, &a->z); - rustsecp256k1zkp_v0_10_0_fe_sqr(&z2, &a->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&z3, &a->z, &z2); - rustsecp256k1zkp_v0_10_0_fe_mul(&a->x, &a->x, &z2); - rustsecp256k1zkp_v0_10_0_fe_mul(&a->y, &a->y, &z3); - rustsecp256k1zkp_v0_10_0_fe_set_int(&a->z, 1); + rustsecp256k1zkp_v0_10_1_fe_inv(&a->z, &a->z); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z2, &a->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&z3, &a->z, &z2); + rustsecp256k1zkp_v0_10_1_fe_mul(&a->x, &a->x, &z2); + rustsecp256k1zkp_v0_10_1_fe_mul(&a->y, &a->y, &z3); + rustsecp256k1zkp_v0_10_1_fe_set_int(&a->z, 1); r->x = a->x; r->y = a->y; @@ -174,29 +174,29 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_gej(rustsecp256k1zkp_v0_10_0_ge *r, SECP256K1_GE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_set_gej_var(rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_fe z2, z3; +static void rustsecp256k1zkp_v0_10_1_ge_set_gej_var(rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_fe z2, z3; SECP256K1_GEJ_VERIFY(a); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(a)) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(r); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(a)) { + rustsecp256k1zkp_v0_10_1_ge_set_infinity(r); return; } r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_inv_var(&a->z, &a->z); - rustsecp256k1zkp_v0_10_0_fe_sqr(&z2, &a->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&z3, &a->z, &z2); - rustsecp256k1zkp_v0_10_0_fe_mul(&a->x, &a->x, &z2); - rustsecp256k1zkp_v0_10_0_fe_mul(&a->y, &a->y, &z3); - rustsecp256k1zkp_v0_10_0_fe_set_int(&a->z, 1); - rustsecp256k1zkp_v0_10_0_ge_set_xy(r, &a->x, &a->y); + rustsecp256k1zkp_v0_10_1_fe_inv_var(&a->z, &a->z); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z2, &a->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&z3, &a->z, &z2); + rustsecp256k1zkp_v0_10_1_fe_mul(&a->x, &a->x, &z2); + rustsecp256k1zkp_v0_10_1_fe_mul(&a->y, &a->y, &z3); + rustsecp256k1zkp_v0_10_1_fe_set_int(&a->z, 1); + rustsecp256k1zkp_v0_10_1_ge_set_xy(r, &a->x, &a->y); SECP256K1_GEJ_VERIFY(a); SECP256K1_GE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_gej *a, size_t len) { - rustsecp256k1zkp_v0_10_0_fe u; +static void rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_gej *a, size_t len) { + rustsecp256k1zkp_v0_10_1_fe u; size_t i; size_t last_i = SIZE_MAX; #ifdef VERIFY @@ -207,13 +207,13 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0 for (i = 0; i < len; i++) { if (a[i].infinity) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&r[i]); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&r[i]); } else { /* Use destination's x coordinates as scratch space */ if (last_i == SIZE_MAX) { r[i].x = a[i].z; } else { - rustsecp256k1zkp_v0_10_0_fe_mul(&r[i].x, &r[last_i].x, &a[i].z); + rustsecp256k1zkp_v0_10_1_fe_mul(&r[i].x, &r[last_i].x, &a[i].z); } last_i = i; } @@ -221,14 +221,14 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0 if (last_i == SIZE_MAX) { return; } - rustsecp256k1zkp_v0_10_0_fe_inv_var(&u, &r[last_i].x); + rustsecp256k1zkp_v0_10_1_fe_inv_var(&u, &r[last_i].x); i = last_i; while (i > 0) { i--; if (!a[i].infinity) { - rustsecp256k1zkp_v0_10_0_fe_mul(&r[last_i].x, &r[i].x, &u); - rustsecp256k1zkp_v0_10_0_fe_mul(&u, &u, &a[last_i].z); + rustsecp256k1zkp_v0_10_1_fe_mul(&r[last_i].x, &r[i].x, &u); + rustsecp256k1zkp_v0_10_1_fe_mul(&u, &u, &a[last_i].z); last_i = i; } } @@ -237,7 +237,7 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0 for (i = 0; i < len; i++) { if (!a[i].infinity) { - rustsecp256k1zkp_v0_10_0_ge_set_gej_zinv(&r[i], &a[i], &r[i].x); + rustsecp256k1zkp_v0_10_1_ge_set_gej_zinv(&r[i], &a[i], &r[i].x); } } @@ -248,9 +248,9 @@ static void rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(rustsecp256k1zkp_v0_10_0 #endif } -static void rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(size_t len, rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_fe *zr) { +static void rustsecp256k1zkp_v0_10_1_ge_table_set_globalz(size_t len, rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_fe *zr) { size_t i; - rustsecp256k1zkp_v0_10_0_fe zs; + rustsecp256k1zkp_v0_10_1_fe zs; #ifdef VERIFY for (i = 0; i < len; i++) { SECP256K1_GE_VERIFY(&a[i]); @@ -261,16 +261,16 @@ static void rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(size_t len, rustsecp25 if (len > 0) { i = len - 1; /* Ensure all y values are in weak normal form for fast negation of points */ - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&a[i].y); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&a[i].y); zs = zr[i]; /* Work our way backwards, using the z-ratios to scale the x/y values. */ while (i > 0) { if (i != len - 1) { - rustsecp256k1zkp_v0_10_0_fe_mul(&zs, &zs, &zr[i]); + rustsecp256k1zkp_v0_10_1_fe_mul(&zs, &zs, &zr[i]); } i--; - rustsecp256k1zkp_v0_10_0_ge_set_ge_zinv(&a[i], &a[i], &zs); + rustsecp256k1zkp_v0_10_1_ge_set_ge_zinv(&a[i], &a[i], &zs); } } @@ -281,101 +281,101 @@ static void rustsecp256k1zkp_v0_10_0_ge_table_set_globalz(size_t len, rustsecp25 #endif } -static void rustsecp256k1zkp_v0_10_0_gej_set_infinity(rustsecp256k1zkp_v0_10_0_gej *r) { +static void rustsecp256k1zkp_v0_10_1_gej_set_infinity(rustsecp256k1zkp_v0_10_1_gej *r) { r->infinity = 1; - rustsecp256k1zkp_v0_10_0_fe_clear(&r->x); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->y); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->z); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->x); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->y); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->z); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_set_infinity(rustsecp256k1zkp_v0_10_0_ge *r) { +static void rustsecp256k1zkp_v0_10_1_ge_set_infinity(rustsecp256k1zkp_v0_10_1_ge *r) { r->infinity = 1; - rustsecp256k1zkp_v0_10_0_fe_clear(&r->x); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->y); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->x); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->y); SECP256K1_GE_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_clear(rustsecp256k1zkp_v0_10_0_gej *r) { +static void rustsecp256k1zkp_v0_10_1_gej_clear(rustsecp256k1zkp_v0_10_1_gej *r) { r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_clear(&r->x); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->y); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->z); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->x); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->y); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->z); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_clear(rustsecp256k1zkp_v0_10_0_ge *r) { +static void rustsecp256k1zkp_v0_10_1_ge_clear(rustsecp256k1zkp_v0_10_1_ge *r) { r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_clear(&r->x); - rustsecp256k1zkp_v0_10_0_fe_clear(&r->y); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->x); + rustsecp256k1zkp_v0_10_1_fe_clear(&r->y); SECP256K1_GE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_ge_set_xquad(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe x2, x3; +static int rustsecp256k1zkp_v0_10_1_ge_set_xquad(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe x2, x3; int ret; SECP256K1_FE_VERIFY(x); r->x = *x; - rustsecp256k1zkp_v0_10_0_fe_sqr(&x2, x); - rustsecp256k1zkp_v0_10_0_fe_mul(&x3, x, &x2); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x2, x); + rustsecp256k1zkp_v0_10_1_fe_mul(&x3, x, &x2); r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_add_int(&x3, SECP256K1_B); - ret = rustsecp256k1zkp_v0_10_0_fe_sqrt(&r->y, &x3); + rustsecp256k1zkp_v0_10_1_fe_add_int(&x3, SECP256K1_B); + ret = rustsecp256k1zkp_v0_10_1_fe_sqrt(&r->y, &x3); SECP256K1_GE_VERIFY(r); return ret; } -static int rustsecp256k1zkp_v0_10_0_ge_set_xo_var(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_fe *x, int odd) { +static int rustsecp256k1zkp_v0_10_1_ge_set_xo_var(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_fe *x, int odd) { int ret; - ret = rustsecp256k1zkp_v0_10_0_ge_set_xquad(r, x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&r->y); - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&r->y) != odd) { - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &r->y, 1); + ret = rustsecp256k1zkp_v0_10_1_ge_set_xquad(r, x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&r->y); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&r->y) != odd) { + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &r->y, 1); } SECP256K1_GE_VERIFY(r); return ret; } -static void rustsecp256k1zkp_v0_10_0_gej_set_ge(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_ge *a) { +static void rustsecp256k1zkp_v0_10_1_gej_set_ge(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_ge *a) { SECP256K1_GE_VERIFY(a); r->infinity = a->infinity; r->x = a->x; r->y = a->y; - rustsecp256k1zkp_v0_10_0_fe_set_int(&r->z, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(&r->z, 1); SECP256K1_GEJ_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_gej_eq_var(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b) { - rustsecp256k1zkp_v0_10_0_gej tmp; +static int rustsecp256k1zkp_v0_10_1_gej_eq_var(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b) { + rustsecp256k1zkp_v0_10_1_gej tmp; SECP256K1_GEJ_VERIFY(b); SECP256K1_GEJ_VERIFY(a); - rustsecp256k1zkp_v0_10_0_gej_neg(&tmp, a); - rustsecp256k1zkp_v0_10_0_gej_add_var(&tmp, &tmp, b, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&tmp); + rustsecp256k1zkp_v0_10_1_gej_neg(&tmp, a); + rustsecp256k1zkp_v0_10_1_gej_add_var(&tmp, &tmp, b, NULL); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&tmp); } -static int rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b) { - rustsecp256k1zkp_v0_10_0_gej tmp; +static int rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b) { + rustsecp256k1zkp_v0_10_1_gej tmp; SECP256K1_GEJ_VERIFY(a); SECP256K1_GE_VERIFY(b); - rustsecp256k1zkp_v0_10_0_gej_neg(&tmp, a); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&tmp, &tmp, b, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&tmp); + rustsecp256k1zkp_v0_10_1_gej_neg(&tmp, a); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&tmp, &tmp, b, NULL); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&tmp); } -static int rustsecp256k1zkp_v0_10_0_ge_eq_var(const rustsecp256k1zkp_v0_10_0_ge *a, const rustsecp256k1zkp_v0_10_0_ge *b) { - rustsecp256k1zkp_v0_10_0_fe tmp; +static int rustsecp256k1zkp_v0_10_1_ge_eq_var(const rustsecp256k1zkp_v0_10_1_ge *a, const rustsecp256k1zkp_v0_10_1_ge *b) { + rustsecp256k1zkp_v0_10_1_fe tmp; SECP256K1_GE_VERIFY(a); SECP256K1_GE_VERIFY(b); @@ -383,62 +383,62 @@ static int rustsecp256k1zkp_v0_10_0_ge_eq_var(const rustsecp256k1zkp_v0_10_0_ge if (a->infinity) return 1; tmp = a->x; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&tmp); - if (!rustsecp256k1zkp_v0_10_0_fe_equal(&tmp, &b->x)) return 0; + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&tmp); + if (!rustsecp256k1zkp_v0_10_1_fe_equal(&tmp, &b->x)) return 0; tmp = a->y; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&tmp); - if (!rustsecp256k1zkp_v0_10_0_fe_equal(&tmp, &b->y)) return 0; + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&tmp); + if (!rustsecp256k1zkp_v0_10_1_fe_equal(&tmp, &b->y)) return 0; return 1; } -static int rustsecp256k1zkp_v0_10_0_gej_eq_x_var(const rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_fe r; +static int rustsecp256k1zkp_v0_10_1_gej_eq_x_var(const rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_fe r; SECP256K1_FE_VERIFY(x); SECP256K1_GEJ_VERIFY(a); VERIFY_CHECK(!a->infinity); - rustsecp256k1zkp_v0_10_0_fe_sqr(&r, &a->z); rustsecp256k1zkp_v0_10_0_fe_mul(&r, &r, x); - return rustsecp256k1zkp_v0_10_0_fe_equal(&r, &a->x); + rustsecp256k1zkp_v0_10_1_fe_sqr(&r, &a->z); rustsecp256k1zkp_v0_10_1_fe_mul(&r, &r, x); + return rustsecp256k1zkp_v0_10_1_fe_equal(&r, &a->x); } -static void rustsecp256k1zkp_v0_10_0_gej_neg(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a) { +static void rustsecp256k1zkp_v0_10_1_gej_neg(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a) { SECP256K1_GEJ_VERIFY(a); r->infinity = a->infinity; r->x = a->x; r->y = a->y; r->z = a->z; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&r->y); - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &r->y, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&r->y); + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &r->y, 1); SECP256K1_GEJ_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_gej_is_infinity(const rustsecp256k1zkp_v0_10_0_gej *a) { +static int rustsecp256k1zkp_v0_10_1_gej_is_infinity(const rustsecp256k1zkp_v0_10_1_gej *a) { SECP256K1_GEJ_VERIFY(a); return a->infinity; } -static int rustsecp256k1zkp_v0_10_0_ge_is_valid_var(const rustsecp256k1zkp_v0_10_0_ge *a) { - rustsecp256k1zkp_v0_10_0_fe y2, x3; +static int rustsecp256k1zkp_v0_10_1_ge_is_valid_var(const rustsecp256k1zkp_v0_10_1_ge *a) { + rustsecp256k1zkp_v0_10_1_fe y2, x3; SECP256K1_GE_VERIFY(a); if (a->infinity) { return 0; } /* y^2 = x^3 + 7 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&y2, &a->y); - rustsecp256k1zkp_v0_10_0_fe_sqr(&x3, &a->x); rustsecp256k1zkp_v0_10_0_fe_mul(&x3, &x3, &a->x); - rustsecp256k1zkp_v0_10_0_fe_add_int(&x3, SECP256K1_B); - return rustsecp256k1zkp_v0_10_0_fe_equal(&y2, &x3); + rustsecp256k1zkp_v0_10_1_fe_sqr(&y2, &a->y); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x3, &a->x); rustsecp256k1zkp_v0_10_1_fe_mul(&x3, &x3, &a->x); + rustsecp256k1zkp_v0_10_1_fe_add_int(&x3, SECP256K1_B); + return rustsecp256k1zkp_v0_10_1_fe_equal(&y2, &x3); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_gej_double(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_gej_double(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a) { /* Operations: 3 mul, 4 sqr, 8 add/half/mul_int/negate */ - rustsecp256k1zkp_v0_10_0_fe l, s, t; + rustsecp256k1zkp_v0_10_1_fe l, s, t; SECP256K1_GEJ_VERIFY(a); r->infinity = a->infinity; @@ -452,26 +452,26 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_gej_double(rustsecp256k1zk * Z3 = Y1*Z1 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &a->z, &a->y); /* Z3 = Y1*Z1 (1) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &a->y); /* S = Y1^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&l, &a->x); /* L = X1^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&l, 3); /* L = 3*X1^2 (3) */ - rustsecp256k1zkp_v0_10_0_fe_half(&l); /* L = 3/2*X1^2 (2) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&t, &s, 1); /* T = -S (2) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &t, &a->x); /* T = -X1*S (1) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&r->x, &l); /* X3 = L^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); /* X3 = L^2 + T (2) */ - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); /* X3 = L^2 + 2*T (3) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &s); /* S' = S^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&t, &r->x); /* T' = X3 + T (4) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &t, &l); /* Y3 = L*(X3 + T) (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&r->y, &s); /* Y3 = L*(X3 + T) + S^2 (2) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &r->y, 2); /* Y3 = -(L*(X3 + T) + S^2) (3) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &a->z, &a->y); /* Z3 = Y1*Z1 (1) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &a->y); /* S = Y1^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&l, &a->x); /* L = X1^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&l, 3); /* L = 3*X1^2 (3) */ + rustsecp256k1zkp_v0_10_1_fe_half(&l); /* L = 3/2*X1^2 (2) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&t, &s, 1); /* T = -S (2) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &t, &a->x); /* T = -X1*S (1) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&r->x, &l); /* X3 = L^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); /* X3 = L^2 + T (2) */ + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); /* X3 = L^2 + 2*T (3) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &s); /* S' = S^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&t, &r->x); /* T' = X3 + T (4) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &t, &l); /* Y3 = L*(X3 + T) (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&r->y, &s); /* Y3 = L*(X3 + T) + S^2 (2) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &r->y, 2); /* Y3 = -(L*(X3 + T) + S^2) (3) */ SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_double_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, rustsecp256k1zkp_v0_10_0_fe *rzr) { +static void rustsecp256k1zkp_v0_10_1_gej_double_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, rustsecp256k1zkp_v0_10_1_fe *rzr) { SECP256K1_GEJ_VERIFY(a); /** For secp256k1, 2Q is infinity if and only if Q is infinity. This is because if 2Q = infinity, @@ -485,26 +485,26 @@ static void rustsecp256k1zkp_v0_10_0_gej_double_var(rustsecp256k1zkp_v0_10_0_gej * point will be gibberish (z = 0 but infinity = 0). */ if (a->infinity) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); if (rzr != NULL) { - rustsecp256k1zkp_v0_10_0_fe_set_int(rzr, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(rzr, 1); } return; } if (rzr != NULL) { *rzr = a->y; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(rzr); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(rzr); } - rustsecp256k1zkp_v0_10_0_gej_double(r, a); + rustsecp256k1zkp_v0_10_1_gej_double(r, a); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_add_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b, rustsecp256k1zkp_v0_10_0_fe *rzr) { +static void rustsecp256k1zkp_v0_10_1_gej_add_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b, rustsecp256k1zkp_v0_10_1_fe *rzr) { /* 12 mul, 4 sqr, 11 add/negate/normalizes_to_zero (ignoring special cases) */ - rustsecp256k1zkp_v0_10_0_fe z22, z12, u1, u2, s1, s2, h, i, h2, h3, t; + rustsecp256k1zkp_v0_10_1_fe z22, z12, u1, u2, s1, s2, h, i, h2, h3, t; SECP256K1_GEJ_VERIFY(a); SECP256K1_GEJ_VERIFY(b); @@ -515,91 +515,91 @@ static void rustsecp256k1zkp_v0_10_0_gej_add_var(rustsecp256k1zkp_v0_10_0_gej *r } if (b->infinity) { if (rzr != NULL) { - rustsecp256k1zkp_v0_10_0_fe_set_int(rzr, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(rzr, 1); } *r = *a; return; } - rustsecp256k1zkp_v0_10_0_fe_sqr(&z22, &b->z); - rustsecp256k1zkp_v0_10_0_fe_sqr(&z12, &a->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&u1, &a->x, &z22); - rustsecp256k1zkp_v0_10_0_fe_mul(&u2, &b->x, &z12); - rustsecp256k1zkp_v0_10_0_fe_mul(&s1, &a->y, &z22); rustsecp256k1zkp_v0_10_0_fe_mul(&s1, &s1, &b->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &s2, &a->z); - rustsecp256k1zkp_v0_10_0_fe_negate(&h, &u1, 1); rustsecp256k1zkp_v0_10_0_fe_add(&h, &u2); - rustsecp256k1zkp_v0_10_0_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_0_fe_add(&i, &s1); - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&h)) { - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&i)) { - rustsecp256k1zkp_v0_10_0_gej_double_var(r, a, rzr); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z22, &b->z); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z12, &a->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&u1, &a->x, &z22); + rustsecp256k1zkp_v0_10_1_fe_mul(&u2, &b->x, &z12); + rustsecp256k1zkp_v0_10_1_fe_mul(&s1, &a->y, &z22); rustsecp256k1zkp_v0_10_1_fe_mul(&s1, &s1, &b->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &s2, &a->z); + rustsecp256k1zkp_v0_10_1_fe_negate(&h, &u1, 1); rustsecp256k1zkp_v0_10_1_fe_add(&h, &u2); + rustsecp256k1zkp_v0_10_1_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_1_fe_add(&i, &s1); + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&h)) { + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&i)) { + rustsecp256k1zkp_v0_10_1_gej_double_var(r, a, rzr); } else { if (rzr != NULL) { - rustsecp256k1zkp_v0_10_0_fe_set_int(rzr, 0); + rustsecp256k1zkp_v0_10_1_fe_set_int(rzr, 0); } - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); } return; } r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &h, &b->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &h, &b->z); if (rzr != NULL) { *rzr = t; } - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &a->z, &t); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &a->z, &t); - rustsecp256k1zkp_v0_10_0_fe_sqr(&h2, &h); - rustsecp256k1zkp_v0_10_0_fe_negate(&h2, &h2, 1); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h2, &h); - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &u1, &h2); + rustsecp256k1zkp_v0_10_1_fe_sqr(&h2, &h); + rustsecp256k1zkp_v0_10_1_fe_negate(&h2, &h2, 1); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h2, &h); + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &u1, &h2); - rustsecp256k1zkp_v0_10_0_fe_sqr(&r->x, &i); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &h3); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_sqr(&r->x, &i); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&t, &r->x); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &t, &i); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h3, &s1); - rustsecp256k1zkp_v0_10_0_fe_add(&r->y, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&t, &r->x); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &t, &i); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h3, &s1); + rustsecp256k1zkp_v0_10_1_fe_add(&r->y, &h3); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_add_ge_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b, rustsecp256k1zkp_v0_10_0_fe *rzr) { +static void rustsecp256k1zkp_v0_10_1_gej_add_ge_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b, rustsecp256k1zkp_v0_10_1_fe *rzr) { /* Operations: 8 mul, 3 sqr, 11 add/negate/normalizes_to_zero (ignoring special cases) */ - rustsecp256k1zkp_v0_10_0_fe z12, u1, u2, s1, s2, h, i, h2, h3, t; + rustsecp256k1zkp_v0_10_1_fe z12, u1, u2, s1, s2, h, i, h2, h3, t; SECP256K1_GEJ_VERIFY(a); SECP256K1_GE_VERIFY(b); if (a->infinity) { VERIFY_CHECK(rzr == NULL); - rustsecp256k1zkp_v0_10_0_gej_set_ge(r, b); + rustsecp256k1zkp_v0_10_1_gej_set_ge(r, b); return; } if (b->infinity) { if (rzr != NULL) { - rustsecp256k1zkp_v0_10_0_fe_set_int(rzr, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(rzr, 1); } *r = *a; return; } - rustsecp256k1zkp_v0_10_0_fe_sqr(&z12, &a->z); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z12, &a->z); u1 = a->x; - rustsecp256k1zkp_v0_10_0_fe_mul(&u2, &b->x, &z12); + rustsecp256k1zkp_v0_10_1_fe_mul(&u2, &b->x, &z12); s1 = a->y; - rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &s2, &a->z); - rustsecp256k1zkp_v0_10_0_fe_negate(&h, &u1, SECP256K1_GEJ_X_MAGNITUDE_MAX); rustsecp256k1zkp_v0_10_0_fe_add(&h, &u2); - rustsecp256k1zkp_v0_10_0_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_0_fe_add(&i, &s1); - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&h)) { - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&i)) { - rustsecp256k1zkp_v0_10_0_gej_double_var(r, a, rzr); + rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &s2, &a->z); + rustsecp256k1zkp_v0_10_1_fe_negate(&h, &u1, SECP256K1_GEJ_X_MAGNITUDE_MAX); rustsecp256k1zkp_v0_10_1_fe_add(&h, &u2); + rustsecp256k1zkp_v0_10_1_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_1_fe_add(&i, &s1); + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&h)) { + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&i)) { + rustsecp256k1zkp_v0_10_1_gej_double_var(r, a, rzr); } else { if (rzr != NULL) { - rustsecp256k1zkp_v0_10_0_fe_set_int(rzr, 0); + rustsecp256k1zkp_v0_10_1_fe_set_int(rzr, 0); } - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); } return; } @@ -608,42 +608,42 @@ static void rustsecp256k1zkp_v0_10_0_gej_add_ge_var(rustsecp256k1zkp_v0_10_0_gej if (rzr != NULL) { *rzr = h; } - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &a->z, &h); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &a->z, &h); - rustsecp256k1zkp_v0_10_0_fe_sqr(&h2, &h); - rustsecp256k1zkp_v0_10_0_fe_negate(&h2, &h2, 1); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h2, &h); - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &u1, &h2); + rustsecp256k1zkp_v0_10_1_fe_sqr(&h2, &h); + rustsecp256k1zkp_v0_10_1_fe_negate(&h2, &h2, 1); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h2, &h); + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &u1, &h2); - rustsecp256k1zkp_v0_10_0_fe_sqr(&r->x, &i); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &h3); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_sqr(&r->x, &i); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&t, &r->x); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &t, &i); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h3, &s1); - rustsecp256k1zkp_v0_10_0_fe_add(&r->y, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&t, &r->x); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &t, &i); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h3, &s1); + rustsecp256k1zkp_v0_10_1_fe_add(&r->y, &h3); SECP256K1_GEJ_VERIFY(r); if (rzr != NULL) SECP256K1_FE_VERIFY(rzr); } -static void rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b, const rustsecp256k1zkp_v0_10_0_fe *bzinv) { +static void rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b, const rustsecp256k1zkp_v0_10_1_fe *bzinv) { /* Operations: 9 mul, 3 sqr, 11 add/negate/normalizes_to_zero (ignoring special cases) */ - rustsecp256k1zkp_v0_10_0_fe az, z12, u1, u2, s1, s2, h, i, h2, h3, t; + rustsecp256k1zkp_v0_10_1_fe az, z12, u1, u2, s1, s2, h, i, h2, h3, t; SECP256K1_GEJ_VERIFY(a); SECP256K1_GE_VERIFY(b); SECP256K1_FE_VERIFY(bzinv); if (a->infinity) { - rustsecp256k1zkp_v0_10_0_fe bzinv2, bzinv3; + rustsecp256k1zkp_v0_10_1_fe bzinv2, bzinv3; r->infinity = b->infinity; - rustsecp256k1zkp_v0_10_0_fe_sqr(&bzinv2, bzinv); - rustsecp256k1zkp_v0_10_0_fe_mul(&bzinv3, &bzinv2, bzinv); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->x, &b->x, &bzinv2); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &b->y, &bzinv3); - rustsecp256k1zkp_v0_10_0_fe_set_int(&r->z, 1); + rustsecp256k1zkp_v0_10_1_fe_sqr(&bzinv2, bzinv); + rustsecp256k1zkp_v0_10_1_fe_mul(&bzinv3, &bzinv2, bzinv); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->x, &b->x, &bzinv2); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &b->y, &bzinv3); + rustsecp256k1zkp_v0_10_1_fe_set_int(&r->z, 1); SECP256K1_GEJ_VERIFY(r); return; } @@ -660,50 +660,50 @@ static void rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(rustsecp256k1zkp_v0_10_0_g * The variable az below holds the modified Z coordinate for a, which is used * for the computation of rx and ry, but not for rz. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&az, &a->z, bzinv); + rustsecp256k1zkp_v0_10_1_fe_mul(&az, &a->z, bzinv); - rustsecp256k1zkp_v0_10_0_fe_sqr(&z12, &az); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z12, &az); u1 = a->x; - rustsecp256k1zkp_v0_10_0_fe_mul(&u2, &b->x, &z12); + rustsecp256k1zkp_v0_10_1_fe_mul(&u2, &b->x, &z12); s1 = a->y; - rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &s2, &az); - rustsecp256k1zkp_v0_10_0_fe_negate(&h, &u1, SECP256K1_GEJ_X_MAGNITUDE_MAX); rustsecp256k1zkp_v0_10_0_fe_add(&h, &u2); - rustsecp256k1zkp_v0_10_0_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_0_fe_add(&i, &s1); - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&h)) { - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&i)) { - rustsecp256k1zkp_v0_10_0_gej_double_var(r, a, NULL); + rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &b->y, &z12); rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &s2, &az); + rustsecp256k1zkp_v0_10_1_fe_negate(&h, &u1, SECP256K1_GEJ_X_MAGNITUDE_MAX); rustsecp256k1zkp_v0_10_1_fe_add(&h, &u2); + rustsecp256k1zkp_v0_10_1_fe_negate(&i, &s2, 1); rustsecp256k1zkp_v0_10_1_fe_add(&i, &s1); + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&h)) { + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&i)) { + rustsecp256k1zkp_v0_10_1_gej_double_var(r, a, NULL); } else { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(r); } return; } r->infinity = 0; - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &a->z, &h); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &a->z, &h); - rustsecp256k1zkp_v0_10_0_fe_sqr(&h2, &h); - rustsecp256k1zkp_v0_10_0_fe_negate(&h2, &h2, 1); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h2, &h); - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &u1, &h2); + rustsecp256k1zkp_v0_10_1_fe_sqr(&h2, &h); + rustsecp256k1zkp_v0_10_1_fe_negate(&h2, &h2, 1); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h2, &h); + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &u1, &h2); - rustsecp256k1zkp_v0_10_0_fe_sqr(&r->x, &i); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &h3); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_sqr(&r->x, &i); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); + rustsecp256k1zkp_v0_10_1_fe_add(&r->x, &t); - rustsecp256k1zkp_v0_10_0_fe_add(&t, &r->x); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &t, &i); - rustsecp256k1zkp_v0_10_0_fe_mul(&h3, &h3, &s1); - rustsecp256k1zkp_v0_10_0_fe_add(&r->y, &h3); + rustsecp256k1zkp_v0_10_1_fe_add(&t, &r->x); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &t, &i); + rustsecp256k1zkp_v0_10_1_fe_mul(&h3, &h3, &s1); + rustsecp256k1zkp_v0_10_1_fe_add(&r->y, &h3); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_add_ge(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_ge *b) { +static void rustsecp256k1zkp_v0_10_1_gej_add_ge(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_ge *b) { /* Operations: 7 mul, 5 sqr, 21 add/cmov/half/mul_int/negate/normalizes_to_zero */ - rustsecp256k1zkp_v0_10_0_fe zz, u1, u2, s1, s2, t, tt, m, n, q, rr; - rustsecp256k1zkp_v0_10_0_fe m_alt, rr_alt; + rustsecp256k1zkp_v0_10_1_fe zz, u1, u2, s1, s2, t, tt, m, n, q, rr; + rustsecp256k1zkp_v0_10_1_fe m_alt, rr_alt; int degenerate; SECP256K1_GEJ_VERIFY(a); SECP256K1_GE_VERIFY(b); @@ -759,62 +759,62 @@ static void rustsecp256k1zkp_v0_10_0_gej_add_ge(rustsecp256k1zkp_v0_10_0_gej *r, * so this covers everything. */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&zz, &a->z); /* z = Z1^2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&zz, &a->z); /* z = Z1^2 */ u1 = a->x; /* u1 = U1 = X1*Z2^2 (GEJ_X_M) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&u2, &b->x, &zz); /* u2 = U2 = X2*Z1^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&u2, &b->x, &zz); /* u2 = U2 = X2*Z1^2 (1) */ s1 = a->y; /* s1 = S1 = Y1*Z2^3 (GEJ_Y_M) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &b->y, &zz); /* s2 = Y2*Z1^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&s2, &s2, &a->z); /* s2 = S2 = Y2*Z1^3 (1) */ - t = u1; rustsecp256k1zkp_v0_10_0_fe_add(&t, &u2); /* t = T = U1+U2 (GEJ_X_M+1) */ - m = s1; rustsecp256k1zkp_v0_10_0_fe_add(&m, &s2); /* m = M = S1+S2 (GEJ_Y_M+1) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&rr, &t); /* rr = T^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&m_alt, &u2, 1); /* Malt = -X2*Z1^2 (2) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&tt, &u1, &m_alt); /* tt = -U1*U2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&rr, &tt); /* rr = R = T^2-U1*U2 (2) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &b->y, &zz); /* s2 = Y2*Z1^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&s2, &s2, &a->z); /* s2 = S2 = Y2*Z1^3 (1) */ + t = u1; rustsecp256k1zkp_v0_10_1_fe_add(&t, &u2); /* t = T = U1+U2 (GEJ_X_M+1) */ + m = s1; rustsecp256k1zkp_v0_10_1_fe_add(&m, &s2); /* m = M = S1+S2 (GEJ_Y_M+1) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&rr, &t); /* rr = T^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&m_alt, &u2, 1); /* Malt = -X2*Z1^2 (2) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&tt, &u1, &m_alt); /* tt = -U1*U2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&rr, &tt); /* rr = R = T^2-U1*U2 (2) */ /* If lambda = R/M = R/0 we have a problem (except in the "trivial" * case that Z = z1z2 = 0, and this is special-cased later on). */ - degenerate = rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&m); + degenerate = rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&m); /* This only occurs when y1 == -y2 and x1^3 == x2^3, but x1 != x2. * This means either x1 == beta*x2 or beta*x1 == x2, where beta is * a nontrivial cube root of one. In either case, an alternate * non-indeterminate expression for lambda is (y1 - y2)/(x1 - x2), * so we set R/M equal to this. */ rr_alt = s1; - rustsecp256k1zkp_v0_10_0_fe_mul_int(&rr_alt, 2); /* rr_alt = Y1*Z2^3 - Y2*Z1^3 (GEJ_Y_M*2) */ - rustsecp256k1zkp_v0_10_0_fe_add(&m_alt, &u1); /* Malt = X1*Z2^2 - X2*Z1^2 (GEJ_X_M+2) */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&rr_alt, 2); /* rr_alt = Y1*Z2^3 - Y2*Z1^3 (GEJ_Y_M*2) */ + rustsecp256k1zkp_v0_10_1_fe_add(&m_alt, &u1); /* Malt = X1*Z2^2 - X2*Z1^2 (GEJ_X_M+2) */ - rustsecp256k1zkp_v0_10_0_fe_cmov(&rr_alt, &rr, !degenerate); /* rr_alt (GEJ_Y_M*2) */ - rustsecp256k1zkp_v0_10_0_fe_cmov(&m_alt, &m, !degenerate); /* m_alt (GEJ_X_M+2) */ + rustsecp256k1zkp_v0_10_1_fe_cmov(&rr_alt, &rr, !degenerate); /* rr_alt (GEJ_Y_M*2) */ + rustsecp256k1zkp_v0_10_1_fe_cmov(&m_alt, &m, !degenerate); /* m_alt (GEJ_X_M+2) */ /* Now Ralt / Malt = lambda and is guaranteed not to be Ralt / 0. * From here on out Ralt and Malt represent the numerator * and denominator of lambda; R and M represent the explicit * expressions x1^2 + x2^2 + x1x2 and y1 + y2. */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&n, &m_alt); /* n = Malt^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&q, &t, + rustsecp256k1zkp_v0_10_1_fe_sqr(&n, &m_alt); /* n = Malt^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&q, &t, SECP256K1_GEJ_X_MAGNITUDE_MAX + 1); /* q = -T (GEJ_X_M+2) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&q, &q, &n); /* q = Q = -T*Malt^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&q, &q, &n); /* q = Q = -T*Malt^2 (1) */ /* These two lines use the observation that either M == Malt or M == 0, * so M^3 * Malt is either Malt^4 (which is computed by squaring), or * zero (which is "computed" by cmov). So the cost is one squaring * versus two multiplications. */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&n, &n); /* n = Malt^4 (1) */ - rustsecp256k1zkp_v0_10_0_fe_cmov(&n, &m, degenerate); /* n = M^3 * Malt (GEJ_Y_M+1) */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&t, &rr_alt); /* t = Ralt^2 (1) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &a->z, &m_alt); /* r->z = Z3 = Malt*Z (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&t, &q); /* t = Ralt^2 + Q (2) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&n, &n); /* n = Malt^4 (1) */ + rustsecp256k1zkp_v0_10_1_fe_cmov(&n, &m, degenerate); /* n = M^3 * Malt (GEJ_Y_M+1) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&t, &rr_alt); /* t = Ralt^2 (1) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &a->z, &m_alt); /* r->z = Z3 = Malt*Z (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&t, &q); /* t = Ralt^2 + Q (2) */ r->x = t; /* r->x = X3 = Ralt^2 + Q (2) */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&t, 2); /* t = 2*X3 (4) */ - rustsecp256k1zkp_v0_10_0_fe_add(&t, &q); /* t = 2*X3 + Q (5) */ - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &t, &rr_alt); /* t = Ralt*(2*X3 + Q) (1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&t, &n); /* t = Ralt*(2*X3 + Q) + M^3*Malt (GEJ_Y_M+2) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &t, + rustsecp256k1zkp_v0_10_1_fe_mul_int(&t, 2); /* t = 2*X3 (4) */ + rustsecp256k1zkp_v0_10_1_fe_add(&t, &q); /* t = 2*X3 + Q (5) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &t, &rr_alt); /* t = Ralt*(2*X3 + Q) (1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&t, &n); /* t = Ralt*(2*X3 + Q) + M^3*Malt (GEJ_Y_M+2) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &t, SECP256K1_GEJ_Y_MAGNITUDE_MAX + 2); /* r->y = -(Ralt*(2*X3 + Q) + M^3*Malt) (GEJ_Y_M+3) */ - rustsecp256k1zkp_v0_10_0_fe_half(&r->y); /* r->y = Y3 = -(Ralt*(2*X3 + Q) + M^3*Malt)/2 ((GEJ_Y_M+3)/2 + 1) */ + rustsecp256k1zkp_v0_10_1_fe_half(&r->y); /* r->y = Y3 = -(Ralt*(2*X3 + Q) + M^3*Malt)/2 ((GEJ_Y_M+3)/2 + 1) */ /* In case a->infinity == 1, replace r with (b->x, b->y, 1). */ - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->x, &b->x, a->infinity); - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->y, &b->y, a->infinity); - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->z, &rustsecp256k1zkp_v0_10_0_fe_one, a->infinity); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->x, &b->x, a->infinity); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->y, &b->y, a->infinity); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->z, &rustsecp256k1zkp_v0_10_1_fe_one, a->infinity); /* Set r->infinity if r->z is 0. * @@ -832,76 +832,76 @@ static void rustsecp256k1zkp_v0_10_0_gej_add_ge(rustsecp256k1zkp_v0_10_0_gej *r, * In this case, we can't have a = -b. * We have degenerate = false, r->z = (y1 + y2) * Z. * Then r->infinity = ((y1 + y2)Z == 0) = (y1 == -y2) = false. */ - r->infinity = rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&r->z); + r->infinity = rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&r->z); SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_gej_rescale(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_fe *s) { +static void rustsecp256k1zkp_v0_10_1_gej_rescale(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_fe *s) { /* Operations: 4 mul, 1 sqr */ - rustsecp256k1zkp_v0_10_0_fe zz; + rustsecp256k1zkp_v0_10_1_fe zz; SECP256K1_GEJ_VERIFY(r); SECP256K1_FE_VERIFY(s); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(s)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(s)); - rustsecp256k1zkp_v0_10_0_fe_sqr(&zz, s); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->x, &r->x, &zz); /* r->x *= s^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &r->y, &zz); - rustsecp256k1zkp_v0_10_0_fe_mul(&r->y, &r->y, s); /* r->y *= s^3 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r->z, &r->z, s); /* r->z *= s */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&zz, s); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->x, &r->x, &zz); /* r->x *= s^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &r->y, &zz); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->y, &r->y, s); /* r->y *= s^3 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r->z, &r->z, s); /* r->z *= s */ SECP256K1_GEJ_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_ge_to_storage(rustsecp256k1zkp_v0_10_0_ge_storage *r, const rustsecp256k1zkp_v0_10_0_ge *a) { - rustsecp256k1zkp_v0_10_0_fe x, y; +static void rustsecp256k1zkp_v0_10_1_ge_to_storage(rustsecp256k1zkp_v0_10_1_ge_storage *r, const rustsecp256k1zkp_v0_10_1_ge *a) { + rustsecp256k1zkp_v0_10_1_fe x, y; SECP256K1_GE_VERIFY(a); VERIFY_CHECK(!a->infinity); x = a->x; - rustsecp256k1zkp_v0_10_0_fe_normalize(&x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&x); y = a->y; - rustsecp256k1zkp_v0_10_0_fe_normalize(&y); - rustsecp256k1zkp_v0_10_0_fe_to_storage(&r->x, &x); - rustsecp256k1zkp_v0_10_0_fe_to_storage(&r->y, &y); + rustsecp256k1zkp_v0_10_1_fe_normalize(&y); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&r->x, &x); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&r->y, &y); } -static void rustsecp256k1zkp_v0_10_0_ge_from_storage(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge_storage *a) { - rustsecp256k1zkp_v0_10_0_fe_from_storage(&r->x, &a->x); - rustsecp256k1zkp_v0_10_0_fe_from_storage(&r->y, &a->y); +static void rustsecp256k1zkp_v0_10_1_ge_from_storage(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge_storage *a) { + rustsecp256k1zkp_v0_10_1_fe_from_storage(&r->x, &a->x); + rustsecp256k1zkp_v0_10_1_fe_from_storage(&r->y, &a->y); r->infinity = 0; SECP256K1_GE_VERIFY(r); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_gej_cmov(rustsecp256k1zkp_v0_10_0_gej *r, const rustsecp256k1zkp_v0_10_0_gej *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_gej_cmov(rustsecp256k1zkp_v0_10_1_gej *r, const rustsecp256k1zkp_v0_10_1_gej *a, int flag) { SECP256K1_GEJ_VERIFY(r); SECP256K1_GEJ_VERIFY(a); - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->x, &a->x, flag); - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->y, &a->y, flag); - rustsecp256k1zkp_v0_10_0_fe_cmov(&r->z, &a->z, flag); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->x, &a->x, flag); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->y, &a->y, flag); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r->z, &a->z, flag); r->infinity ^= (r->infinity ^ a->infinity) & flag; SECP256K1_GEJ_VERIFY(r); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_ge_storage_cmov(rustsecp256k1zkp_v0_10_0_ge_storage *r, const rustsecp256k1zkp_v0_10_0_ge_storage *a, int flag) { - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r->x, &a->x, flag); - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r->y, &a->y, flag); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_ge_storage_cmov(rustsecp256k1zkp_v0_10_1_ge_storage *r, const rustsecp256k1zkp_v0_10_1_ge_storage *a, int flag) { + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r->x, &a->x, flag); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r->y, &a->y, flag); } -static void rustsecp256k1zkp_v0_10_0_ge_mul_lambda(rustsecp256k1zkp_v0_10_0_ge *r, const rustsecp256k1zkp_v0_10_0_ge *a) { +static void rustsecp256k1zkp_v0_10_1_ge_mul_lambda(rustsecp256k1zkp_v0_10_1_ge *r, const rustsecp256k1zkp_v0_10_1_ge *a) { SECP256K1_GE_VERIFY(a); *r = *a; - rustsecp256k1zkp_v0_10_0_fe_mul(&r->x, &r->x, &rustsecp256k1zkp_v0_10_0_const_beta); + rustsecp256k1zkp_v0_10_1_fe_mul(&r->x, &r->x, &rustsecp256k1zkp_v0_10_1_const_beta); SECP256K1_GE_VERIFY(r); } -static int rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(const rustsecp256k1zkp_v0_10_0_gej *a) { - rustsecp256k1zkp_v0_10_0_fe yz; +static int rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(const rustsecp256k1zkp_v0_10_1_gej *a) { + rustsecp256k1zkp_v0_10_1_fe yz; if (a->infinity) { return 0; @@ -910,25 +910,25 @@ static int rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(const rustsecp256k1zkp_v0 /* We rely on the fact that the Jacobi symbol of 1 / a->z^3 is the same as * that of a->z. Thus a->y / a->z^3 is a quadratic residue iff a->y * a->z is */ - rustsecp256k1zkp_v0_10_0_fe_mul(&yz, &a->y, &a->z); - return rustsecp256k1zkp_v0_10_0_fe_is_square_var(&yz); + rustsecp256k1zkp_v0_10_1_fe_mul(&yz, &a->y, &a->z); + return rustsecp256k1zkp_v0_10_1_fe_is_square_var(&yz); } -static int rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(const rustsecp256k1zkp_v0_10_0_ge* ge) { +static int rustsecp256k1zkp_v0_10_1_ge_is_in_correct_subgroup(const rustsecp256k1zkp_v0_10_1_ge* ge) { #ifdef EXHAUSTIVE_TEST_ORDER - rustsecp256k1zkp_v0_10_0_gej out; + rustsecp256k1zkp_v0_10_1_gej out; int i; SECP256K1_GE_VERIFY(ge); /* A very simple EC multiplication ladder that avoids a dependency on ecmult. */ - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&out); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&out); for (i = 0; i < 32; ++i) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&out, &out, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&out, &out, NULL); if ((((uint32_t)EXHAUSTIVE_TEST_ORDER) >> (31 - i)) & 1) { - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&out, &out, ge, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&out, &out, ge, NULL); } } - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&out); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&out); #else SECP256K1_GE_VERIFY(ge); @@ -938,51 +938,51 @@ static int rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(const rustsecp256k #endif } -static int rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(const rustsecp256k1zkp_v0_10_0_fe *x) { - rustsecp256k1zkp_v0_10_0_fe c; - rustsecp256k1zkp_v0_10_0_fe_sqr(&c, x); - rustsecp256k1zkp_v0_10_0_fe_mul(&c, &c, x); - rustsecp256k1zkp_v0_10_0_fe_add_int(&c, SECP256K1_B); - return rustsecp256k1zkp_v0_10_0_fe_is_square_var(&c); +static int rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(const rustsecp256k1zkp_v0_10_1_fe *x) { + rustsecp256k1zkp_v0_10_1_fe c; + rustsecp256k1zkp_v0_10_1_fe_sqr(&c, x); + rustsecp256k1zkp_v0_10_1_fe_mul(&c, &c, x); + rustsecp256k1zkp_v0_10_1_fe_add_int(&c, SECP256K1_B); + return rustsecp256k1zkp_v0_10_1_fe_is_square_var(&c); } -static int rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(const rustsecp256k1zkp_v0_10_0_fe *xn, const rustsecp256k1zkp_v0_10_0_fe *xd) { +static int rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(const rustsecp256k1zkp_v0_10_1_fe *xn, const rustsecp256k1zkp_v0_10_1_fe *xd) { /* We want to determine whether (xn/xd) is on the curve. * * (xn/xd)^3 + 7 is square <=> xd*xn^3 + 7*xd^4 is square (multiplying by xd^4, a square). */ - rustsecp256k1zkp_v0_10_0_fe r, t; - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(xd)); - - rustsecp256k1zkp_v0_10_0_fe_mul(&r, xd, xn); /* r = xd*xn */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&t, xn); /* t = xn^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&r, &r, &t); /* r = xd*xn^3 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&t, xd); /* t = xd^2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&t, &t); /* t = xd^4 */ + rustsecp256k1zkp_v0_10_1_fe r, t; + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(xd)); + + rustsecp256k1zkp_v0_10_1_fe_mul(&r, xd, xn); /* r = xd*xn */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&t, xn); /* t = xn^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&r, &r, &t); /* r = xd*xn^3 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&t, xd); /* t = xd^2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&t, &t); /* t = xd^4 */ VERIFY_CHECK(SECP256K1_B <= 31); - rustsecp256k1zkp_v0_10_0_fe_mul_int(&t, SECP256K1_B); /* t = 7*xd^4 */ - rustsecp256k1zkp_v0_10_0_fe_add(&r, &t); /* r = xd*xn^3 + 7*xd^4 */ - return rustsecp256k1zkp_v0_10_0_fe_is_square_var(&r); + rustsecp256k1zkp_v0_10_1_fe_mul_int(&t, SECP256K1_B); /* t = 7*xd^4 */ + rustsecp256k1zkp_v0_10_1_fe_add(&r, &t); /* r = xd*xn^3 + 7*xd^4 */ + return rustsecp256k1zkp_v0_10_1_fe_is_square_var(&r); } -static void rustsecp256k1zkp_v0_10_0_ge_to_bytes(unsigned char *buf, rustsecp256k1zkp_v0_10_0_ge *a) { - rustsecp256k1zkp_v0_10_0_ge_storage s; +static void rustsecp256k1zkp_v0_10_1_ge_to_bytes(unsigned char *buf, rustsecp256k1zkp_v0_10_1_ge *a) { + rustsecp256k1zkp_v0_10_1_ge_storage s; - /* We require that the rustsecp256k1zkp_v0_10_0_ge_storage type is exactly 64 bytes. + /* We require that the rustsecp256k1zkp_v0_10_1_ge_storage type is exactly 64 bytes. * This is formally not guaranteed by the C standard, but should hold on any * sane compiler in the real world. */ - STATIC_ASSERT(sizeof(rustsecp256k1zkp_v0_10_0_ge_storage) == 64); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(a)); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&s, a); + STATIC_ASSERT(sizeof(rustsecp256k1zkp_v0_10_1_ge_storage) == 64); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(a)); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&s, a); memcpy(buf, &s, 64); } -static void rustsecp256k1zkp_v0_10_0_ge_from_bytes(rustsecp256k1zkp_v0_10_0_ge *r, const unsigned char *buf) { - rustsecp256k1zkp_v0_10_0_ge_storage s; +static void rustsecp256k1zkp_v0_10_1_ge_from_bytes(rustsecp256k1zkp_v0_10_1_ge *r, const unsigned char *buf) { + rustsecp256k1zkp_v0_10_1_ge_storage s; - STATIC_ASSERT(sizeof(rustsecp256k1zkp_v0_10_0_ge_storage) == 64); + STATIC_ASSERT(sizeof(rustsecp256k1zkp_v0_10_1_ge_storage) == 64); memcpy(&s, buf, 64); - rustsecp256k1zkp_v0_10_0_ge_from_storage(r, &s); + rustsecp256k1zkp_v0_10_1_ge_from_storage(r, &s); } #endif /* SECP256K1_GROUP_IMPL_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/hash.h b/secp256k1-zkp-sys/depend/secp256k1/src/hash.h index 2511f224..596b7978 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/hash.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/hash.h @@ -14,28 +14,28 @@ typedef struct { uint32_t s[8]; unsigned char buf[64]; uint64_t bytes; -} rustsecp256k1zkp_v0_10_0_sha256; +} rustsecp256k1zkp_v0_10_1_sha256; -static void rustsecp256k1zkp_v0_10_0_sha256_initialize(rustsecp256k1zkp_v0_10_0_sha256 *hash); -static void rustsecp256k1zkp_v0_10_0_sha256_write(rustsecp256k1zkp_v0_10_0_sha256 *hash, const unsigned char *data, size_t size); -static void rustsecp256k1zkp_v0_10_0_sha256_finalize(rustsecp256k1zkp_v0_10_0_sha256 *hash, unsigned char *out32); +static void rustsecp256k1zkp_v0_10_1_sha256_initialize(rustsecp256k1zkp_v0_10_1_sha256 *hash); +static void rustsecp256k1zkp_v0_10_1_sha256_write(rustsecp256k1zkp_v0_10_1_sha256 *hash, const unsigned char *data, size_t size); +static void rustsecp256k1zkp_v0_10_1_sha256_finalize(rustsecp256k1zkp_v0_10_1_sha256 *hash, unsigned char *out32); typedef struct { - rustsecp256k1zkp_v0_10_0_sha256 inner, outer; -} rustsecp256k1zkp_v0_10_0_hmac_sha256; + rustsecp256k1zkp_v0_10_1_sha256 inner, outer; +} rustsecp256k1zkp_v0_10_1_hmac_sha256; -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, const unsigned char *key, size_t size); -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_write(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, const unsigned char *data, size_t size); -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, unsigned char *out32); +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, const unsigned char *key, size_t size); +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_write(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, const unsigned char *data, size_t size); +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, unsigned char *out32); typedef struct { unsigned char v[32]; unsigned char k[32]; int retry; -} rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256; +} rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256; -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen); -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen); -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng); +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen); +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen); +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng); #endif /* SECP256K1_HASH_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/hash_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/hash_impl.h index 1a0a8e89..13844d25 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/hash_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/hash_impl.h @@ -28,7 +28,7 @@ (h) = t1 + t2; \ } while(0) -static void rustsecp256k1zkp_v0_10_0_sha256_initialize(rustsecp256k1zkp_v0_10_0_sha256 *hash) { +static void rustsecp256k1zkp_v0_10_1_sha256_initialize(rustsecp256k1zkp_v0_10_1_sha256 *hash) { hash->s[0] = 0x6a09e667ul; hash->s[1] = 0xbb67ae85ul; hash->s[2] = 0x3c6ef372ul; @@ -41,26 +41,26 @@ static void rustsecp256k1zkp_v0_10_0_sha256_initialize(rustsecp256k1zkp_v0_10_0_ } /** Perform one SHA-256 transformation, processing 16 big endian 32-bit words. */ -static void rustsecp256k1zkp_v0_10_0_sha256_transform(uint32_t* s, const unsigned char* buf) { +static void rustsecp256k1zkp_v0_10_1_sha256_transform(uint32_t* s, const unsigned char* buf) { uint32_t a = s[0], b = s[1], c = s[2], d = s[3], e = s[4], f = s[5], g = s[6], h = s[7]; uint32_t w0, w1, w2, w3, w4, w5, w6, w7, w8, w9, w10, w11, w12, w13, w14, w15; - Round(a, b, c, d, e, f, g, h, 0x428a2f98, w0 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[0])); - Round(h, a, b, c, d, e, f, g, 0x71374491, w1 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[4])); - Round(g, h, a, b, c, d, e, f, 0xb5c0fbcf, w2 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[8])); - Round(f, g, h, a, b, c, d, e, 0xe9b5dba5, w3 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[12])); - Round(e, f, g, h, a, b, c, d, 0x3956c25b, w4 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[16])); - Round(d, e, f, g, h, a, b, c, 0x59f111f1, w5 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[20])); - Round(c, d, e, f, g, h, a, b, 0x923f82a4, w6 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[24])); - Round(b, c, d, e, f, g, h, a, 0xab1c5ed5, w7 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[28])); - Round(a, b, c, d, e, f, g, h, 0xd807aa98, w8 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[32])); - Round(h, a, b, c, d, e, f, g, 0x12835b01, w9 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[36])); - Round(g, h, a, b, c, d, e, f, 0x243185be, w10 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[40])); - Round(f, g, h, a, b, c, d, e, 0x550c7dc3, w11 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[44])); - Round(e, f, g, h, a, b, c, d, 0x72be5d74, w12 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[48])); - Round(d, e, f, g, h, a, b, c, 0x80deb1fe, w13 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[52])); - Round(c, d, e, f, g, h, a, b, 0x9bdc06a7, w14 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[56])); - Round(b, c, d, e, f, g, h, a, 0xc19bf174, w15 = rustsecp256k1zkp_v0_10_0_read_be32(&buf[60])); + Round(a, b, c, d, e, f, g, h, 0x428a2f98, w0 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[0])); + Round(h, a, b, c, d, e, f, g, 0x71374491, w1 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[4])); + Round(g, h, a, b, c, d, e, f, 0xb5c0fbcf, w2 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[8])); + Round(f, g, h, a, b, c, d, e, 0xe9b5dba5, w3 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[12])); + Round(e, f, g, h, a, b, c, d, 0x3956c25b, w4 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[16])); + Round(d, e, f, g, h, a, b, c, 0x59f111f1, w5 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[20])); + Round(c, d, e, f, g, h, a, b, 0x923f82a4, w6 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[24])); + Round(b, c, d, e, f, g, h, a, 0xab1c5ed5, w7 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[28])); + Round(a, b, c, d, e, f, g, h, 0xd807aa98, w8 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[32])); + Round(h, a, b, c, d, e, f, g, 0x12835b01, w9 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[36])); + Round(g, h, a, b, c, d, e, f, 0x243185be, w10 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[40])); + Round(f, g, h, a, b, c, d, e, 0x550c7dc3, w11 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[44])); + Round(e, f, g, h, a, b, c, d, 0x72be5d74, w12 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[48])); + Round(d, e, f, g, h, a, b, c, 0x80deb1fe, w13 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[52])); + Round(c, d, e, f, g, h, a, b, 0x9bdc06a7, w14 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[56])); + Round(b, c, d, e, f, g, h, a, 0xc19bf174, w15 = rustsecp256k1zkp_v0_10_1_read_be32(&buf[60])); Round(a, b, c, d, e, f, g, h, 0xe49b69c1, w0 += sigma1(w14) + w9 + sigma0(w1)); Round(h, a, b, c, d, e, f, g, 0xefbe4786, w1 += sigma1(w15) + w10 + sigma0(w2)); @@ -123,7 +123,7 @@ static void rustsecp256k1zkp_v0_10_0_sha256_transform(uint32_t* s, const unsigne s[7] += h; } -static void rustsecp256k1zkp_v0_10_0_sha256_write(rustsecp256k1zkp_v0_10_0_sha256 *hash, const unsigned char *data, size_t len) { +static void rustsecp256k1zkp_v0_10_1_sha256_write(rustsecp256k1zkp_v0_10_1_sha256 *hash, const unsigned char *data, size_t len) { size_t bufsize = hash->bytes & 0x3F; hash->bytes += len; VERIFY_CHECK(hash->bytes >= len); @@ -133,7 +133,7 @@ static void rustsecp256k1zkp_v0_10_0_sha256_write(rustsecp256k1zkp_v0_10_0_sha25 memcpy(hash->buf + bufsize, data, chunk_len); data += chunk_len; len -= chunk_len; - rustsecp256k1zkp_v0_10_0_sha256_transform(hash->s, hash->buf); + rustsecp256k1zkp_v0_10_1_sha256_transform(hash->s, hash->buf); bufsize = 0; } if (len) { @@ -142,78 +142,78 @@ static void rustsecp256k1zkp_v0_10_0_sha256_write(rustsecp256k1zkp_v0_10_0_sha25 } } -static void rustsecp256k1zkp_v0_10_0_sha256_finalize(rustsecp256k1zkp_v0_10_0_sha256 *hash, unsigned char *out32) { +static void rustsecp256k1zkp_v0_10_1_sha256_finalize(rustsecp256k1zkp_v0_10_1_sha256 *hash, unsigned char *out32) { static const unsigned char pad[64] = {0x80}; unsigned char sizedesc[8]; int i; /* The maximum message size of SHA256 is 2^64-1 bits. */ VERIFY_CHECK(hash->bytes < ((uint64_t)1 << 61)); - rustsecp256k1zkp_v0_10_0_write_be32(&sizedesc[0], hash->bytes >> 29); - rustsecp256k1zkp_v0_10_0_write_be32(&sizedesc[4], hash->bytes << 3); - rustsecp256k1zkp_v0_10_0_sha256_write(hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64)); - rustsecp256k1zkp_v0_10_0_sha256_write(hash, sizedesc, 8); + rustsecp256k1zkp_v0_10_1_write_be32(&sizedesc[0], hash->bytes >> 29); + rustsecp256k1zkp_v0_10_1_write_be32(&sizedesc[4], hash->bytes << 3); + rustsecp256k1zkp_v0_10_1_sha256_write(hash, pad, 1 + ((119 - (hash->bytes % 64)) % 64)); + rustsecp256k1zkp_v0_10_1_sha256_write(hash, sizedesc, 8); for (i = 0; i < 8; i++) { - rustsecp256k1zkp_v0_10_0_write_be32(&out32[4*i], hash->s[i]); + rustsecp256k1zkp_v0_10_1_write_be32(&out32[4*i], hash->s[i]); hash->s[i] = 0; } } /* Initializes a sha256 struct and writes the 64 byte string * SHA256(tag)||SHA256(tag) into it. */ -static void rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(rustsecp256k1zkp_v0_10_0_sha256 *hash, const unsigned char *tag, size_t taglen) { +static void rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(rustsecp256k1zkp_v0_10_1_sha256 *hash, const unsigned char *tag, size_t taglen) { unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_sha256_initialize(hash); - rustsecp256k1zkp_v0_10_0_sha256_write(hash, tag, taglen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(hash, buf); + rustsecp256k1zkp_v0_10_1_sha256_initialize(hash); + rustsecp256k1zkp_v0_10_1_sha256_write(hash, tag, taglen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(hash, buf); - rustsecp256k1zkp_v0_10_0_sha256_initialize(hash); - rustsecp256k1zkp_v0_10_0_sha256_write(hash, buf, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(hash, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_initialize(hash); + rustsecp256k1zkp_v0_10_1_sha256_write(hash, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(hash, buf, 32); } -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, const unsigned char *key, size_t keylen) { +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, const unsigned char *key, size_t keylen) { size_t n; unsigned char rkey[64]; if (keylen <= sizeof(rkey)) { memcpy(rkey, key, keylen); memset(rkey + keylen, 0, sizeof(rkey) - keylen); } else { - rustsecp256k1zkp_v0_10_0_sha256 sha256; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, key, keylen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256, rkey); + rustsecp256k1zkp_v0_10_1_sha256 sha256; + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, key, keylen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256, rkey); memset(rkey + 32, 0, 32); } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hash->outer); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hash->outer); for (n = 0; n < sizeof(rkey); n++) { rkey[n] ^= 0x5c; } - rustsecp256k1zkp_v0_10_0_sha256_write(&hash->outer, rkey, sizeof(rkey)); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash->outer, rkey, sizeof(rkey)); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hash->inner); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hash->inner); for (n = 0; n < sizeof(rkey); n++) { rkey[n] ^= 0x5c ^ 0x36; } - rustsecp256k1zkp_v0_10_0_sha256_write(&hash->inner, rkey, sizeof(rkey)); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash->inner, rkey, sizeof(rkey)); memset(rkey, 0, sizeof(rkey)); } -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_write(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, const unsigned char *data, size_t size) { - rustsecp256k1zkp_v0_10_0_sha256_write(&hash->inner, data, size); +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_write(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, const unsigned char *data, size_t size) { + rustsecp256k1zkp_v0_10_1_sha256_write(&hash->inner, data, size); } -static void rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_0_hmac_sha256 *hash, unsigned char *out32) { +static void rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_1_hmac_sha256 *hash, unsigned char *out32) { unsigned char temp[32]; - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hash->inner, temp); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash->outer, temp, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hash->inner, temp); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash->outer, temp, 32); memset(temp, 0, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hash->outer, out32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hash->outer, out32); } -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) { - rustsecp256k1zkp_v0_10_0_hmac_sha256 hmac; +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng, const unsigned char *key, size_t keylen) { + rustsecp256k1zkp_v0_10_1_hmac_sha256 hmac; static const unsigned char zero[1] = {0x00}; static const unsigned char one[1] = {0x01}; @@ -221,47 +221,47 @@ static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(rustsecp256k memset(rng->k, 0x00, 32); /* RFC6979 3.2.c. */ /* RFC6979 3.2.d. */ - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, zero, 1); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, key, keylen); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->k); - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->v); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, zero, 1); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, key, keylen); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->k); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->v); /* RFC6979 3.2.f. */ - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, one, 1); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, key, keylen); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->k); - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->v); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, one, 1); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, key, keylen); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->k); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->v); rng->retry = 0; } -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) { +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng, unsigned char *out, size_t outlen) { /* RFC6979 3.2.h. */ static const unsigned char zero[1] = {0x00}; if (rng->retry) { - rustsecp256k1zkp_v0_10_0_hmac_sha256 hmac; - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, zero, 1); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->k); - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->v); + rustsecp256k1zkp_v0_10_1_hmac_sha256 hmac; + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, zero, 1); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->k); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->v); } while (outlen > 0) { - rustsecp256k1zkp_v0_10_0_hmac_sha256 hmac; + rustsecp256k1zkp_v0_10_1_hmac_sha256 hmac; int now = outlen; - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hmac, rng->k, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hmac, rng->v, 32); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hmac, rng->v); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hmac, rng->k, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hmac, rng->v, 32); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hmac, rng->v); if (now > 32) { now = 32; } @@ -273,7 +273,7 @@ static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(rustsecp256k1z rng->retry = 1; } -static void rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 *rng) { +static void rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 *rng) { memset(rng->k, 0, 32); memset(rng->v, 0, 32); rng->retry = 0; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/int128.h b/secp256k1-zkp-sys/depend/secp256k1/src/int128.h index a72fec12..df2833d0 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/int128.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/int128.h @@ -13,77 +13,77 @@ # endif /* Construct an unsigned 128-bit value from a high and a low 64-bit value. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_load(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t hi, uint64_t lo); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_load(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t hi, uint64_t lo); /* Multiply two unsigned 64-bit values a and b and write the result to r. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b); /* Multiply two unsigned 64-bit values a and b and add the result to r. * The final result is taken modulo 2^128. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b); /* Add an unsigned 64-bit value a to r. * The final result is taken modulo 2^128. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a); /* Unsigned (logical) right shift. * Non-constant time in n. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_rshift(rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_rshift(rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n); /* Return the low 64-bits of a 128-bit value as an unsigned 64-bit value. */ -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_to_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a); +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_to_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a); /* Return the high 64-bits of a 128-bit value as an unsigned 64-bit value. */ -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_hi_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a); +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_hi_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a); /* Write an unsigned 64-bit value to r. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_from_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_from_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a); /* Tests if r is strictly less than to 2^n. * n must be strictly less than 128. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_u128_check_bits(const rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n); +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_u128_check_bits(const rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n); /* Construct an signed 128-bit value from a high and a low 64-bit value. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_load(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t hi, uint64_t lo); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_load(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t hi, uint64_t lo); /* Multiply two signed 64-bit values a and b and write the result to r. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b); /* Multiply two signed 64-bit values a and b and add the result to r. * Overflow or underflow from the addition is undefined behaviour. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_accum_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_accum_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b); /* Compute a*d - b*c from signed 64-bit values and write the result to r. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_det(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_det(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d); /* Signed (arithmetic) right shift. * Non-constant time in b. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_rshift(rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int b); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_rshift(rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int b); /* Return the input value modulo 2^64. */ -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_i128_to_u64(const rustsecp256k1zkp_v0_10_0_int128 *a); +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_i128_to_u64(const rustsecp256k1zkp_v0_10_1_int128 *a); /* Return the value as a signed 64-bit value. * Requires the input to be between INT64_MIN and INT64_MAX. */ -static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_i128_to_i64(const rustsecp256k1zkp_v0_10_0_int128 *a); +static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_1_i128_to_i64(const rustsecp256k1zkp_v0_10_1_int128 *a); /* Write a signed 64-bit value to r. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_from_i64(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_from_i64(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a); /* Compare two 128-bit values for equality. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_eq_var(const rustsecp256k1zkp_v0_10_0_int128 *a, const rustsecp256k1zkp_v0_10_0_int128 *b); +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_eq_var(const rustsecp256k1zkp_v0_10_1_int128 *a, const rustsecp256k1zkp_v0_10_1_int128 *b); /* Tests if r is equal to sign*2^n (sign must be 1 or -1). * n must be strictly less than 127. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_check_pow2(const rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int n, int sign); +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_check_pow2(const rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int n, int sign); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/int128_native.h b/secp256k1-zkp-sys/depend/secp256k1/src/int128_native.h index be182728..7acea700 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/int128_native.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/int128_native.h @@ -13,7 +13,7 @@ SECP256K1_GNUC_EXT typedef __int128 int128_t; /* No (U)INT128_C macros because compilers providing __int128 do not support 128-bit literals. */ #endif -typedef uint128_t rustsecp256k1zkp_v0_10_0_uint128; -typedef int128_t rustsecp256k1zkp_v0_10_0_int128; +typedef uint128_t rustsecp256k1zkp_v0_10_1_uint128; +typedef int128_t rustsecp256k1zkp_v0_10_1_int128; #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/int128_native_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/int128_native_impl.h index c93ac7bc..579b9597 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/int128_native_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/int128_native_impl.h @@ -4,88 +4,88 @@ #include "int128.h" #include "util.h" -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_load(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t hi, uint64_t lo) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_load(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t hi, uint64_t lo) { *r = (((uint128_t)hi) << 64) + lo; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b) { *r = (uint128_t)a * b; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b) { *r += (uint128_t)a * b; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a) { *r += a; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_rshift(rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_rshift(rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n) { VERIFY_CHECK(n < 128); *r >>= n; } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_to_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_to_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a) { return (uint64_t)(*a); } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_hi_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_hi_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a) { return (uint64_t)(*a >> 64); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_from_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_from_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a) { *r = a; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_u128_check_bits(const rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_u128_check_bits(const rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n) { VERIFY_CHECK(n < 128); return (*r >> n == 0); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_load(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t hi, uint64_t lo) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_load(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t hi, uint64_t lo) { *r = (((uint128_t)(uint64_t)hi) << 64) + lo; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b) { *r = (int128_t)a * b; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_accum_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_accum_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b) { int128_t ab = (int128_t)a * b; VERIFY_CHECK(0 <= ab ? *r <= INT128_MAX - ab : INT128_MIN - ab <= *r); *r += ab; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_det(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_det(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d) { int128_t ad = (int128_t)a * d; int128_t bc = (int128_t)b * c; VERIFY_CHECK(0 <= bc ? INT128_MIN + bc <= ad : ad <= INT128_MAX + bc); *r = ad - bc; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_rshift(rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int n) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_rshift(rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int n) { VERIFY_CHECK(n < 128); *r >>= n; } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_i128_to_u64(const rustsecp256k1zkp_v0_10_0_int128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_i128_to_u64(const rustsecp256k1zkp_v0_10_1_int128 *a) { return (uint64_t)*a; } -static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_i128_to_i64(const rustsecp256k1zkp_v0_10_0_int128 *a) { +static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_1_i128_to_i64(const rustsecp256k1zkp_v0_10_1_int128 *a) { VERIFY_CHECK(INT64_MIN <= *a && *a <= INT64_MAX); return *a; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_from_i64(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_from_i64(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a) { *r = a; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_eq_var(const rustsecp256k1zkp_v0_10_0_int128 *a, const rustsecp256k1zkp_v0_10_0_int128 *b) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_eq_var(const rustsecp256k1zkp_v0_10_1_int128 *a, const rustsecp256k1zkp_v0_10_1_int128 *b) { return *a == *b; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_check_pow2(const rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int n, int sign) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_check_pow2(const rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int n, int sign) { VERIFY_CHECK(n < 127); VERIFY_CHECK(sign == 1 || sign == -1); return (*r == (int128_t)((uint128_t)sign << n)); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct.h b/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct.h index 8b1ddbb3..a4ad3b48 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct.h @@ -7,8 +7,8 @@ typedef struct { uint64_t lo; uint64_t hi; -} rustsecp256k1zkp_v0_10_0_uint128; +} rustsecp256k1zkp_v0_10_1_uint128; -typedef rustsecp256k1zkp_v0_10_0_uint128 rustsecp256k1zkp_v0_10_0_int128; +typedef rustsecp256k1zkp_v0_10_1_uint128 rustsecp256k1zkp_v0_10_1_int128; #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct_impl.h index 92edf006..d7c264e2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/int128_struct_impl.h @@ -13,23 +13,23 @@ # if defined(SECP256K1_MSVC_MULH_TEST_OVERRIDE) # pragma message(__FILE__ ": SECP256K1_MSVC_MULH_TEST_OVERRIDE is defined, forcing use of __(u)mulh.") # endif -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_umul128(uint64_t a, uint64_t b, uint64_t* hi) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_umul128(uint64_t a, uint64_t b, uint64_t* hi) { *hi = __umulh(a, b); return a * b; } -static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_mul128(int64_t a, int64_t b, int64_t* hi) { +static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_1_mul128(int64_t a, int64_t b, int64_t* hi) { *hi = __mulh(a, b); return (uint64_t)a * (uint64_t)b; } # else /* On x84_64 MSVC, use native _(u)mul128 for 64x64->128 multiplications. */ -# define rustsecp256k1zkp_v0_10_0_umul128 _umul128 -# define rustsecp256k1zkp_v0_10_0_mul128 _mul128 +# define rustsecp256k1zkp_v0_10_1_umul128 _umul128 +# define rustsecp256k1zkp_v0_10_1_mul128 _mul128 # endif #else /* On other systems, emulate 64x64->128 multiplications using 32x32->64 multiplications. */ -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_umul128(uint64_t a, uint64_t b, uint64_t* hi) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_umul128(uint64_t a, uint64_t b, uint64_t* hi) { uint64_t ll = (uint64_t)(uint32_t)a * (uint32_t)b; uint64_t lh = (uint32_t)a * (b >> 32); uint64_t hl = (a >> 32) * (uint32_t)b; @@ -39,7 +39,7 @@ static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_umul128(uint64_t a, ui return (mid34 << 32) + (uint32_t)ll; } -static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_mul128(int64_t a, int64_t b, int64_t* hi) { +static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_1_mul128(int64_t a, int64_t b, int64_t* hi) { uint64_t ll = (uint64_t)(uint32_t)a * (uint32_t)b; int64_t lh = (uint32_t)a * (b >> 32); int64_t hl = (a >> 32) * (uint32_t)b; @@ -50,23 +50,23 @@ static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_mul128(int64_t a, int64 } #endif -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_load(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t hi, uint64_t lo) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_load(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t hi, uint64_t lo) { r->hi = hi; r->lo = lo; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b) { - r->lo = rustsecp256k1zkp_v0_10_0_umul128(a, b, &r->hi); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b) { + r->lo = rustsecp256k1zkp_v0_10_1_umul128(a, b, &r->hi); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_mul(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a, uint64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_mul(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a, uint64_t b) { uint64_t lo, hi; - lo = rustsecp256k1zkp_v0_10_0_umul128(a, b, &hi); + lo = rustsecp256k1zkp_v0_10_1_umul128(a, b, &hi); r->lo += lo; r->hi += hi + (r->lo < lo); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_accum_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a) { r->lo += a; r->hi += r->lo < a; } @@ -74,7 +74,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_accum_u64(rustsecp256 /* Unsigned (logical) right shift. * Non-constant time in n. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_rshift(rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_rshift(rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n) { VERIFY_CHECK(n < 128); if (n >= 64) { r->lo = r->hi >> (n-64); @@ -90,39 +90,39 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_rshift(rustsecp256k1z } } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_to_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_to_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a) { return a->lo; } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_u128_hi_u64(const rustsecp256k1zkp_v0_10_0_uint128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_u128_hi_u64(const rustsecp256k1zkp_v0_10_1_uint128 *a) { return a->hi; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_u128_from_u64(rustsecp256k1zkp_v0_10_0_uint128 *r, uint64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_u128_from_u64(rustsecp256k1zkp_v0_10_1_uint128 *r, uint64_t a) { r->hi = 0; r->lo = a; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_u128_check_bits(const rustsecp256k1zkp_v0_10_0_uint128 *r, unsigned int n) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_u128_check_bits(const rustsecp256k1zkp_v0_10_1_uint128 *r, unsigned int n) { VERIFY_CHECK(n < 128); return n >= 64 ? r->hi >> (n - 64) == 0 : r->hi == 0 && r->lo >> n == 0; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_load(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t hi, uint64_t lo) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_load(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t hi, uint64_t lo) { r->hi = hi; r->lo = lo; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b) { int64_t hi; - r->lo = (uint64_t)rustsecp256k1zkp_v0_10_0_mul128(a, b, &hi); + r->lo = (uint64_t)rustsecp256k1zkp_v0_10_1_mul128(a, b, &hi); r->hi = (uint64_t)hi; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_accum_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_accum_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b) { int64_t hi; - uint64_t lo = (uint64_t)rustsecp256k1zkp_v0_10_0_mul128(a, b, &hi); + uint64_t lo = (uint64_t)rustsecp256k1zkp_v0_10_1_mul128(a, b, &hi); r->lo += lo; hi += r->lo < lo; /* Verify no overflow. @@ -139,9 +139,9 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_accum_mul(rustsecp256 r->hi += hi; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_dissip_mul(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_dissip_mul(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b) { int64_t hi; - uint64_t lo = (uint64_t)rustsecp256k1zkp_v0_10_0_mul128(a, b, &hi); + uint64_t lo = (uint64_t)rustsecp256k1zkp_v0_10_1_mul128(a, b, &hi); hi += r->lo < lo; /* Verify no overflow. * If r represents a positive value (the sign bit is not set) and the value we are subtracting is a negative value (the sign bit is set), @@ -157,15 +157,15 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_dissip_mul(rustsecp25 r->lo -= lo; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_det(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d) { - rustsecp256k1zkp_v0_10_0_i128_mul(r, a, d); - rustsecp256k1zkp_v0_10_0_i128_dissip_mul(r, b, c); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_det(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a, int64_t b, int64_t c, int64_t d) { + rustsecp256k1zkp_v0_10_1_i128_mul(r, a, d); + rustsecp256k1zkp_v0_10_1_i128_dissip_mul(r, b, c); } /* Signed (arithmetic) right shift. * Non-constant time in n. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_rshift(rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int n) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_rshift(rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int n) { VERIFY_CHECK(n < 128); if (n >= 64) { r->lo = (uint64_t)((int64_t)(r->hi) >> (n-64)); @@ -176,26 +176,26 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_rshift(rustsecp256k1z } } -static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_0_i128_to_u64(const rustsecp256k1zkp_v0_10_0_int128 *a) { +static SECP256K1_INLINE uint64_t rustsecp256k1zkp_v0_10_1_i128_to_u64(const rustsecp256k1zkp_v0_10_1_int128 *a) { return a->lo; } -static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_0_i128_to_i64(const rustsecp256k1zkp_v0_10_0_int128 *a) { +static SECP256K1_INLINE int64_t rustsecp256k1zkp_v0_10_1_i128_to_i64(const rustsecp256k1zkp_v0_10_1_int128 *a) { /* Verify that a represents a 64 bit signed value by checking that the high bits are a sign extension of the low bits. */ VERIFY_CHECK(a->hi == -(a->lo >> 63)); - return (int64_t)rustsecp256k1zkp_v0_10_0_i128_to_u64(a); + return (int64_t)rustsecp256k1zkp_v0_10_1_i128_to_u64(a); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_i128_from_i64(rustsecp256k1zkp_v0_10_0_int128 *r, int64_t a) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_i128_from_i64(rustsecp256k1zkp_v0_10_1_int128 *r, int64_t a) { r->hi = (uint64_t)(a >> 63); r->lo = (uint64_t)a; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_eq_var(const rustsecp256k1zkp_v0_10_0_int128 *a, const rustsecp256k1zkp_v0_10_0_int128 *b) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_eq_var(const rustsecp256k1zkp_v0_10_1_int128 *a, const rustsecp256k1zkp_v0_10_1_int128 *b) { return a->hi == b->hi && a->lo == b->lo; } -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_i128_check_pow2(const rustsecp256k1zkp_v0_10_0_int128 *r, unsigned int n, int sign) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_i128_check_pow2(const rustsecp256k1zkp_v0_10_1_int128 *r, unsigned int n, int sign) { VERIFY_CHECK(n < 127); VERIFY_CHECK(sign == 1 || sign == -1); return n >= 64 ? r->hi == (uint64_t)sign << (n - 64) && r->lo == 0 diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modinv32.h b/secp256k1-zkp-sys/depend/secp256k1/src/modinv32.h index 169a3027..daee2e7d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modinv32.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modinv32.h @@ -14,15 +14,15 @@ * Its value is sum(v[i] * 2^(30*i), i=0..8). */ typedef struct { int32_t v[9]; -} rustsecp256k1zkp_v0_10_0_modinv32_signed30; +} rustsecp256k1zkp_v0_10_1_modinv32_signed30; typedef struct { /* The modulus in signed30 notation, must be odd and in [3, 2^256]. */ - rustsecp256k1zkp_v0_10_0_modinv32_signed30 modulus; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 modulus; /* modulus^{-1} mod 2^30 */ uint32_t modulus_inv30; -} rustsecp256k1zkp_v0_10_0_modinv32_modinfo; +} rustsecp256k1zkp_v0_10_1_modinv32_modinfo; /* Replace x with its modular inverse mod modinfo->modulus. x must be in range [0, modulus). * If x is zero, the result will be zero as well. If not, the inverse must exist (i.e., the gcd of @@ -30,14 +30,14 @@ typedef struct { * * On output, all of x's limbs will be in [0, 2^30). */ -static void rustsecp256k1zkp_v0_10_0_modinv32_var(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo); +static void rustsecp256k1zkp_v0_10_1_modinv32_var(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo); -/* Same as rustsecp256k1zkp_v0_10_0_modinv32_var, but constant time in x (not in the modulus). */ -static void rustsecp256k1zkp_v0_10_0_modinv32(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo); +/* Same as rustsecp256k1zkp_v0_10_1_modinv32_var, but constant time in x (not in the modulus). */ +static void rustsecp256k1zkp_v0_10_1_modinv32(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo); /* Compute the Jacobi symbol for (x | modinfo->modulus). x must be coprime with modulus (and thus * cannot be 0, as modulus >= 3). All limbs of x must be non-negative. Returns 0 if the result * cannot be computed. */ -static int rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo); +static int rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var(const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo); #endif /* SECP256K1_MODINV32_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modinv32_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modinv32_impl.h index f5578bc3..6e57f149 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modinv32_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modinv32_impl.h @@ -21,10 +21,10 @@ */ #ifdef VERIFY -static const rustsecp256k1zkp_v0_10_0_modinv32_signed30 SECP256K1_SIGNED30_ONE = {{1}}; +static const rustsecp256k1zkp_v0_10_1_modinv32_signed30 SECP256K1_SIGNED30_ONE = {{1}}; /* Compute a*factor and put it in r. All but the top limb in r will be in range [0,2^30). */ -static void rustsecp256k1zkp_v0_10_0_modinv32_mul_30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *a, int alen, int32_t factor) { +static void rustsecp256k1zkp_v0_10_1_modinv32_mul_30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *a, int alen, int32_t factor) { const int32_t M30 = (int32_t)(UINT32_MAX >> 2); int64_t c = 0; int i; @@ -38,11 +38,11 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_mul_30(rustsecp256k1zkp_v0_10_0_mo } /* Return -1 for ab*factor. A consists of alen limbs; b has 9. */ -static int rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *a, int alen, const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *b, int32_t factor) { +static int rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *a, int alen, const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *b, int32_t factor) { int i; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 am, bm; - rustsecp256k1zkp_v0_10_0_modinv32_mul_30(&am, a, alen, 1); /* Normalize all but the top limb of a. */ - rustsecp256k1zkp_v0_10_0_modinv32_mul_30(&bm, b, 9, factor); + rustsecp256k1zkp_v0_10_1_modinv32_signed30 am, bm; + rustsecp256k1zkp_v0_10_1_modinv32_mul_30(&am, a, alen, 1); /* Normalize all but the top limb of a. */ + rustsecp256k1zkp_v0_10_1_modinv32_mul_30(&bm, b, 9, factor); for (i = 0; i < 8; ++i) { /* Verify that all but the top limb of a and b are normalized. */ VERIFY_CHECK(am.v[i] >> 30 == 0); @@ -60,7 +60,7 @@ static int rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(const rustsecp256k1zkp_v * to it to bring it to range [0,modulus). If sign < 0, the input will also be negated in the * process. The input must have limbs in range (-2^30,2^30). The output will have limbs in range * [0,2^30). */ -static void rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *r, int32_t sign, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv32_normalize_30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *r, int32_t sign, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo) { const int32_t M30 = (int32_t)(UINT32_MAX >> 2); int32_t r0 = r->v[0], r1 = r->v[1], r2 = r->v[2], r3 = r->v[3], r4 = r->v[4], r5 = r->v[5], r6 = r->v[6], r7 = r->v[7], r8 = r->v[8]; @@ -73,8 +73,8 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(rustsecp256k1zkp_v0_1 VERIFY_CHECK(r->v[i] >= -M30); VERIFY_CHECK(r->v[i] <= M30); } - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, -2) > 0); /* r > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 1) < 0); /* r < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, -2) > 0); /* r > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 1) < 0); /* r < modulus */ #endif /* In a first step, add the modulus if the input is negative, and then negate if requested. @@ -153,8 +153,8 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(rustsecp256k1zkp_v0_1 VERIFY_CHECK(r6 >> 30 == 0); VERIFY_CHECK(r7 >> 30 == 0); VERIFY_CHECK(r8 >> 30 == 0); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 0) >= 0); /* r >= 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 1) < 0); /* r < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 0) >= 0); /* r >= 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(r, 9, &modinfo->modulus, 1) < 0); /* r < modulus */ } /* Data type for transition matrices (see section 3 of explanation). @@ -164,7 +164,7 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(rustsecp256k1zkp_v0_1 */ typedef struct { int32_t u, v, q, r; -} rustsecp256k1zkp_v0_10_0_modinv32_trans2x2; +} rustsecp256k1zkp_v0_10_1_modinv32_trans2x2; /* Compute the transition matrix and zeta for 30 divsteps. * @@ -176,7 +176,7 @@ typedef struct { * * Implements the divsteps_n_matrix function from the explanation. */ -static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30(int32_t zeta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t) { +static int32_t rustsecp256k1zkp_v0_10_1_modinv32_divsteps_30(int32_t zeta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t) { /* u,v,q,r are the elements of the transformation matrix being built up, * starting with the identity matrix. Semantically they are signed integers * in range [-2^30,2^30], but here represented as unsigned mod 2^32. This @@ -233,8 +233,8 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30(int32_t zeta, uint3 return zeta; } -/* rustsecp256k1zkp_v0_10_0_modinv32_inv256[i] = -(2*i+1)^-1 (mod 256) */ -static const uint8_t rustsecp256k1zkp_v0_10_0_modinv32_inv256[128] = { +/* rustsecp256k1zkp_v0_10_1_modinv32_inv256[i] = -(2*i+1)^-1 (mod 256) */ +static const uint8_t rustsecp256k1zkp_v0_10_1_modinv32_inv256[128] = { 0xFF, 0x55, 0x33, 0x49, 0xC7, 0x5D, 0x3B, 0x11, 0x0F, 0xE5, 0xC3, 0x59, 0xD7, 0xED, 0xCB, 0x21, 0x1F, 0x75, 0x53, 0x69, 0xE7, 0x7D, 0x5B, 0x31, 0x2F, 0x05, 0xE3, 0x79, 0xF7, 0x0D, 0xEB, 0x41, 0x3F, 0x95, 0x73, 0x89, @@ -258,8 +258,8 @@ static const uint8_t rustsecp256k1zkp_v0_10_0_modinv32_inv256[128] = { * * Implements the divsteps_n_matrix_var function from the explanation. */ -static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30_var(int32_t eta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t) { - /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30. */ +static int32_t rustsecp256k1zkp_v0_10_1_modinv32_divsteps_30_var(int32_t eta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t) { + /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_1_modinv32_divsteps_30. */ uint32_t u = 1, v = 0, q = 0, r = 1; uint32_t f = f0, g = g0, m; uint16_t w; @@ -267,7 +267,7 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30_var(int32_t eta, ui for (;;) { /* Use a sentinel bit to count zeros only up to i. */ - zeros = rustsecp256k1zkp_v0_10_0_ctz32_var(g | (UINT32_MAX << i)); + zeros = rustsecp256k1zkp_v0_10_1_ctz32_var(g | (UINT32_MAX << i)); /* Perform zeros divsteps at once; they all just divide g by two. */ g >>= zeros; u <<= zeros; @@ -298,7 +298,7 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30_var(int32_t eta, ui VERIFY_CHECK(limit > 0 && limit <= 30); m = (UINT32_MAX >> (32 - limit)) & 255U; /* Find what multiple of f must be added to g to cancel its bottom min(limit, 8) bits. */ - w = (g * rustsecp256k1zkp_v0_10_0_modinv32_inv256[(f >> 1) & 127]) & m; + w = (g * rustsecp256k1zkp_v0_10_1_modinv32_inv256[(f >> 1) & 127]) & m; /* Do so. */ g += f * w; q += u * w; @@ -331,7 +331,7 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30_var(int32_t eta, ui * change, but are meaningless. * Return: final eta */ -static int32_t rustsecp256k1zkp_v0_10_0_modinv32_posdivsteps_30_var(int32_t eta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t, int *jacp) { +static int32_t rustsecp256k1zkp_v0_10_1_modinv32_posdivsteps_30_var(int32_t eta, uint32_t f0, uint32_t g0, rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t, int *jacp) { /* Transformation matrix. */ uint32_t u = 1, v = 0, q = 0, r = 1; uint32_t f = f0, g = g0, m; @@ -341,7 +341,7 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_posdivsteps_30_var(int32_t eta, for (;;) { /* Use a sentinel bit to count zeros only up to i. */ - zeros = rustsecp256k1zkp_v0_10_0_ctz32_var(g | (UINT32_MAX << i)); + zeros = rustsecp256k1zkp_v0_10_1_ctz32_var(g | (UINT32_MAX << i)); /* Perform zeros divsteps at once; they all just divide g by two. */ g >>= zeros; u <<= zeros; @@ -376,7 +376,7 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_posdivsteps_30_var(int32_t eta, VERIFY_CHECK(limit > 0 && limit <= 30); m = (UINT32_MAX >> (32 - limit)) & 255U; /* Find what multiple of f must be added to g to cancel its bottom min(limit, 8) bits. */ - w = (g * rustsecp256k1zkp_v0_10_0_modinv32_inv256[(f >> 1) & 127]) & m; + w = (g * rustsecp256k1zkp_v0_10_1_modinv32_inv256[(f >> 1) & 127]) & m; /* Do so. */ g += f * w; q += u * w; @@ -405,16 +405,16 @@ static int32_t rustsecp256k1zkp_v0_10_0_modinv32_posdivsteps_30_var(int32_t eta, * * This implements the update_de function from the explanation. */ -static void rustsecp256k1zkp_v0_10_0_modinv32_update_de_30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *d, rustsecp256k1zkp_v0_10_0_modinv32_signed30 *e, const rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo* modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv32_update_de_30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *d, rustsecp256k1zkp_v0_10_1_modinv32_signed30 *e, const rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo* modinfo) { const int32_t M30 = (int32_t)(UINT32_MAX >> 2); const int32_t u = t->u, v = t->v, q = t->q, r = t->r; int32_t di, ei, md, me, sd, se; int64_t cd, ce; int i; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, 1) < 0); /* d < modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, 1) < 0); /* e < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, 1) < 0); /* d < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, 1) < 0); /* e < modulus */ VERIFY_CHECK(labs(u) <= (M30 + 1 - labs(v))); /* |u|+|v| <= 2^30 */ VERIFY_CHECK(labs(q) <= (M30 + 1 - labs(r))); /* |q|+|r| <= 2^30 */ @@ -453,17 +453,17 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_update_de_30(rustsecp256k1zkp_v0_1 d->v[8] = (int32_t)cd; e->v[8] = (int32_t)ce; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, 1) < 0); /* d < modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, 1) < 0); /* e < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(d, 9, &modinfo->modulus, 1) < 0); /* d < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(e, 9, &modinfo->modulus, 1) < 0); /* e < modulus */ } /* Compute (t/2^30) * [f, g], where t is a transition matrix for 30 divsteps. * * This implements the update_fg function from the explanation. */ -static void rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *f, rustsecp256k1zkp_v0_10_0_modinv32_signed30 *g, const rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t) { +static void rustsecp256k1zkp_v0_10_1_modinv32_update_fg_30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *f, rustsecp256k1zkp_v0_10_1_modinv32_signed30 *g, const rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t) { const int32_t M30 = (int32_t)(UINT32_MAX >> 2); const int32_t u = t->u, v = t->v, q = t->q, r = t->r; int32_t fi, gi; @@ -498,7 +498,7 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30(rustsecp256k1zkp_v0_1 * * This implements the update_fg function from the explanation in modinv64_impl.h. */ -static void rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30_var(int len, rustsecp256k1zkp_v0_10_0_modinv32_signed30 *f, rustsecp256k1zkp_v0_10_0_modinv32_signed30 *g, const rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 *t) { +static void rustsecp256k1zkp_v0_10_1_modinv32_update_fg_30_var(int len, rustsecp256k1zkp_v0_10_1_modinv32_signed30 *f, rustsecp256k1zkp_v0_10_1_modinv32_signed30 *g, const rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 *t) { const int32_t M30 = (int32_t)(UINT32_MAX >> 2); const int32_t u = t->u, v = t->v, q = t->q, r = t->r; int32_t fi, gi; @@ -529,34 +529,34 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30_var(int len, rustsecp } /* Compute the inverse of x modulo modinfo->modulus, and replace x with it (constant time in x). */ -static void rustsecp256k1zkp_v0_10_0_modinv32(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv32(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo) { /* Start with d=0, e=1, f=modulus, g=x, zeta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv32_signed30 d = {{0}}; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 e = {{1}}; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 g = *x; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 d = {{0}}; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 e = {{1}}; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 g = *x; int i; int32_t zeta = -1; /* zeta = -(delta+1/2); delta is initially 1/2. */ /* Do 20 iterations of 30 divsteps each = 600 divsteps. 590 suffices for 256-bit inputs. */ for (i = 0; i < 20; ++i) { /* Compute transition matrix and new zeta after 30 divsteps. */ - rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 t; - zeta = rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30(zeta, f.v[0], g.v[0], &t); + rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 t; + zeta = rustsecp256k1zkp_v0_10_1_modinv32_divsteps_30(zeta, f.v[0], g.v[0], &t); /* Update d,e using that transition matrix. */ - rustsecp256k1zkp_v0_10_0_modinv32_update_de_30(&d, &e, &t, modinfo); + rustsecp256k1zkp_v0_10_1_modinv32_update_de_30(&d, &e, &t, modinfo); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30(&f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv32_update_fg_30(&f, &g, &t); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, 9, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* At this point sufficient iterations have been performed that g must have reached 0 @@ -564,27 +564,27 @@ static void rustsecp256k1zkp_v0_10_0_modinv32(rustsecp256k1zkp_v0_10_0_modinv32_ * values i.e. +/- 1, and d now contains +/- the modular inverse. */ /* g == 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, 9, &SECP256K1_SIGNED30_ONE, 0) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, 9, &SECP256K1_SIGNED30_ONE, 0) == 0); /* |f| == 1, or (x == 0 and d == 0 and |f|=modulus) */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &SECP256K1_SIGNED30_ONE, -1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &SECP256K1_SIGNED30_ONE, 1) == 0 || - (rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(x, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&d, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && - (rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) == 0))); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &SECP256K1_SIGNED30_ONE, -1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &SECP256K1_SIGNED30_ONE, 1) == 0 || + (rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(x, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&d, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && + (rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, 1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, 9, &modinfo->modulus, -1) == 0))); /* Optionally negate d, normalize to [0,modulus), and return it. */ - rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(&d, f.v[8], modinfo); + rustsecp256k1zkp_v0_10_1_modinv32_normalize_30(&d, f.v[8], modinfo); *x = d; } /* Compute the inverse of x modulo modinfo->modulus, and replace x with it (variable time). */ -static void rustsecp256k1zkp_v0_10_0_modinv32_var(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv32_var(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo) { /* Start with d=0, e=1, f=modulus, g=x, eta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv32_signed30 d = {{0, 0, 0, 0, 0, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 e = {{1, 0, 0, 0, 0, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 g = *x; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 d = {{0, 0, 0, 0, 0, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 e = {{1, 0, 0, 0, 0, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 g = *x; #ifdef VERIFY int i = 0; #endif @@ -595,18 +595,18 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_var(rustsecp256k1zkp_v0_10_0_modin /* Do iterations of 30 divsteps each until g=0. */ while (1) { /* Compute transition matrix and new eta after 30 divsteps. */ - rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 t; - eta = rustsecp256k1zkp_v0_10_0_modinv32_divsteps_30_var(eta, f.v[0], g.v[0], &t); + rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 t; + eta = rustsecp256k1zkp_v0_10_1_modinv32_divsteps_30_var(eta, f.v[0], g.v[0], &t); /* Update d,e using that transition matrix. */ - rustsecp256k1zkp_v0_10_0_modinv32_update_de_30(&d, &e, &t, modinfo); + rustsecp256k1zkp_v0_10_1_modinv32_update_de_30(&d, &e, &t, modinfo); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30_var(len, &f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv32_update_fg_30_var(len, &f, &g, &t); /* If the bottom limb of g is 0, there is a chance g=0. */ if (g.v[0] == 0) { cond = 0; @@ -632,27 +632,27 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_var(rustsecp256k1zkp_v0_10_0_modin } VERIFY_CHECK(++i < 25); /* We should never need more than 25*30 = 750 divsteps */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* At this point g is 0 and (if g was not originally 0) f must now equal +/- GCD of * the initial f, g values i.e. +/- 1, and d now contains +/- the modular inverse. */ /* g == 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &SECP256K1_SIGNED30_ONE, 0) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &SECP256K1_SIGNED30_ONE, 0) == 0); /* |f| == 1, or (x == 0 and d == 0 and |f|=modulus) */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &SECP256K1_SIGNED30_ONE, -1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &SECP256K1_SIGNED30_ONE, 1) == 0 || - (rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(x, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&d, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && - (rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) == 0))); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &SECP256K1_SIGNED30_ONE, -1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &SECP256K1_SIGNED30_ONE, 1) == 0 || + (rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(x, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&d, 9, &SECP256K1_SIGNED30_ONE, 0) == 0 && + (rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, -1) == 0))); /* Optionally negate d, normalize to [0,modulus), and return it. */ - rustsecp256k1zkp_v0_10_0_modinv32_normalize_30(&d, f.v[len - 1], modinfo); + rustsecp256k1zkp_v0_10_1_modinv32_normalize_30(&d, f.v[len - 1], modinfo); *x = d; } @@ -665,10 +665,10 @@ static void rustsecp256k1zkp_v0_10_0_modinv32_var(rustsecp256k1zkp_v0_10_0_modin #endif /* Compute the Jacobi symbol of x modulo modinfo->modulus (variable time). gcd(x,modulus) must be 1. */ -static int rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_0_modinv32_modinfo *modinfo) { +static int rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var(const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *x, const rustsecp256k1zkp_v0_10_1_modinv32_modinfo *modinfo) { /* Start with f=modulus, g=x, eta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv32_signed30 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 g = *x; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 g = *x; int j, len = 9; int32_t eta = -1; /* eta = -delta; delta is initially 1 */ int32_t cond, fn, gn; @@ -685,15 +685,15 @@ static int rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(const rustsecp256k1zkp_v0 for (count = 0; count < JACOBI32_ITERATIONS; ++count) { /* Compute transition matrix and new eta after 30 posdivsteps. */ - rustsecp256k1zkp_v0_10_0_modinv32_trans2x2 t; - eta = rustsecp256k1zkp_v0_10_0_modinv32_posdivsteps_30_var(eta, f.v[0] | ((uint32_t)f.v[1] << 30), g.v[0] | ((uint32_t)g.v[1] << 30), &t, &jac); + rustsecp256k1zkp_v0_10_1_modinv32_trans2x2 t; + eta = rustsecp256k1zkp_v0_10_1_modinv32_posdivsteps_30_var(eta, f.v[0] | ((uint32_t)f.v[1] << 30), g.v[0] | ((uint32_t)g.v[1] << 30), &t, &jac); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv32_update_fg_30_var(len, &f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv32_update_fg_30_var(len, &f, &g, &t); /* If the bottom limb of f is 1, there is a chance that f=1. */ if (f.v[0] == 1) { cond = 0; @@ -714,10 +714,10 @@ static int rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(const rustsecp256k1zkp_v0 /* If so, reduce length. */ if (cond == 0) --len; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv32_mul_cmp_30(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* The loop failed to converge to f=g after 1500 iterations. Return 0, indicating unknown result. */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modinv64.h b/secp256k1-zkp-sys/depend/secp256k1/src/modinv64.h index 4a91bca6..aae3ab79 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modinv64.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modinv64.h @@ -18,15 +18,15 @@ * Its value is sum(v[i] * 2^(62*i), i=0..4). */ typedef struct { int64_t v[5]; -} rustsecp256k1zkp_v0_10_0_modinv64_signed62; +} rustsecp256k1zkp_v0_10_1_modinv64_signed62; typedef struct { /* The modulus in signed62 notation, must be odd and in [3, 2^256]. */ - rustsecp256k1zkp_v0_10_0_modinv64_signed62 modulus; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 modulus; /* modulus^{-1} mod 2^62 */ uint64_t modulus_inv62; -} rustsecp256k1zkp_v0_10_0_modinv64_modinfo; +} rustsecp256k1zkp_v0_10_1_modinv64_modinfo; /* Replace x with its modular inverse mod modinfo->modulus. x must be in range [0, modulus). * If x is zero, the result will be zero as well. If not, the inverse must exist (i.e., the gcd of @@ -34,14 +34,14 @@ typedef struct { * * On output, all of x's limbs will be in [0, 2^62). */ -static void rustsecp256k1zkp_v0_10_0_modinv64_var(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo); +static void rustsecp256k1zkp_v0_10_1_modinv64_var(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo); -/* Same as rustsecp256k1zkp_v0_10_0_modinv64_var, but constant time in x (not in the modulus). */ -static void rustsecp256k1zkp_v0_10_0_modinv64(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo); +/* Same as rustsecp256k1zkp_v0_10_1_modinv64_var, but constant time in x (not in the modulus). */ +static void rustsecp256k1zkp_v0_10_1_modinv64(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo); /* Compute the Jacobi symbol for (x | modinfo->modulus). x must be coprime with modulus (and thus * cannot be 0, as modulus >= 3). All limbs of x must be non-negative. Returns 0 if the result * cannot be computed. */ -static int rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo); +static int rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var(const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo); #endif /* SECP256K1_MODINV64_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modinv64_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modinv64_impl.h index 1e764c3d..5c69f15e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modinv64_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modinv64_impl.h @@ -24,41 +24,41 @@ */ typedef struct { int64_t u, v, q, r; -} rustsecp256k1zkp_v0_10_0_modinv64_trans2x2; +} rustsecp256k1zkp_v0_10_1_modinv64_trans2x2; #ifdef VERIFY /* Helper function to compute the absolute value of an int64_t. * (we don't use abs/labs/llabs as it depends on the int sizes). */ -static int64_t rustsecp256k1zkp_v0_10_0_modinv64_abs(int64_t v) { +static int64_t rustsecp256k1zkp_v0_10_1_modinv64_abs(int64_t v) { VERIFY_CHECK(v > INT64_MIN); if (v < 0) return -v; return v; } -static const rustsecp256k1zkp_v0_10_0_modinv64_signed62 SECP256K1_SIGNED62_ONE = {{1}}; +static const rustsecp256k1zkp_v0_10_1_modinv64_signed62 SECP256K1_SIGNED62_ONE = {{1}}; /* Compute a*factor and put it in r. All but the top limb in r will be in range [0,2^62). */ -static void rustsecp256k1zkp_v0_10_0_modinv64_mul_62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *a, int alen, int64_t factor) { +static void rustsecp256k1zkp_v0_10_1_modinv64_mul_62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *a, int alen, int64_t factor) { const uint64_t M62 = UINT64_MAX >> 2; - rustsecp256k1zkp_v0_10_0_int128 c, d; + rustsecp256k1zkp_v0_10_1_int128 c, d; int i; - rustsecp256k1zkp_v0_10_0_i128_from_i64(&c, 0); + rustsecp256k1zkp_v0_10_1_i128_from_i64(&c, 0); for (i = 0; i < 4; ++i) { - if (i < alen) rustsecp256k1zkp_v0_10_0_i128_accum_mul(&c, a->v[i], factor); - r->v[i] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&c) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&c, 62); + if (i < alen) rustsecp256k1zkp_v0_10_1_i128_accum_mul(&c, a->v[i], factor); + r->v[i] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&c) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&c, 62); } - if (4 < alen) rustsecp256k1zkp_v0_10_0_i128_accum_mul(&c, a->v[4], factor); - rustsecp256k1zkp_v0_10_0_i128_from_i64(&d, rustsecp256k1zkp_v0_10_0_i128_to_i64(&c)); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_i128_eq_var(&c, &d)); - r->v[4] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&c); + if (4 < alen) rustsecp256k1zkp_v0_10_1_i128_accum_mul(&c, a->v[4], factor); + rustsecp256k1zkp_v0_10_1_i128_from_i64(&d, rustsecp256k1zkp_v0_10_1_i128_to_i64(&c)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_i128_eq_var(&c, &d)); + r->v[4] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&c); } /* Return -1 for ab*factor. A has alen limbs; b has 5. */ -static int rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *a, int alen, const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *b, int64_t factor) { +static int rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *a, int alen, const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *b, int64_t factor) { int i; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 am, bm; - rustsecp256k1zkp_v0_10_0_modinv64_mul_62(&am, a, alen, 1); /* Normalize all but the top limb of a. */ - rustsecp256k1zkp_v0_10_0_modinv64_mul_62(&bm, b, 5, factor); + rustsecp256k1zkp_v0_10_1_modinv64_signed62 am, bm; + rustsecp256k1zkp_v0_10_1_modinv64_mul_62(&am, a, alen, 1); /* Normalize all but the top limb of a. */ + rustsecp256k1zkp_v0_10_1_modinv64_mul_62(&bm, b, 5, factor); for (i = 0; i < 4; ++i) { /* Verify that all but the top limb of a and b are normalized. */ VERIFY_CHECK(am.v[i] >> 62 == 0); @@ -72,11 +72,11 @@ static int rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(const rustsecp256k1zkp_v } /* Check if the determinant of t is equal to 1 << n. If abs, check if |det t| == 1 << n. */ -static int rustsecp256k1zkp_v0_10_0_modinv64_det_check_pow2(const rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t, unsigned int n, int abs) { - rustsecp256k1zkp_v0_10_0_int128 a; - rustsecp256k1zkp_v0_10_0_i128_det(&a, t->u, t->v, t->q, t->r); - if (rustsecp256k1zkp_v0_10_0_i128_check_pow2(&a, n, 1)) return 1; - if (abs && rustsecp256k1zkp_v0_10_0_i128_check_pow2(&a, n, -1)) return 1; +static int rustsecp256k1zkp_v0_10_1_modinv64_det_check_pow2(const rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t, unsigned int n, int abs) { + rustsecp256k1zkp_v0_10_1_int128 a; + rustsecp256k1zkp_v0_10_1_i128_det(&a, t->u, t->v, t->q, t->r); + if (rustsecp256k1zkp_v0_10_1_i128_check_pow2(&a, n, 1)) return 1; + if (abs && rustsecp256k1zkp_v0_10_1_i128_check_pow2(&a, n, -1)) return 1; return 0; } #endif @@ -85,7 +85,7 @@ static int rustsecp256k1zkp_v0_10_0_modinv64_det_check_pow2(const rustsecp256k1z * to it to bring it to range [0,modulus). If sign < 0, the input will also be negated in the * process. The input must have limbs in range (-2^62,2^62). The output will have limbs in range * [0,2^62). */ -static void rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *r, int64_t sign, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv64_normalize_62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *r, int64_t sign, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo) { const int64_t M62 = (int64_t)(UINT64_MAX >> 2); int64_t r0 = r->v[0], r1 = r->v[1], r2 = r->v[2], r3 = r->v[3], r4 = r->v[4]; volatile int64_t cond_add, cond_negate; @@ -97,8 +97,8 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(rustsecp256k1zkp_v0_1 VERIFY_CHECK(r->v[i] >= -M62); VERIFY_CHECK(r->v[i] <= M62); } - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, -2) > 0); /* r > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 1) < 0); /* r < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, -2) > 0); /* r > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 1) < 0); /* r < modulus */ #endif /* In a first step, add the modulus if the input is negative, and then negate if requested. @@ -149,8 +149,8 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(rustsecp256k1zkp_v0_1 VERIFY_CHECK(r2 >> 62 == 0); VERIFY_CHECK(r3 >> 62 == 0); VERIFY_CHECK(r4 >> 62 == 0); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 0) >= 0); /* r >= 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 1) < 0); /* r < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 0) >= 0); /* r >= 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(r, 5, &modinfo->modulus, 1) < 0); /* r < modulus */ } /* Compute the transition matrix and eta for 59 divsteps (where zeta=-(delta+1/2)). @@ -164,7 +164,7 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(rustsecp256k1zkp_v0_1 * * Implements the divsteps_n_matrix function from the explanation. */ -static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_59(int64_t zeta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t) { +static int64_t rustsecp256k1zkp_v0_10_1_modinv64_divsteps_59(int64_t zeta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t) { /* u,v,q,r are the elements of the transformation matrix being built up, * starting with the identity matrix times 8 (because the caller expects * a result scaled by 2^62). Semantically they are signed integers @@ -221,7 +221,7 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_59(int64_t zeta, uint6 * aggregate of 59 of them will have determinant 2^59. Multiplying with the initial * 8*identity (which has determinant 2^6) means the overall outputs has determinant * 2^65. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_det_check_pow2(t, 65, 0)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_det_check_pow2(t, 65, 0)); return zeta; } @@ -236,8 +236,8 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_59(int64_t zeta, uint6 * * Implements the divsteps_n_matrix_var function from the explanation. */ -static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62_var(int64_t eta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t) { - /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62. */ +static int64_t rustsecp256k1zkp_v0_10_1_modinv64_divsteps_62_var(int64_t eta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t) { + /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_1_modinv64_divsteps_62. */ uint64_t u = 1, v = 0, q = 0, r = 1; uint64_t f = f0, g = g0, m; uint32_t w; @@ -245,7 +245,7 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62_var(int64_t eta, ui for (;;) { /* Use a sentinel bit to count zeros only up to i. */ - zeros = rustsecp256k1zkp_v0_10_0_ctz64_var(g | (UINT64_MAX << i)); + zeros = rustsecp256k1zkp_v0_10_1_ctz64_var(g | (UINT64_MAX << i)); /* Perform zeros divsteps at once; they all just divide g by two. */ g >>= zeros; u <<= zeros; @@ -304,7 +304,7 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62_var(int64_t eta, ui * does not change the gcd of f and g, apart from adding a power-of-2 factor to it (which * will be divided out again). As each divstep's individual matrix has determinant 2, the * aggregate of 62 of them will have determinant 2^62. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_det_check_pow2(t, 62, 0)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_det_check_pow2(t, 62, 0)); return eta; } @@ -322,8 +322,8 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62_var(int64_t eta, ui * change, but are meaningless. * Return: final eta */ -static int64_t rustsecp256k1zkp_v0_10_0_modinv64_posdivsteps_62_var(int64_t eta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t, int *jacp) { - /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62. */ +static int64_t rustsecp256k1zkp_v0_10_1_modinv64_posdivsteps_62_var(int64_t eta, uint64_t f0, uint64_t g0, rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t, int *jacp) { + /* Transformation matrix; see comments in rustsecp256k1zkp_v0_10_1_modinv64_divsteps_62. */ uint64_t u = 1, v = 0, q = 0, r = 1; uint64_t f = f0, g = g0, m; uint32_t w; @@ -332,7 +332,7 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_posdivsteps_62_var(int64_t eta, for (;;) { /* Use a sentinel bit to count zeros only up to i. */ - zeros = rustsecp256k1zkp_v0_10_0_ctz64_var(g | (UINT64_MAX << i)); + zeros = rustsecp256k1zkp_v0_10_1_ctz64_var(g | (UINT64_MAX << i)); /* Perform zeros divsteps at once; they all just divide g by two. */ g >>= zeros; u <<= zeros; @@ -395,7 +395,7 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_posdivsteps_62_var(int64_t eta, * does not change the gcd of f and g, apart from adding a power-of-2 factor to it (which * will be divided out again). As each divstep's individual matrix has determinant 2 or -2, * the aggregate of 62 of them will have determinant 2^62 or -2^62. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_det_check_pow2(t, 62, 1)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_det_check_pow2(t, 62, 1)); *jacp = jac; return eta; @@ -408,19 +408,19 @@ static int64_t rustsecp256k1zkp_v0_10_0_modinv64_posdivsteps_62_var(int64_t eta, * * This implements the update_de function from the explanation. */ -static void rustsecp256k1zkp_v0_10_0_modinv64_update_de_62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *d, rustsecp256k1zkp_v0_10_0_modinv64_signed62 *e, const rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo* modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv64_update_de_62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *d, rustsecp256k1zkp_v0_10_1_modinv64_signed62 *e, const rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo* modinfo) { const uint64_t M62 = UINT64_MAX >> 2; const int64_t d0 = d->v[0], d1 = d->v[1], d2 = d->v[2], d3 = d->v[3], d4 = d->v[4]; const int64_t e0 = e->v[0], e1 = e->v[1], e2 = e->v[2], e3 = e->v[3], e4 = e->v[4]; const int64_t u = t->u, v = t->v, q = t->q, r = t->r; int64_t md, me, sd, se; - rustsecp256k1zkp_v0_10_0_int128 cd, ce; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, 1) < 0); /* d < modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, 1) < 0); /* e < modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_abs(u) <= (((int64_t)1 << 62) - rustsecp256k1zkp_v0_10_0_modinv64_abs(v))); /* |u|+|v| <= 2^62 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_abs(q) <= (((int64_t)1 << 62) - rustsecp256k1zkp_v0_10_0_modinv64_abs(r))); /* |q|+|r| <= 2^62 */ + rustsecp256k1zkp_v0_10_1_int128 cd, ce; + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, 1) < 0); /* d < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, 1) < 0); /* e < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_abs(u) <= (((int64_t)1 << 62) - rustsecp256k1zkp_v0_10_1_modinv64_abs(v))); /* |u|+|v| <= 2^62 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_abs(q) <= (((int64_t)1 << 62) - rustsecp256k1zkp_v0_10_1_modinv64_abs(r))); /* |q|+|r| <= 2^62 */ /* [md,me] start as zero; plus [u,q] if d is negative; plus [v,r] if e is negative. */ sd = d4 >> 63; @@ -428,120 +428,120 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_update_de_62(rustsecp256k1zkp_v0_1 md = (u & sd) + (v & se); me = (q & sd) + (r & se); /* Begin computing t*[d,e]. */ - rustsecp256k1zkp_v0_10_0_i128_mul(&cd, u, d0); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, v, e0); - rustsecp256k1zkp_v0_10_0_i128_mul(&ce, q, d0); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, r, e0); + rustsecp256k1zkp_v0_10_1_i128_mul(&cd, u, d0); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, v, e0); + rustsecp256k1zkp_v0_10_1_i128_mul(&ce, q, d0); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, r, e0); /* Correct md,me so that t*[d,e]+modulus*[md,me] has 62 zero bottom bits. */ - md -= (modinfo->modulus_inv62 * rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) + md) & M62; - me -= (modinfo->modulus_inv62 * rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) + me) & M62; + md -= (modinfo->modulus_inv62 * rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) + md) & M62; + me -= (modinfo->modulus_inv62 * rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) + me) & M62; /* Update the beginning of computation for t*[d,e]+modulus*[md,me] now md,me are known. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, modinfo->modulus.v[0], md); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, modinfo->modulus.v[0], me); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, modinfo->modulus.v[0], md); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, modinfo->modulus.v[0], me); /* Verify that the low 62 bits of the computation are indeed zero, and then throw them away. */ - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&cd, 62); - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&ce, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&cd, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&ce, 62); /* Compute limb 1 of t*[d,e]+modulus*[md,me], and store it as output limb 0 (= down shift). */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, u, d1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, v, e1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, q, d1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, r, e1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, u, d1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, v, e1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, q, d1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, r, e1); if (modinfo->modulus.v[1]) { /* Optimize for the case where limb of modulus is zero. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, modinfo->modulus.v[1], md); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, modinfo->modulus.v[1], me); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, modinfo->modulus.v[1], md); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, modinfo->modulus.v[1], me); } - d->v[0] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cd, 62); - e->v[0] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&ce, 62); + d->v[0] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cd, 62); + e->v[0] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&ce, 62); /* Compute limb 2 of t*[d,e]+modulus*[md,me], and store it as output limb 1. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, u, d2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, v, e2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, q, d2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, r, e2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, u, d2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, v, e2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, q, d2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, r, e2); if (modinfo->modulus.v[2]) { /* Optimize for the case where limb of modulus is zero. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, modinfo->modulus.v[2], md); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, modinfo->modulus.v[2], me); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, modinfo->modulus.v[2], md); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, modinfo->modulus.v[2], me); } - d->v[1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cd, 62); - e->v[1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&ce, 62); + d->v[1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cd, 62); + e->v[1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&ce, 62); /* Compute limb 3 of t*[d,e]+modulus*[md,me], and store it as output limb 2. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, u, d3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, v, e3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, q, d3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, r, e3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, u, d3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, v, e3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, q, d3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, r, e3); if (modinfo->modulus.v[3]) { /* Optimize for the case where limb of modulus is zero. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, modinfo->modulus.v[3], md); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, modinfo->modulus.v[3], me); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, modinfo->modulus.v[3], md); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, modinfo->modulus.v[3], me); } - d->v[2] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cd, 62); - e->v[2] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&ce, 62); + d->v[2] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cd, 62); + e->v[2] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&ce, 62); /* Compute limb 4 of t*[d,e]+modulus*[md,me], and store it as output limb 3. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, u, d4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, v, e4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, q, d4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, r, e4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cd, modinfo->modulus.v[4], md); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&ce, modinfo->modulus.v[4], me); - d->v[3] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cd, 62); - e->v[3] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&ce, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, u, d4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, v, e4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, q, d4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, r, e4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cd, modinfo->modulus.v[4], md); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&ce, modinfo->modulus.v[4], me); + d->v[3] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cd) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cd, 62); + e->v[3] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&ce) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&ce, 62); /* What remains is limb 5 of t*[d,e]+modulus*[md,me]; store it as output limb 4. */ - d->v[4] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&cd); - e->v[4] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&ce); + d->v[4] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&cd); + e->v[4] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&ce); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, 1) < 0); /* d < modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, 1) < 0); /* e < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, -2) > 0); /* d > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(d, 5, &modinfo->modulus, 1) < 0); /* d < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, -2) > 0); /* e > -2*modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(e, 5, &modinfo->modulus, 1) < 0); /* e < modulus */ } /* Compute (t/2^62) * [f, g], where t is a transition matrix scaled by 2^62. * * This implements the update_fg function from the explanation. */ -static void rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *f, rustsecp256k1zkp_v0_10_0_modinv64_signed62 *g, const rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t) { +static void rustsecp256k1zkp_v0_10_1_modinv64_update_fg_62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *f, rustsecp256k1zkp_v0_10_1_modinv64_signed62 *g, const rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t) { const uint64_t M62 = UINT64_MAX >> 2; const int64_t f0 = f->v[0], f1 = f->v[1], f2 = f->v[2], f3 = f->v[3], f4 = f->v[4]; const int64_t g0 = g->v[0], g1 = g->v[1], g2 = g->v[2], g3 = g->v[3], g4 = g->v[4]; const int64_t u = t->u, v = t->v, q = t->q, r = t->r; - rustsecp256k1zkp_v0_10_0_int128 cf, cg; + rustsecp256k1zkp_v0_10_1_int128 cf, cg; /* Start computing t*[f,g]. */ - rustsecp256k1zkp_v0_10_0_i128_mul(&cf, u, f0); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, g0); - rustsecp256k1zkp_v0_10_0_i128_mul(&cg, q, f0); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, g0); + rustsecp256k1zkp_v0_10_1_i128_mul(&cf, u, f0); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, g0); + rustsecp256k1zkp_v0_10_1_i128_mul(&cg, q, f0); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, g0); /* Verify that the bottom 62 bits of the result are zero, and then throw them away. */ - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* Compute limb 1 of t*[f,g], and store it as output limb 0 (= down shift). */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, u, f1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, g1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, q, f1); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, g1); - f->v[0] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - g->v[0] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, u, f1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, g1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, q, f1); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, g1); + f->v[0] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + g->v[0] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* Compute limb 2 of t*[f,g], and store it as output limb 1. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, u, f2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, g2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, q, f2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, g2); - f->v[1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - g->v[1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, u, f2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, g2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, q, f2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, g2); + f->v[1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + g->v[1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* Compute limb 3 of t*[f,g], and store it as output limb 2. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, u, f3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, g3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, q, f3); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, g3); - f->v[2] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - g->v[2] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, u, f3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, g3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, q, f3); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, g3); + f->v[2] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + g->v[2] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* Compute limb 4 of t*[f,g], and store it as output limb 3. */ - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, u, f4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, g4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, q, f4); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, g4); - f->v[3] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - g->v[3] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, u, f4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, g4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, q, f4); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, g4); + f->v[3] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + g->v[3] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* What remains is limb 5 of t*[f,g]; store it as output limb 4. */ - f->v[4] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&cf); - g->v[4] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&cg); + f->v[4] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&cf); + g->v[4] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&cg); } /* Compute (t/2^62) * [f, g], where t is a transition matrix for 62 divsteps. @@ -550,69 +550,69 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62(rustsecp256k1zkp_v0_1 * * This implements the update_fg function from the explanation. */ -static void rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62_var(int len, rustsecp256k1zkp_v0_10_0_modinv64_signed62 *f, rustsecp256k1zkp_v0_10_0_modinv64_signed62 *g, const rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 *t) { +static void rustsecp256k1zkp_v0_10_1_modinv64_update_fg_62_var(int len, rustsecp256k1zkp_v0_10_1_modinv64_signed62 *f, rustsecp256k1zkp_v0_10_1_modinv64_signed62 *g, const rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 *t) { const uint64_t M62 = UINT64_MAX >> 2; const int64_t u = t->u, v = t->v, q = t->q, r = t->r; int64_t fi, gi; - rustsecp256k1zkp_v0_10_0_int128 cf, cg; + rustsecp256k1zkp_v0_10_1_int128 cf, cg; int i; VERIFY_CHECK(len > 0); /* Start computing t*[f,g]. */ fi = f->v[0]; gi = g->v[0]; - rustsecp256k1zkp_v0_10_0_i128_mul(&cf, u, fi); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, gi); - rustsecp256k1zkp_v0_10_0_i128_mul(&cg, q, fi); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, gi); + rustsecp256k1zkp_v0_10_1_i128_mul(&cf, u, fi); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, gi); + rustsecp256k1zkp_v0_10_1_i128_mul(&cg, q, fi); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, gi); /* Verify that the bottom 62 bits of the result are zero, and then throw them away. */ - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - VERIFY_CHECK((rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62) == 0); rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + VERIFY_CHECK((rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62) == 0); rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); /* Now iteratively compute limb i=1..len of t*[f,g], and store them in output limb i-1 (shifting * down by 62 bits). */ for (i = 1; i < len; ++i) { fi = f->v[i]; gi = g->v[i]; - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, u, fi); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cf, v, gi); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, q, fi); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&cg, r, gi); - f->v[i - 1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cf, 62); - g->v[i - 1] = rustsecp256k1zkp_v0_10_0_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_0_i128_rshift(&cg, 62); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, u, fi); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cf, v, gi); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, q, fi); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&cg, r, gi); + f->v[i - 1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cf) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cf, 62); + g->v[i - 1] = rustsecp256k1zkp_v0_10_1_i128_to_u64(&cg) & M62; rustsecp256k1zkp_v0_10_1_i128_rshift(&cg, 62); } /* What remains is limb (len) of t*[f,g]; store it as output limb (len-1). */ - f->v[len - 1] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&cf); - g->v[len - 1] = rustsecp256k1zkp_v0_10_0_i128_to_i64(&cg); + f->v[len - 1] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&cf); + g->v[len - 1] = rustsecp256k1zkp_v0_10_1_i128_to_i64(&cg); } /* Compute the inverse of x modulo modinfo->modulus, and replace x with it (constant time in x). */ -static void rustsecp256k1zkp_v0_10_0_modinv64(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv64(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo) { /* Start with d=0, e=1, f=modulus, g=x, zeta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv64_signed62 d = {{0, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 e = {{1, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 g = *x; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 d = {{0, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 e = {{1, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 g = *x; int i; int64_t zeta = -1; /* zeta = -(delta+1/2); delta starts at 1/2. */ /* Do 10 iterations of 59 divsteps each = 590 divsteps. This suffices for 256-bit inputs. */ for (i = 0; i < 10; ++i) { /* Compute transition matrix and new zeta after 59 divsteps. */ - rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 t; - zeta = rustsecp256k1zkp_v0_10_0_modinv64_divsteps_59(zeta, f.v[0], g.v[0], &t); + rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 t; + zeta = rustsecp256k1zkp_v0_10_1_modinv64_divsteps_59(zeta, f.v[0], g.v[0], &t); /* Update d,e using that transition matrix. */ - rustsecp256k1zkp_v0_10_0_modinv64_update_de_62(&d, &e, &t, modinfo); + rustsecp256k1zkp_v0_10_1_modinv64_update_de_62(&d, &e, &t, modinfo); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62(&f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv64_update_fg_62(&f, &g, &t); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, 5, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* At this point sufficient iterations have been performed that g must have reached 0 @@ -620,27 +620,27 @@ static void rustsecp256k1zkp_v0_10_0_modinv64(rustsecp256k1zkp_v0_10_0_modinv64_ * values i.e. +/- 1, and d now contains +/- the modular inverse. */ /* g == 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, 5, &SECP256K1_SIGNED62_ONE, 0) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, 5, &SECP256K1_SIGNED62_ONE, 0) == 0); /* |f| == 1, or (x == 0 and d == 0 and |f|=modulus) */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &SECP256K1_SIGNED62_ONE, -1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &SECP256K1_SIGNED62_ONE, 1) == 0 || - (rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(x, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&d, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && - (rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) == 0))); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &SECP256K1_SIGNED62_ONE, -1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &SECP256K1_SIGNED62_ONE, 1) == 0 || + (rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(x, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&d, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && + (rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, 1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, 5, &modinfo->modulus, -1) == 0))); /* Optionally negate d, normalize to [0,modulus), and return it. */ - rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(&d, f.v[4], modinfo); + rustsecp256k1zkp_v0_10_1_modinv64_normalize_62(&d, f.v[4], modinfo); *x = d; } /* Compute the inverse of x modulo modinfo->modulus, and replace x with it (variable time). */ -static void rustsecp256k1zkp_v0_10_0_modinv64_var(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo) { +static void rustsecp256k1zkp_v0_10_1_modinv64_var(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo) { /* Start with d=0, e=1, f=modulus, g=x, eta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv64_signed62 d = {{0, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 e = {{1, 0, 0, 0, 0}}; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 g = *x; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 d = {{0, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 e = {{1, 0, 0, 0, 0}}; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 g = *x; #ifdef VERIFY int i = 0; #endif @@ -651,17 +651,17 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_var(rustsecp256k1zkp_v0_10_0_modin /* Do iterations of 62 divsteps each until g=0. */ while (1) { /* Compute transition matrix and new eta after 62 divsteps. */ - rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 t; - eta = rustsecp256k1zkp_v0_10_0_modinv64_divsteps_62_var(eta, f.v[0], g.v[0], &t); + rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 t; + eta = rustsecp256k1zkp_v0_10_1_modinv64_divsteps_62_var(eta, f.v[0], g.v[0], &t); /* Update d,e using that transition matrix. */ - rustsecp256k1zkp_v0_10_0_modinv64_update_de_62(&d, &e, &t, modinfo); + rustsecp256k1zkp_v0_10_1_modinv64_update_de_62(&d, &e, &t, modinfo); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62_var(len, &f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv64_update_fg_62_var(len, &f, &g, &t); /* If the bottom limb of g is zero, there is a chance that g=0. */ if (g.v[0] == 0) { cond = 0; @@ -687,27 +687,27 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_var(rustsecp256k1zkp_v0_10_0_modin } VERIFY_CHECK(++i < 12); /* We should never need more than 12*62 = 744 divsteps */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) > 0); /* f > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, -1) > 0); /* g > -modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* At this point g is 0 and (if g was not originally 0) f must now equal +/- GCD of * the initial f, g values i.e. +/- 1, and d now contains +/- the modular inverse. */ /* g == 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &SECP256K1_SIGNED62_ONE, 0) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &SECP256K1_SIGNED62_ONE, 0) == 0); /* |f| == 1, or (x == 0 and d == 0 and |f|=modulus) */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &SECP256K1_SIGNED62_ONE, -1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &SECP256K1_SIGNED62_ONE, 1) == 0 || - (rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(x, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&d, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && - (rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) == 0 || - rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) == 0))); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &SECP256K1_SIGNED62_ONE, -1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &SECP256K1_SIGNED62_ONE, 1) == 0 || + (rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(x, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&d, 5, &SECP256K1_SIGNED62_ONE, 0) == 0 && + (rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) == 0 || + rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, -1) == 0))); /* Optionally negate d, normalize to [0,modulus), and return it. */ - rustsecp256k1zkp_v0_10_0_modinv64_normalize_62(&d, f.v[len - 1], modinfo); + rustsecp256k1zkp_v0_10_1_modinv64_normalize_62(&d, f.v[len - 1], modinfo); *x = d; } @@ -720,10 +720,10 @@ static void rustsecp256k1zkp_v0_10_0_modinv64_var(rustsecp256k1zkp_v0_10_0_modin #endif /* Compute the Jacobi symbol of x modulo modinfo->modulus (variable time). gcd(x,modulus) must be 1. */ -static int rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_0_modinv64_modinfo *modinfo) { +static int rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var(const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *x, const rustsecp256k1zkp_v0_10_1_modinv64_modinfo *modinfo) { /* Start with f=modulus, g=x, eta=-1. */ - rustsecp256k1zkp_v0_10_0_modinv64_signed62 f = modinfo->modulus; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 g = *x; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 f = modinfo->modulus; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 g = *x; int j, len = 5; int64_t eta = -1; /* eta = -delta; delta is initially 1 */ int64_t cond, fn, gn; @@ -740,15 +740,15 @@ static int rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(const rustsecp256k1zkp_v0 for (count = 0; count < JACOBI64_ITERATIONS; ++count) { /* Compute transition matrix and new eta after 62 posdivsteps. */ - rustsecp256k1zkp_v0_10_0_modinv64_trans2x2 t; - eta = rustsecp256k1zkp_v0_10_0_modinv64_posdivsteps_62_var(eta, f.v[0] | ((uint64_t)f.v[1] << 62), g.v[0] | ((uint64_t)g.v[1] << 62), &t, &jac); + rustsecp256k1zkp_v0_10_1_modinv64_trans2x2 t; + eta = rustsecp256k1zkp_v0_10_1_modinv64_posdivsteps_62_var(eta, f.v[0] | ((uint64_t)f.v[1] << 62), g.v[0] | ((uint64_t)g.v[1] << 62), &t, &jac); /* Update f,g using that transition matrix. */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ - rustsecp256k1zkp_v0_10_0_modinv64_update_fg_62_var(len, &f, &g, &t); + rustsecp256k1zkp_v0_10_1_modinv64_update_fg_62_var(len, &f, &g, &t); /* If the bottom limb of f is 1, there is a chance that f=1. */ if (f.v[0] == 1) { cond = 0; @@ -769,10 +769,10 @@ static int rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(const rustsecp256k1zkp_v0 /* If so, reduce length. */ if (cond == 0) --len; - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 0) > 0); /* f > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&f, len, &modinfo->modulus, 1) <= 0); /* f <= modulus */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 0) > 0); /* g > 0 */ + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_modinv64_mul_cmp_62(&g, len, &modinfo->modulus, 1) < 0); /* g < modulus */ } /* The loop failed to converge to f=g after 1550 iterations. Return 0, indicating unknown result. */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/Makefile.am.include index 1aa81488..b4cd8141 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_bppp.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_bppp.h noinst_HEADERS += src/modules/bppp/bppp_util.h noinst_HEADERS += src/modules/bppp/main_impl.h noinst_HEADERS += src/modules/bppp/bppp_transcript_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_norm_product_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_norm_product_impl.h index 3cceebed..cee99820 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_norm_product_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_norm_product_impl.h @@ -21,21 +21,21 @@ * with elements starting from offset a and offset b * skipping elements according to specified step. * Returns: Sum_{i=0..len-1}(a[offset_a + i*step] * b[offset_b + i*step]) */ -static int rustsecp256k1zkp_v0_10_0_scalar_inner_product( - rustsecp256k1zkp_v0_10_0_scalar* res, - const rustsecp256k1zkp_v0_10_0_scalar* a_vec, +static int rustsecp256k1zkp_v0_10_1_scalar_inner_product( + rustsecp256k1zkp_v0_10_1_scalar* res, + const rustsecp256k1zkp_v0_10_1_scalar* a_vec, const size_t a_offset, - const rustsecp256k1zkp_v0_10_0_scalar* b_vec, + const rustsecp256k1zkp_v0_10_1_scalar* b_vec, const size_t b_offset, const size_t step, const size_t len ) { size_t i; - rustsecp256k1zkp_v0_10_0_scalar_set_int(res, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(res, 0); for (i = 0; i < len; i++) { - rustsecp256k1zkp_v0_10_0_scalar term; - rustsecp256k1zkp_v0_10_0_scalar_mul(&term, &a_vec[a_offset + step*i], &b_vec[b_offset + step*i]); - rustsecp256k1zkp_v0_10_0_scalar_add(res, res, &term); + rustsecp256k1zkp_v0_10_1_scalar term; + rustsecp256k1zkp_v0_10_1_scalar_mul(&term, &a_vec[a_offset + step*i], &b_vec[b_offset + step*i]); + rustsecp256k1zkp_v0_10_1_scalar_add(res, res, &term); } return 1; } @@ -44,50 +44,50 @@ static int rustsecp256k1zkp_v0_10_0_scalar_inner_product( * for elements starting from offset a and offset b respectively with the * given step. * Returns: Sum_{i=0..len-1}(a[offset_a + step*i] * b[offset_b2 + step*i]*mu^(i+1)) */ -static int rustsecp256k1zkp_v0_10_0_weighted_scalar_inner_product( - rustsecp256k1zkp_v0_10_0_scalar* res, - const rustsecp256k1zkp_v0_10_0_scalar* a_vec, +static int rustsecp256k1zkp_v0_10_1_weighted_scalar_inner_product( + rustsecp256k1zkp_v0_10_1_scalar* res, + const rustsecp256k1zkp_v0_10_1_scalar* a_vec, const size_t a_offset, - const rustsecp256k1zkp_v0_10_0_scalar* b_vec, + const rustsecp256k1zkp_v0_10_1_scalar* b_vec, const size_t b_offset, const size_t step, const size_t len, - const rustsecp256k1zkp_v0_10_0_scalar* mu + const rustsecp256k1zkp_v0_10_1_scalar* mu ) { - rustsecp256k1zkp_v0_10_0_scalar mu_pow; + rustsecp256k1zkp_v0_10_1_scalar mu_pow; size_t i; - rustsecp256k1zkp_v0_10_0_scalar_set_int(res, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(res, 0); mu_pow = *mu; for (i = 0; i < len; i++) { - rustsecp256k1zkp_v0_10_0_scalar term; - rustsecp256k1zkp_v0_10_0_scalar_mul(&term, &a_vec[a_offset + step*i], &b_vec[b_offset + step*i]); - rustsecp256k1zkp_v0_10_0_scalar_mul(&term, &term, &mu_pow); - rustsecp256k1zkp_v0_10_0_scalar_mul(&mu_pow, &mu_pow, mu); - rustsecp256k1zkp_v0_10_0_scalar_add(res, res, &term); + rustsecp256k1zkp_v0_10_1_scalar term; + rustsecp256k1zkp_v0_10_1_scalar_mul(&term, &a_vec[a_offset + step*i], &b_vec[b_offset + step*i]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&term, &term, &mu_pow); + rustsecp256k1zkp_v0_10_1_scalar_mul(&mu_pow, &mu_pow, mu); + rustsecp256k1zkp_v0_10_1_scalar_add(res, res, &term); } return 1; } /* Compute the powers of rho as rho, rho^2, rho^4 ... rho^(2^(n-1)) */ -static void rustsecp256k1zkp_v0_10_0_bppp_powers_of_rho(rustsecp256k1zkp_v0_10_0_scalar *powers, const rustsecp256k1zkp_v0_10_0_scalar *rho, size_t n) { +static void rustsecp256k1zkp_v0_10_1_bppp_powers_of_rho(rustsecp256k1zkp_v0_10_1_scalar *powers, const rustsecp256k1zkp_v0_10_1_scalar *rho, size_t n) { size_t i; if (n == 0) { return; } powers[0] = *rho; for (i = 1; i < n; i++) { - rustsecp256k1zkp_v0_10_0_scalar_sqr(&powers[i], &powers[i - 1]); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&powers[i], &powers[i - 1]); } } typedef struct ecmult_bp_commit_cb_data { - const rustsecp256k1zkp_v0_10_0_scalar *n; - const rustsecp256k1zkp_v0_10_0_ge *g; - const rustsecp256k1zkp_v0_10_0_scalar *l; + const rustsecp256k1zkp_v0_10_1_scalar *n; + const rustsecp256k1zkp_v0_10_1_ge *g; + const rustsecp256k1zkp_v0_10_1_scalar *l; size_t g_len; } ecmult_bp_commit_cb_data; -static int ecmult_bp_commit_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_bp_commit_cb(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ecmult_bp_commit_cb_data *data = (ecmult_bp_commit_cb_data*) cbdata; *pt = data->g[idx]; if (idx < data->g_len) { @@ -102,20 +102,20 @@ static int ecmult_bp_commit_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k v = |n_vec*n_vec|_mu + . |w|_mu denotes mu-weighted norm of w and denotes inner product of l and r. */ -static int rustsecp256k1zkp_v0_10_0_bppp_commit( - const rustsecp256k1zkp_v0_10_0_context* ctx, - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, - rustsecp256k1zkp_v0_10_0_ge* commit, - const rustsecp256k1zkp_v0_10_0_bppp_generators* g_vec, - const rustsecp256k1zkp_v0_10_0_scalar* n_vec, +static int rustsecp256k1zkp_v0_10_1_bppp_commit( + const rustsecp256k1zkp_v0_10_1_context* ctx, + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, + rustsecp256k1zkp_v0_10_1_ge* commit, + const rustsecp256k1zkp_v0_10_1_bppp_generators* g_vec, + const rustsecp256k1zkp_v0_10_1_scalar* n_vec, size_t n_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* l_vec, + const rustsecp256k1zkp_v0_10_1_scalar* l_vec, size_t l_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* mu + const rustsecp256k1zkp_v0_10_1_scalar* mu ) { - rustsecp256k1zkp_v0_10_0_scalar v, l_c; + rustsecp256k1zkp_v0_10_1_scalar v, l_c; /* First n_vec_len generators are Gs, rest are Hs*/ VERIFY_CHECK(g_vec->n == (n_vec_len + l_vec_len)); #ifdef VERIFY @@ -126,48 +126,48 @@ static int rustsecp256k1zkp_v0_10_0_bppp_commit( /* It is possible to extend to support n_vec and c_vec to not be power of two. For the initial iterations of the code, we stick to powers of two for simplicity.*/ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(n_vec_len)); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(c_vec_len)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(n_vec_len)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(c_vec_len)); /* Compute v = n_vec*n_vec*mu + l_vec*c_vec */ - rustsecp256k1zkp_v0_10_0_weighted_scalar_inner_product(&v, n_vec, 0 /*a offset */, n_vec, 0 /*b offset*/, 1 /*step*/, n_vec_len, mu); - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&l_c, l_vec, 0 /*a offset */, c_vec, 0 /*b offset*/, 1 /*step*/, l_vec_len); - rustsecp256k1zkp_v0_10_0_scalar_add(&v, &v, &l_c); + rustsecp256k1zkp_v0_10_1_weighted_scalar_inner_product(&v, n_vec, 0 /*a offset */, n_vec, 0 /*b offset*/, 1 /*step*/, n_vec_len, mu); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&l_c, l_vec, 0 /*a offset */, c_vec, 0 /*b offset*/, 1 /*step*/, l_vec_len); + rustsecp256k1zkp_v0_10_1_scalar_add(&v, &v, &l_c); { ecmult_bp_commit_cb_data data; - rustsecp256k1zkp_v0_10_0_gej commitj; + rustsecp256k1zkp_v0_10_1_gej commitj; data.g = g_vec->gens; data.n = n_vec; data.l = l_vec; data.g_len = n_vec_len; - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &commitj, &v, ecmult_bp_commit_cb, (void*) &data, n_vec_len + l_vec_len)) { + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &commitj, &v, ecmult_bp_commit_cb, (void*) &data, n_vec_len + l_vec_len)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(commit, &commitj); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(commit, &commitj); } return 1; } typedef struct ecmult_x_cb_data { - const rustsecp256k1zkp_v0_10_0_scalar *n; - const rustsecp256k1zkp_v0_10_0_ge *g; - const rustsecp256k1zkp_v0_10_0_scalar *l; - const rustsecp256k1zkp_v0_10_0_scalar *rho; - const rustsecp256k1zkp_v0_10_0_scalar *rho_inv; + const rustsecp256k1zkp_v0_10_1_scalar *n; + const rustsecp256k1zkp_v0_10_1_ge *g; + const rustsecp256k1zkp_v0_10_1_scalar *l; + const rustsecp256k1zkp_v0_10_1_scalar *rho; + const rustsecp256k1zkp_v0_10_1_scalar *rho_inv; size_t G_GENS_LEN; /* Figure out initialization syntax so that this can also be const */ size_t n_len; } ecmult_x_cb_data; -static int ecmult_x_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_x_cb(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ecmult_x_cb_data *data = (ecmult_x_cb_data*) cbdata; if (idx < data->n_len) { if (idx % 2 == 0) { - rustsecp256k1zkp_v0_10_0_scalar_mul(sc, &data->n[idx + 1], data->rho); + rustsecp256k1zkp_v0_10_1_scalar_mul(sc, &data->n[idx + 1], data->rho); *pt = data->g[idx]; } else { - rustsecp256k1zkp_v0_10_0_scalar_mul(sc, &data->n[idx - 1], data->rho_inv); + rustsecp256k1zkp_v0_10_1_scalar_mul(sc, &data->n[idx - 1], data->rho_inv); *pt = data->g[idx]; } } else { @@ -184,14 +184,14 @@ static int ecmult_x_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_ } typedef struct ecmult_r_cb_data { - const rustsecp256k1zkp_v0_10_0_scalar *n1; - const rustsecp256k1zkp_v0_10_0_ge *g1; - const rustsecp256k1zkp_v0_10_0_scalar *l1; + const rustsecp256k1zkp_v0_10_1_scalar *n1; + const rustsecp256k1zkp_v0_10_1_ge *g1; + const rustsecp256k1zkp_v0_10_1_scalar *l1; size_t G_GENS_LEN; size_t n_len; } ecmult_r_cb_data; -static int ecmult_r_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_r_cb(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ecmult_r_cb_data *data = (ecmult_r_cb_data*) cbdata; if (idx < data->n_len) { *sc = data->n1[2*idx + 1]; @@ -220,23 +220,23 @@ static int ecmult_r_cb(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_ * some parent protocol. To use this norm protocol in a standalone manner, the user * should add the commitment, generators and initial public data to the transcript hash. */ -static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( - const rustsecp256k1zkp_v0_10_0_context* ctx, - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, +static int rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove( + const rustsecp256k1zkp_v0_10_1_context* ctx, + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, unsigned char* proof, size_t *proof_len, - rustsecp256k1zkp_v0_10_0_sha256* transcript, /* Transcript hash of the parent protocol */ - const rustsecp256k1zkp_v0_10_0_scalar* rho, - rustsecp256k1zkp_v0_10_0_ge* g_vec, + rustsecp256k1zkp_v0_10_1_sha256* transcript, /* Transcript hash of the parent protocol */ + const rustsecp256k1zkp_v0_10_1_scalar* rho, + rustsecp256k1zkp_v0_10_1_ge* g_vec, size_t g_vec_len, - rustsecp256k1zkp_v0_10_0_scalar* n_vec, + rustsecp256k1zkp_v0_10_1_scalar* n_vec, size_t n_vec_len, - rustsecp256k1zkp_v0_10_0_scalar* l_vec, + rustsecp256k1zkp_v0_10_1_scalar* l_vec, size_t l_vec_len, - rustsecp256k1zkp_v0_10_0_scalar* c_vec, + rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len ) { - rustsecp256k1zkp_v0_10_0_scalar mu_f, rho_f = *rho; + rustsecp256k1zkp_v0_10_1_scalar mu_f, rho_f = *rho; size_t proof_idx = 0; ecmult_x_cb_data x_cb_data; ecmult_r_cb_data r_cb_data; @@ -246,14 +246,14 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( #ifdef VERIFY { size_t log_g_len_ver, log_h_len_ver, num_rounds_ver; - VERIFY_CHECK(g_len > 0 && h_len > 0); /* Precondition for rustsecp256k1zkp_v0_10_0_bppp_log2() */ - log_g_len_ver = rustsecp256k1zkp_v0_10_0_bppp_log2(g_len); - log_h_len_ver = rustsecp256k1zkp_v0_10_0_bppp_log2(h_len); + VERIFY_CHECK(g_len > 0 && h_len > 0); /* Precondition for rustsecp256k1zkp_v0_10_1_bppp_log2() */ + log_g_len_ver = rustsecp256k1zkp_v0_10_1_bppp_log2(g_len); + log_h_len_ver = rustsecp256k1zkp_v0_10_1_bppp_log2(h_len); num_rounds_ver = log_g_len_ver > log_h_len_ver ? log_g_len_ver : log_h_len_ver; /* Check proof sizes.*/ VERIFY_CHECK(*proof_len >= 65 * num_rounds_ver + 64); VERIFY_CHECK(g_vec_len == (n_vec_len + l_vec_len) && l_vec_len == c_vec_len); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(n_vec_len) && rustsecp256k1zkp_v0_10_0_is_power_of_two(c_vec_len)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(n_vec_len) && rustsecp256k1zkp_v0_10_1_is_power_of_two(c_vec_len)); } #else (void)g_vec_len; @@ -269,87 +269,87 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( r_cb_data.g1 = g_vec; r_cb_data.l1 = l_vec; r_cb_data.G_GENS_LEN = G_GENS_LEN; - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu_f, &rho_f); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu_f, &rho_f); while (g_len > 1 || h_len > 1) { size_t i, num_points; - rustsecp256k1zkp_v0_10_0_scalar mu_sq, rho_inv, c0_l1, c1_l0, x_v, c1_l1, r_v; - rustsecp256k1zkp_v0_10_0_gej rj, xj; - rustsecp256k1zkp_v0_10_0_ge r_ge, x_ge; - rustsecp256k1zkp_v0_10_0_scalar gamma; + rustsecp256k1zkp_v0_10_1_scalar mu_sq, rho_inv, c0_l1, c1_l0, x_v, c1_l1, r_v; + rustsecp256k1zkp_v0_10_1_gej rj, xj; + rustsecp256k1zkp_v0_10_1_ge r_ge, x_ge; + rustsecp256k1zkp_v0_10_1_scalar gamma; - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&rho_inv, &rho_f); - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu_sq, &mu_f); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&rho_inv, &rho_f); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu_sq, &mu_f); /* Compute the X commitment X = WIP(rho_inv*n0,n1)_mu2 * g + r + */ - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&c0_l1, c_vec, 0, l_vec, 1, 2, h_len/2); - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&c1_l0, c_vec, 1, l_vec, 0, 2, h_len/2); - rustsecp256k1zkp_v0_10_0_weighted_scalar_inner_product(&x_v, n_vec, 0, n_vec, 1, 2, g_len/2, &mu_sq); - rustsecp256k1zkp_v0_10_0_scalar_mul(&x_v, &x_v, &rho_inv); - rustsecp256k1zkp_v0_10_0_scalar_add(&x_v, &x_v, &x_v); - rustsecp256k1zkp_v0_10_0_scalar_add(&x_v, &x_v, &c0_l1); - rustsecp256k1zkp_v0_10_0_scalar_add(&x_v, &x_v, &c1_l0); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&c0_l1, c_vec, 0, l_vec, 1, 2, h_len/2); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&c1_l0, c_vec, 1, l_vec, 0, 2, h_len/2); + rustsecp256k1zkp_v0_10_1_weighted_scalar_inner_product(&x_v, n_vec, 0, n_vec, 1, 2, g_len/2, &mu_sq); + rustsecp256k1zkp_v0_10_1_scalar_mul(&x_v, &x_v, &rho_inv); + rustsecp256k1zkp_v0_10_1_scalar_add(&x_v, &x_v, &x_v); + rustsecp256k1zkp_v0_10_1_scalar_add(&x_v, &x_v, &c0_l1); + rustsecp256k1zkp_v0_10_1_scalar_add(&x_v, &x_v, &c1_l0); x_cb_data.rho = &rho_f; x_cb_data.rho_inv = &rho_inv; x_cb_data.n_len = g_len >= 2 ? g_len : 0; num_points = x_cb_data.n_len + (h_len >= 2 ? h_len : 0); - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &xj, &x_v, ecmult_x_cb, (void*)&x_cb_data, num_points)) { + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &xj, &x_v, ecmult_x_cb, (void*)&x_cb_data, num_points)) { return 0; } - rustsecp256k1zkp_v0_10_0_weighted_scalar_inner_product(&r_v, n_vec, 1, n_vec, 1, 2, g_len/2, &mu_sq); - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&c1_l1, c_vec, 1, l_vec, 1, 2, h_len/2); - rustsecp256k1zkp_v0_10_0_scalar_add(&r_v, &r_v, &c1_l1); + rustsecp256k1zkp_v0_10_1_weighted_scalar_inner_product(&r_v, n_vec, 1, n_vec, 1, 2, g_len/2, &mu_sq); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&c1_l1, c_vec, 1, l_vec, 1, 2, h_len/2); + rustsecp256k1zkp_v0_10_1_scalar_add(&r_v, &r_v, &c1_l1); r_cb_data.n_len = g_len/2; num_points = r_cb_data.n_len + h_len/2; - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &rj, &r_v, ecmult_r_cb, (void*)&r_cb_data, num_points)) { + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &rj, &r_v, ecmult_r_cb, (void*)&r_cb_data, num_points)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&x_ge, &xj); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&r_ge, &rj); - rustsecp256k1zkp_v0_10_0_bppp_serialize_points(&proof[proof_idx], &x_ge, &r_ge); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&x_ge, &xj); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&r_ge, &rj); + rustsecp256k1zkp_v0_10_1_bppp_serialize_points(&proof[proof_idx], &x_ge, &r_ge); proof_idx += 65; /* Obtain challenge gamma for the the next round */ - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, &proof[proof_idx - 65], 65); - rustsecp256k1zkp_v0_10_0_bppp_challenge_scalar(&gamma, transcript, 0); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, &proof[proof_idx - 65], 65); + rustsecp256k1zkp_v0_10_1_bppp_challenge_scalar(&gamma, transcript, 0); if (g_len > 1) { for (i = 0; i < g_len; i = i + 2) { - rustsecp256k1zkp_v0_10_0_scalar nl, nr; - rustsecp256k1zkp_v0_10_0_gej gl, gr; - rustsecp256k1zkp_v0_10_0_scalar_mul(&nl, &n_vec[i], &rho_inv); - rustsecp256k1zkp_v0_10_0_scalar_mul(&nr, &n_vec[i + 1], &gamma); - rustsecp256k1zkp_v0_10_0_scalar_add(&n_vec[i/2], &nl, &nr); - - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gl, &g_vec[i]); - rustsecp256k1zkp_v0_10_0_ecmult(&gl, &gl, &rho_f, NULL); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gr, &g_vec[i + 1]); - rustsecp256k1zkp_v0_10_0_ecmult(&gr, &gr, &gamma, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(&gl, &gl, &gr, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&g_vec[i/2], &gl); + rustsecp256k1zkp_v0_10_1_scalar nl, nr; + rustsecp256k1zkp_v0_10_1_gej gl, gr; + rustsecp256k1zkp_v0_10_1_scalar_mul(&nl, &n_vec[i], &rho_inv); + rustsecp256k1zkp_v0_10_1_scalar_mul(&nr, &n_vec[i + 1], &gamma); + rustsecp256k1zkp_v0_10_1_scalar_add(&n_vec[i/2], &nl, &nr); + + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gl, &g_vec[i]); + rustsecp256k1zkp_v0_10_1_ecmult(&gl, &gl, &rho_f, NULL); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gr, &g_vec[i + 1]); + rustsecp256k1zkp_v0_10_1_ecmult(&gr, &gr, &gamma, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&gl, &gl, &gr, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&g_vec[i/2], &gl); } } if (h_len > 1) { for (i = 0; i < h_len; i = i + 2) { - rustsecp256k1zkp_v0_10_0_scalar temp1; - rustsecp256k1zkp_v0_10_0_gej grj; - rustsecp256k1zkp_v0_10_0_scalar_mul(&temp1, &c_vec[i + 1], &gamma); - rustsecp256k1zkp_v0_10_0_scalar_add(&c_vec[i/2], &c_vec[i], &temp1); - - rustsecp256k1zkp_v0_10_0_scalar_mul(&temp1, &l_vec[i + 1], &gamma); - rustsecp256k1zkp_v0_10_0_scalar_add(&l_vec[i/2], &l_vec[i], &temp1); - - rustsecp256k1zkp_v0_10_0_gej_set_ge(&grj, &g_vec[G_GENS_LEN + i + 1]); - rustsecp256k1zkp_v0_10_0_ecmult(&grj, &grj, &gamma, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&grj, &grj, &g_vec[G_GENS_LEN + i], NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&g_vec[G_GENS_LEN + i/2], &grj); + rustsecp256k1zkp_v0_10_1_scalar temp1; + rustsecp256k1zkp_v0_10_1_gej grj; + rustsecp256k1zkp_v0_10_1_scalar_mul(&temp1, &c_vec[i + 1], &gamma); + rustsecp256k1zkp_v0_10_1_scalar_add(&c_vec[i/2], &c_vec[i], &temp1); + + rustsecp256k1zkp_v0_10_1_scalar_mul(&temp1, &l_vec[i + 1], &gamma); + rustsecp256k1zkp_v0_10_1_scalar_add(&l_vec[i/2], &l_vec[i], &temp1); + + rustsecp256k1zkp_v0_10_1_gej_set_ge(&grj, &g_vec[G_GENS_LEN + i + 1]); + rustsecp256k1zkp_v0_10_1_ecmult(&grj, &grj, &gamma, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&grj, &grj, &g_vec[G_GENS_LEN + i], NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&g_vec[G_GENS_LEN + i/2], &grj); } } g_len = g_len / 2; @@ -358,8 +358,8 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( mu_f = mu_sq; } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&proof[proof_idx], &n_vec[0]); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&proof[proof_idx + 32], &l_vec[0]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&proof[proof_idx], &n_vec[0]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&proof[proof_idx + 32], &l_vec[0]); proof_idx += 64; *proof_len = proof_idx; return 1; @@ -367,33 +367,33 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( typedef struct ec_mult_verify_cb_data1 { const unsigned char *proof; - const rustsecp256k1zkp_v0_10_0_ge *commit; - const rustsecp256k1zkp_v0_10_0_scalar *gammas; + const rustsecp256k1zkp_v0_10_1_ge *commit; + const rustsecp256k1zkp_v0_10_1_scalar *gammas; } ec_mult_verify_cb_data1; -static int ec_mult_verify_cb1(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ec_mult_verify_cb1(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ec_mult_verify_cb_data1 *data = (ec_mult_verify_cb_data1*) cbdata; if (idx == 0) { *pt = *data->commit; - rustsecp256k1zkp_v0_10_0_scalar_set_int(sc, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(sc, 1); return 1; } idx -= 1; if (idx % 2 == 0) { idx /= 2; *sc = data->gammas[idx]; - if (!rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(pt, &data->proof[65*idx], 0)) { + if (!rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(pt, &data->proof[65*idx], 0)) { return 0; } } else { - rustsecp256k1zkp_v0_10_0_scalar neg_one; + rustsecp256k1zkp_v0_10_1_scalar neg_one; idx /= 2; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&neg_one, 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&neg_one, &neg_one); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&neg_one, 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&neg_one, &neg_one); *sc = data->gammas[idx]; - rustsecp256k1zkp_v0_10_0_scalar_sqr(sc, sc); - rustsecp256k1zkp_v0_10_0_scalar_add(sc, sc, &neg_one); - if (!rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(pt, &data->proof[65*idx], 1)) { + rustsecp256k1zkp_v0_10_1_scalar_sqr(sc, sc); + rustsecp256k1zkp_v0_10_1_scalar_add(sc, sc, &neg_one); + if (!rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(pt, &data->proof[65*idx], 1)) { return 0; } } @@ -401,13 +401,13 @@ static int ec_mult_verify_cb1(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1 } typedef struct ec_mult_verify_cb_data2 { - const rustsecp256k1zkp_v0_10_0_scalar *s_g; - const rustsecp256k1zkp_v0_10_0_scalar *s_h; - const rustsecp256k1zkp_v0_10_0_ge *g_vec; + const rustsecp256k1zkp_v0_10_1_scalar *s_g; + const rustsecp256k1zkp_v0_10_1_scalar *s_h; + const rustsecp256k1zkp_v0_10_1_ge *g_vec; size_t g_vec_len; } ec_mult_verify_cb_data2; -static int ec_mult_verify_cb2(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ec_mult_verify_cb2(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ec_mult_verify_cb_data2 *data = (ec_mult_verify_cb_data2*) cbdata; if (idx < data->g_vec_len) { *sc = data->s_g[idx]; @@ -421,22 +421,22 @@ static int ec_mult_verify_cb2(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1 /* Verify the proof. This function modifies the generators, c_vec and the challenge r. The caller should make sure to back them up if they need to be reused. */ -static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify( - const rustsecp256k1zkp_v0_10_0_context* ctx, - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, +static int rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_verify( + const rustsecp256k1zkp_v0_10_1_context* ctx, + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, const unsigned char* proof, size_t proof_len, - rustsecp256k1zkp_v0_10_0_sha256* transcript, - const rustsecp256k1zkp_v0_10_0_scalar* rho, - const rustsecp256k1zkp_v0_10_0_bppp_generators* g_vec, + rustsecp256k1zkp_v0_10_1_sha256* transcript, + const rustsecp256k1zkp_v0_10_1_scalar* rho, + const rustsecp256k1zkp_v0_10_1_bppp_generators* g_vec, size_t g_len, - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len, - const rustsecp256k1zkp_v0_10_0_ge* commit + const rustsecp256k1zkp_v0_10_1_ge* commit ) { - rustsecp256k1zkp_v0_10_0_scalar rho_f, mu_f, v, n, l, rho_inv, h_c; - rustsecp256k1zkp_v0_10_0_scalar *gammas, *s_g, *s_h, *rho_inv_pows; - rustsecp256k1zkp_v0_10_0_gej res1, res2; + rustsecp256k1zkp_v0_10_1_scalar rho_f, mu_f, v, n, l, rho_inv, h_c; + rustsecp256k1zkp_v0_10_1_scalar *gammas, *s_g, *s_h, *rho_inv_pows; + rustsecp256k1zkp_v0_10_1_gej res1, res2; size_t i = 0, scratch_checkpoint; int overflow; size_t log_g_len, log_h_len; @@ -446,79 +446,79 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify( if (g_len == 0 || c_vec_len == 0) { return 0; } - log_g_len = rustsecp256k1zkp_v0_10_0_bppp_log2(g_len); - log_h_len = rustsecp256k1zkp_v0_10_0_bppp_log2(c_vec_len); + log_g_len = rustsecp256k1zkp_v0_10_1_bppp_log2(g_len); + log_h_len = rustsecp256k1zkp_v0_10_1_bppp_log2(c_vec_len); n_rounds = log_g_len > log_h_len ? log_g_len : log_h_len; if (g_vec->n != (h_len + g_len) || (proof_len != 65 * n_rounds + 64)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_is_power_of_two(g_len) || !rustsecp256k1zkp_v0_10_0_is_power_of_two(h_len)) { + if (!rustsecp256k1zkp_v0_10_1_is_power_of_two(g_len) || !rustsecp256k1zkp_v0_10_1_is_power_of_two(h_len)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&n, &proof[n_rounds*65], &overflow); /* n */ + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&n, &proof[n_rounds*65], &overflow); /* n */ if (overflow) return 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&l, &proof[n_rounds*65 + 32], &overflow); /* l */ + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&l, &proof[n_rounds*65 + 32], &overflow); /* l */ if (overflow) return 0; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(rho)) return 0; + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(rho)) return 0; /* Collect the gammas in a new vector */ - scratch_checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(&ctx->error_callback, scratch); - gammas = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&ctx->error_callback, scratch, n_rounds * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - s_g = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&ctx->error_callback, scratch, g_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - s_h = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&ctx->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - rho_inv_pows = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&ctx->error_callback, scratch, log_g_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); + scratch_checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(&ctx->error_callback, scratch); + gammas = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&ctx->error_callback, scratch, n_rounds * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + s_g = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&ctx->error_callback, scratch, g_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + s_h = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&ctx->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + rho_inv_pows = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&ctx->error_callback, scratch, log_g_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); if (gammas == NULL || s_g == NULL || s_h == NULL || rho_inv_pows == NULL) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); return 0; } /* Compute powers of rho_inv. Later used in g_factor computations*/ - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&rho_inv, rho); - rustsecp256k1zkp_v0_10_0_bppp_powers_of_rho(rho_inv_pows, &rho_inv, log_g_len); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&rho_inv, rho); + rustsecp256k1zkp_v0_10_1_bppp_powers_of_rho(rho_inv_pows, &rho_inv, log_g_len); /* Compute rho_f = rho^(2^log_g_len) */ rho_f = *rho; for (i = 0; i < log_g_len; i++) { - rustsecp256k1zkp_v0_10_0_scalar_sqr(&rho_f, &rho_f); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&rho_f, &rho_f); } for (i = 0; i < n_rounds; i++) { - rustsecp256k1zkp_v0_10_0_scalar gamma; - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, &proof[i * 65], 65); - rustsecp256k1zkp_v0_10_0_bppp_challenge_scalar(&gamma, transcript, 0); + rustsecp256k1zkp_v0_10_1_scalar gamma; + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, &proof[i * 65], 65); + rustsecp256k1zkp_v0_10_1_bppp_challenge_scalar(&gamma, transcript, 0); gammas[i] = gamma; } /* s_g[0] = n * \prod_{j=0}^{log_g_len - 1} rho^(2^j) * = n * rho^(2^log_g_len - 1) * = n * rho_f * rho_inv */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_g[0], &n, &rho_f); - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_g[0], &s_g[0], &rho_inv); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_g[0], &n, &rho_f); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_g[0], &s_g[0], &rho_inv); for (i = 1; i < g_len; i++) { - size_t log_i = rustsecp256k1zkp_v0_10_0_bppp_log2(i); + size_t log_i = rustsecp256k1zkp_v0_10_1_bppp_log2(i); size_t nearest_pow_of_two = (size_t)1 << log_i; /* This combines the two multiplications of gammas and rho_invs in a * single loop. * s_g[i] = s_g[i - nearest_pow_of_two] * * e[log_i] * rho_inv^(2^log_i) */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_g[i], &s_g[i - nearest_pow_of_two], &gammas[log_i]); - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_g[i], &s_g[i], &rho_inv_pows[log_i]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_g[i], &s_g[i - nearest_pow_of_two], &gammas[log_i]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_g[i], &s_g[i], &rho_inv_pows[log_i]); } s_h[0] = l; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&h_c, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&h_c, 0); for (i = 1; i < h_len; i++) { - size_t log_i = rustsecp256k1zkp_v0_10_0_bppp_log2(i); + size_t log_i = rustsecp256k1zkp_v0_10_1_bppp_log2(i); size_t nearest_pow_of_two = (size_t)1 << log_i; - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_h[i], &s_h[i - nearest_pow_of_two], &gammas[log_i]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_h[i], &s_h[i - nearest_pow_of_two], &gammas[log_i]); } - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&h_c, c_vec, 0 /* a_offset */ , s_h, 0 /* b_offset */, 1 /* step */, h_len); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&h_c, c_vec, 0 /* a_offset */ , s_h, 0 /* b_offset */, 1 /* step */, h_len); /* Compute v = n*n*mu_f + l*h_c where mu_f = rho_f^2 */ - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu_f, &rho_f); - rustsecp256k1zkp_v0_10_0_scalar_mul(&v, &n, &n); - rustsecp256k1zkp_v0_10_0_scalar_mul(&v, &v, &mu_f); - rustsecp256k1zkp_v0_10_0_scalar_add(&v, &v, &h_c); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu_f, &rho_f); + rustsecp256k1zkp_v0_10_1_scalar_mul(&v, &n, &n); + rustsecp256k1zkp_v0_10_1_scalar_mul(&v, &v, &mu_f); + rustsecp256k1zkp_v0_10_1_scalar_add(&v, &v, &h_c); { ec_mult_verify_cb_data1 data; @@ -526,8 +526,8 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify( data.commit = commit; data.gammas = gammas; - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &res1, NULL, ec_mult_verify_cb1, &data, 2*n_rounds + 1)) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &res1, NULL, ec_mult_verify_cb1, &data, 2*n_rounds + 1)) { + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); return 0; } } @@ -538,14 +538,14 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify( data.s_g = s_g; data.s_h = s_h; - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &res2, &v, ec_mult_verify_cb2, &data, g_len + h_len)) { - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &res2, &v, ec_mult_verify_cb2, &data, g_len + h_len)) { + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); return 0; } } - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&ctx->error_callback, scratch, scratch_checkpoint); - return rustsecp256k1zkp_v0_10_0_gej_eq_var(&res1, &res2); + return rustsecp256k1zkp_v0_10_1_gej_eq_var(&res1, &res2); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_transcript_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_transcript_impl.h index 26661397..f6f9fba4 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_transcript_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_transcript_impl.h @@ -13,8 +13,8 @@ /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("Bulletproofs_pp/v0/commitment")||SHA256("Bulletproofs_pp/v0/commitment"). */ -static void rustsecp256k1zkp_v0_10_0_bppp_sha256_tagged_commitment_init(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_bppp_sha256_tagged_commitment_init(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x52fc8185ul; sha->s[1] = 0x0e7debf0ul; sha->s[2] = 0xb0967270ul; @@ -28,13 +28,13 @@ static void rustsecp256k1zkp_v0_10_0_bppp_sha256_tagged_commitment_init(rustsecp } /* Obtain a challenge scalar from the current transcript.*/ -static void rustsecp256k1zkp_v0_10_0_bppp_challenge_scalar(rustsecp256k1zkp_v0_10_0_scalar* ch, const rustsecp256k1zkp_v0_10_0_sha256 *transcript, uint64_t idx) { +static void rustsecp256k1zkp_v0_10_1_bppp_challenge_scalar(rustsecp256k1zkp_v0_10_1_scalar* ch, const rustsecp256k1zkp_v0_10_1_sha256 *transcript, uint64_t idx) { unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha = *transcript; - rustsecp256k1zkp_v0_10_0_bppp_le64(buf, idx); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, 8); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(ch, buf, NULL); + rustsecp256k1zkp_v0_10_1_sha256 sha = *transcript; + rustsecp256k1zkp_v0_10_1_bppp_le64(buf, idx); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, 8); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(ch, buf, NULL); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_util.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_util.h index 6a48f892..79169148 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_util.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/bppp_util.h @@ -15,23 +15,23 @@ /* Outputs a pair of points, amortizing the parity byte between them * Assumes both points' coordinates have been normalized. */ -static void rustsecp256k1zkp_v0_10_0_bppp_serialize_points(unsigned char *output, rustsecp256k1zkp_v0_10_0_ge *lpt, rustsecp256k1zkp_v0_10_0_ge *rpt) { +static void rustsecp256k1zkp_v0_10_1_bppp_serialize_points(unsigned char *output, rustsecp256k1zkp_v0_10_1_ge *lpt, rustsecp256k1zkp_v0_10_1_ge *rpt) { unsigned char tmp[33]; - rustsecp256k1zkp_v0_10_0_ge_serialize_ext(tmp, lpt); + rustsecp256k1zkp_v0_10_1_ge_serialize_ext(tmp, lpt); output[0] = (tmp[0] & 1) << 1; memcpy(&output[1], &tmp[1], 32); - rustsecp256k1zkp_v0_10_0_ge_serialize_ext(tmp, rpt); + rustsecp256k1zkp_v0_10_1_ge_serialize_ext(tmp, rpt); output[0] |= (tmp[0] & 1); memcpy(&output[33], &tmp[1], 32); } -static int rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(rustsecp256k1zkp_v0_10_0_ge *pt, const unsigned char *in65, int idx) { +static int rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(rustsecp256k1zkp_v0_10_1_ge *pt, const unsigned char *in65, int idx) { unsigned char tmp[33] = { 0 }; if (in65[0] > 3) { return 0; } /* Check if the input array encodes the point at infinity */ - if ((rustsecp256k1zkp_v0_10_0_memcmp_var(tmp, &in65[1 + 32*idx], 32)) != 0) { + if ((rustsecp256k1zkp_v0_10_1_memcmp_var(tmp, &in65[1 + 32*idx], 32)) != 0) { tmp[0] = 2 | ((in65[0] & (2 - idx)) >> (1 - idx)); memcpy(&tmp[1], &in65[1 + 32*idx], 32); } else { @@ -41,18 +41,18 @@ static int rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(rustsecp256k1zkp_v0 return 0; } } - return rustsecp256k1zkp_v0_10_0_ge_parse_ext(pt, tmp); + return rustsecp256k1zkp_v0_10_1_ge_parse_ext(pt, tmp); } /* Outputs a serialized point in compressed form. Returns 0 at point at infinity. */ -static int rustsecp256k1zkp_v0_10_0_bppp_serialize_pt(unsigned char *output, rustsecp256k1zkp_v0_10_0_ge *lpt) { +static int rustsecp256k1zkp_v0_10_1_bppp_serialize_pt(unsigned char *output, rustsecp256k1zkp_v0_10_1_ge *lpt) { size_t size; - return rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(lpt, output, &size, 1 /*compressed*/); + return rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(lpt, output, &size, 1 /*compressed*/); } /* little-endian encodes a uint64 */ -static void rustsecp256k1zkp_v0_10_0_bppp_le64(unsigned char *output, const uint64_t n) { +static void rustsecp256k1zkp_v0_10_1_bppp_le64(unsigned char *output, const uint64_t n) { output[0] = n; output[1] = n >> 8; output[2] = n >> 16; @@ -64,7 +64,7 @@ static void rustsecp256k1zkp_v0_10_0_bppp_le64(unsigned char *output, const uint } /* Check if n is power of two*/ -static int rustsecp256k1zkp_v0_10_0_is_power_of_two(size_t n) { +static int rustsecp256k1zkp_v0_10_1_is_power_of_two(size_t n) { return n > 0 && (n & (n - 1)) == 0; } @@ -73,8 +73,8 @@ static int rustsecp256k1zkp_v0_10_0_is_power_of_two(size_t n) { * Bulletproofs, this is bounded by len of input vectors which can be safely * assumed to be less than 2^64. */ -static size_t rustsecp256k1zkp_v0_10_0_bppp_log2(size_t n) { - return 64 - 1 - rustsecp256k1zkp_v0_10_0_clz64_var((uint64_t)n); +static size_t rustsecp256k1zkp_v0_10_1_bppp_log2(size_t n) { + return 64 - 1 - rustsecp256k1zkp_v0_10_1_clz64_var((uint64_t)n); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main.h index 4ff6ee5d..74bea8cd 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main.h @@ -2,12 +2,12 @@ #define SECP256K1_MODULE_BPPP_MAIN_H /* this type must be completed before any of the modules/bppp includes */ -struct rustsecp256k1zkp_v0_10_0_bppp_generators { +struct rustsecp256k1zkp_v0_10_1_bppp_generators { size_t n; /* n total generators; includes both G_i and H_i */ /* For BP++, the generators are G_i from [0..(n - 8)] and the last 8 values are generators are for H_i */ - rustsecp256k1zkp_v0_10_0_ge* gens; + rustsecp256k1zkp_v0_10_1_ge* gens; }; #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main_impl.h index 5052148b..8eaba92e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/main_impl.h @@ -15,43 +15,43 @@ #include "../bppp/main.h" #include "../bppp/bppp_norm_product_impl.h" -rustsecp256k1zkp_v0_10_0_bppp_generators *rustsecp256k1zkp_v0_10_0_bppp_generators_create(const rustsecp256k1zkp_v0_10_0_context *ctx, size_t n) { - rustsecp256k1zkp_v0_10_0_bppp_generators *ret; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; +rustsecp256k1zkp_v0_10_1_bppp_generators *rustsecp256k1zkp_v0_10_1_bppp_generators_create(const rustsecp256k1zkp_v0_10_1_context *ctx, size_t n) { + rustsecp256k1zkp_v0_10_1_bppp_generators *ret; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; unsigned char seed[64]; size_t i; VERIFY_CHECK(ctx != NULL); - ret = (rustsecp256k1zkp_v0_10_0_bppp_generators *)checked_malloc(&ctx->error_callback, sizeof(*ret)); + ret = (rustsecp256k1zkp_v0_10_1_bppp_generators *)checked_malloc(&ctx->error_callback, sizeof(*ret)); if (ret == NULL) { return NULL; } - ret->gens = (rustsecp256k1zkp_v0_10_0_ge*)checked_malloc(&ctx->error_callback, n * sizeof(*ret->gens)); + ret->gens = (rustsecp256k1zkp_v0_10_1_ge*)checked_malloc(&ctx->error_callback, n * sizeof(*ret->gens)); if (ret->gens == NULL) { free(ret); return NULL; } ret->n = n; - rustsecp256k1zkp_v0_10_0_fe_get_b32(&seed[0], &rustsecp256k1zkp_v0_10_0_ge_const_g.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&seed[32], &rustsecp256k1zkp_v0_10_0_ge_const_g.y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&seed[0], &rustsecp256k1zkp_v0_10_1_ge_const_g.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&seed[32], &rustsecp256k1zkp_v0_10_1_ge_const_g.y); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, seed, 64); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, seed, 64); for (i = 0; i < n; i++) { - rustsecp256k1zkp_v0_10_0_generator gen; + rustsecp256k1zkp_v0_10_1_generator gen; unsigned char tmp[32] = { 0 }; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, tmp, 32); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate(ctx, &gen, tmp)); - rustsecp256k1zkp_v0_10_0_generator_load(&ret->gens[i], &gen); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate(ctx, &gen, tmp)); + rustsecp256k1zkp_v0_10_1_generator_load(&ret->gens[i], &gen); } return ret; } -rustsecp256k1zkp_v0_10_0_bppp_generators* rustsecp256k1zkp_v0_10_0_bppp_generators_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char* data, size_t data_len) { +rustsecp256k1zkp_v0_10_1_bppp_generators* rustsecp256k1zkp_v0_10_1_bppp_generators_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char* data, size_t data_len) { size_t n = data_len / 33; - rustsecp256k1zkp_v0_10_0_bppp_generators* ret; + rustsecp256k1zkp_v0_10_1_bppp_generators* ret; VERIFY_CHECK(ctx != NULL); ARG_CHECK(data != NULL); @@ -60,30 +60,30 @@ rustsecp256k1zkp_v0_10_0_bppp_generators* rustsecp256k1zkp_v0_10_0_bppp_generato return NULL; } - ret = (rustsecp256k1zkp_v0_10_0_bppp_generators *)checked_malloc(&ctx->error_callback, sizeof(*ret)); + ret = (rustsecp256k1zkp_v0_10_1_bppp_generators *)checked_malloc(&ctx->error_callback, sizeof(*ret)); if (ret == NULL) { return NULL; } ret->n = n; - ret->gens = (rustsecp256k1zkp_v0_10_0_ge*)checked_malloc(&ctx->error_callback, n * sizeof(*ret->gens)); + ret->gens = (rustsecp256k1zkp_v0_10_1_ge*)checked_malloc(&ctx->error_callback, n * sizeof(*ret->gens)); if (ret->gens == NULL) { free(ret); return NULL; } while (n--) { - rustsecp256k1zkp_v0_10_0_generator gen; - if (!rustsecp256k1zkp_v0_10_0_generator_parse(ctx, &gen, &data[33 * n])) { + rustsecp256k1zkp_v0_10_1_generator gen; + if (!rustsecp256k1zkp_v0_10_1_generator_parse(ctx, &gen, &data[33 * n])) { free(ret->gens); free(ret); return NULL; } - rustsecp256k1zkp_v0_10_0_generator_load(&ret->gens[n], &gen); + rustsecp256k1zkp_v0_10_1_generator_load(&ret->gens[n], &gen); } return ret; } -int rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_bppp_generators* gens, unsigned char* data, size_t *data_len) { +int rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_bppp_generators* gens, unsigned char* data, size_t *data_len) { size_t i; VERIFY_CHECK(ctx != NULL); @@ -94,16 +94,16 @@ int rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(const rustsecp256k1zkp_v0 memset(data, 0, *data_len); for (i = 0; i < gens->n; i++) { - rustsecp256k1zkp_v0_10_0_generator gen; - rustsecp256k1zkp_v0_10_0_generator_save(&gen, &gens->gens[i]); - rustsecp256k1zkp_v0_10_0_generator_serialize(ctx, &data[33 * i], &gen); + rustsecp256k1zkp_v0_10_1_generator gen; + rustsecp256k1zkp_v0_10_1_generator_save(&gen, &gens->gens[i]); + rustsecp256k1zkp_v0_10_1_generator_serialize(ctx, &data[33 * i], &gen); } *data_len = 33 * gens->n; return 1; } -void rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_bppp_generators *gens) { +void rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_bppp_generators *gens) { VERIFY_CHECK(ctx != NULL); (void) ctx; if (gens != NULL) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/prove.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/prove.h index 3577d7dd..244efd6e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/prove.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/prove.h @@ -1,46 +1,46 @@ static const unsigned char prove_vector_gens[264] = { 0x03, 0xAF, 0x2C, 0x40, 0xAD, 0x03, 0xCD, 0xC5, 0x76, 0x8C, 0x07, 0x1E, 0x58, 0xD6, 0x8C, 0x73, 0x45, 0xBA, 0xEB, 0xB5, 0x3F, 0x40, 0xFA, 0x8B, 0xBF, 0x73, 0x6E, 0x7B, 0x4A, 0x54, 0x06, 0xED, 0x32, 0x03, 0xCC, 0x11, 0x19, 0x22, 0x2C, 0xA1, 0x0A, 0x45, 0x23, 0xAF, 0x9B, 0x40, 0x0D, 0xA4, 0x5E, 0x06, 0x24, 0xF4, 0x5F, 0x07, 0x89, 0x88, 0xCD, 0x71, 0xAE, 0x77, 0xC1, 0xF5, 0x87, 0x4E, 0xFC, 0xA5, 0x03, 0xDE, 0x61, 0xB1, 0x8F, 0x2C, 0xAC, 0x18, 0xF5, 0xE4, 0x06, 0x8F, 0x65, 0x55, 0xA1, 0x30, 0x5E, 0xF5, 0xF4, 0x84, 0xED, 0x6B, 0xDD, 0xC2, 0xCC, 0xE8, 0x51, 0x38, 0xB8, 0xA5, 0x4C, 0x43, 0xBD, 0x02, 0xA5, 0xF9, 0x8C, 0x1F, 0x82, 0x2D, 0xC6, 0xF3, 0x0F, 0x53, 0xDB, 0x74, 0x77, 0xC7, 0x91, 0x04, 0xB0, 0xB1, 0xA6, 0x17, 0xB2, 0x91, 0xF4, 0x8B, 0x93, 0x3E, 0xBB, 0x73, 0x15, 0x3E, 0x5A, 0xD1, 0x02, 0x44, 0xF5, 0xC6, 0x4E, 0x77, 0x60, 0x81, 0x83, 0xFF, 0xC2, 0x8E, 0x06, 0xFE, 0x67, 0x0C, 0x9A, 0x4B, 0xF2, 0x34, 0xB9, 0xEA, 0xE9, 0x37, 0xDA, 0x30, 0xE2, 0x32, 0x27, 0xF3, 0x88, 0x5F, 0x2A, 0x02, 0x1D, 0x49, 0x5D, 0x04, 0xED, 0x61, 0x95, 0x37, 0xDD, 0x95, 0xB1, 0x4F, 0x64, 0x0E, 0x1E, 0xFB, 0x47, 0x9F, 0xA7, 0xD7, 0xE0, 0x7A, 0xB1, 0x02, 0x81, 0x95, 0xD1, 0xA5, 0x7E, 0xB2, 0x74, 0x8F, 0x03, 0x26, 0xA5, 0xEC, 0xE9, 0x71, 0x46, 0x37, 0xAC, 0x3D, 0x74, 0x84, 0x26, 0xCB, 0x7C, 0xE8, 0xFE, 0x4E, 0xB0, 0x6D, 0x70, 0x3D, 0x00, 0x10, 0x1A, 0x3A, 0x5B, 0xB8, 0xAA, 0x29, 0x59, 0x93, 0x15, 0x03, 0xE1, 0xA5, 0x39, 0x44, 0x75, 0x16, 0x28, 0x5F, 0xBA, 0x69, 0xA2, 0x4A, 0x2A, 0xC3, 0x5B, 0x63, 0x1F, 0x40, 0x10, 0x36, 0xF9, 0x4C, 0xD2, 0x76, 0x0F, 0xCF, 0x7F, 0x50, 0x30, 0x6E, 0x2B, 0x1D }; static const unsigned char prove_vector_0_n_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_0_n_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_0_n_vec[1]; static const unsigned char prove_vector_0_l_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_0_l_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_0_l_vec[1]; static const unsigned char prove_vector_0_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_0_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_0_c_vec[1]; static const unsigned char prove_vector_0_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char prove_vector_0_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int prove_vector_0_result = 1; static const unsigned char prove_vector_1_n_vec32[2][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01 } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_1_n_vec[2]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_1_n_vec[2]; static const unsigned char prove_vector_1_l_vec32[4][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0B } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_1_l_vec[4]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_1_l_vec[4]; static const unsigned char prove_vector_1_c_vec32[4][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x30 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0D } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_1_c_vec[4]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_1_c_vec[4]; static const unsigned char prove_vector_1_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char prove_vector_1_proof[] = { 0x00, 0xD2, 0xEC, 0xE2, 0x53, 0x97, 0x28, 0x68, 0x22, 0x59, 0x34, 0xEF, 0xE4, 0x7B, 0x87, 0x4D, 0xE9, 0x57, 0xD5, 0xB7, 0xC7, 0x72, 0xF4, 0xC9, 0xEA, 0x66, 0x14, 0x59, 0xE1, 0xA9, 0xD5, 0xB2, 0x10, 0xDF, 0xE2, 0xFF, 0xF5, 0xA4, 0x38, 0x6B, 0xFE, 0x36, 0x89, 0xE4, 0x9D, 0x90, 0x9F, 0x71, 0x19, 0xE6, 0xA3, 0x1E, 0xAA, 0xAA, 0x4E, 0xFE, 0xC2, 0xD3, 0x37, 0xBB, 0xDE, 0xDB, 0x46, 0x43, 0xC2, 0x01, 0x42, 0x5F, 0xFC, 0xC6, 0x25, 0xA0, 0xB4, 0xF0, 0x76, 0x99, 0xF4, 0x7C, 0xE9, 0x83, 0x82, 0xED, 0x7C, 0x95, 0xBA, 0xD0, 0xE6, 0x5B, 0x88, 0xFD, 0x38, 0xEA, 0x23, 0x54, 0xD4, 0xBD, 0xD4, 0x37, 0xB8, 0x2B, 0x49, 0xAF, 0x81, 0xFD, 0xBE, 0x88, 0xB2, 0xE5, 0x3F, 0xF4, 0x30, 0x52, 0x00, 0x63, 0x9D, 0xAE, 0x82, 0x44, 0xE9, 0x62, 0x87, 0x2A, 0x23, 0x89, 0x10, 0xE4, 0x9A, 0x64, 0x9F, 0x71, 0xD9, 0x32, 0x57, 0x3B, 0xCB, 0xAC, 0x30, 0xAE, 0x71, 0x61, 0xE9, 0x50, 0x1F, 0xCB, 0x49, 0x9C, 0x52, 0xBA, 0x0C, 0xC4, 0x00, 0x58, 0x73, 0x63, 0xD3, 0x42, 0xDE, 0x42, 0x5E, 0xC5, 0x97, 0xE5, 0xDA, 0x88, 0x76, 0x49, 0x6C, 0x8B, 0x92, 0x99, 0xEE, 0xD0, 0xA9, 0xEB, 0x6E, 0xCA, 0xE1, 0x93, 0x81, 0x56, 0x2E, 0xCA, 0xF3, 0x8E, 0xF0, 0x04, 0xD2, 0x96, 0xD8, 0xDB, 0xEE, 0xEE, 0x1C, 0x44 }; static const int prove_vector_1_result = 1; static const unsigned char prove_vector_2_n_vec32[4][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0B } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_2_n_vec[4]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_2_n_vec[4]; static const unsigned char prove_vector_2_l_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_2_l_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_2_l_vec[1]; static const unsigned char prove_vector_2_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_2_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_2_c_vec[1]; static const unsigned char prove_vector_2_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char prove_vector_2_proof[] = { 0x00, 0xBC, 0x4C, 0x42, 0x67, 0x71, 0x69, 0x52, 0x6A, 0x65, 0xFE, 0xA0, 0xCB, 0x3F, 0x58, 0x8B, 0x48, 0x48, 0x6E, 0x59, 0xFC, 0x55, 0x51, 0x10, 0xB9, 0xBF, 0x6A, 0x7D, 0xBF, 0x32, 0x34, 0x4E, 0x7D, 0xBA, 0xD5, 0xCB, 0xCC, 0x19, 0xED, 0xAA, 0x9F, 0x8D, 0x93, 0x26, 0x5E, 0x3F, 0x3E, 0xAA, 0xDF, 0x0B, 0x1C, 0xB3, 0xDC, 0x37, 0xB6, 0xDB, 0xAE, 0x43, 0x63, 0x92, 0xB5, 0xFF, 0x0D, 0x1C, 0x77, 0x02, 0x7E, 0x2B, 0xB8, 0x87, 0x85, 0x81, 0x13, 0x70, 0x1F, 0x03, 0x65, 0x7D, 0xD8, 0x91, 0x83, 0xE5, 0x7E, 0x8B, 0x9E, 0x6F, 0x1C, 0x08, 0x9C, 0x9C, 0x5F, 0xA4, 0x12, 0x5F, 0xD3, 0xEE, 0xE2, 0x74, 0x7A, 0x2C, 0x58, 0x3A, 0x29, 0x4F, 0x64, 0x10, 0xE7, 0x89, 0xBF, 0xB2, 0xE5, 0xD9, 0xD5, 0xC5, 0x62, 0x83, 0x0C, 0xA8, 0xDD, 0x1E, 0x24, 0x6D, 0xD1, 0x58, 0x8D, 0x80, 0x74, 0xF3, 0xD9, 0x3A, 0x68, 0x7B, 0xF5, 0x12, 0xC6, 0xC2, 0x3F, 0x71, 0x47, 0xDF, 0xCF, 0xC8, 0xE2, 0xC4, 0x59, 0xDF, 0x4F, 0xEC, 0x86, 0xE9, 0xF9, 0x31, 0x94, 0x6A, 0x5F, 0xD9, 0x1E, 0x6B, 0x09, 0xCD, 0xCF, 0x5D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int prove_vector_2_result = 1; static const unsigned char prove_vector_3_n_vec32[1][32] = { { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_3_n_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_3_n_vec[1]; static const unsigned char prove_vector_3_l_vec32[1][32] = { { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_3_l_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_3_l_vec[1]; static const unsigned char prove_vector_3_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_3_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_3_c_vec[1]; static const unsigned char prove_vector_3_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char prove_vector_3_proof[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; static const int prove_vector_3_result = 1; static const unsigned char prove_vector_4_n_vec32[2][32] = { { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_4_n_vec[2]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_4_n_vec[2]; static const unsigned char prove_vector_4_l_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_4_l_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_4_l_vec[1]; static const unsigned char prove_vector_4_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar prove_vector_4_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar prove_vector_4_c_vec[1]; static const unsigned char prove_vector_4_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char prove_vector_4_proof[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const int prove_vector_4_result = 1; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/verify.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/verify.h index 96d8ab96..c15c827d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/verify.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/test_vectors/verify.h @@ -2,91 +2,91 @@ static const unsigned char verify_vector_gens[264] = { 0x03, 0xAF, 0x2C, 0x40, 0 static const unsigned char verify_vector_0_commit33[33] = { 0x03, 0xD7, 0x53, 0x31, 0x5B, 0xAA, 0x04, 0xD5, 0x7C, 0x4A, 0x34, 0x94, 0x98, 0xBC, 0xA9, 0x1E, 0xD6, 0xA3, 0xBF, 0x81, 0xFC, 0x38, 0x30, 0x7C, 0x3B, 0x7C, 0xFC, 0xC6, 0xFF, 0x1A, 0x13, 0x36, 0x72 }; static const size_t verify_vector_0_n_vec_len = 1; static const unsigned char verify_vector_0_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_0_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_0_c_vec[1]; static const unsigned char verify_vector_0_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_0_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int verify_vector_0_result = 1; static const unsigned char verify_vector_1_commit33[33] = { 0x02, 0x6C, 0x09, 0xD7, 0x06, 0x2D, 0x1C, 0x07, 0x0A, 0x64, 0x34, 0x82, 0xF6, 0x46, 0x03, 0xEB, 0x24, 0x3E, 0x54, 0x0F, 0xDA, 0xAF, 0x3A, 0x69, 0x5F, 0x86, 0xB6, 0xD2, 0xC2, 0x06, 0xE9, 0x49, 0xC7 }; static const size_t verify_vector_1_n_vec_len = 1; static const unsigned char verify_vector_1_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_1_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_1_c_vec[1]; static const unsigned char verify_vector_1_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_1_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int verify_vector_1_result = 0; static const unsigned char verify_vector_2_commit33[33] = { 0x03, 0xD7, 0x53, 0x31, 0x5B, 0xAA, 0x04, 0xD5, 0x7C, 0x4A, 0x34, 0x94, 0x98, 0xBC, 0xA9, 0x1E, 0xD6, 0xA3, 0xBF, 0x81, 0xFC, 0x38, 0x30, 0x7C, 0x3B, 0x7C, 0xFC, 0xC6, 0xFF, 0x1A, 0x13, 0x36, 0x72 }; static const size_t verify_vector_2_n_vec_len = 1; static const unsigned char verify_vector_2_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_2_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_2_c_vec[1]; static const unsigned char verify_vector_2_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_2_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x41 }; static const int verify_vector_2_result = 0; static const unsigned char verify_vector_3_commit33[33] = { 0x03, 0xD7, 0x53, 0x31, 0x5B, 0xAA, 0x04, 0xD5, 0x7C, 0x4A, 0x34, 0x94, 0x98, 0xBC, 0xA9, 0x1E, 0xD6, 0xA3, 0xBF, 0x81, 0xFC, 0x38, 0x30, 0x7C, 0x3B, 0x7C, 0xFC, 0xC6, 0xFF, 0x1A, 0x13, 0x36, 0x72 }; static const size_t verify_vector_3_n_vec_len = 1; static const unsigned char verify_vector_3_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_3_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_3_c_vec[1]; static const unsigned char verify_vector_3_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_3_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x41, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x41 }; static const int verify_vector_3_result = 0; static const unsigned char verify_vector_4_commit33[33] = { 0x03, 0xD7, 0x53, 0x31, 0x5B, 0xAA, 0x04, 0xD5, 0x7C, 0x4A, 0x34, 0x94, 0x98, 0xBC, 0xA9, 0x1E, 0xD6, 0xA3, 0xBF, 0x81, 0xFC, 0x38, 0x30, 0x7C, 0x3B, 0x7C, 0xFC, 0xC6, 0xFF, 0x1A, 0x13, 0x36, 0x72 }; static const size_t verify_vector_4_n_vec_len = 1; static const unsigned char verify_vector_4_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_4_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_4_c_vec[1]; static const unsigned char verify_vector_4_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_4_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41 }; static const int verify_vector_4_result = 0; static const unsigned char verify_vector_5_commit33[33] = { 0x03, 0x83, 0x6A, 0xD4, 0x2D, 0xD2, 0x02, 0x49, 0xC8, 0x6E, 0x53, 0x22, 0x53, 0x24, 0xDA, 0x52, 0x08, 0xC0, 0x62, 0x4C, 0xCB, 0xB3, 0x13, 0xD7, 0x14, 0x59, 0x68, 0x47, 0x56, 0x00, 0xC0, 0x8D, 0xBA }; static const size_t verify_vector_5_n_vec_len = 2; static const unsigned char verify_vector_5_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_5_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_5_c_vec[1]; static const unsigned char verify_vector_5_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x36 }; static const unsigned char verify_vector_5_proof[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x4C, 0xB9, 0xD4, 0x34, 0xA2, 0xD6, 0xD5, 0x4C, 0x0F, 0x2E, 0x2C, 0xE3, 0x82, 0x17, 0x48, 0x63, 0xE0, 0xAE, 0x6B, 0xD7, 0x64, 0x9D, 0x43, 0x2B, 0x6E, 0x6E, 0x1C, 0x62, 0x55, 0x4B, 0xC5, 0x73, 0x3D, 0x74, 0x7B, 0x78, 0x43, 0xF4, 0x8B, 0x7C, 0x84, 0x10, 0x00, 0x8B, 0x12, 0xAF, 0xA4, 0xF1, 0xF4, 0x01, 0x96, 0x21, 0x8B, 0xE9, 0x05, 0x01, 0xF8, 0x23, 0x7A, 0x8F, 0x66, 0xC9, 0xDE, 0xE1, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int verify_vector_5_result = 0; static const unsigned char verify_vector_6_commit33[33] = { 0x03, 0xCF, 0x7F, 0x08, 0xF5, 0x8A, 0x06, 0x74, 0x5C, 0xDB, 0xCE, 0xC6, 0x51, 0xF3, 0xE5, 0xE4, 0xDC, 0xAD, 0xF4, 0x40, 0x3C, 0xFA, 0xE6, 0x78, 0xBE, 0x49, 0x2D, 0x90, 0xC8, 0xD0, 0x16, 0x3D, 0x78 }; static const size_t verify_vector_6_n_vec_len = 2; static const unsigned char verify_vector_6_c_vec32[4][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x30 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0D } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_6_c_vec[4]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_6_c_vec[4]; static const unsigned char verify_vector_6_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const unsigned char verify_vector_6_proof[] = { 0x00, 0xD2, 0xEC, 0xE2, 0x53, 0x97, 0x28, 0x68, 0x22, 0x59, 0x34, 0xEF, 0xE4, 0x7B, 0x87, 0x4D, 0xE9, 0x57, 0xD5, 0xB7, 0xC7, 0x72, 0xF4, 0xC9, 0xEA, 0x66, 0x14, 0x59, 0xE1, 0xA9, 0xD5, 0xB2, 0x10, 0xDF, 0xE2, 0xFF, 0xF5, 0xA4, 0x38, 0x6B, 0xFE, 0x36, 0x89, 0xE4, 0x9D, 0x90, 0x9F, 0x71, 0x19, 0xE6, 0xA3, 0x1E, 0xAA, 0xAA, 0x4E, 0xFE, 0xC2, 0xD3, 0x37, 0xBB, 0xDE, 0xDB, 0x46, 0x43, 0xC2, 0x01, 0x42, 0x5F, 0xFC, 0xC6, 0x25, 0xA0, 0xB4, 0xF0, 0x76, 0x99, 0xF4, 0x7C, 0xE9, 0x83, 0x82, 0xED, 0x7C, 0x95, 0xBA, 0xD0, 0xE6, 0x5B, 0x88, 0xFD, 0x38, 0xEA, 0x23, 0x54, 0xD4, 0xBD, 0xD4, 0x37, 0xB8, 0x2B, 0x49, 0xAF, 0x81, 0xFD, 0xBE, 0x88, 0xB2, 0xE5, 0x3F, 0xF4, 0x30, 0x52, 0x00, 0x63, 0x9D, 0xAE, 0x82, 0x44, 0xE9, 0x62, 0x87, 0x2A, 0x23, 0x89, 0x10, 0xE4, 0x9A, 0x64, 0x9F, 0x71, 0xD9, 0x32, 0x57, 0x3B, 0xCB, 0xAC, 0x30, 0xAE, 0x71, 0x61, 0xE9, 0x50, 0x1F, 0xCB, 0x49, 0x9C, 0x52, 0xBA, 0x0C, 0xC4, 0x00, 0x58, 0x73, 0x63, 0xD3, 0x42, 0xDE, 0x42, 0x5E, 0xC5, 0x97, 0xE5, 0xDA, 0x88, 0x76, 0x49, 0x6C, 0x8B, 0x92, 0x99, 0xEE, 0xD0, 0xA9, 0xEB, 0x6E, 0xCA, 0xE1, 0x93, 0x81, 0x56, 0x2E, 0xCA, 0xF3, 0x8E, 0xF0, 0x04, 0xD2, 0x96, 0xD8, 0xDB, 0xEE, 0xEE, 0x1C, 0x44 }; static const int verify_vector_6_result = 1; static const unsigned char verify_vector_7_commit33[33] = { 0x02, 0x7A, 0xAA, 0xB2, 0x7E, 0xA5, 0x5B, 0x77, 0x08, 0xE5, 0x43, 0xB6, 0x22, 0x7F, 0xC9, 0xAC, 0x53, 0x10, 0x32, 0x61, 0x7B, 0x7D, 0xAC, 0xB1, 0xB6, 0xF6, 0xAC, 0xDE, 0x63, 0x79, 0x82, 0x9C, 0x24 }; static const size_t verify_vector_7_n_vec_len = 4; static const unsigned char verify_vector_7_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_7_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_7_c_vec[1]; static const unsigned char verify_vector_7_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_7_proof[] = { 0x00, 0xBC, 0x4C, 0x42, 0x67, 0x71, 0x69, 0x52, 0x6A, 0x65, 0xFE, 0xA0, 0xCB, 0x3F, 0x58, 0x8B, 0x48, 0x48, 0x6E, 0x59, 0xFC, 0x55, 0x51, 0x10, 0xB9, 0xBF, 0x6A, 0x7D, 0xBF, 0x32, 0x34, 0x4E, 0x7D, 0xBA, 0xD5, 0xCB, 0xCC, 0x19, 0xED, 0xAA, 0x9F, 0x8D, 0x93, 0x26, 0x5E, 0x3F, 0x3E, 0xAA, 0xDF, 0x0B, 0x1C, 0xB3, 0xDC, 0x37, 0xB6, 0xDB, 0xAE, 0x43, 0x63, 0x92, 0xB5, 0xFF, 0x0D, 0x1C, 0x77, 0x02, 0x7E, 0x2B, 0xB8, 0x87, 0x85, 0x81, 0x13, 0x70, 0x1F, 0x03, 0x65, 0x7D, 0xD8, 0x91, 0x83, 0xE5, 0x7E, 0x8B, 0x9E, 0x6F, 0x1C, 0x08, 0x9C, 0x9C, 0x5F, 0xA4, 0x12, 0x5F, 0xD3, 0xEE, 0xE2, 0x74, 0x7A, 0x2C, 0x58, 0x3A, 0x29, 0x4F, 0x64, 0x10, 0xE7, 0x89, 0xBF, 0xB2, 0xE5, 0xD9, 0xD5, 0xC5, 0x62, 0x83, 0x0C, 0xA8, 0xDD, 0x1E, 0x24, 0x6D, 0xD1, 0x58, 0x8D, 0x80, 0x74, 0xF3, 0xD9, 0x3A, 0x68, 0x7B, 0xF5, 0x12, 0xC6, 0xC2, 0x3F, 0x71, 0x47, 0xDF, 0xCF, 0xC8, 0xE2, 0xC4, 0x59, 0xDF, 0x4F, 0xEC, 0x86, 0xE9, 0xF9, 0x31, 0x94, 0x6A, 0x5F, 0xD9, 0x1E, 0x6B, 0x09, 0xCD, 0xCF, 0x5D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int verify_vector_7_result = 1; static const unsigned char verify_vector_8_commit33[33] = { 0x02, 0x2D, 0x4F, 0xF9, 0xB7, 0x15, 0x22, 0xBC, 0xB0, 0x8B, 0xF8, 0xBA, 0x31, 0x0A, 0x80, 0x76, 0x7A, 0xE9, 0xA9, 0x83, 0x00, 0xBC, 0x5A, 0x01, 0xCC, 0xE9, 0x00, 0x83, 0x56, 0xEA, 0x77, 0xEB, 0x75 }; static const size_t verify_vector_8_n_vec_len = 4; static const unsigned char verify_vector_8_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_8_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_8_c_vec[1]; static const unsigned char verify_vector_8_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_8_proof[] = { 0x00, 0xBC, 0x4C, 0x42, 0x67, 0x71, 0x69, 0x52, 0x6A, 0x65, 0xFE, 0xA0, 0xCB, 0x3F, 0x58, 0x8B, 0x48, 0x48, 0x6E, 0x59, 0xFC, 0x55, 0x51, 0x10, 0xB9, 0xBF, 0x6A, 0x7D, 0xBF, 0x32, 0x34, 0x4E, 0x7D, 0xBA, 0xD5, 0xCB, 0xCC, 0x19, 0xED, 0xAA, 0x9F, 0x8D, 0x93, 0x26, 0x5E, 0x3F, 0x3E, 0xAA, 0xDF, 0x0B, 0x1C, 0xB3, 0xDC, 0x37, 0xB6, 0xDB, 0xAE, 0x43, 0x63, 0x92, 0xB5, 0xFF, 0x0D, 0x1C, 0x77, 0x02, 0x7E, 0x2B, 0xB8, 0x87, 0x85, 0x81, 0x13, 0x70, 0x1F, 0x03, 0x65, 0x7D, 0xD8, 0x91, 0x83, 0xE5, 0x7E, 0x8B, 0x9E, 0x6F, 0x1C, 0x08, 0x9C, 0x9C, 0x5F, 0xA4, 0x12, 0x5F, 0xD3, 0xEE, 0xE2, 0x74, 0x7A, 0x2C, 0x58, 0x3A, 0x29, 0x4F, 0x64, 0x10, 0xE7, 0x89, 0xBF, 0xB2, 0xE5, 0xD9, 0xD5, 0xC5, 0x62, 0x83, 0x0C, 0xA8, 0xDD, 0x1E, 0x24, 0x6D, 0xD1, 0x58, 0x8D, 0x80, 0x74, 0xF3, 0xD9, 0x3A, 0x68, 0x7B, 0xF5, 0x12, 0xC6, 0xC2, 0x3F, 0x71, 0x47, 0xDF, 0xCF, 0xC8, 0xE2, 0xC4, 0x59, 0xDF, 0x4F, 0xEC, 0x86, 0xE9, 0xF9, 0x31, 0x94, 0x6A, 0x5F, 0xD9, 0x1E, 0x6B, 0x09, 0xCD, 0xCF, 0x5D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3E }; static const int verify_vector_8_result = 0; static const unsigned char verify_vector_9_commit33[33] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; static const size_t verify_vector_9_n_vec_len = 1; static const unsigned char verify_vector_9_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_9_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_9_c_vec[1]; static const unsigned char verify_vector_9_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_9_proof[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; static const int verify_vector_9_result = 1; static const unsigned char verify_vector_10_commit33[33] = { 0x03, 0x62, 0x8A, 0xC2, 0xF1, 0xF2, 0x00, 0xE0, 0x81, 0xBD, 0xA0, 0xA9, 0x6D, 0x25, 0x53, 0xB4, 0x17, 0xC1, 0x02, 0x93, 0x50, 0x3E, 0x91, 0xD4, 0xD1, 0x3A, 0x82, 0x89, 0x02, 0x24, 0x78, 0x49, 0xA5 }; static const size_t verify_vector_10_n_vec_len = 2; static const unsigned char verify_vector_10_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_10_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_10_c_vec[1]; static const unsigned char verify_vector_10_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_10_proof[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const int verify_vector_10_result = 1; static const unsigned char verify_vector_11_commit33[33] = { 0x03, 0x62, 0x8A, 0xC2, 0xF1, 0xF2, 0x00, 0xE0, 0x81, 0xBD, 0xA0, 0xA9, 0x6D, 0x25, 0x53, 0xB4, 0x17, 0xC1, 0x02, 0x93, 0x50, 0x3E, 0x91, 0xD4, 0xD1, 0x3A, 0x82, 0x89, 0x02, 0x24, 0x78, 0x49, 0xA5 }; static const size_t verify_vector_11_n_vec_len = 2; static const unsigned char verify_vector_11_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_11_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_11_c_vec[1]; static const unsigned char verify_vector_11_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_11_proof[] = { 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const int verify_vector_11_result = 0; static const unsigned char verify_vector_12_commit33[33] = { 0x02, 0x7D, 0x5F, 0x4B, 0x11, 0xC0, 0xE4, 0x2E, 0x4C, 0x1B, 0x56, 0xAE, 0xF0, 0x5F, 0xAA, 0xD8, 0x77, 0x0C, 0x93, 0x71, 0xA2, 0x92, 0xF9, 0x89, 0xA2, 0xB4, 0x69, 0x9B, 0x46, 0x8A, 0x03, 0xF1, 0x50 }; static const size_t verify_vector_12_n_vec_len = 0; static const unsigned char verify_vector_12_c_vec32[1][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3C } }; -static rustsecp256k1zkp_v0_10_0_scalar verify_vector_12_c_vec[1]; +static rustsecp256k1zkp_v0_10_1_scalar verify_vector_12_c_vec[1]; static const unsigned char verify_vector_12_r32[32] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34 }; static const unsigned char verify_vector_12_proof[] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x34, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x3A }; static const int verify_vector_12_result = 0; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/tests_impl.h index f55caa9d..c8c3f0d9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/bppp/tests_impl.h @@ -17,60 +17,60 @@ #include "test_vectors/prove.h" static void test_bppp_generators_api(void) { - rustsecp256k1zkp_v0_10_0_bppp_generators *gens; - rustsecp256k1zkp_v0_10_0_bppp_generators *gens_orig; + rustsecp256k1zkp_v0_10_1_bppp_generators *gens; + rustsecp256k1zkp_v0_10_1_bppp_generators *gens_orig; unsigned char gens_ser[330]; size_t len = sizeof(gens_ser); /* Create */ - gens = rustsecp256k1zkp_v0_10_0_bppp_generators_create(CTX, 10); + gens = rustsecp256k1zkp_v0_10_1_bppp_generators_create(CTX, 10); CHECK(gens != NULL); gens_orig = gens; /* Preserve for round-trip test */ /* Serialize */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, NULL, gens_ser, &len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, NULL, &len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, NULL, gens_ser, &len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, NULL, &len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, NULL)); len = 0; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, &len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, &len)); len = sizeof(gens_ser) - 1; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, &len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, &len)); len = sizeof(gens_ser); { /* Output buffer can be greater than minimum needed */ unsigned char gens_ser_tmp[331]; size_t len_tmp = sizeof(gens_ser_tmp); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser_tmp, &len_tmp)); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser_tmp, &len_tmp)); CHECK(len_tmp == sizeof(gens_ser_tmp) - 1); } /* Parse */ - CHECK(rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, &len)); - CHECK_ILLEGAL_VOID(CTX, gens = rustsecp256k1zkp_v0_10_0_bppp_generators_parse(CTX, NULL, sizeof(gens_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, &len)); + CHECK_ILLEGAL_VOID(CTX, gens = rustsecp256k1zkp_v0_10_1_bppp_generators_parse(CTX, NULL, sizeof(gens_ser)); CHECK(gens == NULL)); /* Not a multiple of 33 */ - gens = rustsecp256k1zkp_v0_10_0_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser) - 1); + gens = rustsecp256k1zkp_v0_10_1_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser) - 1); CHECK(gens == NULL); - gens = rustsecp256k1zkp_v0_10_0_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser)); + gens = rustsecp256k1zkp_v0_10_1_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser)); CHECK(gens != NULL); /* Not valid generators */ memset(gens_ser, 1, sizeof(gens_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser)) == NULL); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_generators_parse(CTX, gens_ser, sizeof(gens_ser)) == NULL); /* Check that round-trip succeeded */ CHECK(gens->n == gens_orig->n); for (len = 0; len < gens->n; len++) { - rustsecp256k1zkp_v0_10_0_ge_eq_var(&gens->gens[len], &gens_orig->gens[len]); + rustsecp256k1zkp_v0_10_1_ge_eq_var(&gens->gens[len], &gens_orig->gens[len]); } /* Destroy (we allow destroying a NULL context, it's just a noop. like free().) */ - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, NULL); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gens); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gens_orig); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, NULL); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gens); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gens_orig); } static void test_bppp_generators_fixed(void) { - rustsecp256k1zkp_v0_10_0_bppp_generators *gens = rustsecp256k1zkp_v0_10_0_bppp_generators_create(CTX, 3); + rustsecp256k1zkp_v0_10_1_bppp_generators *gens = rustsecp256k1zkp_v0_10_1_bppp_generators_create(CTX, 3); unsigned char gens_ser[330]; const unsigned char fixed_first_3[99] = { 0x0b, @@ -92,39 +92,39 @@ static void test_bppp_generators_fixed(void) { size_t len; len = 99; - CHECK(rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, &len)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(gens_ser, fixed_first_3, sizeof(fixed_first_3)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, &len)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(gens_ser, fixed_first_3, sizeof(fixed_first_3)) == 0); len = sizeof(gens_ser); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_generators_serialize(CTX, gens, gens_ser, &len)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(gens_ser, fixed_first_3, sizeof(fixed_first_3)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_generators_serialize(CTX, gens, gens_ser, &len)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(gens_ser, fixed_first_3, sizeof(fixed_first_3)) == 0); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gens); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gens); } static void test_bppp_tagged_hash(void) { unsigned char tag_data[29] = "Bulletproofs_pp/v0/commitment"; - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_cached; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_cached; unsigned char output[32]; unsigned char output_cached[32]; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); - rustsecp256k1zkp_v0_10_0_bppp_sha256_tagged_commitment_init(&sha_cached); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha_cached, output_cached); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, output_cached, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); + rustsecp256k1zkp_v0_10_1_bppp_sha256_tagged_commitment_init(&sha_cached); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha_cached, output_cached); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, output_cached, 32) == 0); { unsigned char expected[32] = { 0x21, 0x2F, 0xB6, 0x4F, 0x9D, 0x8C, 0x3B, 0xC5, 0xF6, 0x91, 0x15, 0xEE, 0x74, 0xF5, 0x12, 0x67, 0x8A, 0x41, 0xC6, 0x85, 0x1A, 0x79, 0x14, 0xFC, 0x48, 0x15, 0xC7, 0x2D, 0xF8, 0x63, 0x8F, 0x1B }; - rustsecp256k1zkp_v0_10_0_bppp_sha256_tagged_commitment_init(&sha); - rustsecp256k1zkp_v0_10_0_bppp_challenge_scalar(&s, &sha, 0); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(output, &s); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, expected, sizeof(output)) == 0); + rustsecp256k1zkp_v0_10_1_bppp_sha256_tagged_commitment_init(&sha); + rustsecp256k1zkp_v0_10_1_bppp_challenge_scalar(&s, &sha, 0); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(output, &s); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, expected, sizeof(output)) == 0); } { @@ -133,78 +133,78 @@ static void test_bppp_tagged_hash(void) { 0x72, 0x7E, 0x3E, 0xB7, 0x10, 0x03, 0xF0, 0xE9, 0x69, 0x4D, 0xAA, 0x96, 0xCE, 0x98, 0xBB, 0x39, 0x1C, 0x2F, 0x7C, 0x2E, 0x1C, 0x17, 0x78, 0x6D }; - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, tmp, sizeof(tmp)); - rustsecp256k1zkp_v0_10_0_bppp_challenge_scalar(&s, &sha, 0); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(output, &s); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, expected, sizeof(output)) == 0); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, tmp, sizeof(tmp)); + rustsecp256k1zkp_v0_10_1_bppp_challenge_scalar(&s, &sha, 0); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(output, &s); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, expected, sizeof(output)) == 0); } } static void test_log_exp(void) { - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(0) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(1) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(64) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(63) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_is_power_of_two(256) == 1); - - CHECK(rustsecp256k1zkp_v0_10_0_bppp_log2(1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_log2(2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_log2(255) == 7); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_log2(256) == 8); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_log2(257) == 8); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(63) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_is_power_of_two(256) == 1); + + CHECK(rustsecp256k1zkp_v0_10_1_bppp_log2(1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_log2(2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_log2(255) == 7); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_log2(256) == 8); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_log2(257) == 8); } static void test_norm_util_helpers(void) { - rustsecp256k1zkp_v0_10_0_scalar a_vec[4], b_vec[4], rho_pows[4], res, res2, mu, rho; + rustsecp256k1zkp_v0_10_1_scalar a_vec[4], b_vec[4], rho_pows[4], res, res2, mu, rho; int i; /* a = {1, 2, 3, 4} b = {5, 6, 7, 8}, mu = 4, rho = 2 */ for (i = 0; i < 4; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&a_vec[i], i + 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&b_vec[i], i + 5); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&a_vec[i], i + 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&b_vec[i], i + 5); } - rustsecp256k1zkp_v0_10_0_scalar_set_int(&mu, 4); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&rho, 2); - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&res, a_vec, 0, b_vec, 0, 1, 4); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res2, 70); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res2, &res) == 1); - - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&res, a_vec, 0, b_vec, 1, 2, 2); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res2, 30); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res2, &res) == 1); - - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&res, a_vec, 1, b_vec, 0, 2, 2); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res2, 38); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res2, &res) == 1); - - rustsecp256k1zkp_v0_10_0_scalar_inner_product(&res, a_vec, 1, b_vec, 1, 2, 2); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res2, 44); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res2, &res) == 1); - - rustsecp256k1zkp_v0_10_0_weighted_scalar_inner_product(&res, a_vec, 0, a_vec, 0, 1, 4, &mu); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res2, 4740); /*i*i*4^(i+1) */ - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res2, &res) == 1); - - rustsecp256k1zkp_v0_10_0_bppp_powers_of_rho(rho_pows, &rho, 4); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res, 2); CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res, &rho_pows[0])); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res, 4); CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res, &rho_pows[1])); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res, 16); CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res, &rho_pows[2])); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&res, 256); CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&res, &rho_pows[3])); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&mu, 4); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&rho, 2); + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&res, a_vec, 0, b_vec, 0, 1, 4); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res2, 70); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res2, &res) == 1); + + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&res, a_vec, 0, b_vec, 1, 2, 2); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res2, 30); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res2, &res) == 1); + + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&res, a_vec, 1, b_vec, 0, 2, 2); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res2, 38); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res2, &res) == 1); + + rustsecp256k1zkp_v0_10_1_scalar_inner_product(&res, a_vec, 1, b_vec, 1, 2, 2); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res2, 44); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res2, &res) == 1); + + rustsecp256k1zkp_v0_10_1_weighted_scalar_inner_product(&res, a_vec, 0, a_vec, 0, 1, 4, &mu); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res2, 4740); /*i*i*4^(i+1) */ + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res2, &res) == 1); + + rustsecp256k1zkp_v0_10_1_bppp_powers_of_rho(rho_pows, &rho, 4); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res, 2); CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res, &rho_pows[0])); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res, 4); CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res, &rho_pows[1])); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res, 16); CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res, &rho_pows[2])); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&res, 256); CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&res, &rho_pows[3])); } -static void test_serialize_two_points_roundtrip(rustsecp256k1zkp_v0_10_0_ge *X, rustsecp256k1zkp_v0_10_0_ge *R) { - rustsecp256k1zkp_v0_10_0_ge X_tmp, R_tmp; +static void test_serialize_two_points_roundtrip(rustsecp256k1zkp_v0_10_1_ge *X, rustsecp256k1zkp_v0_10_1_ge *R) { + rustsecp256k1zkp_v0_10_1_ge X_tmp, R_tmp; unsigned char buf[65]; - rustsecp256k1zkp_v0_10_0_bppp_serialize_points(buf, X, R); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&X_tmp, buf, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&R_tmp, buf, 1)); - rustsecp256k1zkp_v0_10_0_ge_eq_var(X, &X_tmp); - rustsecp256k1zkp_v0_10_0_ge_eq_var(R, &R_tmp); + rustsecp256k1zkp_v0_10_1_bppp_serialize_points(buf, X, R); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&X_tmp, buf, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&R_tmp, buf, 1)); + rustsecp256k1zkp_v0_10_1_ge_eq_var(X, &X_tmp); + rustsecp256k1zkp_v0_10_1_ge_eq_var(R, &R_tmp); } static void test_serialize_two_points(void) { - rustsecp256k1zkp_v0_10_0_ge X, R; + rustsecp256k1zkp_v0_10_1_ge X, R; int i; for (i = 0; i < COUNT; i++) { @@ -215,145 +215,145 @@ static void test_serialize_two_points(void) { for (i = 0; i < COUNT; i++) { random_group_element_test(&X); - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&R); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&R); test_serialize_two_points_roundtrip(&X, &R); } for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&X); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&X); random_group_element_test(&R); test_serialize_two_points_roundtrip(&X, &R); } - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&X); - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&R); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&X); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&R); test_serialize_two_points_roundtrip(&X, &R); /* Test invalid sign byte */ { - rustsecp256k1zkp_v0_10_0_ge X_tmp, R_tmp; + rustsecp256k1zkp_v0_10_1_ge X_tmp, R_tmp; unsigned char buf[65]; random_group_element_test(&X); random_group_element_test(&R); - rustsecp256k1zkp_v0_10_0_bppp_serialize_points(buf, &X, &R); + rustsecp256k1zkp_v0_10_1_bppp_serialize_points(buf, &X, &R); /* buf is valid if 0 <= buf[0] < 4. */ - buf[0] = (unsigned char)rustsecp256k1zkp_v0_10_0_testrandi64(4, 255); - CHECK(!rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&X_tmp, buf, 0)); - CHECK(!rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&R_tmp, buf, 0)); + buf[0] = (unsigned char)rustsecp256k1zkp_v0_10_1_testrandi64(4, 255); + CHECK(!rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&X_tmp, buf, 0)); + CHECK(!rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&R_tmp, buf, 0)); } /* Check that sign bit is 0 for point at infinity */ for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_ge X_tmp, R_tmp; + rustsecp256k1zkp_v0_10_1_ge X_tmp, R_tmp; unsigned char buf[65]; int expect; random_group_element_test(&X); random_group_element_test(&R); - rustsecp256k1zkp_v0_10_0_bppp_serialize_points(buf, &X, &R); + rustsecp256k1zkp_v0_10_1_bppp_serialize_points(buf, &X, &R); memset(&buf[1], 0, 32); if ((buf[0] & 2) == 0) { expect = 1; } else { expect = 0; } - CHECK(rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&X_tmp, buf, 0) == expect); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&R_tmp, buf, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&X_tmp, buf, 0) == expect); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&R_tmp, buf, 1)); memset(&buf[33], 0, 32); if ((buf[0] & 1) == 0) { expect = 1; } else { expect = 0; } - CHECK(rustsecp256k1zkp_v0_10_0_bppp_parse_one_of_points(&R_tmp, buf, 1) == expect); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_parse_one_of_points(&R_tmp, buf, 1) == expect); } } -static void rustsecp256k1zkp_v0_10_0_norm_arg_commit_initial_data( - rustsecp256k1zkp_v0_10_0_sha256* transcript, - const rustsecp256k1zkp_v0_10_0_scalar* rho, - const rustsecp256k1zkp_v0_10_0_bppp_generators* gens_vec, +static void rustsecp256k1zkp_v0_10_1_norm_arg_commit_initial_data( + rustsecp256k1zkp_v0_10_1_sha256* transcript, + const rustsecp256k1zkp_v0_10_1_scalar* rho, + const rustsecp256k1zkp_v0_10_1_bppp_generators* gens_vec, size_t g_len, /* Same as n_vec_len, g_len + c_vec_len = gens->n */ - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len, - const rustsecp256k1zkp_v0_10_0_ge* commit + const rustsecp256k1zkp_v0_10_1_ge* commit ) { /* Commit to the initial public values */ unsigned char ser_commit[33], ser_scalar[32], ser_le64[8]; size_t i; - rustsecp256k1zkp_v0_10_0_ge comm = *commit; - rustsecp256k1zkp_v0_10_0_bppp_sha256_tagged_commitment_init(transcript); - rustsecp256k1zkp_v0_10_0_fe_normalize(&comm.x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&comm.y); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&comm) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_serialize_pt(&ser_commit[0], &comm)); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_commit, 33); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ser_scalar, rho); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_scalar, 32); - rustsecp256k1zkp_v0_10_0_bppp_le64(ser_le64, g_len); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_le64, 8); - rustsecp256k1zkp_v0_10_0_bppp_le64(ser_le64, gens_vec->n); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_le64, 8); + rustsecp256k1zkp_v0_10_1_ge comm = *commit; + rustsecp256k1zkp_v0_10_1_bppp_sha256_tagged_commitment_init(transcript); + rustsecp256k1zkp_v0_10_1_fe_normalize(&comm.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&comm.y); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&comm) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_serialize_pt(&ser_commit[0], &comm)); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_commit, 33); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ser_scalar, rho); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_scalar, 32); + rustsecp256k1zkp_v0_10_1_bppp_le64(ser_le64, g_len); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_le64, 8); + rustsecp256k1zkp_v0_10_1_bppp_le64(ser_le64, gens_vec->n); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_le64, 8); for (i = 0; i < gens_vec->n; i++) { - rustsecp256k1zkp_v0_10_0_fe_normalize(&gens_vec->gens[i].x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&gens_vec->gens[i].y); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_serialize_pt(&ser_commit[0], &gens_vec->gens[i])); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_commit, 33); + rustsecp256k1zkp_v0_10_1_fe_normalize(&gens_vec->gens[i].x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&gens_vec->gens[i].y); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_serialize_pt(&ser_commit[0], &gens_vec->gens[i])); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_commit, 33); } - rustsecp256k1zkp_v0_10_0_bppp_le64(ser_le64, c_vec_len); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_le64, 8); + rustsecp256k1zkp_v0_10_1_bppp_le64(ser_le64, c_vec_len); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_le64, 8); for (i = 0; i < c_vec_len; i++) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ser_scalar, &c_vec[i]); - rustsecp256k1zkp_v0_10_0_sha256_write(transcript, ser_scalar, 32); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ser_scalar, &c_vec[i]); + rustsecp256k1zkp_v0_10_1_sha256_write(transcript, ser_scalar, 32); } } -static void copy_vectors_into_scratch(rustsecp256k1zkp_v0_10_0_scratch_space* scratch, - rustsecp256k1zkp_v0_10_0_scalar **ns, - rustsecp256k1zkp_v0_10_0_scalar **ls, - rustsecp256k1zkp_v0_10_0_scalar **cs, - rustsecp256k1zkp_v0_10_0_ge **gs, - const rustsecp256k1zkp_v0_10_0_scalar *n_vec, - const rustsecp256k1zkp_v0_10_0_scalar *l_vec, - const rustsecp256k1zkp_v0_10_0_scalar *c_vec, - const rustsecp256k1zkp_v0_10_0_ge *gens_vec, +static void copy_vectors_into_scratch(rustsecp256k1zkp_v0_10_1_scratch_space* scratch, + rustsecp256k1zkp_v0_10_1_scalar **ns, + rustsecp256k1zkp_v0_10_1_scalar **ls, + rustsecp256k1zkp_v0_10_1_scalar **cs, + rustsecp256k1zkp_v0_10_1_ge **gs, + const rustsecp256k1zkp_v0_10_1_scalar *n_vec, + const rustsecp256k1zkp_v0_10_1_scalar *l_vec, + const rustsecp256k1zkp_v0_10_1_scalar *c_vec, + const rustsecp256k1zkp_v0_10_1_ge *gens_vec, size_t g_len, size_t h_len) { - *ns = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, g_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - *ls = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - *cs = (rustsecp256k1zkp_v0_10_0_scalar*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - *gs = (rustsecp256k1zkp_v0_10_0_ge*)rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, (g_len + h_len) * sizeof(rustsecp256k1zkp_v0_10_0_ge)); + *ns = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, g_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + *ls = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + *cs = (rustsecp256k1zkp_v0_10_1_scalar*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, h_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + *gs = (rustsecp256k1zkp_v0_10_1_ge*)rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, (g_len + h_len) * sizeof(rustsecp256k1zkp_v0_10_1_ge)); CHECK(ns != NULL && ls != NULL && cs != NULL && gs != NULL); - memcpy(*ns, n_vec, g_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - memcpy(*ls, l_vec, h_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - memcpy(*cs, c_vec, h_len * sizeof(rustsecp256k1zkp_v0_10_0_scalar)); - memcpy(*gs, gens_vec, (g_len + h_len) * sizeof(rustsecp256k1zkp_v0_10_0_ge)); + memcpy(*ns, n_vec, g_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + memcpy(*ls, l_vec, h_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + memcpy(*cs, c_vec, h_len * sizeof(rustsecp256k1zkp_v0_10_1_scalar)); + memcpy(*gs, gens_vec, (g_len + h_len) * sizeof(rustsecp256k1zkp_v0_10_1_ge)); } -/* Same as rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove but does not modify the inputs */ -static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove_const( - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, +/* Same as rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove but does not modify the inputs */ +static int rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove_const( + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, unsigned char* proof, size_t *proof_len, - rustsecp256k1zkp_v0_10_0_sha256 *transcript, - const rustsecp256k1zkp_v0_10_0_scalar* rho, - const rustsecp256k1zkp_v0_10_0_ge* g_vec, + rustsecp256k1zkp_v0_10_1_sha256 *transcript, + const rustsecp256k1zkp_v0_10_1_scalar* rho, + const rustsecp256k1zkp_v0_10_1_ge* g_vec, size_t g_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* n_vec, + const rustsecp256k1zkp_v0_10_1_scalar* n_vec, size_t n_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* l_vec, + const rustsecp256k1zkp_v0_10_1_scalar* l_vec, size_t l_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len ) { - rustsecp256k1zkp_v0_10_0_scalar *ns, *ls, *cs; - rustsecp256k1zkp_v0_10_0_ge *gs; + rustsecp256k1zkp_v0_10_1_scalar *ns, *ls, *cs; + rustsecp256k1zkp_v0_10_1_ge *gs; size_t scratch_checkpoint; size_t g_len = n_vec_len, h_len = l_vec_len; int res; - scratch_checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(&CTX->error_callback, scratch); + scratch_checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(&CTX->error_callback, scratch); copy_vectors_into_scratch(scratch, &ns, &ls, &cs, &gs, n_vec, l_vec, c_vec, g_vec, g_len, h_len); - res = rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove( + res = rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove( CTX, scratch, proof, @@ -369,56 +369,56 @@ static int rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove_const( cs, c_vec_len ); - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, scratch_checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, scratch_checkpoint); return res; } -/* A complete norm argument. In contrast to rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove, this is meant +/* A complete norm argument. In contrast to rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove, this is meant to be used as a standalone norm argument. - This is a simple wrapper around rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove + This is a simple wrapper around rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove that also commits to the initial public values used in the protocol. In this case, these public values are commitment. */ -static int rustsecp256k1zkp_v0_10_0_norm_arg_prove( - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, +static int rustsecp256k1zkp_v0_10_1_norm_arg_prove( + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, unsigned char* proof, size_t *proof_len, - const rustsecp256k1zkp_v0_10_0_scalar* rho, - const rustsecp256k1zkp_v0_10_0_bppp_generators* gens_vec, - const rustsecp256k1zkp_v0_10_0_scalar* n_vec, + const rustsecp256k1zkp_v0_10_1_scalar* rho, + const rustsecp256k1zkp_v0_10_1_bppp_generators* gens_vec, + const rustsecp256k1zkp_v0_10_1_scalar* n_vec, size_t n_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* l_vec, + const rustsecp256k1zkp_v0_10_1_scalar* l_vec, size_t l_vec_len, - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len, - const rustsecp256k1zkp_v0_10_0_ge* commit + const rustsecp256k1zkp_v0_10_1_ge* commit ) { - rustsecp256k1zkp_v0_10_0_sha256 transcript; - rustsecp256k1zkp_v0_10_0_norm_arg_commit_initial_data(&transcript, rho, gens_vec, n_vec_len, c_vec, c_vec_len, commit); + rustsecp256k1zkp_v0_10_1_sha256 transcript; + rustsecp256k1zkp_v0_10_1_norm_arg_commit_initial_data(&transcript, rho, gens_vec, n_vec_len, c_vec, c_vec_len, commit); - return rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove_const(scratch, proof, proof_len, &transcript, rho, gens_vec->gens, gens_vec->n, n_vec, n_vec_len, l_vec, l_vec_len, c_vec, c_vec_len); + return rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove_const(scratch, proof, proof_len, &transcript, rho, gens_vec->gens, gens_vec->n, n_vec, n_vec_len, l_vec, l_vec_len, c_vec, c_vec_len); } /* Verify the proof */ -static int rustsecp256k1zkp_v0_10_0_norm_arg_verify( - rustsecp256k1zkp_v0_10_0_scratch_space* scratch, +static int rustsecp256k1zkp_v0_10_1_norm_arg_verify( + rustsecp256k1zkp_v0_10_1_scratch_space* scratch, const unsigned char* proof, size_t proof_len, - const rustsecp256k1zkp_v0_10_0_scalar* rho, - const rustsecp256k1zkp_v0_10_0_bppp_generators* gens_vec, + const rustsecp256k1zkp_v0_10_1_scalar* rho, + const rustsecp256k1zkp_v0_10_1_bppp_generators* gens_vec, size_t g_len, - const rustsecp256k1zkp_v0_10_0_scalar* c_vec, + const rustsecp256k1zkp_v0_10_1_scalar* c_vec, size_t c_vec_len, - const rustsecp256k1zkp_v0_10_0_ge* commit + const rustsecp256k1zkp_v0_10_1_ge* commit ) { - rustsecp256k1zkp_v0_10_0_ge comm = *commit; + rustsecp256k1zkp_v0_10_1_ge comm = *commit; int res; - rustsecp256k1zkp_v0_10_0_sha256 transcript; + rustsecp256k1zkp_v0_10_1_sha256 transcript; /* Commit to the initial public values */ - rustsecp256k1zkp_v0_10_0_norm_arg_commit_initial_data(&transcript, rho, gens_vec, g_len, c_vec, c_vec_len, &comm); + rustsecp256k1zkp_v0_10_1_norm_arg_commit_initial_data(&transcript, rho, gens_vec, g_len, c_vec, c_vec_len, &comm); - res = rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify( + res = rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_verify( CTX, scratch, proof, @@ -436,44 +436,44 @@ static int rustsecp256k1zkp_v0_10_0_norm_arg_verify( /* Verify |c| = 0 */ static void norm_arg_verify_zero_len(void) { - rustsecp256k1zkp_v0_10_0_scalar n_vec[64], l_vec[64], c_vec[64]; - rustsecp256k1zkp_v0_10_0_scalar rho, mu; - rustsecp256k1zkp_v0_10_0_ge commit; - rustsecp256k1zkp_v0_10_0_scratch *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000*10); /* shouldn't need much */ + rustsecp256k1zkp_v0_10_1_scalar n_vec[64], l_vec[64], c_vec[64]; + rustsecp256k1zkp_v0_10_1_scalar rho, mu; + rustsecp256k1zkp_v0_10_1_ge commit; + rustsecp256k1zkp_v0_10_1_scratch *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000*10); /* shouldn't need much */ unsigned char proof[1000]; unsigned int n_vec_len = 1; unsigned int c_vec_len = 1; - rustsecp256k1zkp_v0_10_0_bppp_generators *gs = rustsecp256k1zkp_v0_10_0_bppp_generators_create(CTX, n_vec_len + c_vec_len); + rustsecp256k1zkp_v0_10_1_bppp_generators *gs = rustsecp256k1zkp_v0_10_1_bppp_generators_create(CTX, n_vec_len + c_vec_len); size_t plen = sizeof(proof); random_scalar_order(&rho); - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu, &rho); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu, &rho); random_scalar_order(&n_vec[0]); random_scalar_order(&c_vec[0]); random_scalar_order(&l_vec[0]); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_commit(CTX, scratch, &commit, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &mu)); - CHECK(rustsecp256k1zkp_v0_10_0_norm_arg_prove(scratch, proof, &plen, &rho, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &commit)); - CHECK(rustsecp256k1zkp_v0_10_0_norm_arg_verify(scratch, proof, plen, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit)); - CHECK(!rustsecp256k1zkp_v0_10_0_norm_arg_verify(scratch, proof, plen, &rho, gs, n_vec_len, c_vec, 0, &commit)); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_commit(CTX, scratch, &commit, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &mu)); + CHECK(rustsecp256k1zkp_v0_10_1_norm_arg_prove(scratch, proof, &plen, &rho, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &commit)); + CHECK(rustsecp256k1zkp_v0_10_1_norm_arg_verify(scratch, proof, plen, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit)); + CHECK(!rustsecp256k1zkp_v0_10_1_norm_arg_verify(scratch, proof, plen, &rho, gs, n_vec_len, c_vec, 0, &commit)); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gs); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gs); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } static void norm_arg_test(unsigned int n, unsigned int m) { - rustsecp256k1zkp_v0_10_0_scalar n_vec[64], l_vec[64], c_vec[64]; - rustsecp256k1zkp_v0_10_0_scalar rho, mu; - rustsecp256k1zkp_v0_10_0_ge commit; + rustsecp256k1zkp_v0_10_1_scalar n_vec[64], l_vec[64], c_vec[64]; + rustsecp256k1zkp_v0_10_1_scalar rho, mu; + rustsecp256k1zkp_v0_10_1_ge commit; size_t i, plen; int res; - rustsecp256k1zkp_v0_10_0_bppp_generators *gs = rustsecp256k1zkp_v0_10_0_bppp_generators_create(CTX, n + m); - rustsecp256k1zkp_v0_10_0_scratch *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ + rustsecp256k1zkp_v0_10_1_bppp_generators *gs = rustsecp256k1zkp_v0_10_1_bppp_generators_create(CTX, n + m); + rustsecp256k1zkp_v0_10_1_scratch *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ unsigned char proof[1000]; plen = 1000; random_scalar_order(&rho); - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu, &rho); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu, &rho); for (i = 0; i < n; i++) { random_scalar_order(&n_vec[i]); @@ -484,48 +484,48 @@ static void norm_arg_test(unsigned int n, unsigned int m) { random_scalar_order(&c_vec[i]); } - res = rustsecp256k1zkp_v0_10_0_bppp_commit(CTX, scratch, &commit, gs, n_vec, n, l_vec, m, c_vec, m, &mu); + res = rustsecp256k1zkp_v0_10_1_bppp_commit(CTX, scratch, &commit, gs, n_vec, n, l_vec, m, c_vec, m, &mu); CHECK(res == 1); - res = rustsecp256k1zkp_v0_10_0_norm_arg_prove(scratch, proof, &plen, &rho, gs, n_vec, n, l_vec, m, c_vec, m, &commit); + res = rustsecp256k1zkp_v0_10_1_norm_arg_prove(scratch, proof, &plen, &rho, gs, n_vec, n, l_vec, m, c_vec, m, &commit); CHECK(res == 1); - res = rustsecp256k1zkp_v0_10_0_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); + res = rustsecp256k1zkp_v0_10_1_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); CHECK(res == 1); /* Changing any of last two scalars should break the proof */ proof[plen - 1] ^= 1; - res = rustsecp256k1zkp_v0_10_0_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); + res = rustsecp256k1zkp_v0_10_1_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); CHECK(res == 0); proof[plen - 1 - 32] ^= 1; - res = rustsecp256k1zkp_v0_10_0_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); + res = rustsecp256k1zkp_v0_10_1_norm_arg_verify(scratch, proof, plen, &rho, gs, n, c_vec, m, &commit); CHECK(res == 0); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gs); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gs); } /* Parses generators from points compressed as pubkeys */ -rustsecp256k1zkp_v0_10_0_bppp_generators* bppp_generators_parse_regular(const unsigned char* data, size_t data_len) { +rustsecp256k1zkp_v0_10_1_bppp_generators* bppp_generators_parse_regular(const unsigned char* data, size_t data_len) { size_t n = data_len / 33; - rustsecp256k1zkp_v0_10_0_bppp_generators* ret; + rustsecp256k1zkp_v0_10_1_bppp_generators* ret; if (data_len % 33 != 0) { return NULL; } - ret = (rustsecp256k1zkp_v0_10_0_bppp_generators *)checked_malloc(&CTX->error_callback, sizeof(*ret)); + ret = (rustsecp256k1zkp_v0_10_1_bppp_generators *)checked_malloc(&CTX->error_callback, sizeof(*ret)); if (ret == NULL) { return NULL; } ret->n = n; - ret->gens = (rustsecp256k1zkp_v0_10_0_ge*)checked_malloc(&CTX->error_callback, n * sizeof(*ret->gens)); + ret->gens = (rustsecp256k1zkp_v0_10_1_ge*)checked_malloc(&CTX->error_callback, n * sizeof(*ret->gens)); if (ret->gens == NULL) { free(ret); return NULL; } while (n--) { - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&ret->gens[n], &data[33 * n], 33)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&ret->gens[n], &data[33 * n], 33)) { free(ret->gens); free(ret); return NULL; @@ -534,36 +534,36 @@ rustsecp256k1zkp_v0_10_0_bppp_generators* bppp_generators_parse_regular(const un return ret; } -int norm_arg_verify_vectors_helper(rustsecp256k1zkp_v0_10_0_scratch *scratch, const unsigned char *gens, const unsigned char *proof, size_t plen, const unsigned char *r32, size_t n_vec_len, const unsigned char c_vec32[][32], rustsecp256k1zkp_v0_10_0_scalar *c_vec, size_t c_vec_len, const unsigned char *commit33) { - rustsecp256k1zkp_v0_10_0_sha256 transcript; - rustsecp256k1zkp_v0_10_0_bppp_generators *gs = bppp_generators_parse_regular(gens, 33*(n_vec_len + c_vec_len)); - rustsecp256k1zkp_v0_10_0_scalar rho; - rustsecp256k1zkp_v0_10_0_ge commit; +int norm_arg_verify_vectors_helper(rustsecp256k1zkp_v0_10_1_scratch *scratch, const unsigned char *gens, const unsigned char *proof, size_t plen, const unsigned char *r32, size_t n_vec_len, const unsigned char c_vec32[][32], rustsecp256k1zkp_v0_10_1_scalar *c_vec, size_t c_vec_len, const unsigned char *commit33) { + rustsecp256k1zkp_v0_10_1_sha256 transcript; + rustsecp256k1zkp_v0_10_1_bppp_generators *gs = bppp_generators_parse_regular(gens, 33*(n_vec_len + c_vec_len)); + rustsecp256k1zkp_v0_10_1_scalar rho; + rustsecp256k1zkp_v0_10_1_ge commit; int overflow; int i; int ret; CHECK(gs != NULL); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&transcript); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&transcript); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&rho, r32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&rho, r32, &overflow); CHECK(!overflow); for (i = 0; i < (int)c_vec_len; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&c_vec[i], c_vec32[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&c_vec[i], c_vec32[i], &overflow); CHECK(!overflow); } - CHECK(rustsecp256k1zkp_v0_10_0_ge_parse_ext(&commit, commit33)); - ret = rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify(CTX, scratch, proof, plen, &transcript, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit); + CHECK(rustsecp256k1zkp_v0_10_1_ge_parse_ext(&commit, commit33)); + ret = rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_verify(CTX, scratch, proof, plen, &transcript, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gs); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gs); return ret; } -#define IDX_TO_TEST(i) (norm_arg_verify_vectors_helper(scratch, verify_vector_gens, verify_vector_##i##_proof, sizeof(verify_vector_##i##_proof), verify_vector_##i##_r32, verify_vector_##i##_n_vec_len, verify_vector_##i##_c_vec32, verify_vector_##i##_c_vec, sizeof(verify_vector_##i##_c_vec)/sizeof(rustsecp256k1zkp_v0_10_0_scalar), verify_vector_##i##_commit33) == verify_vector_##i##_result) +#define IDX_TO_TEST(i) (norm_arg_verify_vectors_helper(scratch, verify_vector_gens, verify_vector_##i##_proof, sizeof(verify_vector_##i##_proof), verify_vector_##i##_r32, verify_vector_##i##_n_vec_len, verify_vector_##i##_c_vec32, verify_vector_##i##_c_vec, sizeof(verify_vector_##i##_c_vec)/sizeof(rustsecp256k1zkp_v0_10_1_scalar), verify_vector_##i##_commit33) == verify_vector_##i##_result) static void norm_arg_verify_vectors(void) { - rustsecp256k1zkp_v0_10_0_scratch *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ + rustsecp256k1zkp_v0_10_1_scratch *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ size_t alloc = scratch->alloc_size; CHECK(IDX_TO_TEST(0)); @@ -581,60 +581,60 @@ static void norm_arg_verify_vectors(void) { CHECK(IDX_TO_TEST(12)); CHECK(alloc == scratch->alloc_size); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } #undef IDX_TO_TEST -static void norm_arg_prove_vectors_helper(rustsecp256k1zkp_v0_10_0_scratch *scratch, const unsigned char *gens, const unsigned char *proof, size_t plen, const unsigned char *r32, const unsigned char n_vec32[][32], rustsecp256k1zkp_v0_10_0_scalar *n_vec, size_t n_vec_len, const unsigned char l_vec32[][32], rustsecp256k1zkp_v0_10_0_scalar *l_vec, const unsigned char c_vec32[][32], rustsecp256k1zkp_v0_10_0_scalar *c_vec, size_t c_vec_len, int result) { - rustsecp256k1zkp_v0_10_0_sha256 transcript; - rustsecp256k1zkp_v0_10_0_bppp_generators *gs = bppp_generators_parse_regular(gens, 33*(n_vec_len + c_vec_len)); - rustsecp256k1zkp_v0_10_0_scalar rho, mu; - rustsecp256k1zkp_v0_10_0_ge commit; +static void norm_arg_prove_vectors_helper(rustsecp256k1zkp_v0_10_1_scratch *scratch, const unsigned char *gens, const unsigned char *proof, size_t plen, const unsigned char *r32, const unsigned char n_vec32[][32], rustsecp256k1zkp_v0_10_1_scalar *n_vec, size_t n_vec_len, const unsigned char l_vec32[][32], rustsecp256k1zkp_v0_10_1_scalar *l_vec, const unsigned char c_vec32[][32], rustsecp256k1zkp_v0_10_1_scalar *c_vec, size_t c_vec_len, int result) { + rustsecp256k1zkp_v0_10_1_sha256 transcript; + rustsecp256k1zkp_v0_10_1_bppp_generators *gs = bppp_generators_parse_regular(gens, 33*(n_vec_len + c_vec_len)); + rustsecp256k1zkp_v0_10_1_scalar rho, mu; + rustsecp256k1zkp_v0_10_1_ge commit; unsigned char myproof[1024]; size_t myplen = sizeof(myproof); int overflow; int i; CHECK(gs != NULL); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&transcript); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&rho, r32, &overflow); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&transcript); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&rho, r32, &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_sqr(&mu, &rho); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&mu, &rho); for (i = 0; i < (int)n_vec_len; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&n_vec[i], n_vec32[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&n_vec[i], n_vec32[i], &overflow); CHECK(!overflow); } for (i = 0; i < (int)c_vec_len; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&l_vec[i], l_vec32[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&l_vec[i], l_vec32[i], &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&c_vec[i], c_vec32[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&c_vec[i], c_vec32[i], &overflow); CHECK(!overflow); } - CHECK(rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_prove_const(scratch, myproof, &myplen, &transcript, &rho, gs->gens, gs->n, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len) == result); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_prove_const(scratch, myproof, &myplen, &transcript, &rho, gs->gens, gs->n, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len) == result); if (!result) { - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gs); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gs); return; } CHECK(plen == myplen); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(proof, myproof, plen) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(proof, myproof, plen) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_commit(CTX, scratch, &commit, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &mu)); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&transcript); - CHECK(rustsecp256k1zkp_v0_10_0_bppp_rangeproof_norm_product_verify(CTX, scratch, proof, plen, &transcript, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit)); - rustsecp256k1zkp_v0_10_0_bppp_generators_destroy(CTX, gs); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_commit(CTX, scratch, &commit, gs, n_vec, n_vec_len, l_vec, c_vec_len, c_vec, c_vec_len, &mu)); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&transcript); + CHECK(rustsecp256k1zkp_v0_10_1_bppp_rangeproof_norm_product_verify(CTX, scratch, proof, plen, &transcript, &rho, gs, n_vec_len, c_vec, c_vec_len, &commit)); + rustsecp256k1zkp_v0_10_1_bppp_generators_destroy(CTX, gs); } #define IDX_TO_TEST(i) (norm_arg_prove_vectors_helper(scratch, prove_vector_gens, prove_vector_##i##_proof, sizeof(prove_vector_##i##_proof), prove_vector_##i##_r32,\ - prove_vector_##i##_n_vec32, prove_vector_##i##_n_vec, sizeof(prove_vector_##i##_n_vec)/sizeof(rustsecp256k1zkp_v0_10_0_scalar),\ + prove_vector_##i##_n_vec32, prove_vector_##i##_n_vec, sizeof(prove_vector_##i##_n_vec)/sizeof(rustsecp256k1zkp_v0_10_1_scalar),\ prove_vector_##i##_l_vec32, prove_vector_##i##_l_vec,\ - prove_vector_##i##_c_vec32, prove_vector_##i##_c_vec, sizeof(prove_vector_##i##_c_vec)/sizeof(rustsecp256k1zkp_v0_10_0_scalar), \ + prove_vector_##i##_c_vec32, prove_vector_##i##_c_vec, sizeof(prove_vector_##i##_c_vec)/sizeof(rustsecp256k1zkp_v0_10_1_scalar), \ prove_vector_##i##_result)) static void norm_arg_prove_vectors(void) { - rustsecp256k1zkp_v0_10_0_scratch *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ + rustsecp256k1zkp_v0_10_1_scratch *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000*1000); /* shouldn't need much */ size_t alloc = scratch->alloc_size; IDX_TO_TEST(0); @@ -644,7 +644,7 @@ static void norm_arg_prove_vectors(void) { IDX_TO_TEST(4); CHECK(alloc == scratch->alloc_size); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } #undef IDX_TO_TEST diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/Makefile.am.include index 331299aa..6eecce61 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_ecdh.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_ecdh.h noinst_HEADERS += src/modules/ecdh/main_impl.h noinst_HEADERS += src/modules/ecdh/tests_impl.h noinst_HEADERS += src/modules/ecdh/bench_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/bench_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/bench_impl.h index e7e8e56e..ba34dee7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/bench_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/bench_impl.h @@ -10,8 +10,8 @@ #include "../../../include/secp256k1_ecdh.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context *ctx; - rustsecp256k1zkp_v0_10_0_pubkey point; + rustsecp256k1zkp_v0_10_1_context *ctx; + rustsecp256k1zkp_v0_10_1_pubkey point; unsigned char scalar[32]; } bench_ecdh_data; @@ -29,7 +29,7 @@ static void bench_ecdh_setup(void* arg) { for (i = 0; i < 32; i++) { data->scalar[i] = i + 1; } - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(data->ctx, &data->point, point, sizeof(point)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(data->ctx, &data->point, point, sizeof(point)) == 1); } static void bench_ecdh(void* arg, int iters) { @@ -38,7 +38,7 @@ static void bench_ecdh(void* arg, int iters) { bench_ecdh_data *data = (bench_ecdh_data*)arg; for (i = 0; i < iters; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(data->ctx, res, &data->point, data->scalar, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(data->ctx, res, &data->point, data->scalar, NULL, NULL) == 1); } } @@ -47,11 +47,11 @@ static void run_ecdh_bench(int iters, int argc, char** argv) { int d = argc == 1; /* create a context with no capabilities */ - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_FLAGS_TYPE_CONTEXT); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_FLAGS_TYPE_CONTEXT); if (d || have_flag(argc, argv, "ecdh")) run_benchmark("ecdh", bench_ecdh, bench_ecdh_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); } #endif /* SECP256K1_MODULE_ECDH_BENCH_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/main_impl.h index 46130a30..65a0f14d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/main_impl.h @@ -12,26 +12,26 @@ static int ecdh_hash_function_sha256(unsigned char *output, const unsigned char *x32, const unsigned char *y32, void *data) { unsigned char version = (y32[31] & 0x01) | 0x02; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; (void)data; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, &version, 1); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, x32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, &version, 1); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, x32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); return 1; } -const rustsecp256k1zkp_v0_10_0_ecdh_hash_function rustsecp256k1zkp_v0_10_0_ecdh_hash_function_sha256 = ecdh_hash_function_sha256; -const rustsecp256k1zkp_v0_10_0_ecdh_hash_function rustsecp256k1zkp_v0_10_0_ecdh_hash_function_default = ecdh_hash_function_sha256; +const rustsecp256k1zkp_v0_10_1_ecdh_hash_function rustsecp256k1zkp_v0_10_1_ecdh_hash_function_sha256 = ecdh_hash_function_sha256; +const rustsecp256k1zkp_v0_10_1_ecdh_hash_function rustsecp256k1zkp_v0_10_1_ecdh_hash_function_default = ecdh_hash_function_sha256; -int rustsecp256k1zkp_v0_10_0_ecdh(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_0_pubkey *point, const unsigned char *scalar, rustsecp256k1zkp_v0_10_0_ecdh_hash_function hashfp, void *data) { +int rustsecp256k1zkp_v0_10_1_ecdh(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_1_pubkey *point, const unsigned char *scalar, rustsecp256k1zkp_v0_10_1_ecdh_hash_function hashfp, void *data) { int ret = 0; int overflow = 0; - rustsecp256k1zkp_v0_10_0_gej res; - rustsecp256k1zkp_v0_10_0_ge pt; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_gej res; + rustsecp256k1zkp_v0_10_1_ge pt; + rustsecp256k1zkp_v0_10_1_scalar s; unsigned char x[32]; unsigned char y[32]; @@ -41,29 +41,29 @@ int rustsecp256k1zkp_v0_10_0_ecdh(const rustsecp256k1zkp_v0_10_0_context* ctx, u ARG_CHECK(scalar != NULL); if (hashfp == NULL) { - hashfp = rustsecp256k1zkp_v0_10_0_ecdh_hash_function_default; + hashfp = rustsecp256k1zkp_v0_10_1_ecdh_hash_function_default; } - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pt, point); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, scalar, &overflow); + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pt, point); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, scalar, &overflow); - overflow |= rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_0_scalar_one, overflow); + overflow |= rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_1_scalar_one, overflow); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res, &pt, &s); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pt, &res); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res, &pt, &s); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pt, &res); /* Compute a hash of the point */ - rustsecp256k1zkp_v0_10_0_fe_normalize(&pt.x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&pt.y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(x, &pt.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(y, &pt.y); + rustsecp256k1zkp_v0_10_1_fe_normalize(&pt.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&pt.y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(x, &pt.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(y, &pt.y); ret = hashfp(output, x, y, data); memset(x, 0, 32); memset(y, 0, 32); - rustsecp256k1zkp_v0_10_0_scalar_clear(&s); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s); return !!ret & !overflow; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/tests_impl.h index b44fb2e6..e5c1bdab 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdh/tests_impl.h @@ -25,59 +25,59 @@ static int ecdh_hash_function_custom(unsigned char *output, const unsigned char } static void test_ecdh_api(void) { - rustsecp256k1zkp_v0_10_0_pubkey point; + rustsecp256k1zkp_v0_10_1_pubkey point; unsigned char res[32]; unsigned char s_one[32] = { 0 }; s_one[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point, s_one) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point, s_one) == 1); /* Check all NULLs are detected */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, res, &point, s_one, NULL, NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdh(CTX, NULL, &point, s_one, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdh(CTX, res, NULL, s_one, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdh(CTX, res, &point, NULL, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, res, &point, s_one, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, res, &point, s_one, NULL, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdh(CTX, NULL, &point, s_one, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdh(CTX, res, NULL, s_one, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdh(CTX, res, &point, NULL, NULL, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, res, &point, s_one, NULL, NULL) == 1); } static void test_ecdh_generator_basepoint(void) { unsigned char s_one[32] = { 0 }; - rustsecp256k1zkp_v0_10_0_pubkey point[2]; + rustsecp256k1zkp_v0_10_1_pubkey point[2]; int i; s_one[31] = 1; /* Check against pubkey creation when the basepoint is the generator */ for (i = 0; i < 2 * COUNT; ++i) { - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char s_b32[32]; unsigned char output_ecdh[65]; unsigned char output_ser[32]; unsigned char point_ser[65]; size_t point_ser_len = sizeof(point_ser); - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; random_scalar_order(&s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(s_b32, &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(s_b32, &s); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point[0], s_one) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point[1], s_b32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point[0], s_one) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point[1], s_b32) == 1); /* compute using ECDH function with custom hash function */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output_ecdh, &point[0], s_b32, ecdh_hash_function_custom, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output_ecdh, &point[0], s_b32, ecdh_hash_function_custom, NULL) == 1); /* compute "explicitly" */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, point_ser, &point_ser_len, &point[1], SECP256K1_EC_UNCOMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, point_ser, &point_ser_len, &point[1], SECP256K1_EC_UNCOMPRESSED) == 1); /* compare */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output_ecdh, point_ser, 65) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output_ecdh, point_ser, 65) == 0); /* compute using ECDH function with default hash function */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output_ecdh, &point[0], s_b32, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output_ecdh, &point[0], s_b32, NULL, NULL) == 1); /* compute "explicitly" */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, point_ser, &point_ser_len, &point[1], SECP256K1_EC_COMPRESSED) == 1); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, point_ser, point_ser_len); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output_ser); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, point_ser, &point_ser_len, &point[1], SECP256K1_EC_COMPRESSED) == 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, point_ser, point_ser_len); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output_ser); /* compare */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output_ecdh, output_ser, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output_ecdh, output_ser, 32) == 0); } } @@ -91,29 +91,29 @@ static void test_bad_scalar(void) { }; unsigned char s_rand[32] = { 0 }; unsigned char output[32]; - rustsecp256k1zkp_v0_10_0_scalar rand; - rustsecp256k1zkp_v0_10_0_pubkey point; + rustsecp256k1zkp_v0_10_1_scalar rand; + rustsecp256k1zkp_v0_10_1_pubkey point; /* Create random point */ random_scalar_order(&rand); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(s_rand, &rand); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point, s_rand) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(s_rand, &rand); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point, s_rand) == 1); /* Try to multiply it by bad values */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output, &point, s_zero, NULL, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output, &point, s_overflow, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output, &point, s_zero, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output, &point, s_overflow, NULL, NULL) == 0); /* ...and a good one */ s_overflow[31] -= 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output, &point, s_overflow, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output, &point, s_overflow, NULL, NULL) == 1); /* Hash function failure results in ecdh failure */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, output, &point, s_overflow, ecdh_hash_function_test_fail, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, output, &point, s_overflow, ecdh_hash_function_test_fail, NULL) == 0); } /** Test that ECDH(sG, 1/s) == ECDH((1/s)G, s) == ECDH(G, 1) for a few random s. */ static void test_result_basepoint(void) { - rustsecp256k1zkp_v0_10_0_pubkey point; - rustsecp256k1zkp_v0_10_0_scalar rand; + rustsecp256k1zkp_v0_10_1_pubkey point; + rustsecp256k1zkp_v0_10_1_scalar rand; unsigned char s[32]; unsigned char s_inv[32]; unsigned char out[32]; @@ -123,22 +123,22 @@ static void test_result_basepoint(void) { unsigned char s_one[32] = { 0 }; s_one[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point, s_one) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, out_base, &point, s_one, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point, s_one) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, out_base, &point, s_one, NULL, NULL) == 1); for (i = 0; i < 2 * COUNT; i++) { random_scalar_order(&rand); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(s, &rand); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&rand, &rand); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(s_inv, &rand); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(s, &rand); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&rand, &rand); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(s_inv, &rand); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point, s) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, out, &point, s_inv, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, out_base, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point, s) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, out, &point, s_inv, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, out_base, 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &point, s_inv) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdh(CTX, out_inv, &point, s, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out_inv, out_base, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &point, s_inv) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdh(CTX, out_inv, &point, s, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out_inv, out_base, 32) == 0); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/Makefile.am.include index 94487ee6..1fd9bafc 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_ecdsa_adaptor.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_ecdsa_adaptor.h noinst_HEADERS += src/modules/ecdsa_adaptor/main_impl.h noinst_HEADERS += src/modules/ecdsa_adaptor/dleq_impl.h noinst_HEADERS += src/modules/ecdsa_adaptor/tests_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/dleq_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/dleq_impl.h index 68f4da37..bfba716a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/dleq_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/dleq_impl.h @@ -3,8 +3,8 @@ /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("DLEQ")||SHA256("DLEQ"). */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_nonce_function_dleq_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x8cc4beacul; sha->s[1] = 0x2e011f3ful; sha->s[2] = 0x355c75fbul; @@ -20,38 +20,38 @@ static void rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged(rustsecp2 /* algo argument for nonce_function_ecdsa_adaptor to derive the nonce using a tagged hash function. */ static const unsigned char dleq_algo[4] = "DLEQ"; -static int rustsecp256k1zkp_v0_10_0_dleq_hash_point(rustsecp256k1zkp_v0_10_0_sha256 *sha, rustsecp256k1zkp_v0_10_0_ge *p) { +static int rustsecp256k1zkp_v0_10_1_dleq_hash_point(rustsecp256k1zkp_v0_10_1_sha256 *sha, rustsecp256k1zkp_v0_10_1_ge *p) { unsigned char buf[33]; size_t size = 33; - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(p, buf, &size, 1)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(p, buf, &size, 1)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(sha, buf, size); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, buf, size); return 1; } -static int rustsecp256k1zkp_v0_10_0_dleq_nonce(rustsecp256k1zkp_v0_10_0_scalar *k, const unsigned char *sk32, const unsigned char *gen2_33, const unsigned char *p1_33, const unsigned char *p2_33, rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +static int rustsecp256k1zkp_v0_10_1_dleq_nonce(rustsecp256k1zkp_v0_10_1_scalar *k, const unsigned char *sk32, const unsigned char *gen2_33, const unsigned char *p1_33, const unsigned char *p2_33, rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char buf[32]; unsigned char nonce[32]; size_t size = 33; if (noncefp == NULL) { - noncefp = rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor; + noncefp = rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor; } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, p1_33, size); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, p2_33, size); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, p1_33, size); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, p2_33, size); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); if (!noncefp(nonce, buf, sk32, gen2_33, dleq_algo, sizeof(dleq_algo), ndata)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(k, nonce, NULL); - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(k)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(k, nonce, NULL); + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(k)) { return 0; } @@ -60,36 +60,36 @@ static int rustsecp256k1zkp_v0_10_0_dleq_nonce(rustsecp256k1zkp_v0_10_0_scalar * /* Generates a challenge as defined in the DLC Specification at * https://github.com/discreetlogcontracts/dlcspecs */ -static void rustsecp256k1zkp_v0_10_0_dleq_challenge(rustsecp256k1zkp_v0_10_0_scalar *e, rustsecp256k1zkp_v0_10_0_ge *gen2, rustsecp256k1zkp_v0_10_0_ge *r1, rustsecp256k1zkp_v0_10_0_ge *r2, rustsecp256k1zkp_v0_10_0_ge *p1, rustsecp256k1zkp_v0_10_0_ge *p2) { +static void rustsecp256k1zkp_v0_10_1_dleq_challenge(rustsecp256k1zkp_v0_10_1_scalar *e, rustsecp256k1zkp_v0_10_1_ge *gen2, rustsecp256k1zkp_v0_10_1_ge *r1, rustsecp256k1zkp_v0_10_1_ge *r2, rustsecp256k1zkp_v0_10_1_ge *p1, rustsecp256k1zkp_v0_10_1_ge *p2) { unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; - rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged(&sha); - rustsecp256k1zkp_v0_10_0_dleq_hash_point(&sha, p1); - rustsecp256k1zkp_v0_10_0_dleq_hash_point(&sha, gen2); - rustsecp256k1zkp_v0_10_0_dleq_hash_point(&sha, p2); - rustsecp256k1zkp_v0_10_0_dleq_hash_point(&sha, r1); - rustsecp256k1zkp_v0_10_0_dleq_hash_point(&sha, r2); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_nonce_function_dleq_sha256_tagged(&sha); + rustsecp256k1zkp_v0_10_1_dleq_hash_point(&sha, p1); + rustsecp256k1zkp_v0_10_1_dleq_hash_point(&sha, gen2); + rustsecp256k1zkp_v0_10_1_dleq_hash_point(&sha, p2); + rustsecp256k1zkp_v0_10_1_dleq_hash_point(&sha, r1); + rustsecp256k1zkp_v0_10_1_dleq_hash_point(&sha, r2); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(e, buf, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(e, buf, NULL); } /* P1 = x*G, P2 = x*Y */ -static void rustsecp256k1zkp_v0_10_0_dleq_pair(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_0_ge *p1, rustsecp256k1zkp_v0_10_0_ge *p2, const rustsecp256k1zkp_v0_10_0_scalar *sk, const rustsecp256k1zkp_v0_10_0_ge *gen2) { - rustsecp256k1zkp_v0_10_0_gej p1j, p2j; +static void rustsecp256k1zkp_v0_10_1_dleq_pair(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_1_ge *p1, rustsecp256k1zkp_v0_10_1_ge *p2, const rustsecp256k1zkp_v0_10_1_scalar *sk, const rustsecp256k1zkp_v0_10_1_ge *gen2) { + rustsecp256k1zkp_v0_10_1_gej p1j, p2j; - rustsecp256k1zkp_v0_10_0_ecmult_gen(ecmult_gen_ctx, &p1j, sk); - rustsecp256k1zkp_v0_10_0_ge_set_gej(p1, &p1j); - rustsecp256k1zkp_v0_10_0_ecmult_const(&p2j, gen2, sk); - rustsecp256k1zkp_v0_10_0_ge_set_gej(p2, &p2j); + rustsecp256k1zkp_v0_10_1_ecmult_gen(ecmult_gen_ctx, &p1j, sk); + rustsecp256k1zkp_v0_10_1_ge_set_gej(p1, &p1j); + rustsecp256k1zkp_v0_10_1_ecmult_const(&p2j, gen2, sk); + rustsecp256k1zkp_v0_10_1_ge_set_gej(p2, &p2j); } /* Generates a proof that the discrete logarithm of P1 to the secp256k1 base G is the * same as the discrete logarithm of P2 to the base Y */ -static int rustsecp256k1zkp_v0_10_0_dleq_prove(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar *s, rustsecp256k1zkp_v0_10_0_scalar *e, const rustsecp256k1zkp_v0_10_0_scalar *sk, rustsecp256k1zkp_v0_10_0_ge *gen2, rustsecp256k1zkp_v0_10_0_ge *p1, rustsecp256k1zkp_v0_10_0_ge *p2, rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { - rustsecp256k1zkp_v0_10_0_ge r1, r2; - rustsecp256k1zkp_v0_10_0_scalar k = { 0 }; +static int rustsecp256k1zkp_v0_10_1_dleq_prove(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar *s, rustsecp256k1zkp_v0_10_1_scalar *e, const rustsecp256k1zkp_v0_10_1_scalar *sk, rustsecp256k1zkp_v0_10_1_ge *gen2, rustsecp256k1zkp_v0_10_1_ge *p1, rustsecp256k1zkp_v0_10_1_ge *p2, rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { + rustsecp256k1zkp_v0_10_1_ge r1, r2; + rustsecp256k1zkp_v0_10_1_scalar k = { 0 }; unsigned char sk32[32]; unsigned char gen2_33[33]; unsigned char p1_33[33]; @@ -97,62 +97,62 @@ static int rustsecp256k1zkp_v0_10_0_dleq_prove(const rustsecp256k1zkp_v0_10_0_co int ret = 1; size_t pubkey_size = 33; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sk32, sk); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(gen2, gen2_33, &pubkey_size, 1)) { + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sk32, sk); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(gen2, gen2_33, &pubkey_size, 1)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(p1, p1_33, &pubkey_size, 1)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(p1, p1_33, &pubkey_size, 1)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(p2, p2_33, &pubkey_size, 1)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(p2, p2_33, &pubkey_size, 1)) { return 0; } - ret &= rustsecp256k1zkp_v0_10_0_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, noncefp, ndata); + ret &= rustsecp256k1zkp_v0_10_1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, noncefp, ndata); /* R1 = k*G, R2 = k*Y */ - rustsecp256k1zkp_v0_10_0_dleq_pair(&ctx->ecmult_gen_ctx, &r1, &r2, &k, gen2); + rustsecp256k1zkp_v0_10_1_dleq_pair(&ctx->ecmult_gen_ctx, &r1, &r2, &k, gen2); /* We declassify the non-secret values r1 and r2 to allow using them as * branch points. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &r1, sizeof(r1)); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &r2, sizeof(r2)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &r1, sizeof(r1)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &r2, sizeof(r2)); /* e = tagged hash(p1, gen2, p2, r1, r2) */ /* s = k + e * sk */ - rustsecp256k1zkp_v0_10_0_dleq_challenge(e, gen2, &r1, &r2, p1, p2); - rustsecp256k1zkp_v0_10_0_scalar_mul(s, e, sk); - rustsecp256k1zkp_v0_10_0_scalar_add(s, s, &k); + rustsecp256k1zkp_v0_10_1_dleq_challenge(e, gen2, &r1, &r2, p1, p2); + rustsecp256k1zkp_v0_10_1_scalar_mul(s, e, sk); + rustsecp256k1zkp_v0_10_1_scalar_add(s, s, &k); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k); return ret; } -static int rustsecp256k1zkp_v0_10_0_dleq_verify(const rustsecp256k1zkp_v0_10_0_scalar *s, const rustsecp256k1zkp_v0_10_0_scalar *e, rustsecp256k1zkp_v0_10_0_ge *p1, rustsecp256k1zkp_v0_10_0_ge *gen2, rustsecp256k1zkp_v0_10_0_ge *p2) { - rustsecp256k1zkp_v0_10_0_scalar e_neg; - rustsecp256k1zkp_v0_10_0_scalar e_expected; - rustsecp256k1zkp_v0_10_0_gej gen2j; - rustsecp256k1zkp_v0_10_0_gej p1j, p2j; - rustsecp256k1zkp_v0_10_0_gej r1j, r2j; - rustsecp256k1zkp_v0_10_0_ge r1, r2; - rustsecp256k1zkp_v0_10_0_gej tmpj; +static int rustsecp256k1zkp_v0_10_1_dleq_verify(const rustsecp256k1zkp_v0_10_1_scalar *s, const rustsecp256k1zkp_v0_10_1_scalar *e, rustsecp256k1zkp_v0_10_1_ge *p1, rustsecp256k1zkp_v0_10_1_ge *gen2, rustsecp256k1zkp_v0_10_1_ge *p2) { + rustsecp256k1zkp_v0_10_1_scalar e_neg; + rustsecp256k1zkp_v0_10_1_scalar e_expected; + rustsecp256k1zkp_v0_10_1_gej gen2j; + rustsecp256k1zkp_v0_10_1_gej p1j, p2j; + rustsecp256k1zkp_v0_10_1_gej r1j, r2j; + rustsecp256k1zkp_v0_10_1_ge r1, r2; + rustsecp256k1zkp_v0_10_1_gej tmpj; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&p1j, p1); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&p2j, p2); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&p1j, p1); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&p2j, p2); - rustsecp256k1zkp_v0_10_0_scalar_negate(&e_neg, e); + rustsecp256k1zkp_v0_10_1_scalar_negate(&e_neg, e); /* R1 = s*G - e*P1 */ - rustsecp256k1zkp_v0_10_0_ecmult(&r1j, &p1j, &e_neg, s); + rustsecp256k1zkp_v0_10_1_ecmult(&r1j, &p1j, &e_neg, s); /* R2 = s*gen2 - e*P2 */ - rustsecp256k1zkp_v0_10_0_ecmult(&tmpj, &p2j, &e_neg, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gen2j, gen2); - rustsecp256k1zkp_v0_10_0_ecmult(&r2j, &gen2j, s, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_gej_add_var(&r2j, &r2j, &tmpj, NULL); + rustsecp256k1zkp_v0_10_1_ecmult(&tmpj, &p2j, &e_neg, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gen2j, gen2); + rustsecp256k1zkp_v0_10_1_ecmult(&r2j, &gen2j, s, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_gej_add_var(&r2j, &r2j, &tmpj, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r1, &r1j); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r2, &r2j); - rustsecp256k1zkp_v0_10_0_dleq_challenge(&e_expected, gen2, &r1, &r2, p1, p2); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r1, &r1j); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r2, &r2j); + rustsecp256k1zkp_v0_10_1_dleq_challenge(&e_expected, gen2, &r1, &r2, p1, p2); - rustsecp256k1zkp_v0_10_0_scalar_add(&e_expected, &e_expected, &e_neg); - return rustsecp256k1zkp_v0_10_0_scalar_is_zero(&e_expected); + rustsecp256k1zkp_v0_10_1_scalar_add(&e_expected, &e_expected, &e_neg); + return rustsecp256k1zkp_v0_10_1_scalar_is_zero(&e_expected); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/main_impl.h index 0d78393a..cc7a11c7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/main_impl.h @@ -11,53 +11,53 @@ #include "dleq_impl.h" /* (R, R', s', dleq_proof) */ -static int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(unsigned char *adaptor_sig162, rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_ge *rp, const rustsecp256k1zkp_v0_10_0_scalar *sp, const rustsecp256k1zkp_v0_10_0_scalar *dleq_proof_e, const rustsecp256k1zkp_v0_10_0_scalar *dleq_proof_s) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(unsigned char *adaptor_sig162, rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_ge *rp, const rustsecp256k1zkp_v0_10_1_scalar *sp, const rustsecp256k1zkp_v0_10_1_scalar *dleq_proof_e, const rustsecp256k1zkp_v0_10_1_scalar *dleq_proof_s) { size_t size = 33; - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(r, adaptor_sig162, &size, 1)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(r, adaptor_sig162, &size, 1)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(rp, &adaptor_sig162[33], &size, 1)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(rp, &adaptor_sig162[33], &size, 1)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&adaptor_sig162[66], sp); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&adaptor_sig162[98], dleq_proof_e); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&adaptor_sig162[130], dleq_proof_s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&adaptor_sig162[66], sp); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&adaptor_sig162[98], dleq_proof_e); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&adaptor_sig162[130], dleq_proof_s); return 1; } -static int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(rustsecp256k1zkp_v0_10_0_ge *r, rustsecp256k1zkp_v0_10_0_scalar *sigr, rustsecp256k1zkp_v0_10_0_ge *rp, rustsecp256k1zkp_v0_10_0_scalar *sp, rustsecp256k1zkp_v0_10_0_scalar *dleq_proof_e, rustsecp256k1zkp_v0_10_0_scalar *dleq_proof_s, const unsigned char *adaptor_sig162) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(rustsecp256k1zkp_v0_10_1_ge *r, rustsecp256k1zkp_v0_10_1_scalar *sigr, rustsecp256k1zkp_v0_10_1_ge *rp, rustsecp256k1zkp_v0_10_1_scalar *sp, rustsecp256k1zkp_v0_10_1_scalar *dleq_proof_e, rustsecp256k1zkp_v0_10_1_scalar *dleq_proof_s, const unsigned char *adaptor_sig162) { /* If r is deserialized, require that a sigr is provided to receive * the X-coordinate */ VERIFY_CHECK((r == NULL) || (r != NULL && sigr != NULL)); if (r != NULL) { - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(r, &adaptor_sig162[0], 33)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(r, &adaptor_sig162[0], 33)) { return 0; } } if (sigr != NULL) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(sigr, &adaptor_sig162[1], NULL); - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigr)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(sigr, &adaptor_sig162[1], NULL); + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigr)) { return 0; } } if (rp != NULL) { - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(rp, &adaptor_sig162[33], 33)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(rp, &adaptor_sig162[33], 33)) { return 0; } } if (sp != NULL) { - if (!rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(sp, &adaptor_sig162[66])) { + if (!rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(sp, &adaptor_sig162[66])) { return 0; } } if (dleq_proof_e != NULL) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(dleq_proof_e, &adaptor_sig162[98], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(dleq_proof_e, &adaptor_sig162[98], NULL); } if (dleq_proof_s != NULL) { int overflow; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(dleq_proof_s, &adaptor_sig162[130], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(dleq_proof_s, &adaptor_sig162[130], &overflow); if (overflow) { return 0; } @@ -67,8 +67,8 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(rustsecp256k1z /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("ECDSAadaptor/non")||SHA256("ECDSAadaptor/non"). */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x791dae43ul; sha->s[1] = 0xe52d3b44ul; sha->s[2] = 0x37f9edeaul; @@ -83,8 +83,8 @@ static void rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged( /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("ECDSAadaptor/aux")||SHA256("ECDSAadaptor/aux"). */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged_aux(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0xd14c7bd9ul; sha->s[1] = 0x095d35e6ul; sha->s[2] = 0xb8490a88ul; @@ -102,7 +102,7 @@ static const unsigned char ecdsa_adaptor_algo[16] = "ECDSAadaptor/non"; /* Modified BIP-340 nonce function */ static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *pk33, const unsigned char *algo, size_t algolen, void *data) { - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char masked_key[32]; int i; @@ -111,9 +111,9 @@ static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned c } if (data != NULL) { - rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, data, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, masked_key); + rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, data, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, masked_key); for (i = 0; i < 32; i++) { masked_key[i] ^= key32[i]; } @@ -123,117 +123,117 @@ static int nonce_function_ecdsa_adaptor(unsigned char *nonce32, const unsigned c * algorithims. An optimized tagging implementation is used if the default * tag is provided. */ if (algolen == sizeof(ecdsa_adaptor_algo) - && rustsecp256k1zkp_v0_10_0_memcmp_var(algo, ecdsa_adaptor_algo, algolen) == 0) { - rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged(&sha); + && rustsecp256k1zkp_v0_10_1_memcmp_var(algo, ecdsa_adaptor_algo, algolen) == 0) { + rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged(&sha); } else if (algolen == sizeof(dleq_algo) - && rustsecp256k1zkp_v0_10_0_memcmp_var(algo, dleq_algo, algolen) == 0) { - rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged(&sha); + && rustsecp256k1zkp_v0_10_1_memcmp_var(algo, dleq_algo, algolen) == 0) { + rustsecp256k1zkp_v0_10_1_nonce_function_dleq_sha256_tagged(&sha); } else { - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, algo, algolen); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, algo, algolen); } /* Hash (masked-)key||pk||msg using the tagged hash as per BIP-340 */ if (data != NULL) { - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, masked_key, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, masked_key, 32); } else { - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, key32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, key32, 32); } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, pk33, 33); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, msg32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, nonce32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, pk33, 33); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, nonce32); return 1; } -const rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor = nonce_function_ecdsa_adaptor; - -int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *adaptor_sig162, unsigned char *seckey32, const rustsecp256k1zkp_v0_10_0_pubkey *enckey, const unsigned char *msg32, rustsecp256k1zkp_v0_10_0_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { - rustsecp256k1zkp_v0_10_0_scalar k; - rustsecp256k1zkp_v0_10_0_gej rj, rpj; - rustsecp256k1zkp_v0_10_0_ge r, rp; - rustsecp256k1zkp_v0_10_0_ge enckey_ge; - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_s; - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_e; - rustsecp256k1zkp_v0_10_0_scalar sk; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar sigr; - rustsecp256k1zkp_v0_10_0_scalar n; +const rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor = nonce_function_ecdsa_adaptor; + +int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *adaptor_sig162, unsigned char *seckey32, const rustsecp256k1zkp_v0_10_1_pubkey *enckey, const unsigned char *msg32, rustsecp256k1zkp_v0_10_1_nonce_function_hardened_ecdsa_adaptor noncefp, void *ndata) { + rustsecp256k1zkp_v0_10_1_scalar k; + rustsecp256k1zkp_v0_10_1_gej rj, rpj; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_ge enckey_ge; + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_s; + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_e; + rustsecp256k1zkp_v0_10_1_scalar sk; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar sigr; + rustsecp256k1zkp_v0_10_1_scalar n; unsigned char nonce32[32] = { 0 }; unsigned char buf33[33]; size_t size = 33; int ret = 1; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(adaptor_sig162 != NULL); ARG_CHECK(seckey32 != NULL); ARG_CHECK(enckey != NULL); ARG_CHECK(msg32 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_clear(&dleq_proof_e); - rustsecp256k1zkp_v0_10_0_scalar_clear(&dleq_proof_s); + rustsecp256k1zkp_v0_10_1_scalar_clear(&dleq_proof_e); + rustsecp256k1zkp_v0_10_1_scalar_clear(&dleq_proof_s); if (noncefp == NULL) { - noncefp = rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor; + noncefp = rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor; } - ret &= rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &enckey_ge, enckey); - ret &= rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&enckey_ge, buf33, &size, 1); + ret &= rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &enckey_ge, enckey); + ret &= rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&enckey_ge, buf33, &size, 1); ret &= !!noncefp(nonce32, msg32, seckey32, buf33, ecdsa_adaptor_algo, sizeof(ecdsa_adaptor_algo), ndata); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k, nonce32, NULL); - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&k, &rustsecp256k1zkp_v0_10_0_scalar_one, !ret); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k, nonce32, NULL); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&k, &rustsecp256k1zkp_v0_10_1_scalar_one, !ret); /* R' := k*G */ - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &rpj, &k); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&rp, &rpj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &rpj, &k); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&rp, &rpj); /* R = k*Y; */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&rj, &enckey_ge, &k); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r, &rj); + rustsecp256k1zkp_v0_10_1_ecmult_const(&rj, &enckey_ge, &k); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rj); /* We declassify the non-secret values rp and r to allow using them * as branch points. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &rp, sizeof(rp)); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &r, sizeof(r)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &rp, sizeof(rp)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &r, sizeof(r)); /* dleq_proof = DLEQ_prove(k, (R', Y, R)) */ - ret &= rustsecp256k1zkp_v0_10_0_dleq_prove(ctx, &dleq_proof_s, &dleq_proof_e, &k, &enckey_ge, &rp, &r, noncefp, ndata); - - ret &= rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sk, seckey32); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&sk, &rustsecp256k1zkp_v0_10_0_scalar_one, !ret); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&msg, msg32, NULL); - rustsecp256k1zkp_v0_10_0_fe_normalize(&r.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(buf33, &r.x); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sigr, buf33, NULL); - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sigr); + ret &= rustsecp256k1zkp_v0_10_1_dleq_prove(ctx, &dleq_proof_s, &dleq_proof_e, &k, &enckey_ge, &rp, &r, noncefp, ndata); + + ret &= rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sk, seckey32); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&sk, &rustsecp256k1zkp_v0_10_1_scalar_one, !ret); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&msg, msg32, NULL); + rustsecp256k1zkp_v0_10_1_fe_normalize(&r.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(buf33, &r.x); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sigr, buf33, NULL); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sigr); /* s' = k⁻¹(m + R.x * x) */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&n, &sigr, &sk); - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, &msg); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&sp, &k); - rustsecp256k1zkp_v0_10_0_scalar_mul(&sp, &sp, &n); - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sp); + rustsecp256k1zkp_v0_10_1_scalar_mul(&n, &sigr, &sk); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, &msg); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&sp, &k); + rustsecp256k1zkp_v0_10_1_scalar_mul(&sp, &sp, &n); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sp); /* return (R, R', s', dleq_proof) */ - ret &= rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(adaptor_sig162, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s); + ret &= rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(adaptor_sig162, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s); - rustsecp256k1zkp_v0_10_0_memczero(adaptor_sig162, 162, !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&n); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sk); + rustsecp256k1zkp_v0_10_1_memczero(adaptor_sig162, 162, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&n); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char *adaptor_sig162, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_pubkey *enckey) { - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_s, dleq_proof_e; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_ge pubkey_ge; - rustsecp256k1zkp_v0_10_0_ge r, rp; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar sigr; - rustsecp256k1zkp_v0_10_0_ge enckey_ge; - rustsecp256k1zkp_v0_10_0_gej derived_rp; - rustsecp256k1zkp_v0_10_0_scalar sn, u1, u2; - rustsecp256k1zkp_v0_10_0_gej pubkeyj; +int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char *adaptor_sig162, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_pubkey *enckey) { + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_s, dleq_proof_e; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_ge pubkey_ge; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar sigr; + rustsecp256k1zkp_v0_10_1_ge enckey_ge; + rustsecp256k1zkp_v0_10_1_gej derived_rp; + rustsecp256k1zkp_v0_10_1_scalar sn, u1, u2; + rustsecp256k1zkp_v0_10_1_gej pubkeyj; VERIFY_CHECK(ctx != NULL); ARG_CHECK(adaptor_sig162 != NULL); @@ -241,40 +241,40 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(const rustsecp256k1zkp_v0_10_0 ARG_CHECK(msg32 != NULL); ARG_CHECK(enckey != NULL); - if (!rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig162)) { + if (!rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig162)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &enckey_ge, enckey)) { + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &enckey_ge, enckey)) { return 0; } /* DLEQ_verify((R', Y, R), dleq_proof) */ - if(!rustsecp256k1zkp_v0_10_0_dleq_verify(&dleq_proof_s, &dleq_proof_e, &rp, &enckey_ge, &r)) { + if(!rustsecp256k1zkp_v0_10_1_dleq_verify(&dleq_proof_s, &dleq_proof_e, &rp, &enckey_ge, &r)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&msg, msg32, NULL); - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pubkey_ge, pubkey)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&msg, msg32, NULL); + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pubkey_ge, pubkey)) { return 0; } /* return R' == s'⁻¹(m * G + R.x * X) */ - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&sn, &sp); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u1, &sn, &msg); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u2, &sn, &sigr); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pubkeyj, &pubkey_ge); - rustsecp256k1zkp_v0_10_0_ecmult(&derived_rp, &pubkeyj, &u2, &u1); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&derived_rp)) { + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&sn, &sp); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u1, &sn, &msg); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u2, &sn, &sigr); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pubkeyj, &pubkey_ge); + rustsecp256k1zkp_v0_10_1_ecmult(&derived_rp, &pubkeyj, &u2, &u1); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&derived_rp)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_neg(&derived_rp, &derived_rp); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&derived_rp, &derived_rp, &rp, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&derived_rp); + rustsecp256k1zkp_v0_10_1_gej_neg(&derived_rp, &derived_rp); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&derived_rp, &derived_rp, &rp, NULL); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&derived_rp); } -int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, const unsigned char *deckey32, const unsigned char *adaptor_sig162) { - rustsecp256k1zkp_v0_10_0_scalar deckey; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar sigr; +int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *deckey32, const unsigned char *adaptor_sig162) { + rustsecp256k1zkp_v0_10_1_scalar deckey; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar sigr; int overflow; int high; int ret = 1; @@ -284,62 +284,62 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(const rustsecp256k1zkp_v0_10_ ARG_CHECK(deckey32 != NULL); ARG_CHECK(adaptor_sig162 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sp); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&deckey, deckey32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sp); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&deckey, deckey32, &overflow); ret &= !overflow; - ret &= rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, &sp, NULL, NULL, adaptor_sig162); - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&deckey); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&s, &deckey); + ret &= rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, &sp, NULL, NULL, adaptor_sig162); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&deckey); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&s, &deckey); /* s = s' * y⁻¹ */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&s, &s, &sp); - high = rustsecp256k1zkp_v0_10_0_scalar_is_high(&s); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&s, high); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(sig, &sigr, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s, &s, &sp); + high = rustsecp256k1zkp_v0_10_1_scalar_is_high(&s); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&s, high); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(sig, &sigr, &s); - rustsecp256k1zkp_v0_10_0_memczero(&sig->data[0], 64, !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&deckey); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&s); + rustsecp256k1zkp_v0_10_1_memczero(&sig->data[0], 64, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&deckey); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s); return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *deckey32, const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, const unsigned char *adaptor_sig162, const rustsecp256k1zkp_v0_10_0_pubkey *enckey) { - rustsecp256k1zkp_v0_10_0_scalar sp, adaptor_sigr; - rustsecp256k1zkp_v0_10_0_scalar s, r; - rustsecp256k1zkp_v0_10_0_scalar deckey; - rustsecp256k1zkp_v0_10_0_ge enckey_expected_ge; - rustsecp256k1zkp_v0_10_0_gej enckey_expected_gej; +int rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *deckey32, const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *adaptor_sig162, const rustsecp256k1zkp_v0_10_1_pubkey *enckey) { + rustsecp256k1zkp_v0_10_1_scalar sp, adaptor_sigr; + rustsecp256k1zkp_v0_10_1_scalar s, r; + rustsecp256k1zkp_v0_10_1_scalar deckey; + rustsecp256k1zkp_v0_10_1_ge enckey_expected_ge; + rustsecp256k1zkp_v0_10_1_gej enckey_expected_gej; unsigned char enckey33[33]; unsigned char enckey_expected33[33]; size_t size = 33; int ret = 1; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(deckey32 != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(adaptor_sig162 != NULL); ARG_CHECK(enckey != NULL); - if (!rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &adaptor_sigr, NULL, &sp, NULL, NULL, adaptor_sig162)) { + if (!rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &adaptor_sigr, NULL, &sp, NULL, NULL, adaptor_sig162)) { return 0; } - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, sig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, sig); /* Check that we're not looking at some unrelated signature */ - ret &= rustsecp256k1zkp_v0_10_0_scalar_eq(&adaptor_sigr, &r); + ret &= rustsecp256k1zkp_v0_10_1_scalar_eq(&adaptor_sigr, &r); /* y = s⁻¹ * s' */ - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&deckey, &s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&deckey, &deckey, &sp); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&deckey, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&deckey, &deckey, &sp); /* Deal with ECDSA malleability */ - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &enckey_expected_gej, &deckey); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&enckey_expected_ge, &enckey_expected_gej); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &enckey_expected_gej, &deckey); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&enckey_expected_ge, &enckey_expected_gej); /* We declassify non-secret enckey_expected_ge to allow using it as a * branch point. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &enckey_expected_ge, sizeof(enckey_expected_ge)); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&enckey_expected_ge, enckey_expected33, &size, SECP256K1_EC_COMPRESSED)) { + rustsecp256k1zkp_v0_10_1_declassify(ctx, &enckey_expected_ge, sizeof(enckey_expected_ge)); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&enckey_expected_ge, enckey_expected33, &size, SECP256K1_EC_COMPRESSED)) { /* Unreachable from tests (and other VERIFY builds) and therefore this * branch should be ignored in test coverage analysis. * @@ -353,21 +353,21 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(const rustsecp256k1zkp_v0_10_ */ return 0; } - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, enckey33, &size, enckey, SECP256K1_EC_COMPRESSED)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, enckey33, &size, enckey, SECP256K1_EC_COMPRESSED)) { return 0; } - if (rustsecp256k1zkp_v0_10_0_memcmp_var(&enckey_expected33[1], &enckey33[1], 32) != 0) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(&enckey_expected33[1], &enckey33[1], 32) != 0) { return 0; } if (enckey_expected33[0] != enckey33[0]) { /* try Y_implied == -Y */ - rustsecp256k1zkp_v0_10_0_scalar_negate(&deckey, &deckey); + rustsecp256k1zkp_v0_10_1_scalar_negate(&deckey, &deckey); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(deckey32, &deckey); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(deckey32, &deckey); - rustsecp256k1zkp_v0_10_0_scalar_clear(&deckey); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&s); + rustsecp256k1zkp_v0_10_1_scalar_clear(&deckey); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s); return ret; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/tests_impl.h index 83595dee..46ed4c52 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_adaptor/tests_impl.h @@ -3,33 +3,33 @@ #include "../../../include/secp256k1_ecdsa_adaptor.h" -static void rand_scalar(rustsecp256k1zkp_v0_10_0_scalar *scalar) { +static void rand_scalar(rustsecp256k1zkp_v0_10_1_scalar *scalar) { unsigned char buf32[32]; - rustsecp256k1zkp_v0_10_0_testrand256(buf32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(scalar, buf32, NULL); + rustsecp256k1zkp_v0_10_1_testrand256(buf32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(scalar, buf32, NULL); } -static void rand_point(rustsecp256k1zkp_v0_10_0_ge *point) { - rustsecp256k1zkp_v0_10_0_scalar x; - rustsecp256k1zkp_v0_10_0_gej pointj; +static void rand_point(rustsecp256k1zkp_v0_10_1_ge *point) { + rustsecp256k1zkp_v0_10_1_scalar x; + rustsecp256k1zkp_v0_10_1_gej pointj; rand_scalar(&x); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pointj, &x); - rustsecp256k1zkp_v0_10_0_ge_set_gej(point, &pointj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pointj, &x); + rustsecp256k1zkp_v0_10_1_ge_set_gej(point, &pointj); } static void dleq_nonce_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes) { - rustsecp256k1zkp_v0_10_0_scalar k1, k2; + rustsecp256k1zkp_v0_10_1_scalar k1, k2; - CHECK(rustsecp256k1zkp_v0_10_0_dleq_nonce(&k1, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); - rustsecp256k1zkp_v0_10_0_testrand_flip(args[n_flip], n_bytes); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_nonce(&k2, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&k1, &k2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_nonce(&k1, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); + rustsecp256k1zkp_v0_10_1_testrand_flip(args[n_flip], n_bytes); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_nonce(&k2, args[0], args[1], args[2], args[3], NULL, args[4]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&k1, &k2) == 0); } static void dleq_tests(void) { - rustsecp256k1zkp_v0_10_0_scalar s, e, sk, k; - rustsecp256k1zkp_v0_10_0_ge gen2, p1, p2; + rustsecp256k1zkp_v0_10_1_scalar s, e, sk, k; + rustsecp256k1zkp_v0_10_1_ge gen2, p1, p2; unsigned char *args[5]; unsigned char sk32[32]; unsigned char gen2_33[33]; @@ -41,43 +41,43 @@ static void dleq_tests(void) { rand_point(&gen2); rand_scalar(&sk); - rustsecp256k1zkp_v0_10_0_dleq_pair(&CTX->ecmult_gen_ctx, &p1, &p2, &sk, &gen2); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_prove(CTX, &s, &e, &sk, &gen2, &p1, &p2, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&s, &e, &p1, &gen2, &p2) == 1); + rustsecp256k1zkp_v0_10_1_dleq_pair(&CTX->ecmult_gen_ctx, &p1, &p2, &sk, &gen2); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_prove(CTX, &s, &e, &sk, &gen2, &p1, &p2, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&s, &e, &p1, &gen2, &p2) == 1); { - rustsecp256k1zkp_v0_10_0_scalar tmp; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&tmp, 1); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&tmp, &e, &p1, &gen2, &p2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&s, &tmp, &p1, &gen2, &p2) == 0); + rustsecp256k1zkp_v0_10_1_scalar tmp; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&tmp, 1); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&tmp, &e, &p1, &gen2, &p2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&s, &tmp, &p1, &gen2, &p2) == 0); } { - rustsecp256k1zkp_v0_10_0_ge p_tmp; + rustsecp256k1zkp_v0_10_1_ge p_tmp; rand_point(&p_tmp); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&s, &e, &p_tmp, &gen2, &p2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&s, &e, &p1, &p_tmp, &p2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_verify(&s, &e, &p1, &gen2, &p_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&s, &e, &p_tmp, &gen2, &p2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&s, &e, &p1, &p_tmp, &p2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_verify(&s, &e, &p1, &gen2, &p_tmp) == 0); } { - rustsecp256k1zkp_v0_10_0_ge p_inf; - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&p_inf); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_prove(CTX, &s, &e, &sk, &p_inf, &p1, &p2, NULL, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_prove(CTX, &s, &e, &sk, &gen2, &p_inf, &p2, NULL, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_prove(CTX, &s, &e, &sk, &gen2, &p1, &p_inf, NULL, NULL) == 0); + rustsecp256k1zkp_v0_10_1_ge p_inf; + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&p_inf); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_prove(CTX, &s, &e, &sk, &p_inf, &p1, &p2, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_prove(CTX, &s, &e, &sk, &gen2, &p_inf, &p2, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_prove(CTX, &s, &e, &sk, &gen2, &p1, &p_inf, NULL, NULL) == 0); } /* Nonce tests */ - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sk32, &sk); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&gen2, gen2_33, &pubkey_size, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&p1, p1_33, &pubkey_size, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&p2, p2_33, &pubkey_size, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); - - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(sk32, sizeof(sk32)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(gen2_33, sizeof(gen2_33)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(p1_33, sizeof(p1_33)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(p2_33, sizeof(p2_33)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(aux_rand, sizeof(aux_rand)); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sk32, &sk); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&gen2, gen2_33, &pubkey_size, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&p1, p1_33, &pubkey_size, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&p2, p2_33, &pubkey_size, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); + + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(sk32, sizeof(sk32)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(gen2_33, sizeof(gen2_33)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(p1_33, sizeof(p1_33)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(p2_33, sizeof(p2_33)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(aux_rand, sizeof(aux_rand)); /* Check that a bitflip in an argument results in different nonces. */ args[0] = sk32; @@ -97,56 +97,52 @@ static void dleq_tests(void) { } /* NULL aux_rand argument is allowed. */ - CHECK(rustsecp256k1zkp_v0_10_0_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); -} - -static void rand_flip_bit(unsigned char *array, size_t n) { - array[rustsecp256k1zkp_v0_10_0_testrand_int(n)] ^= 1 << rustsecp256k1zkp_v0_10_0_testrand_int(8); + CHECK(rustsecp256k1zkp_v0_10_1_dleq_nonce(&k, sk32, gen2_33, p1_33, p2_33, NULL, NULL) == 1); } /* Helper function for test_ecdsa_adaptor_spec_vectors * Checks that the adaptor signature is valid for the public and encryption keys. */ static void test_ecdsa_adaptor_spec_vectors_check_verify(const unsigned char *adaptor_sig162, const unsigned char *msg32, const unsigned char *pubkey33, const unsigned char *encryption_key33, int expected) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ge pubkey_ge; - rustsecp256k1zkp_v0_10_0_pubkey encryption_key; - rustsecp256k1zkp_v0_10_0_ge encryption_key_ge; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ge pubkey_ge; + rustsecp256k1zkp_v0_10_1_pubkey encryption_key; + rustsecp256k1zkp_v0_10_1_ge encryption_key_ge; - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&encryption_key_ge, encryption_key33, 33) == 1); - rustsecp256k1zkp_v0_10_0_pubkey_save(&encryption_key, &encryption_key_ge); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&pubkey_ge, pubkey33, 33) == 1); - rustsecp256k1zkp_v0_10_0_pubkey_save(&pubkey, &pubkey_ge); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&encryption_key_ge, encryption_key33, 33) == 1); + rustsecp256k1zkp_v0_10_1_pubkey_save(&encryption_key, &encryption_key_ge); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&pubkey_ge, pubkey33, 33) == 1); + rustsecp256k1zkp_v0_10_1_pubkey_save(&pubkey, &pubkey_ge); - CHECK(expected == rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig162, &pubkey, msg32, &encryption_key)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig162, &pubkey, msg32, &encryption_key)); } /* Helper function for test_ecdsa_adaptor_spec_vectors * Checks that the signature can be decrypted from the adaptor signature and the decryption key. */ static void test_ecdsa_adaptor_spec_vectors_check_decrypt(const unsigned char *adaptor_sig162, const unsigned char *decryption_key32, const unsigned char *signature64, int expected) { unsigned char signature[64]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature s; + rustsecp256k1zkp_v0_10_1_ecdsa_signature s; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &s, decryption_key32, adaptor_sig162) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, signature, &s) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &s, decryption_key32, adaptor_sig162) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, signature, &s) == 1); - CHECK(expected == !(rustsecp256k1zkp_v0_10_0_memcmp_var(signature, signature64, 64))); + CHECK(expected == !(rustsecp256k1zkp_v0_10_1_memcmp_var(signature, signature64, 64))); } /* Helper function for test_ecdsa_adaptor_spec_vectors * Checks that the decryption key can be recovered from the adaptor signature, encryption key, and the signature. */ static void test_ecdsa_adaptor_spec_vectors_check_recover(const unsigned char *adaptor_sig162, const unsigned char *encryption_key33, const unsigned char *decryption_key32, const unsigned char *signature64, int expected) { unsigned char deckey32[32] = { 0 }; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; - rustsecp256k1zkp_v0_10_0_pubkey encryption_key; - rustsecp256k1zkp_v0_10_0_ge encryption_key_ge; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey encryption_key; + rustsecp256k1zkp_v0_10_1_ge encryption_key_ge; - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&encryption_key_ge, encryption_key33, 33) == 1); - rustsecp256k1zkp_v0_10_0_pubkey_save(&encryption_key, &encryption_key_ge); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&encryption_key_ge, encryption_key33, 33) == 1); + rustsecp256k1zkp_v0_10_1_pubkey_save(&encryption_key, &encryption_key_ge); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &sig, signature64) == 1); - CHECK(expected == rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey32, &sig, adaptor_sig162, &encryption_key)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &sig, signature64) == 1); + CHECK(expected == rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey32, &sig, adaptor_sig162, &encryption_key)); if (decryption_key32 != NULL) { - CHECK(expected == !(rustsecp256k1zkp_v0_10_0_memcmp_var(deckey32, decryption_key32, 32))); + CHECK(expected == !(rustsecp256k1zkp_v0_10_1_memcmp_var(deckey32, decryption_key32, 32))); } } @@ -154,15 +150,15 @@ static void test_ecdsa_adaptor_spec_vectors_check_recover(const unsigned char *a * Checks deserialization and serialization. */ static void test_ecdsa_adaptor_spec_vectors_check_serialization(const unsigned char *adaptor_sig162, int expected) { unsigned char buf[162]; - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_s, dleq_proof_e; - rustsecp256k1zkp_v0_10_0_ge r, rp; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar sigr; + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_s, dleq_proof_e; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar sigr; - CHECK(expected == rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig162)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig162)); if (expected == 1) { - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(buf, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf, adaptor_sig162, 162) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(buf, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf, adaptor_sig162, 162) == 0); } } @@ -713,20 +709,20 @@ static int ecdsa_adaptor_nonce_function_overflowing(unsigned char *nonce32, cons static void nonce_function_ecdsa_adaptor_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes, size_t algolen) { unsigned char nonces[2][32]; CHECK(nonce_function_ecdsa_adaptor(nonces[0], args[0], args[1], args[2], args[3], algolen, args[4]) == 1); - rustsecp256k1zkp_v0_10_0_testrand_flip(args[n_flip], n_bytes); + rustsecp256k1zkp_v0_10_1_testrand_flip(args[n_flip], n_bytes); CHECK(nonce_function_ecdsa_adaptor(nonces[1], args[0], args[1], args[2], args[3], algolen, args[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonces[0], nonces[1], 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonces[0], nonces[1], 32) != 0); } /* Tests for the equality of two sha256 structs. This function only produces a * correct result if an integer multiple of 64 many bytes have been written * into the hash functions. */ -static void ecdsa_adaptor_test_sha256_eq(const rustsecp256k1zkp_v0_10_0_sha256 *sha1, const rustsecp256k1zkp_v0_10_0_sha256 *sha2) { +static void ecdsa_adaptor_test_sha256_eq(const rustsecp256k1zkp_v0_10_1_sha256 *sha1, const rustsecp256k1zkp_v0_10_1_sha256 *sha2) { /* Is buffer fully consumed? */ CHECK((sha1->bytes & 0x3F) == 0); CHECK(sha1->bytes == sha2->bytes); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sha1->s, sha2->s, sizeof(sha1->s)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sha1->s, sha2->s, sizeof(sha1->s)) == 0); } static void run_nonce_function_ecdsa_adaptor_tests(void) { @@ -735,8 +731,8 @@ static void run_nonce_function_ecdsa_adaptor_tests(void) { unsigned char algo[16] = "ECDSAadaptor/non"; size_t algolen = sizeof(algo); unsigned char dleq_tag[4] = "DLEQ"; - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_optimized; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; unsigned char nonce[32]; unsigned char msg[32]; unsigned char key[32]; @@ -746,30 +742,30 @@ static void run_nonce_function_ecdsa_adaptor_tests(void) { int i; /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged has the expected + * rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag, sizeof(tag)); - rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag, sizeof(tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged(&sha_optimized); ecdsa_adaptor_test_sha256_eq(&sha, &sha_optimized); /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged_aux has the expected + * rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged_aux has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, aux_tag, sizeof(aux_tag)); - rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, aux_tag, sizeof(aux_tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor_sha256_tagged_aux(&sha_optimized); ecdsa_adaptor_test_sha256_eq(&sha, &sha_optimized); /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged_aux has the expected + * rustsecp256k1zkp_v0_10_1_nonce_function_dleq_sha256_tagged_aux has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, dleq_tag, sizeof(dleq_tag)); - rustsecp256k1zkp_v0_10_0_nonce_function_dleq_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, dleq_tag, sizeof(dleq_tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_dleq_sha256_tagged(&sha_optimized); ecdsa_adaptor_test_sha256_eq(&sha, &sha_optimized); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(msg, sizeof(msg)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(key, sizeof(key)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(pk, sizeof(pk)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(aux_rand, sizeof(aux_rand)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(msg, sizeof(msg)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(key, sizeof(key)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(pk, sizeof(pk)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(aux_rand, sizeof(aux_rand)); /* Check that a bitflip in an argument results in different nonces. */ args[0] = msg; @@ -802,11 +798,11 @@ static void run_nonce_function_ecdsa_adaptor_tests(void) { /* Different algolen gives different nonce */ for (i = 0; i < COUNT; i++) { unsigned char nonce2[32]; - uint32_t offset = rustsecp256k1zkp_v0_10_0_testrand_int(algolen - 1); + uint32_t offset = rustsecp256k1zkp_v0_10_1_testrand_int(algolen - 1); size_t algolen_tmp = (algolen + offset) % algolen; CHECK(nonce_function_ecdsa_adaptor(nonce2, msg, key, pk, algo, algolen_tmp, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce2, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce2, 32) != 0); } /* NULL aux_rand argument is allowed. */ @@ -814,170 +810,170 @@ static void run_nonce_function_ecdsa_adaptor_tests(void) { } static void test_ecdsa_adaptor_api(void) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey enckey; - rustsecp256k1zkp_v0_10_0_pubkey zero_pk; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey enckey; + rustsecp256k1zkp_v0_10_1_pubkey zero_pk; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; unsigned char sk[32]; unsigned char msg[32]; unsigned char asig[162]; unsigned char deckey[32]; /** setup **/ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - rustsecp256k1zkp_v0_10_0_testrand256(msg); - rustsecp256k1zkp_v0_10_0_testrand256(deckey); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &enckey, deckey) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(deckey); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &enckey, deckey) == 1); memset(&zero_pk, 0, sizeof(zero_pk)); /** main test body **/ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, msg, NULL, NULL) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(STATIC_CTX, asig, sk, &enckey, msg, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, NULL, sk, &enckey, msg, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, NULL, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, NULL, &enckey, msg, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, sk, NULL, msg, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, sk, &zero_pk, msg, NULL, NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, msg, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, &enckey) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, NULL, &pubkey, msg, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, &pubkey, NULL, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, NULL, msg, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, &zero_pk, msg, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, &zero_pk)); - - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, NULL, deckey, asig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, NULL, asig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, deckey, NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, &enckey) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(STATIC_CTX, deckey, &sig, asig, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, NULL, &sig, asig, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey, NULL, asig, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey, &sig, NULL, &enckey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, &zero_pk)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, msg, NULL, NULL) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(STATIC_CTX, asig, sk, &enckey, msg, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, NULL, sk, &enckey, msg, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, NULL, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, NULL, &enckey, msg, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, sk, NULL, msg, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, sk, &zero_pk, msg, NULL, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig, sk, &enckey, msg, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, &enckey) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, NULL, &pubkey, msg, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, &pubkey, NULL, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, NULL, msg, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, &zero_pk, msg, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig, &pubkey, msg, &zero_pk)); + + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, NULL, deckey, asig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, NULL, asig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, deckey, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, deckey, asig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, &enckey) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(STATIC_CTX, deckey, &sig, asig, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, NULL, &sig, asig, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey, NULL, asig, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey, &sig, NULL, &enckey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, deckey, &sig, asig, &zero_pk)); } static void adaptor_tests(void) { unsigned char seckey[32]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; unsigned char msg[32]; unsigned char deckey[32]; - rustsecp256k1zkp_v0_10_0_pubkey enckey; + rustsecp256k1zkp_v0_10_1_pubkey enckey; unsigned char adaptor_sig[162]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; unsigned char zeros162[162] = { 0 }; unsigned char zeros64[64] = { 0 }; unsigned char big[32]; - rustsecp256k1zkp_v0_10_0_testrand256(seckey); - rustsecp256k1zkp_v0_10_0_testrand256(msg); - rustsecp256k1zkp_v0_10_0_testrand256(deckey); + rustsecp256k1zkp_v0_10_1_testrand256(seckey); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(deckey); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, seckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &enckey, deckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, seckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &enckey, deckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, NULL, NULL) == 1); { /* Test overflowing seckey */ memset(big, 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, adaptor_sig, big, &enckey, msg, NULL, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, adaptor_sig, big, &enckey, msg, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); /* Test different nonce functions */ memset(adaptor_sig, 1, sizeof(adaptor_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_failing, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_failing, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); memset(&adaptor_sig, 1, sizeof(adaptor_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_0, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_overflowing, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_0, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, adaptor_sig, seckey, &enckey, msg, ecdsa_adaptor_nonce_function_overflowing, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(adaptor_sig, zeros162, sizeof(adaptor_sig)) != 0); } { /* Test adaptor_sig_serialize roundtrip */ - rustsecp256k1zkp_v0_10_0_ge r, rp; - rustsecp256k1zkp_v0_10_0_scalar sigr; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_s, dleq_proof_e; - rustsecp256k1zkp_v0_10_0_ge p_inf; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_scalar sigr; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_s, dleq_proof_e; + rustsecp256k1zkp_v0_10_1_ge p_inf; unsigned char adaptor_sig_tmp[162]; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &r, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)) == 0); /* Test adaptor_sig_serialize points at infinity */ - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&p_inf); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &p_inf, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &r, &p_inf, &sp, &dleq_proof_e, &dleq_proof_s) == 0); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&p_inf); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &p_inf, &rp, &sp, &dleq_proof_e, &dleq_proof_s) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_serialize(adaptor_sig_tmp, &r, &p_inf, &sp, &dleq_proof_e, &dleq_proof_s) == 0); } { /* Test adaptor_sig_deserialize */ - rustsecp256k1zkp_v0_10_0_ge r, rp; - rustsecp256k1zkp_v0_10_0_scalar sigr; - rustsecp256k1zkp_v0_10_0_scalar sp; - rustsecp256k1zkp_v0_10_0_scalar dleq_proof_s, dleq_proof_e; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_scalar sigr; + rustsecp256k1zkp_v0_10_1_scalar sp; + rustsecp256k1zkp_v0_10_1_scalar dleq_proof_s, dleq_proof_e; unsigned char adaptor_sig_tmp[162]; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, &rp, &sp, &dleq_proof_e, &dleq_proof_s, adaptor_sig) == 1); /* r */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[0], 0xFF, 33); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(&r, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(&r, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 0); /* sigr */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[1], 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 1); memset(&adaptor_sig_tmp[1], 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig_tmp) == 0); /* rp */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, &rp, NULL, NULL, NULL, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, &rp, NULL, NULL, NULL, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[33], 0xFF, 33); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, &rp, NULL, NULL, NULL, adaptor_sig_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, &rp, NULL, NULL, NULL, adaptor_sig_tmp) == 0); /* sp */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, &sp, NULL, NULL, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, &sp, NULL, NULL, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[66], 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, &sp, NULL, NULL, adaptor_sig_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, &sp, NULL, NULL, adaptor_sig_tmp) == 0); /* dleq_proof_e */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, &dleq_proof_e, NULL, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, &dleq_proof_e, NULL, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[98], 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, &dleq_proof_e, NULL, adaptor_sig_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, &dleq_proof_e, NULL, adaptor_sig_tmp) == 1); /* dleq_proof_s */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, NULL, &dleq_proof_s, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, NULL, &dleq_proof_s, adaptor_sig) == 1); memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[130], 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, NULL, &dleq_proof_s, adaptor_sig_tmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, NULL, NULL, NULL, NULL, &dleq_proof_s, adaptor_sig_tmp) == 0); } /* Test adaptor_sig_verify */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg, &enckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig, &enckey, msg, &enckey) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg, &pubkey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg, &enckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig, &enckey, msg, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg, &pubkey) == 0); { /* Test failed adaptor sig deserialization */ unsigned char adaptor_sig_tmp[162]; memset(&adaptor_sig_tmp, 0xFF, 162); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey, msg, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey, msg, &enckey) == 0); } { /* Test that any flipped bit in the adaptor signature will make @@ -985,13 +981,13 @@ static void adaptor_tests(void) { unsigned char adaptor_sig_tmp[162]; memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); rand_flip_bit(&adaptor_sig_tmp[1], sizeof(adaptor_sig_tmp) - 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey, msg, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey, msg, &enckey) == 0); } { unsigned char msg_tmp[32]; memcpy(msg_tmp, msg, sizeof(msg_tmp)); rand_flip_bit(msg_tmp, sizeof(msg_tmp)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg_tmp, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig, &pubkey, msg_tmp, &enckey) == 0); } { /* Verification must check that the derived R' is not equal to the point at @@ -1019,49 +1015,49 @@ static void adaptor_tests(void) { unsigned char seckey_tmp[32] = { 0 }; unsigned char msg_tmp[32]; unsigned char adaptor_sig_tmp[162]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_tmp; - rustsecp256k1zkp_v0_10_0_scalar sigr, t, m; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_tmp; + rustsecp256k1zkp_v0_10_1_scalar sigr, t, m; /* m := t * sigr */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&t, target, NULL); - rustsecp256k1zkp_v0_10_0_scalar_mul(&m, &t, &sigr); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msg_tmp, &m); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_sig_deserialize(NULL, &sigr, NULL, NULL, NULL, NULL, adaptor_sig) == 1); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&t, target, NULL); + rustsecp256k1zkp_v0_10_1_scalar_mul(&m, &t, &sigr); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msg_tmp, &m); /* X := G */ seckey_tmp[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey_tmp, seckey_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey_tmp, seckey_tmp) == 1); /* sp := sigr */ memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memcpy(&adaptor_sig_tmp[66], &adaptor_sig_tmp[1], 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey_tmp, msg_tmp, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, adaptor_sig_tmp, &pubkey_tmp, msg_tmp, &enckey) == 0); } /* Test decryption */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig, deckey, adaptor_sig) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig, deckey, adaptor_sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg, &pubkey) == 1); { /* Test overflowing decryption key */ - rustsecp256k1zkp_v0_10_0_ecdsa_signature s; + rustsecp256k1zkp_v0_10_1_ecdsa_signature s; memset(big, 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &s, big, adaptor_sig) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&s.data[0], zeros64, sizeof(&s.data[0])) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &s, big, adaptor_sig) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&s.data[0], zeros64, sizeof(&s.data[0])) == 0); } { /* Test key recover */ unsigned char decryption_key_tmp[32]; unsigned char adaptor_sig_tmp[162]; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, decryption_key_tmp, &sig, adaptor_sig, &enckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(deckey, decryption_key_tmp, sizeof(deckey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, decryption_key_tmp, &sig, adaptor_sig, &enckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(deckey, decryption_key_tmp, sizeof(deckey)) == 0); /* Test failed sp deserialization */ memcpy(adaptor_sig_tmp, adaptor_sig, sizeof(adaptor_sig_tmp)); memset(&adaptor_sig_tmp[66], 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, decryption_key_tmp, &sig, adaptor_sig_tmp, &enckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, decryption_key_tmp, &sig, adaptor_sig_tmp, &enckey) == 0); } } @@ -1074,69 +1070,69 @@ static void multi_hop_lock_tests(void) { unsigned char buf[32]; unsigned char asig_ab[162]; unsigned char asig_bc[162]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_pop; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_a, pubkey_b; - rustsecp256k1zkp_v0_10_0_pubkey l, r; - rustsecp256k1zkp_v0_10_0_ge l_ge, r_ge; - rustsecp256k1zkp_v0_10_0_scalar t1, t2, tp; - rustsecp256k1zkp_v0_10_0_scalar deckey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig_ab, sig_bc; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_pop; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_a, pubkey_b; + rustsecp256k1zkp_v0_10_1_pubkey l, r; + rustsecp256k1zkp_v0_10_1_ge l_ge, r_ge; + rustsecp256k1zkp_v0_10_1_scalar t1, t2, tp; + rustsecp256k1zkp_v0_10_1_scalar deckey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig_ab, sig_bc; - rustsecp256k1zkp_v0_10_0_testrand256(seckey_a); - rustsecp256k1zkp_v0_10_0_testrand256(seckey_b); + rustsecp256k1zkp_v0_10_1_testrand256(seckey_a); + rustsecp256k1zkp_v0_10_1_testrand256(seckey_b); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey_a, seckey_a)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey_b, seckey_b)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey_a, seckey_a)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey_b, seckey_b)); /* Carol setup */ /* Proof of payment */ - rustsecp256k1zkp_v0_10_0_testrand256(pop); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey_pop, pop)); + rustsecp256k1zkp_v0_10_1_testrand256(pop); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey_pop, pop)); /* Alice setup */ - rustsecp256k1zkp_v0_10_0_testrand256(tx_ab); + rustsecp256k1zkp_v0_10_1_testrand256(tx_ab); rand_scalar(&t1); rand_scalar(&t2); - rustsecp256k1zkp_v0_10_0_scalar_add(&tp, &t1, &t2); + rustsecp256k1zkp_v0_10_1_scalar_add(&tp, &t1, &t2); /* Left lock */ - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &l_ge, &pubkey_pop); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(&l_ge, &t1)); - rustsecp256k1zkp_v0_10_0_pubkey_save(&l, &l_ge); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &l_ge, &pubkey_pop); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(&l_ge, &t1)); + rustsecp256k1zkp_v0_10_1_pubkey_save(&l, &l_ge); /* Right lock */ - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &r_ge, &pubkey_pop); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(&r_ge, &tp)); - rustsecp256k1zkp_v0_10_0_pubkey_save(&r, &r_ge); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig_ab, seckey_a, &l, tx_ab, NULL, NULL)); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &r_ge, &pubkey_pop); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(&r_ge, &tp)); + rustsecp256k1zkp_v0_10_1_pubkey_save(&r, &r_ge); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig_ab, seckey_a, &l, tx_ab, NULL, NULL)); /* Bob setup */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig_ab, &pubkey_a, tx_ab, &l)); - rustsecp256k1zkp_v0_10_0_testrand256(tx_bc); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt(CTX, asig_bc, seckey_b, &r, tx_bc, NULL, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig_ab, &pubkey_a, tx_ab, &l)); + rustsecp256k1zkp_v0_10_1_testrand256(tx_bc); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt(CTX, asig_bc, seckey_b, &r, tx_bc, NULL, NULL)); /* Carol decrypt */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify(CTX, asig_bc, &pubkey_b, tx_bc, &r)); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&deckey, pop, NULL); - rustsecp256k1zkp_v0_10_0_scalar_add(&deckey, &deckey, &tp); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf, &deckey); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig_bc, buf, asig_bc)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig_bc, tx_bc, &pubkey_b)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify(CTX, asig_bc, &pubkey_b, tx_bc, &r)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&deckey, pop, NULL); + rustsecp256k1zkp_v0_10_1_scalar_add(&deckey, &deckey, &tp); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf, &deckey); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig_bc, buf, asig_bc)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig_bc, tx_bc, &pubkey_b)); /* Bob recover and decrypt */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, buf, &sig_bc, asig_bc, &r)); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&deckey, buf, NULL); - rustsecp256k1zkp_v0_10_0_scalar_negate(&t2, &t2); - rustsecp256k1zkp_v0_10_0_scalar_add(&deckey, &deckey, &t2); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf, &deckey); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt(CTX, &sig_ab, buf, asig_ab)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig_ab, tx_ab, &pubkey_a)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, buf, &sig_bc, asig_bc, &r)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&deckey, buf, NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t2, &t2); + rustsecp256k1zkp_v0_10_1_scalar_add(&deckey, &deckey, &t2); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf, &deckey); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt(CTX, &sig_ab, buf, asig_ab)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig_ab, tx_ab, &pubkey_a)); /* Alice recover and derive proof of payment */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover(CTX, buf, &sig_ab, asig_ab, &l)); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&deckey, buf, NULL); - rustsecp256k1zkp_v0_10_0_scalar_negate(&t1, &t1); - rustsecp256k1zkp_v0_10_0_scalar_add(&deckey, &deckey, &t1); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf, &deckey); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf, pop, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover(CTX, buf, &sig_ab, asig_ab, &l)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&deckey, buf, NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t1, &t1); + rustsecp256k1zkp_v0_10_1_scalar_add(&deckey, &deckey, &t1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf, &deckey); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf, pop, 32) == 0); } static void run_ecdsa_adaptor_tests(void) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/Makefile.am.include index 23b345af..f0b406d6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/Makefile.am.include @@ -1,3 +1,3 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_ecdsa_s2c.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_ecdsa_s2c.h noinst_HEADERS += src/modules/ecdsa_s2c/main_impl.h noinst_HEADERS += src/modules/ecdsa_s2c/tests_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/main_impl.h index 1399d58b..84e22062 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/main_impl.h @@ -10,33 +10,33 @@ #include "../../../include/secp256k1.h" #include "../../../include/secp256k1_ecdsa_s2c.h" -static void rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_0_ge* ge) { - rustsecp256k1zkp_v0_10_0_pubkey_save((rustsecp256k1zkp_v0_10_0_pubkey*) opening, ge); +static void rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_1_ge* ge) { + rustsecp256k1zkp_v0_10_1_pubkey_save((rustsecp256k1zkp_v0_10_1_pubkey*) opening, ge); } -static int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening) { - return rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, ge, (const rustsecp256k1zkp_v0_10_0_pubkey*) opening); +static int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening) { + return rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, ge, (const rustsecp256k1zkp_v0_10_1_pubkey*) opening); } -int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening, const unsigned char* input33) { +int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening, const unsigned char* input33) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(opening != NULL); ARG_CHECK(input33 != NULL); - return rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(ctx, (rustsecp256k1zkp_v0_10_0_pubkey*) opening, input33, 33); + return rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(ctx, (rustsecp256k1zkp_v0_10_1_pubkey*) opening, input33, 33); } -int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char* output33, const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening) { +int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char* output33, const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening) { size_t out_len = 33; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output33 != NULL); ARG_CHECK(opening != NULL); - return rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, output33, &out_len, (const rustsecp256k1zkp_v0_10_0_pubkey*) opening, SECP256K1_EC_COMPRESSED); + return rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, output33, &out_len, (const rustsecp256k1zkp_v0_10_1_pubkey*) opening, SECP256K1_EC_COMPRESSED); } /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("s2c/ecdsa/point")||SHA256("s2c/ecdsa/point"). */ -static void rustsecp256k1zkp_v0_10_0_s2c_ecdsa_point_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_s2c_ecdsa_point_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0xa9b21c7bul; sha->s[1] = 0x358c3e3eul; sha->s[2] = 0x0b6863d1ul; @@ -51,8 +51,8 @@ static void rustsecp256k1zkp_v0_10_0_s2c_ecdsa_point_sha256_tagged(rustsecp256k1 /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("s2c/ecdsa/data")||SHA256("s2c/ecdsa/data"). */ -static void rustsecp256k1zkp_v0_10_0_s2c_ecdsa_data_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_s2c_ecdsa_data_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0xfeefd675ul; sha->s[1] = 0x73166c99ul; sha->s[2] = 0xe2309cb8ul; @@ -65,15 +65,15 @@ static void rustsecp256k1zkp_v0_10_0_s2c_ecdsa_data_sha256_tagged(rustsecp256k1z sha->bytes = 64; } -int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* signature, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* s2c_opening, const unsigned char +int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* signature, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* s2c_opening, const unsigned char *msg32, const unsigned char *seckey, const unsigned char* s2c_data32) { - rustsecp256k1zkp_v0_10_0_scalar r, s; + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret; unsigned char ndata[32]; - rustsecp256k1zkp_v0_10_0_sha256 s2c_sha; + rustsecp256k1zkp_v0_10_1_sha256 s2c_sha; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(msg32 != NULL); ARG_CHECK(signature != NULL); ARG_CHECK(seckey != NULL); @@ -83,35 +83,35 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(const rustsecp256k1zkp_v0_10_0_conte * derive the nonce. It is first hashed because it should be possible * to derive nonces even if only a SHA256 commitment to the data is * known. This is important in the ECDSA anti-exfil protocol. */ - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_data_sha256_tagged(&s2c_sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&s2c_sha, s2c_data32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&s2c_sha, ndata); - - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_point_sha256_tagged(&s2c_sha); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign_inner(ctx, &r, &s, NULL, &s2c_sha, s2c_opening, s2c_data32, msg32, seckey, NULL, ndata); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(signature, &r, &s); + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_data_sha256_tagged(&s2c_sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&s2c_sha, s2c_data32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&s2c_sha, ndata); + + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_point_sha256_tagged(&s2c_sha); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign_inner(ctx, &r, &s, NULL, &s2c_sha, s2c_opening, s2c_data32, msg32, seckey, NULL, ndata); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(signature, &r, &s); return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const unsigned char* data32, const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening) { - rustsecp256k1zkp_v0_10_0_ge commitment_ge; - rustsecp256k1zkp_v0_10_0_ge original_pubnonce_ge; +int rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char* data32, const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening) { + rustsecp256k1zkp_v0_10_1_ge commitment_ge; + rustsecp256k1zkp_v0_10_1_ge original_pubnonce_ge; unsigned char x_bytes[32]; - rustsecp256k1zkp_v0_10_0_scalar sigr, sigs, x_scalar; - rustsecp256k1zkp_v0_10_0_sha256 s2c_sha; + rustsecp256k1zkp_v0_10_1_scalar sigr, sigs, x_scalar; + rustsecp256k1zkp_v0_10_1_sha256 s2c_sha; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(data32 != NULL); ARG_CHECK(opening != NULL); - if (!rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_load(ctx, &original_pubnonce_ge, opening)) { + if (!rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_load(ctx, &original_pubnonce_ge, opening)) { return 0; } - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_point_sha256_tagged(&s2c_sha); - if (!rustsecp256k1zkp_v0_10_0_ec_commit(&commitment_ge, &original_pubnonce_ge, &s2c_sha, data32, 32)) { + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_point_sha256_tagged(&s2c_sha); + if (!rustsecp256k1zkp_v0_10_1_ec_commit(&commitment_ge, &original_pubnonce_ge, &s2c_sha, data32, 32)) { return 0; } @@ -122,44 +122,44 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(const rustsecp256k1zkp_v0_1 * Note that we are only checking the x-coordinate -- this is because the y-coordinate * is not part of the ECDSA signature (and therefore not part of the commitment!) */ - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &sigr, &sigs, sig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &sigr, &sigs, sig); - rustsecp256k1zkp_v0_10_0_fe_normalize(&commitment_ge.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(x_bytes, &commitment_ge.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&commitment_ge.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(x_bytes, &commitment_ge.x); /* Do not check overflow; overflowing a scalar does not affect whether * or not the R value is a cryptographic commitment, only whether it * is a valid R value for an ECDSA signature. If users care about that * they should use `ecdsa_verify` or `anti_exfil_host_verify`. In other * words, this check would be (at best) unnecessary, and (at worst) * insufficient. */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x_scalar, x_bytes, NULL); - return rustsecp256k1zkp_v0_10_0_scalar_eq(&sigr, &x_scalar); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x_scalar, x_bytes, NULL); + return rustsecp256k1zkp_v0_10_1_scalar_eq(&sigr, &x_scalar); } /*** anti-exfil ***/ -int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char* rand_commitment32, const unsigned char* rand32) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +int rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char* rand_commitment32, const unsigned char* rand32) { + rustsecp256k1zkp_v0_10_1_sha256 sha; VERIFY_CHECK(ctx != NULL); ARG_CHECK(rand_commitment32 != NULL); ARG_CHECK(rand32 != NULL); - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_data_sha256_tagged(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, rand32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, rand_commitment32); + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_data_sha256_tagged(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, rand32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, rand_commitment32); return 1; } -int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening, const unsigned char* msg32, const unsigned char* seckey32, const unsigned char* rand_commitment32) { +int rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening, const unsigned char* msg32, const unsigned char* seckey32, const unsigned char* rand_commitment32) { unsigned char nonce32[32]; - rustsecp256k1zkp_v0_10_0_scalar k; - rustsecp256k1zkp_v0_10_0_gej rj; - rustsecp256k1zkp_v0_10_0_ge r; + rustsecp256k1zkp_v0_10_1_scalar k; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_ge r; unsigned int count = 0; int is_nonce_valid = 0; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(opening != NULL); ARG_CHECK(msg32 != NULL); ARG_CHECK(seckey32 != NULL); @@ -167,31 +167,31 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(const rustsecp256k1z memset(nonce32, 0, 32); while (!is_nonce_valid) { - /* cast to void* removes const qualifier, but rustsecp256k1zkp_v0_10_0_nonce_function_default does not modify it */ - if (!rustsecp256k1zkp_v0_10_0_nonce_function_default(nonce32, msg32, seckey32, NULL, (void*)rand_commitment32, count)) { - rustsecp256k1zkp_v0_10_0_callback_call(&ctx->error_callback, "(cryptographically unreachable) generated bad nonce"); + /* cast to void* removes const qualifier, but rustsecp256k1zkp_v0_10_1_nonce_function_default does not modify it */ + if (!rustsecp256k1zkp_v0_10_1_nonce_function_default(nonce32, msg32, seckey32, NULL, (void*)rand_commitment32, count)) { + rustsecp256k1zkp_v0_10_1_callback_call(&ctx->error_callback, "(cryptographically unreachable) generated bad nonce"); } - is_nonce_valid = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&k, nonce32); + is_nonce_valid = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&k, nonce32); /* The nonce is still secret here, but it being invalid is is less likely than 1:2^255. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &is_nonce_valid, sizeof(is_nonce_valid)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &is_nonce_valid, sizeof(is_nonce_valid)); count++; } - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &rj, &k); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r, &rj); - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(opening, &r); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &rj, &k); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rj); + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_save(opening, &r); memset(nonce32, 0, 32); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k); return 1; } -int rustsecp256k1zkp_v0_10_0_anti_exfil_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const unsigned char* msg32, const unsigned char* seckey, const unsigned char* host_data32) { - return rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(ctx, sig, NULL, msg32, seckey, host_data32); +int rustsecp256k1zkp_v0_10_1_anti_exfil_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char* msg32, const unsigned char* seckey, const unsigned char* host_data32) { + return rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(ctx, sig, NULL, msg32, seckey, host_data32); } -int rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *host_data32, const rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *opening) { - return rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(ctx, sig, host_data32, opening) && - rustsecp256k1zkp_v0_10_0_ecdsa_verify(ctx, sig, msg32, pubkey); +int rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *host_data32, const rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *opening) { + return rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(ctx, sig, host_data32, opening) && + rustsecp256k1zkp_v0_10_1_ecdsa_verify(ctx, sig, msg32, pubkey); } #endif /* SECP256K1_ECDSA_S2C_MAIN_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/tests_impl.h index 9705ddec..6ae970f1 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ecdsa_s2c/tests_impl.h @@ -12,22 +12,22 @@ static void test_ecdsa_s2c_tagged_hash(void) { unsigned char tag_data[14] = "s2c/ecdsa/data"; unsigned char tag_point[15] = "s2c/ecdsa/point"; - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_optimized; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; unsigned char output[32]; unsigned char output_optimized[32]; - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_data_sha256_tagged(&sha_optimized); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha_optimized, output_optimized); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, output_optimized, 32) == 0); - - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag_point, sizeof(tag_point)); - rustsecp256k1zkp_v0_10_0_s2c_ecdsa_point_sha256_tagged(&sha_optimized); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha_optimized, output_optimized); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, output_optimized, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag_data, sizeof(tag_data)); + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_data_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha_optimized, output_optimized); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, output_optimized, 32) == 0); + + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag_point, sizeof(tag_point)); + rustsecp256k1zkp_v0_10_1_s2c_ecdsa_point_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha_optimized, output_optimized); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, output_optimized, 32) == 0); } static void run_s2c_opening_test(void) { @@ -40,100 +40,100 @@ static void run_s2c_opening_test(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02 }; - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening opening; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening opening; /* First parsing, then serializing works */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, output, &opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, output, &opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, NULL, input)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, NULL, input)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, NULL, &opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, output, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, NULL, &opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, output, NULL)); /* Invalid pubkey makes parsing fail but they are not API errors */ input[0] = 0; /* bad oddness bit */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 0); input[0] = 2; input[31] = 1; /* point not on the curve */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 0); /* Try parsing and serializing a bunch of openings */ for (i = 0; i < COUNT; i++) { /* This is expected to fail in about 50% of iterations because the * points' x-coordinates are uniformly random */ - if (rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1) { - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, output, &opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(output, input, sizeof(output)) == 0); + if (rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_parse(CTX, &opening, input) == 1) { + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, output, &opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(output, input, sizeof(output)) == 0); } - rustsecp256k1zkp_v0_10_0_testrand256(&input[1]); + rustsecp256k1zkp_v0_10_1_testrand256(&input[1]); /* Set pubkey oddness tag to first bit of input[1] */ input[0] = (input[1] & 1) + 2; } } static void test_ecdsa_s2c_api(void) { - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; const unsigned char msg[32] = "mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm"; const unsigned char sec[32] = "ssssssssssssssssssssssssssssssss"; const unsigned char s2c_data[32] = "dddddddddddddddddddddddddddddddd"; const unsigned char hostrand[32] = "hrhrhrhrhrhrhrhrhrhrhrhrhrhrhrhr"; unsigned char hostrand_commitment[32]; - rustsecp256k1zkp_v0_10_0_pubkey pk; + rustsecp256k1zkp_v0_10_1_pubkey pk; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sec)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sec)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, NULL, &s2c_opening, msg, sec, s2c_data)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, NULL, &s2c_opening, msg, sec, s2c_data)); /* NULL opening is not an API error */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, NULL, msg, sec, s2c_data) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, NULL, sec, s2c_data)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, NULL, s2c_data)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, sec, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, sec, s2c_data) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(STATIC_CTX, &sig, &s2c_opening, msg, sec, s2c_data)); - - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg, &pk) == 1); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, NULL, s2c_data, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &sig, NULL, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, NULL, msg, sec, s2c_data) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, NULL, sec, s2c_data)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, NULL, s2c_data)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, sec, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, &s2c_opening, msg, sec, s2c_data) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(STATIC_CTX, &sig, &s2c_opening, msg, sec, s2c_data)); + + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg, &pk) == 1); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, NULL, s2c_data, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &sig, NULL, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, &s2c_opening) == 1); /* wrong data is not an API error */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &sig, sec, &s2c_opening) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &sig, sec, &s2c_opening) == 0); /* Signing with NULL s2c_opening gives the same result */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &sig, NULL, msg, sec, s2c_data) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &sig, NULL, msg, sec, s2c_data) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &sig, s2c_data, &s2c_opening) == 1); /* anti-exfil */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(CTX, NULL, hostrand)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(CTX, hostrand_commitment, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(CTX, hostrand_commitment, hostrand) == 1); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, NULL, msg, sec, hostrand_commitment)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, NULL, sec, hostrand_commitment)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, NULL, hostrand_commitment)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, sec, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, sec, hostrand_commitment) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(STATIC_CTX, &s2c_opening, msg, sec, hostrand_commitment)); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, NULL, msg, sec, hostrand)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, &sig, NULL, sec, hostrand)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, &sig, msg, NULL, hostrand)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, &sig, msg, sec, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, &sig, msg, sec, hostrand) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_sign(STATIC_CTX, &sig, msg, sec, hostrand)); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, NULL, msg, &pk, hostrand, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &sig, NULL, &pk, hostrand, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &sig, msg, NULL, hostrand, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &sig, msg, &pk, NULL, &s2c_opening)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &sig, msg, &pk, hostrand, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &sig, msg, &pk, hostrand, &s2c_opening) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(CTX, NULL, hostrand)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(CTX, hostrand_commitment, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(CTX, hostrand_commitment, hostrand) == 1); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, NULL, msg, sec, hostrand_commitment)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, NULL, sec, hostrand_commitment)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, NULL, hostrand_commitment)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, sec, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, msg, sec, hostrand_commitment) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(STATIC_CTX, &s2c_opening, msg, sec, hostrand_commitment)); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, NULL, msg, sec, hostrand)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, &sig, NULL, sec, hostrand)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, &sig, msg, NULL, hostrand)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, &sig, msg, sec, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, &sig, msg, sec, hostrand) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_sign(STATIC_CTX, &sig, msg, sec, hostrand)); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, NULL, msg, &pk, hostrand, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &sig, NULL, &pk, hostrand, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &sig, msg, NULL, hostrand, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &sig, msg, &pk, NULL, &s2c_opening)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &sig, msg, &pk, hostrand, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &sig, msg, &pk, hostrand, &s2c_opening) == 1); } /* When using sign-to-contract commitments, the nonce function is fixed, so we can use fixtures to test. */ @@ -171,65 +171,65 @@ static void test_ecdsa_s2c_fixed_vectors(void) { size_t i; for (i = 0; i < sizeof(ecdsa_s2c_tests) / sizeof(ecdsa_s2c_tests[0]); i++) { - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; unsigned char opening_ser[33]; const ecdsa_s2c_test *test = &ecdsa_s2c_tests[i]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, test->s2c_data) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, opening_ser, &s2c_opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(test->expected_s2c_opening, opening_ser, sizeof(opening_ser)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &signature, test->s2c_data, &s2c_opening) == 1); + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, test->s2c_data) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, opening_ser, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(test->expected_s2c_opening, opening_ser, sizeof(opening_ser)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &signature, test->s2c_data, &s2c_opening) == 1); } } static void test_ecdsa_s2c_sign_verify(void) { unsigned char privkey[32]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; unsigned char message[32]; unsigned char noncedata[32]; unsigned char s2c_data[32]; unsigned char s2c_data2[32]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; /* Generate a random key, message, noncedata and s2c_data. */ { - rustsecp256k1zkp_v0_10_0_scalar key; + rustsecp256k1zkp_v0_10_1_scalar key; random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(privkey, &key); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, privkey) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(privkey, &key); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, privkey) == 1); - rustsecp256k1zkp_v0_10_0_testrand256_test(message); - rustsecp256k1zkp_v0_10_0_testrand256_test(noncedata); - rustsecp256k1zkp_v0_10_0_testrand256_test(s2c_data); - rustsecp256k1zkp_v0_10_0_testrand256_test(s2c_data2); + rustsecp256k1zkp_v0_10_1_testrand256_test(message); + rustsecp256k1zkp_v0_10_1_testrand256_test(noncedata); + rustsecp256k1zkp_v0_10_1_testrand256_test(s2c_data); + rustsecp256k1zkp_v0_10_1_testrand256_test(s2c_data2); } { /* invalid privkeys */ unsigned char zero_privkey[32] = {0}; unsigned char overflow_privkey[32] = "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, NULL, message, zero_privkey, s2c_data) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, NULL, message, overflow_privkey, s2c_data) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, NULL, message, zero_privkey, s2c_data) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, NULL, message, overflow_privkey, s2c_data) == 0); } /* Check that the sign-to-contract signature is valid, with s2c_data. Also check the commitment. */ { - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, s2c_data) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature, message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &signature, s2c_data, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, s2c_data) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature, message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &signature, s2c_data, &s2c_opening) == 1); } /* Check that an invalid commitment does not verify */ { unsigned char sigbytes[64]; size_t i; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, s2c_data) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature, message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, message, privkey, s2c_data) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature, message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, sigbytes, &signature) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, sigbytes, &signature) == 1); for(i = 0; i < 32; i++) { /* change one byte */ sigbytes[i] = (((int)sigbytes[i]) + 1) % 256; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &signature, s2c_data, &s2c_opening) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &signature, s2c_data, &s2c_opening) == 0); /* revert */ sigbytes[i] = (((int)sigbytes[i]) + 255) % 256; } @@ -248,12 +248,12 @@ static void test_ecdsa_anti_exfil_signer_commit(void) { }; /* Check that original pubnonce is derived from s2c_data */ for (i = 0; i < sizeof(ecdsa_s2c_tests) / sizeof(ecdsa_s2c_tests[0]); i++) { - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; unsigned char buf[33]; const ecdsa_s2c_test *test = &ecdsa_s2c_tests[i]; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, message, privkey, test->s2c_data) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_serialize(CTX, buf, &s2c_opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(test->expected_s2c_exfil_opening, buf, sizeof(buf)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, message, privkey, test->s2c_data) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_serialize(CTX, buf, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(test->expected_s2c_exfil_opening, buf, sizeof(buf)) == 0); } } @@ -263,63 +263,63 @@ static void test_ecdsa_anti_exfil(void) { unsigned char host_msg[32]; unsigned char host_commitment[32]; unsigned char host_nonce_contribution[32]; - rustsecp256k1zkp_v0_10_0_pubkey signer_pubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening s2c_opening; + rustsecp256k1zkp_v0_10_1_pubkey signer_pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening s2c_opening; /* Generate a random key, message. */ { - rustsecp256k1zkp_v0_10_0_scalar key; + rustsecp256k1zkp_v0_10_1_scalar key; random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(signer_privkey, &key); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &signer_pubkey, signer_privkey) == 1); - rustsecp256k1zkp_v0_10_0_testrand256_test(host_msg); - rustsecp256k1zkp_v0_10_0_testrand256_test(host_nonce_contribution); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(signer_privkey, &key); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &signer_pubkey, signer_privkey) == 1); + rustsecp256k1zkp_v0_10_1_testrand256_test(host_msg); + rustsecp256k1zkp_v0_10_1_testrand256_test(host_nonce_contribution); } /* Protocol step 1. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_host_commit(CTX, host_commitment, host_nonce_contribution) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_host_commit(CTX, host_commitment, host_nonce_contribution) == 1); /* Protocol step 2. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, host_msg, signer_privkey, host_commitment) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_anti_exfil_signer_commit(CTX, &s2c_opening, host_msg, signer_privkey, host_commitment) == 1); /* Protocol step 3: host_nonce_contribution send to signer to be used in step 4. */ /* Protocol step 4. */ - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_sign(CTX, &signature, host_msg, signer_privkey, host_nonce_contribution) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_sign(CTX, &signature, host_msg, signer_privkey, host_nonce_contribution) == 1); /* Protocol step 5. */ - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 1); /* Protocol step 5 (explicitly) */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &signature, host_nonce_contribution, &s2c_opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature, host_msg, &signer_pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &signature, host_nonce_contribution, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature, host_msg, &signer_pubkey) == 1); { /* host_verify: commitment does not match */ unsigned char sigbytes[64]; size_t i; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, sigbytes, &signature) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, sigbytes, &signature) == 1); for(i = 0; i < 32; i++) { /* change one byte */ sigbytes[i] += 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_verify_commit(CTX, &signature, host_nonce_contribution, &s2c_opening) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_verify_commit(CTX, &signature, host_nonce_contribution, &s2c_opening) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); /* revert */ sigbytes[i] -= 1; } - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &signature, sigbytes) == 1); } { /* host_verify: message does not match */ unsigned char bad_msg[32]; - rustsecp256k1zkp_v0_10_0_testrand256_test(bad_msg); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, bad_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); + rustsecp256k1zkp_v0_10_1_testrand256_test(bad_msg); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, bad_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); } { /* s2c_sign: host provided data that didn't match commitment */ - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening orig_opening = s2c_opening; + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening orig_opening = s2c_opening; unsigned char bad_nonce_contribution[32] = { 1, 2, 3, 4 }; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, host_msg, signer_privkey, bad_nonce_contribution) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_sign(CTX, &signature, &s2c_opening, host_msg, signer_privkey, bad_nonce_contribution) == 1); /* good signature but the opening (original public nonce does not match the original */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature, host_msg, &signer_pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, bad_nonce_contribution, &s2c_opening) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&s2c_opening, &orig_opening, sizeof(s2c_opening)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature, host_msg, &signer_pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, host_nonce_contribution, &s2c_opening) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_anti_exfil_host_verify(CTX, &signature, host_msg, &signer_pubkey, bad_nonce_contribution, &s2c_opening) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&s2c_opening, &orig_opening, sizeof(s2c_opening)) != 0); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/Makefile.am.include index 04e1c1cf..e3999fbc 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_ellswift.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_ellswift.h noinst_HEADERS += src/modules/ellswift/bench_impl.h noinst_HEADERS += src/modules/ellswift/main_impl.h noinst_HEADERS += src/modules/ellswift/tests_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/bench_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/bench_impl.h index bb2b5689..85039f7c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/bench_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/bench_impl.h @@ -9,8 +9,8 @@ #include "../../../include/secp256k1_ellswift.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context *ctx; - rustsecp256k1zkp_v0_10_0_pubkey point[256]; + rustsecp256k1zkp_v0_10_1_context *ctx; + rustsecp256k1zkp_v0_10_1_pubkey point[256]; unsigned char rnd64[64]; } bench_ellswift_data; @@ -30,12 +30,12 @@ static void bench_ellswift_setup(void *arg) { memcpy(data->rnd64, init, 64); for (i = 0; i < 256; ++i) { int j; - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_decode(data->ctx, &data->point[i], data->rnd64)); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_decode(data->ctx, &data->point[i], data->rnd64)); for (j = 0; j < 64; ++j) { data->rnd64[j] += 1; } } - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_encode(data->ctx, data->rnd64, &data->point[255], init + 16)); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_encode(data->ctx, data->rnd64, &data->point[255], init + 16)); } static void bench_ellswift_encode(void *arg, int iters) { @@ -43,7 +43,7 @@ static void bench_ellswift_encode(void *arg, int iters) { bench_ellswift_data *data = (bench_ellswift_data*)arg; for (i = 0; i < iters; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_encode(data->ctx, data->rnd64, &data->point[i & 255], data->rnd64 + 16)); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_encode(data->ctx, data->rnd64, &data->point[i & 255], data->rnd64 + 16)); } } @@ -53,21 +53,21 @@ static void bench_ellswift_create(void *arg, int iters) { for (i = 0; i < iters; i++) { unsigned char buf[64]; - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_create(data->ctx, buf, data->rnd64, data->rnd64 + 32)); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_create(data->ctx, buf, data->rnd64, data->rnd64 + 32)); memcpy(data->rnd64, buf, 64); } } static void bench_ellswift_decode(void *arg, int iters) { int i; - rustsecp256k1zkp_v0_10_0_pubkey out; + rustsecp256k1zkp_v0_10_1_pubkey out; size_t len; bench_ellswift_data *data = (bench_ellswift_data*)arg; for (i = 0; i < iters; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_decode(data->ctx, &out, data->rnd64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_decode(data->ctx, &out, data->rnd64) == 1); len = 33; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(data->ctx, data->rnd64 + (i % 32), &len, &out, SECP256K1_EC_COMPRESSED)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(data->ctx, data->rnd64 + (i % 32), &len, &out, SECP256K1_EC_COMPRESSED)); } } @@ -77,13 +77,13 @@ static void bench_ellswift_xdh(void *arg, int iters) { for (i = 0; i < iters; i++) { int party = i & 1; - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_xdh(data->ctx, + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_xdh(data->ctx, data->rnd64 + (i % 33), data->rnd64, data->rnd64, data->rnd64 + ((i + 16) % 33), party, - rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324, + rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324, NULL) == 1); } } @@ -93,14 +93,14 @@ void run_ellswift_bench(int iters, int argc, char **argv) { int d = argc == 1; /* create a context with signing capabilities */ - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (d || have_flag(argc, argv, "ellswift") || have_flag(argc, argv, "encode") || have_flag(argc, argv, "ellswift_encode")) run_benchmark("ellswift_encode", bench_ellswift_encode, bench_ellswift_setup, NULL, &data, 10, iters); if (d || have_flag(argc, argv, "ellswift") || have_flag(argc, argv, "decode") || have_flag(argc, argv, "ellswift_decode")) run_benchmark("ellswift_decode", bench_ellswift_decode, bench_ellswift_setup, NULL, &data, 10, iters); if (d || have_flag(argc, argv, "ellswift") || have_flag(argc, argv, "keygen") || have_flag(argc, argv, "ellswift_keygen")) run_benchmark("ellswift_keygen", bench_ellswift_create, bench_ellswift_setup, NULL, &data, 10, iters); if (d || have_flag(argc, argv, "ellswift") || have_flag(argc, argv, "ecdh") || have_flag(argc, argv, "ellswift_ecdh")) run_benchmark("ellswift_ecdh", bench_ellswift_xdh, bench_ellswift_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/main_impl.h index a55996f4..107c7f2e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/main_impl.h @@ -12,16 +12,16 @@ #include "../../hash.h" /** c1 = (sqrt(-3)-1)/2 */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ellswift_c1 = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40); +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ellswift_c1 = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40); /** c2 = (-sqrt(-3)-1)/2 = -(c1+1) */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ellswift_c2 = SECP256K1_FE_CONST(0x7ae96a2b, 0x657c0710, 0x6e64479e, 0xac3434e9, 0x9cf04975, 0x12f58995, 0xc1396c28, 0x719501ee); +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ellswift_c2 = SECP256K1_FE_CONST(0x7ae96a2b, 0x657c0710, 0x6e64479e, 0xac3434e9, 0x9cf04975, 0x12f58995, 0xc1396c28, 0x719501ee); /** c3 = (-sqrt(-3)+1)/2 = -c1 = c2+1 */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ellswift_c3 = SECP256K1_FE_CONST(0x7ae96a2b, 0x657c0710, 0x6e64479e, 0xac3434e9, 0x9cf04975, 0x12f58995, 0xc1396c28, 0x719501ef); +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ellswift_c3 = SECP256K1_FE_CONST(0x7ae96a2b, 0x657c0710, 0x6e64479e, 0xac3434e9, 0x9cf04975, 0x12f58995, 0xc1396c28, 0x719501ef); /** c4 = (sqrt(-3)+1)/2 = -c2 = c1+1 */ -static const rustsecp256k1zkp_v0_10_0_fe rustsecp256k1zkp_v0_10_0_ellswift_c4 = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa41); +static const rustsecp256k1zkp_v0_10_1_fe rustsecp256k1zkp_v0_10_1_ellswift_c4 = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa41); /** Decode ElligatorSwift encoding (u, t) to a fraction xn/xd representing a curve X coordinate. */ -static void rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_frac_var(rustsecp256k1zkp_v0_10_0_fe *xn, rustsecp256k1zkp_v0_10_0_fe *xd, const rustsecp256k1zkp_v0_10_0_fe *u, const rustsecp256k1zkp_v0_10_0_fe *t) { +static void rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_frac_var(rustsecp256k1zkp_v0_10_1_fe *xn, rustsecp256k1zkp_v0_10_1_fe *xd, const rustsecp256k1zkp_v0_10_1_fe *u, const rustsecp256k1zkp_v0_10_1_fe *t) { /* The implemented algorithm is the following (all operations in GF(p)): * * - Let c0 = sqrt(-3) = 0xa2d2ba93507f1df233770c2a797962cc61f6d15da14ecd47d8d27ae1cd5f852. @@ -83,67 +83,67 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_frac_var(rustsecp256k1zkp * - If x2 = u*(c1*s+c2*g)/(g+s) is a valid x coordinate, return it. * - Return x1 = -(x2+u). */ - rustsecp256k1zkp_v0_10_0_fe u1, s, g, p, d, n, l; + rustsecp256k1zkp_v0_10_1_fe u1, s, g, p, d, n, l; u1 = *u; - if (EXPECT(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&u1), 0)) u1 = rustsecp256k1zkp_v0_10_0_fe_one; - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, t); - if (EXPECT(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(t), 0)) s = rustsecp256k1zkp_v0_10_0_fe_one; - rustsecp256k1zkp_v0_10_0_fe_sqr(&l, &u1); /* l = u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&g, &l, &u1); /* g = u^3 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&g, SECP256K1_B); /* g = u^3 + 7 */ + if (EXPECT(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&u1), 0)) u1 = rustsecp256k1zkp_v0_10_1_fe_one; + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, t); + if (EXPECT(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(t), 0)) s = rustsecp256k1zkp_v0_10_1_fe_one; + rustsecp256k1zkp_v0_10_1_fe_sqr(&l, &u1); /* l = u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&g, &l, &u1); /* g = u^3 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&g, SECP256K1_B); /* g = u^3 + 7 */ p = g; /* p = g */ - rustsecp256k1zkp_v0_10_0_fe_add(&p, &s); /* p = g+s */ - if (EXPECT(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&p), 0)) { - rustsecp256k1zkp_v0_10_0_fe_mul_int(&s, 4); + rustsecp256k1zkp_v0_10_1_fe_add(&p, &s); /* p = g+s */ + if (EXPECT(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&p), 0)) { + rustsecp256k1zkp_v0_10_1_fe_mul_int(&s, 4); /* Recompute p = g+s */ p = g; /* p = g */ - rustsecp256k1zkp_v0_10_0_fe_add(&p, &s); /* p = g+s */ + rustsecp256k1zkp_v0_10_1_fe_add(&p, &s); /* p = g+s */ } - rustsecp256k1zkp_v0_10_0_fe_mul(&d, &s, &l); /* d = s*u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&d, 3); /* d = 3*s*u^2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&l, &p); /* l = (g+s)^2 */ - rustsecp256k1zkp_v0_10_0_fe_negate(&l, &l, 1); /* l = -(g+s)^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &d, &u1); /* n = 3*s*u^3 */ - rustsecp256k1zkp_v0_10_0_fe_add(&n, &l); /* n = 3*s*u^3-(g+s)^2 */ - if (rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(&n, &d)) { + rustsecp256k1zkp_v0_10_1_fe_mul(&d, &s, &l); /* d = s*u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&d, 3); /* d = 3*s*u^2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&l, &p); /* l = (g+s)^2 */ + rustsecp256k1zkp_v0_10_1_fe_negate(&l, &l, 1); /* l = -(g+s)^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &d, &u1); /* n = 3*s*u^3 */ + rustsecp256k1zkp_v0_10_1_fe_add(&n, &l); /* n = 3*s*u^3-(g+s)^2 */ + if (rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(&n, &d)) { /* Return x3 = n/d = (3*s*u^3-(g+s)^2)/(3*s*u^2) */ *xn = n; *xd = d; return; } *xd = p; - rustsecp256k1zkp_v0_10_0_fe_mul(&l, &rustsecp256k1zkp_v0_10_0_ellswift_c1, &s); /* l = c1*s */ - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &rustsecp256k1zkp_v0_10_0_ellswift_c2, &g); /* n = c2*g */ - rustsecp256k1zkp_v0_10_0_fe_add(&n, &l); /* n = c1*s+c2*g */ - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &n, &u1); /* n = u*(c1*s+c2*g) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&l, &rustsecp256k1zkp_v0_10_1_ellswift_c1, &s); /* l = c1*s */ + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &rustsecp256k1zkp_v0_10_1_ellswift_c2, &g); /* n = c2*g */ + rustsecp256k1zkp_v0_10_1_fe_add(&n, &l); /* n = c1*s+c2*g */ + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &n, &u1); /* n = u*(c1*s+c2*g) */ /* Possible optimization: in the invocation below, p^2 = (g+s)^2 is computed, * which we already have computed above. This could be deduplicated. */ - if (rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(&n, &p)) { + if (rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(&n, &p)) { /* Return x2 = n/p = u*(c1*s+c2*g)/(g+s) */ *xn = n; return; } - rustsecp256k1zkp_v0_10_0_fe_mul(&l, &p, &u1); /* l = u*(g+s) */ - rustsecp256k1zkp_v0_10_0_fe_add(&n, &l); /* n = u*(c1*s+c2*g)+u*(g+s) */ - rustsecp256k1zkp_v0_10_0_fe_negate(xn, &n, 2); /* n = -u*(c1*s+c2*g)-u*(g+s) */ + rustsecp256k1zkp_v0_10_1_fe_mul(&l, &p, &u1); /* l = u*(g+s) */ + rustsecp256k1zkp_v0_10_1_fe_add(&n, &l); /* n = u*(c1*s+c2*g)+u*(g+s) */ + rustsecp256k1zkp_v0_10_1_fe_negate(xn, &n, 2); /* n = -u*(c1*s+c2*g)-u*(g+s) */ - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(xn, &p)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(xn, &p)); /* Return x3 = n/p = -(u*(c1*s+c2*g)/(g+s)+u) */ } /** Decode ElligatorSwift encoding (u, t) to X coordinate. */ -static void rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_var(rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_fe *u, const rustsecp256k1zkp_v0_10_0_fe *t) { - rustsecp256k1zkp_v0_10_0_fe xn, xd; - rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_frac_var(&xn, &xd, u, t); - rustsecp256k1zkp_v0_10_0_fe_inv_var(&xd, &xd); - rustsecp256k1zkp_v0_10_0_fe_mul(x, &xn, &xd); +static void rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_var(rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_fe *u, const rustsecp256k1zkp_v0_10_1_fe *t) { + rustsecp256k1zkp_v0_10_1_fe xn, xd; + rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_frac_var(&xn, &xd, u, t); + rustsecp256k1zkp_v0_10_1_fe_inv_var(&xd, &xd); + rustsecp256k1zkp_v0_10_1_fe_mul(x, &xn, &xd); } /** Decode ElligatorSwift encoding (u, t) to point P. */ -static void rustsecp256k1zkp_v0_10_0_ellswift_swiftec_var(rustsecp256k1zkp_v0_10_0_ge *p, const rustsecp256k1zkp_v0_10_0_fe *u, const rustsecp256k1zkp_v0_10_0_fe *t) { - rustsecp256k1zkp_v0_10_0_fe x; - rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_var(&x, u, t); - rustsecp256k1zkp_v0_10_0_ge_set_xo_var(p, &x, rustsecp256k1zkp_v0_10_0_fe_is_odd(t)); +static void rustsecp256k1zkp_v0_10_1_ellswift_swiftec_var(rustsecp256k1zkp_v0_10_1_ge *p, const rustsecp256k1zkp_v0_10_1_fe *u, const rustsecp256k1zkp_v0_10_1_fe *t) { + rustsecp256k1zkp_v0_10_1_fe x; + rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_var(&x, u, t); + rustsecp256k1zkp_v0_10_1_ge_set_xo_var(p, &x, rustsecp256k1zkp_v0_10_1_fe_is_odd(t)); } /* Try to complete an ElligatorSwift encoding (u, t) for X coordinate x, given u and x. @@ -153,7 +153,7 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_swiftec_var(rustsecp256k1zkp_v0_10 * distinct input argument c (in range 0-7), and some or all of these may return failure. * The following guarantees exist: * - Given (x, u), no two distinct c values give the same successful result t. - * - Every successful result maps back to x through rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_var. + * - Every successful result maps back to x through rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_var. * - Given (x, u), all t values that map back to x can be reached by combining the * successful results from this function over all c values, with the exception of: * - this function cannot be called with u=0 @@ -165,7 +165,7 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_swiftec_var(rustsecp256k1zkp_v0_10 * encoding more closely: c=0 through c=3 match branches 1..4 in the paper, while c=4 through * c=7 are copies of those with an additional negation of sqrt(w). */ -static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v0_10_0_fe *t, const rustsecp256k1zkp_v0_10_0_fe *x_in, const rustsecp256k1zkp_v0_10_0_fe *u_in, int c) { +static int rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v0_10_1_fe *t, const rustsecp256k1zkp_v0_10_1_fe *x_in, const rustsecp256k1zkp_v0_10_1_fe *u_in, int c) { /* The implemented algorithm is this (all arithmetic, except involving c, is mod p): * * - If (c & 2) = 0: @@ -186,14 +186,14 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v * - If (c & 5) = 4: return w*(c3*u + v). * - If (c & 5) = 5: return -w*(c4*u + v). */ - rustsecp256k1zkp_v0_10_0_fe x = *x_in, u = *u_in, g, v, s, m, r, q; + rustsecp256k1zkp_v0_10_1_fe x = *x_in, u = *u_in, g, v, s, m, r, q; int ret; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&x); - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&u); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&x); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&u); VERIFY_CHECK(c >= 0 && c < 8); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&x)); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&x)); if (!(c & 2)) { /* c is in {0, 1, 4, 5}. In this case we look for an inverse under the x1 (if c=0 or @@ -203,16 +203,16 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v * back under the x3 formula instead (which has priority over x1 and x2, so the decoding * would not match x). */ m = x; /* m = x */ - rustsecp256k1zkp_v0_10_0_fe_add(&m, &u); /* m = u+x */ - rustsecp256k1zkp_v0_10_0_fe_negate(&m, &m, 2); /* m = -u-x */ + rustsecp256k1zkp_v0_10_1_fe_add(&m, &u); /* m = u+x */ + rustsecp256k1zkp_v0_10_1_fe_negate(&m, &m, 2); /* m = -u-x */ /* Test if (-u-x) is a valid X coordinate. If so, fail. */ - if (rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&m)) return 0; + if (rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&m)) return 0; /* Let s = -(u^3 + 7)/(u^2 + u*x + x^2) [first part] */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &m); /* s = (u+x)^2 */ - rustsecp256k1zkp_v0_10_0_fe_negate(&s, &s, 1); /* s = -(u+x)^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&m, &u, &x); /* m = u*x */ - rustsecp256k1zkp_v0_10_0_fe_add(&s, &m); /* s = -(u^2 + u*x + x^2) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &m); /* s = (u+x)^2 */ + rustsecp256k1zkp_v0_10_1_fe_negate(&s, &s, 1); /* s = -(u+x)^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&m, &u, &x); /* m = u*x */ + rustsecp256k1zkp_v0_10_1_fe_add(&s, &m); /* s = -(u^2 + u*x + x^2) */ /* Note that at this point, s = 0 is impossible. If it were the case: * s = -(u^2 + u*x + x^2) = 0 @@ -224,23 +224,23 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v * => x^3 + B = (-u - x)^3 + B * * However, we know x^3 + B is square (because x is on the curve) and - * that (-u-x)^3 + B is not square (the rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&m) + * that (-u-x)^3 + B is not square (the rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&m) * test above would have failed). This is a contradiction, and thus the * assumption s=0 is false. */ - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&s)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&s)); /* If s is not square, fail. We have not fully computed s yet, but s is square iff * -(u^3+7)*(u^2+u*x+x^2) is square (because a/b is square iff a*b is square and b is * nonzero). */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&g, &u); /* g = u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&g, &g, &u); /* g = u^3 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&g, SECP256K1_B); /* g = u^3+7 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&m, &s, &g); /* m = -(u^3 + 7)*(u^2 + u*x + x^2) */ - if (!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&m)) return 0; + rustsecp256k1zkp_v0_10_1_fe_sqr(&g, &u); /* g = u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&g, &g, &u); /* g = u^3 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&g, SECP256K1_B); /* g = u^3+7 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&m, &s, &g); /* m = -(u^3 + 7)*(u^2 + u*x + x^2) */ + if (!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&m)) return 0; /* Let s = -(u^3 + 7)/(u^2 + u*x + x^2) [second part] */ - rustsecp256k1zkp_v0_10_0_fe_inv_var(&s, &s); /* s = -1/(u^2 + u*x + x^2) [no div by 0] */ - rustsecp256k1zkp_v0_10_0_fe_mul(&s, &s, &g); /* s = -(u^3 + 7)/(u^2 + u*x + x^2) */ + rustsecp256k1zkp_v0_10_1_fe_inv_var(&s, &s); /* s = -1/(u^2 + u*x + x^2) [no div by 0] */ + rustsecp256k1zkp_v0_10_1_fe_mul(&s, &s, &g); /* s = -(u^3 + 7)/(u^2 + u*x + x^2) */ /* Let v = x. */ v = x; @@ -248,25 +248,25 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v /* c is in {2, 3, 6, 7}. In this case we look for an inverse under the x3 formula. */ /* Let s = x-u. */ - rustsecp256k1zkp_v0_10_0_fe_negate(&m, &u, 1); /* m = -u */ + rustsecp256k1zkp_v0_10_1_fe_negate(&m, &u, 1); /* m = -u */ s = m; /* s = -u */ - rustsecp256k1zkp_v0_10_0_fe_add(&s, &x); /* s = x-u */ + rustsecp256k1zkp_v0_10_1_fe_add(&s, &x); /* s = x-u */ /* If s is not square, fail. */ - if (!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&s)) return 0; + if (!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&s)) return 0; /* Let r = sqrt(-s*(4*(u^3+7)+3*u^2*s)); fail if it doesn't exist. */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&g, &u); /* g = u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&q, &s, &g); /* q = s*u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&q, 3); /* q = 3*s*u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&g, &g, &u); /* g = u^3 */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&g, 4); /* g = 4*u^3 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&g, 4 * SECP256K1_B); /* g = 4*(u^3+7) */ - rustsecp256k1zkp_v0_10_0_fe_add(&q, &g); /* q = 4*(u^3+7)+3*s*u^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&q, &q, &s); /* q = s*(4*(u^3+7)+3*u^2*s) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&q, &q, 1); /* q = -s*(4*(u^3+7)+3*u^2*s) */ - if (!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&q)) return 0; - ret = rustsecp256k1zkp_v0_10_0_fe_sqrt(&r, &q); /* r = sqrt(-s*(4*(u^3+7)+3*u^2*s)) */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&g, &u); /* g = u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&q, &s, &g); /* q = s*u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&q, 3); /* q = 3*s*u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&g, &g, &u); /* g = u^3 */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&g, 4); /* g = 4*u^3 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&g, 4 * SECP256K1_B); /* g = 4*(u^3+7) */ + rustsecp256k1zkp_v0_10_1_fe_add(&q, &g); /* q = 4*(u^3+7)+3*s*u^2 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&q, &q, &s); /* q = s*(4*(u^3+7)+3*u^2*s) */ + rustsecp256k1zkp_v0_10_1_fe_negate(&q, &q, 1); /* q = -s*(4*(u^3+7)+3*u^2*s) */ + if (!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&q)) return 0; + ret = rustsecp256k1zkp_v0_10_1_fe_sqrt(&r, &q); /* r = sqrt(-s*(4*(u^3+7)+3*u^2*s)) */ #ifdef VERIFY VERIFY_CHECK(ret); #else @@ -274,31 +274,31 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v #endif /* If (c & 1) = 1 and r = 0, fail. */ - if (EXPECT((c & 1) && rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&r), 0)) return 0; + if (EXPECT((c & 1) && rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&r), 0)) return 0; /* If s = 0, fail. */ - if (EXPECT(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&s), 0)) return 0; + if (EXPECT(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&s), 0)) return 0; /* Let v = (r/s-u)/2. */ - rustsecp256k1zkp_v0_10_0_fe_inv_var(&v, &s); /* v = 1/s [no div by 0] */ - rustsecp256k1zkp_v0_10_0_fe_mul(&v, &v, &r); /* v = r/s */ - rustsecp256k1zkp_v0_10_0_fe_add(&v, &m); /* v = r/s-u */ - rustsecp256k1zkp_v0_10_0_fe_half(&v); /* v = (r/s-u)/2 */ + rustsecp256k1zkp_v0_10_1_fe_inv_var(&v, &s); /* v = 1/s [no div by 0] */ + rustsecp256k1zkp_v0_10_1_fe_mul(&v, &v, &r); /* v = r/s */ + rustsecp256k1zkp_v0_10_1_fe_add(&v, &m); /* v = r/s-u */ + rustsecp256k1zkp_v0_10_1_fe_half(&v); /* v = (r/s-u)/2 */ } /* Let w = sqrt(s). */ - ret = rustsecp256k1zkp_v0_10_0_fe_sqrt(&m, &s); /* m = sqrt(s) = w */ + ret = rustsecp256k1zkp_v0_10_1_fe_sqrt(&m, &s); /* m = sqrt(s) = w */ VERIFY_CHECK(ret); /* Return logic. */ if ((c & 5) == 0 || (c & 5) == 5) { - rustsecp256k1zkp_v0_10_0_fe_negate(&m, &m, 1); /* m = -w */ + rustsecp256k1zkp_v0_10_1_fe_negate(&m, &m, 1); /* m = -w */ } /* Now m = {-w if c&5=0 or c&5=5; w otherwise}. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&u, &u, c&1 ? &rustsecp256k1zkp_v0_10_0_ellswift_c4 : &rustsecp256k1zkp_v0_10_0_ellswift_c3); + rustsecp256k1zkp_v0_10_1_fe_mul(&u, &u, c&1 ? &rustsecp256k1zkp_v0_10_1_ellswift_c4 : &rustsecp256k1zkp_v0_10_1_ellswift_c3); /* u = {c4 if c&1=1; c3 otherwise}*u */ - rustsecp256k1zkp_v0_10_0_fe_add(&u, &v); /* u = {c4 if c&1=1; c3 otherwise}*u + v */ - rustsecp256k1zkp_v0_10_0_fe_mul(t, &m, &u); + rustsecp256k1zkp_v0_10_1_fe_add(&u, &v); /* u = {c4 if c&1=1; c3 otherwise}*u + v */ + rustsecp256k1zkp_v0_10_1_fe_mul(t, &m, &u); return 1; } @@ -307,8 +307,8 @@ static int rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(rustsecp256k1zkp_v * hasher is a SHA256 object to which an incrementing 4-byte counter is written to generate randomness. * Writing 13 bytes (4 bytes for counter, plus 9 bytes for the SHA256 padding) cannot cross a * 64-byte block size boundary (to make sure it only triggers a single SHA256 compression). */ -static void rustsecp256k1zkp_v0_10_0_ellswift_prng(unsigned char* out32, const rustsecp256k1zkp_v0_10_0_sha256 *hasher, uint32_t cnt) { - rustsecp256k1zkp_v0_10_0_sha256 hash = *hasher; +static void rustsecp256k1zkp_v0_10_1_ellswift_prng(unsigned char* out32, const rustsecp256k1zkp_v0_10_1_sha256 *hasher, uint32_t cnt) { + rustsecp256k1zkp_v0_10_1_sha256 hash = *hasher; unsigned char buf4[4]; #ifdef VERIFY size_t blocks = hash.bytes >> 6; @@ -317,8 +317,8 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_prng(unsigned char* out32, const r buf4[1] = cnt >> 8; buf4[2] = cnt >> 16; buf4[3] = cnt >> 24; - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, buf4, 4); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hash, out32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, buf4, 4); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hash, out32); /* Writing and finalizing together should trigger exactly one SHA256 compression. */ VERIFY_CHECK(((hash.bytes) >> 6) == (blocks + 1)); @@ -329,8 +329,8 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_prng(unsigned char* out32, const r * u32 is the 32-byte big endian encoding of u; t is the output field element t that still * needs encoding. * - * hasher is a hasher in the rustsecp256k1zkp_v0_10_0_ellswift_prng sense, with the same restrictions. */ -static void rustsecp256k1zkp_v0_10_0_ellswift_xelligatorswift_var(unsigned char *u32, rustsecp256k1zkp_v0_10_0_fe *t, const rustsecp256k1zkp_v0_10_0_fe *x, const rustsecp256k1zkp_v0_10_0_sha256 *hasher) { + * hasher is a hasher in the rustsecp256k1zkp_v0_10_1_ellswift_prng sense, with the same restrictions. */ +static void rustsecp256k1zkp_v0_10_1_ellswift_xelligatorswift_var(unsigned char *u32, rustsecp256k1zkp_v0_10_1_fe *t, const rustsecp256k1zkp_v0_10_1_fe *x, const rustsecp256k1zkp_v0_10_1_sha256 *hasher) { /* Pool of 3-bit branch values. */ unsigned char branch_hash[32]; /* Number of 3-bit values in branch_hash left. */ @@ -343,47 +343,47 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_xelligatorswift_var(unsigned char uint32_t cnt = 0; while (1) { int branch; - rustsecp256k1zkp_v0_10_0_fe u; + rustsecp256k1zkp_v0_10_1_fe u; /* If the pool of branch values is empty, populate it. */ if (branches_left == 0) { - rustsecp256k1zkp_v0_10_0_ellswift_prng(branch_hash, hasher, cnt++); + rustsecp256k1zkp_v0_10_1_ellswift_prng(branch_hash, hasher, cnt++); branches_left = 64; } /* Take a 3-bit branch value from the branch pool (top bit is discarded). */ --branches_left; branch = (branch_hash[branches_left >> 1] >> ((branches_left & 1) << 2)) & 7; /* Compute a new u value by hashing. */ - rustsecp256k1zkp_v0_10_0_ellswift_prng(u32, hasher, cnt++); + rustsecp256k1zkp_v0_10_1_ellswift_prng(u32, hasher, cnt++); /* overflow is not a problem (we prefer uniform u32 over uniform u). */ - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&u, u32); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&u, u32); /* Since u is the output of a hash, it should practically never be 0. We could apply the * u=0 to u=1 correction here too to deal with that case still, but it's such a low * probability event that we do not bother. */ - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&u)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&u)); /* Find a remainder t, and return it if found. */ - if (EXPECT(rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(t, x, &u, branch), 0)) break; + if (EXPECT(rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_inv_var(t, x, &u, branch), 0)) break; } } /** Find an ElligatorSwift encoding (u, t) for point P. * - * This is similar rustsecp256k1zkp_v0_10_0_ellswift_xelligatorswift_var, except it takes a full group element p + * This is similar rustsecp256k1zkp_v0_10_1_ellswift_xelligatorswift_var, except it takes a full group element p * as input, and returns an encoding that matches the provided Y coordinate rather than a random * one. */ -static void rustsecp256k1zkp_v0_10_0_ellswift_elligatorswift_var(unsigned char *u32, rustsecp256k1zkp_v0_10_0_fe *t, const rustsecp256k1zkp_v0_10_0_ge *p, const rustsecp256k1zkp_v0_10_0_sha256 *hasher) { - rustsecp256k1zkp_v0_10_0_ellswift_xelligatorswift_var(u32, t, &p->x, hasher); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(t); - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(t) != rustsecp256k1zkp_v0_10_0_fe_is_odd(&p->y)) { - rustsecp256k1zkp_v0_10_0_fe_negate(t, t, 1); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(t); +static void rustsecp256k1zkp_v0_10_1_ellswift_elligatorswift_var(unsigned char *u32, rustsecp256k1zkp_v0_10_1_fe *t, const rustsecp256k1zkp_v0_10_1_ge *p, const rustsecp256k1zkp_v0_10_1_sha256 *hasher) { + rustsecp256k1zkp_v0_10_1_ellswift_xelligatorswift_var(u32, t, &p->x, hasher); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(t); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(t) != rustsecp256k1zkp_v0_10_1_fe_is_odd(&p->y)) { + rustsecp256k1zkp_v0_10_1_fe_negate(t, t, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(t); } } -/** Set hash state to the BIP340 tagged hash midstate for "rustsecp256k1zkp_v0_10_0_ellswift_encode". */ -static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_encode(rustsecp256k1zkp_v0_10_0_sha256* hash) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(hash); +/** Set hash state to the BIP340 tagged hash midstate for "rustsecp256k1zkp_v0_10_1_ellswift_encode". */ +static void rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_encode(rustsecp256k1zkp_v0_10_1_sha256* hash) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(hash); hash->s[0] = 0xd1a6524bul; hash->s[1] = 0x028594b3ul; hash->s[2] = 0x96e42f4eul; @@ -396,35 +396,35 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_encode(rustsecp256k1zk hash->bytes = 64; } -int rustsecp256k1zkp_v0_10_0_ellswift_encode(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *ell64, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *rnd32) { - rustsecp256k1zkp_v0_10_0_ge p; +int rustsecp256k1zkp_v0_10_1_ellswift_encode(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *ell64, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *rnd32) { + rustsecp256k1zkp_v0_10_1_ge p; VERIFY_CHECK(ctx != NULL); ARG_CHECK(ell64 != NULL); ARG_CHECK(pubkey != NULL); ARG_CHECK(rnd32 != NULL); - if (rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &p, pubkey)) { - rustsecp256k1zkp_v0_10_0_fe t; + if (rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &p, pubkey)) { + rustsecp256k1zkp_v0_10_1_fe t; unsigned char p64[64] = {0}; size_t ser_size; int ser_ret; - rustsecp256k1zkp_v0_10_0_sha256 hash; + rustsecp256k1zkp_v0_10_1_sha256 hash; /* Set up hasher state; the used RNG is H(pubkey || "\x00"*31 || rnd32 || cnt++), using - * BIP340 tagged hash with tag "rustsecp256k1zkp_v0_10_0_ellswift_encode". */ - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_encode(&hash); - ser_ret = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&p, p64, &ser_size, 1); + * BIP340 tagged hash with tag "rustsecp256k1zkp_v0_10_1_ellswift_encode". */ + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_encode(&hash); + ser_ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&p, p64, &ser_size, 1); #ifdef VERIFY VERIFY_CHECK(ser_ret && ser_size == 33); #else (void)ser_ret; #endif - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, p64, sizeof(p64)); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, rnd32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, p64, sizeof(p64)); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, rnd32, 32); /* Compute ElligatorSwift encoding and construct output. */ - rustsecp256k1zkp_v0_10_0_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ - rustsecp256k1zkp_v0_10_0_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ + rustsecp256k1zkp_v0_10_1_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ + rustsecp256k1zkp_v0_10_1_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ return 1; } /* Only reached in case the provided pubkey is invalid. */ @@ -432,9 +432,9 @@ int rustsecp256k1zkp_v0_10_0_ellswift_encode(const rustsecp256k1zkp_v0_10_0_cont return 0; } -/** Set hash state to the BIP340 tagged hash midstate for "rustsecp256k1zkp_v0_10_0_ellswift_create". */ -static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_create(rustsecp256k1zkp_v0_10_0_sha256* hash) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(hash); +/** Set hash state to the BIP340 tagged hash midstate for "rustsecp256k1zkp_v0_10_1_ellswift_create". */ +static void rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_create(rustsecp256k1zkp_v0_10_1_sha256* hash) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(hash); hash->s[0] = 0xd29e1bf5ul; hash->s[1] = 0xf7025f42ul; hash->s[2] = 0x9b024773ul; @@ -447,11 +447,11 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_create(rustsecp256k1zk hash->bytes = 64; } -int rustsecp256k1zkp_v0_10_0_ellswift_create(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *ell64, const unsigned char *seckey32, const unsigned char *auxrnd32) { - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_fe t; - rustsecp256k1zkp_v0_10_0_sha256 hash; - rustsecp256k1zkp_v0_10_0_scalar seckey_scalar; +int rustsecp256k1zkp_v0_10_1_ellswift_create(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *ell64, const unsigned char *seckey32, const unsigned char *auxrnd32) { + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_fe t; + rustsecp256k1zkp_v0_10_1_sha256 hash; + rustsecp256k1zkp_v0_10_1_scalar seckey_scalar; int ret; static const unsigned char zero32[32] = {0}; @@ -459,64 +459,64 @@ int rustsecp256k1zkp_v0_10_0_ellswift_create(const rustsecp256k1zkp_v0_10_0_cont VERIFY_CHECK(ctx != NULL); ARG_CHECK(ell64 != NULL); memset(ell64, 0, 64); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(seckey32 != NULL); /* Compute (affine) public key */ - ret = rustsecp256k1zkp_v0_10_0_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &seckey_scalar, &p, seckey32); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &p, sizeof(p)); /* not constant time in produced pubkey */ - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&p.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&p.y); + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &seckey_scalar, &p, seckey32); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &p, sizeof(p)); /* not constant time in produced pubkey */ + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&p.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&p.y); /* Set up hasher state. The used RNG is H(privkey || "\x00"*32 [|| auxrnd32] || cnt++), - * using BIP340 tagged hash with tag "rustsecp256k1zkp_v0_10_0_ellswift_create". */ - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_create(&hash); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, seckey32, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, zero32, sizeof(zero32)); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &hash, sizeof(hash)); /* private key is hashed now */ - if (auxrnd32) rustsecp256k1zkp_v0_10_0_sha256_write(&hash, auxrnd32, 32); + * using BIP340 tagged hash with tag "rustsecp256k1zkp_v0_10_1_ellswift_create". */ + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_create(&hash); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, seckey32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, zero32, sizeof(zero32)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &hash, sizeof(hash)); /* private key is hashed now */ + if (auxrnd32) rustsecp256k1zkp_v0_10_1_sha256_write(&hash, auxrnd32, 32); /* Compute ElligatorSwift encoding and construct output. */ - rustsecp256k1zkp_v0_10_0_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ - rustsecp256k1zkp_v0_10_0_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ + rustsecp256k1zkp_v0_10_1_ellswift_elligatorswift_var(ell64, &t, &p, &hash); /* puts u in ell64[0..32] */ + rustsecp256k1zkp_v0_10_1_fe_get_b32(ell64 + 32, &t); /* puts t in ell64[32..64] */ - rustsecp256k1zkp_v0_10_0_memczero(ell64, 64, !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&seckey_scalar); + rustsecp256k1zkp_v0_10_1_memczero(ell64, 64, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&seckey_scalar); return ret; } -int rustsecp256k1zkp_v0_10_0_ellswift_decode(const rustsecp256k1zkp_v0_10_0_context *ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *ell64) { - rustsecp256k1zkp_v0_10_0_fe u, t; - rustsecp256k1zkp_v0_10_0_ge p; +int rustsecp256k1zkp_v0_10_1_ellswift_decode(const rustsecp256k1zkp_v0_10_1_context *ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *ell64) { + rustsecp256k1zkp_v0_10_1_fe u, t; + rustsecp256k1zkp_v0_10_1_ge p; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); ARG_CHECK(ell64 != NULL); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&u, ell64); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&t, ell64 + 32); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&t); - rustsecp256k1zkp_v0_10_0_ellswift_swiftec_var(&p, &u, &t); - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &p); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&u, ell64); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&t, ell64 + 32); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&t); + rustsecp256k1zkp_v0_10_1_ellswift_swiftec_var(&p, &u, &t); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &p); return 1; } static int ellswift_xdh_hash_function_prefix(unsigned char *output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, data, 64); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, ell_a64, 64); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, ell_b64, 64); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, x32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, data, 64); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, ell_a64, 64); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, ell_b64, 64); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, x32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); return 1; } /** Set hash state to the BIP340 tagged hash midstate for "bip324_ellswift_xonly_ecdh". */ -static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_bip324(rustsecp256k1zkp_v0_10_0_sha256* hash) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(hash); +static void rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_bip324(rustsecp256k1zkp_v0_10_1_sha256* hash) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(hash); hash->s[0] = 0x8c12d730ul; hash->s[1] = 0x827bd392ul; hash->s[2] = 0x9e4fb2eeul; @@ -530,27 +530,27 @@ static void rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_bip324(rustsecp256k1zk } static int ellswift_xdh_hash_function_bip324(unsigned char* output, const unsigned char *x32, const unsigned char *ell_a64, const unsigned char *ell_b64, void *data) { - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; (void)data; - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_bip324(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, ell_a64, 64); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, ell_b64, 64); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, x32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, output); + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_bip324(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, ell_a64, 64); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, ell_b64, 64); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, x32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, output); return 1; } -const rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_prefix = ellswift_xdh_hash_function_prefix; -const rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324 = ellswift_xdh_hash_function_bip324; +const rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_prefix = ellswift_xdh_hash_function_prefix; +const rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324 = ellswift_xdh_hash_function_bip324; -int rustsecp256k1zkp_v0_10_0_ellswift_xdh(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *output, const unsigned char *ell_a64, const unsigned char *ell_b64, const unsigned char *seckey32, int party, rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function hashfp, void *data) { +int rustsecp256k1zkp_v0_10_1_ellswift_xdh(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *output, const unsigned char *ell_a64, const unsigned char *ell_b64, const unsigned char *seckey32, int party, rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function hashfp, void *data) { int ret = 0; int overflow; - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_fe xn, xd, px, u, t; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_fe xn, xd, px, u, t; unsigned char sx[32]; const unsigned char* theirs64; @@ -563,26 +563,26 @@ int rustsecp256k1zkp_v0_10_0_ellswift_xdh(const rustsecp256k1zkp_v0_10_0_context /* Load remote public key (as fraction). */ theirs64 = party ? ell_a64 : ell_b64; - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&u, theirs64); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&t, theirs64 + 32); - rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_frac_var(&xn, &xd, &u, &t); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&u, theirs64); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&t, theirs64 + 32); + rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_frac_var(&xn, &xd, &u, &t); /* Load private key (using one if invalid). */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, seckey32, &overflow); - overflow = rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_0_scalar_one, overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, seckey32, &overflow); + overflow = rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&s, &rustsecp256k1zkp_v0_10_1_scalar_one, overflow); /* Compute shared X coordinate. */ - rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(&px, &xn, &xd, &s, 1); - rustsecp256k1zkp_v0_10_0_fe_normalize(&px); - rustsecp256k1zkp_v0_10_0_fe_get_b32(sx, &px); + rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(&px, &xn, &xd, &s, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize(&px); + rustsecp256k1zkp_v0_10_1_fe_get_b32(sx, &px); /* Invoke hasher */ ret = hashfp(output, sx, ell_a64, ell_b64, data); memset(sx, 0, 32); - rustsecp256k1zkp_v0_10_0_fe_clear(&px); - rustsecp256k1zkp_v0_10_0_scalar_clear(&s); + rustsecp256k1zkp_v0_10_1_fe_clear(&px); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s); return !!ret & !overflow; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_exhaustive_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_exhaustive_impl.h index 879d1019..a46f5536 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_exhaustive_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_exhaustive_impl.h @@ -9,7 +9,7 @@ #include "../../../include/secp256k1_ellswift.h" #include "main_impl.h" -static void test_exhaustive_ellswift(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_ellswift(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { int i; /* Note that SwiftEC/ElligatorSwift are inherently curve operations, not @@ -18,21 +18,21 @@ static void test_exhaustive_ellswift(const rustsecp256k1zkp_v0_10_0_context *ctx * it doesn't (and for computational reasons obviously cannot) test the * entire domain ellswift operates under. */ for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_scalar scalar_i; + rustsecp256k1zkp_v0_10_1_scalar scalar_i; unsigned char sec32[32]; unsigned char ell64[64]; - rustsecp256k1zkp_v0_10_0_pubkey pub_decoded; - rustsecp256k1zkp_v0_10_0_ge ge_decoded; + rustsecp256k1zkp_v0_10_1_pubkey pub_decoded; + rustsecp256k1zkp_v0_10_1_ge ge_decoded; /* Construct ellswift pubkey from exhaustive loop scalar i. */ - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalar_i, i); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec32, &scalar_i); - CHECK(rustsecp256k1zkp_v0_10_0_ellswift_create(ctx, ell64, sec32, NULL)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalar_i, i); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec32, &scalar_i); + CHECK(rustsecp256k1zkp_v0_10_1_ellswift_create(ctx, ell64, sec32, NULL)); /* Decode ellswift pubkey and check that it matches the precomputed group element. */ - rustsecp256k1zkp_v0_10_0_ellswift_decode(ctx, &pub_decoded, ell64); - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &ge_decoded, &pub_decoded); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&ge_decoded, &group[i])); + rustsecp256k1zkp_v0_10_1_ellswift_decode(ctx, &pub_decoded, ell64); + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &ge_decoded, &pub_decoded); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&ge_decoded, &group[i])); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_impl.h index f224ebca..4dc123e7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/ellswift/tests_impl.h @@ -10,14 +10,14 @@ struct ellswift_xswiftec_inv_test { int enc_bitmap; - rustsecp256k1zkp_v0_10_0_fe u; - rustsecp256k1zkp_v0_10_0_fe x; - rustsecp256k1zkp_v0_10_0_fe encs[8]; + rustsecp256k1zkp_v0_10_1_fe u; + rustsecp256k1zkp_v0_10_1_fe x; + rustsecp256k1zkp_v0_10_1_fe encs[8]; }; struct ellswift_decode_test { unsigned char enc[64]; - rustsecp256k1zkp_v0_10_0_fe x; + rustsecp256k1zkp_v0_10_1_fe x; int odd_y; }; @@ -183,28 +183,28 @@ void run_ellswift_tests(void) { const struct ellswift_xswiftec_inv_test *testcase = &ellswift_xswiftec_inv_tests[i]; int c; for (c = 0; c < 8; ++c) { - rustsecp256k1zkp_v0_10_0_fe t; - int ret = rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_inv_var(&t, &testcase->x, &testcase->u, c); + rustsecp256k1zkp_v0_10_1_fe t; + int ret = rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_inv_var(&t, &testcase->x, &testcase->u, c); CHECK(ret == ((testcase->enc_bitmap >> c) & 1)); if (ret) { - rustsecp256k1zkp_v0_10_0_fe x2; + rustsecp256k1zkp_v0_10_1_fe x2; CHECK(check_fe_equal(&t, &testcase->encs[c])); - rustsecp256k1zkp_v0_10_0_ellswift_xswiftec_var(&x2, &testcase->u, &testcase->encs[c]); + rustsecp256k1zkp_v0_10_1_ellswift_xswiftec_var(&x2, &testcase->u, &testcase->encs[c]); CHECK(check_fe_equal(&testcase->x, &x2)); } } } for (i = 0; (unsigned)i < sizeof(ellswift_decode_tests) / sizeof(ellswift_decode_tests[0]); ++i) { const struct ellswift_decode_test *testcase = &ellswift_decode_tests[i]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ge ge; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ge ge; int ret; - ret = rustsecp256k1zkp_v0_10_0_ellswift_decode(CTX, &pubkey, testcase->enc); + ret = rustsecp256k1zkp_v0_10_1_ellswift_decode(CTX, &pubkey, testcase->enc); CHECK(ret); - ret = rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey); + ret = rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey); CHECK(ret); CHECK(check_fe_equal(&testcase->x, &ge.x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_odd(&ge.y) == testcase->odd_y); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_odd(&ge.y) == testcase->odd_y); } for (i = 0; (unsigned)i < sizeof(ellswift_xdh_tests_bip324) / sizeof(ellswift_xdh_tests_bip324[0]); ++i) { const struct ellswift_xdh_test *test = &ellswift_xdh_tests_bip324[i]; @@ -213,94 +213,94 @@ void run_ellswift_tests(void) { int party = !test->initiating; const unsigned char* ell_a64 = party ? test->ellswift_theirs : test->ellswift_ours; const unsigned char* ell_b64 = party ? test->ellswift_ours : test->ellswift_theirs; - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, shared_secret, + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, shared_secret, ell_a64, ell_b64, test->priv_ours, party, - rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324, + rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324, NULL); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(shared_secret, test->shared_secret, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(shared_secret, test->shared_secret, 32) == 0); } - /* Verify that rustsecp256k1zkp_v0_10_0_ellswift_encode + decode roundtrips. */ + /* Verify that rustsecp256k1zkp_v0_10_1_ellswift_encode + decode roundtrips. */ for (i = 0; i < 1000 * COUNT; i++) { unsigned char rnd32[32]; unsigned char ell64[64]; - rustsecp256k1zkp_v0_10_0_ge g, g2; - rustsecp256k1zkp_v0_10_0_pubkey pubkey, pubkey2; + rustsecp256k1zkp_v0_10_1_ge g, g2; + rustsecp256k1zkp_v0_10_1_pubkey pubkey, pubkey2; /* Generate random public key and random randomizer. */ random_group_element_test(&g); - rustsecp256k1zkp_v0_10_0_pubkey_save(&pubkey, &g); - rustsecp256k1zkp_v0_10_0_testrand256(rnd32); + rustsecp256k1zkp_v0_10_1_pubkey_save(&pubkey, &g); + rustsecp256k1zkp_v0_10_1_testrand256(rnd32); /* Convert the public key to ElligatorSwift and back. */ - rustsecp256k1zkp_v0_10_0_ellswift_encode(CTX, ell64, &pubkey, rnd32); - rustsecp256k1zkp_v0_10_0_ellswift_decode(CTX, &pubkey2, ell64); - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &g2, &pubkey2); + rustsecp256k1zkp_v0_10_1_ellswift_encode(CTX, ell64, &pubkey, rnd32); + rustsecp256k1zkp_v0_10_1_ellswift_decode(CTX, &pubkey2, ell64); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &g2, &pubkey2); /* Compare with original. */ - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&g, &g2)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&g, &g2)); } - /* Verify the behavior of rustsecp256k1zkp_v0_10_0_ellswift_create */ + /* Verify the behavior of rustsecp256k1zkp_v0_10_1_ellswift_create */ for (i = 0; i < 400 * COUNT; i++) { unsigned char auxrnd32[32], sec32[32]; - rustsecp256k1zkp_v0_10_0_scalar sec; - rustsecp256k1zkp_v0_10_0_gej res; - rustsecp256k1zkp_v0_10_0_ge dec; - rustsecp256k1zkp_v0_10_0_pubkey pub; + rustsecp256k1zkp_v0_10_1_scalar sec; + rustsecp256k1zkp_v0_10_1_gej res; + rustsecp256k1zkp_v0_10_1_ge dec; + rustsecp256k1zkp_v0_10_1_pubkey pub; unsigned char ell64[64]; int ret; /* Generate random secret key and random randomizer. */ - if (i & 1) rustsecp256k1zkp_v0_10_0_testrand256_test(auxrnd32); + if (i & 1) rustsecp256k1zkp_v0_10_1_testrand256_test(auxrnd32); random_scalar_order_test(&sec); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec32, &sec); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec32, &sec); /* Construct ElligatorSwift-encoded public keys for that key. */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(CTX, ell64, sec32, (i & 1) ? auxrnd32 : NULL); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(CTX, ell64, sec32, (i & 1) ? auxrnd32 : NULL); CHECK(ret); /* Decode it, and compare with traditionally-computed public key. */ - rustsecp256k1zkp_v0_10_0_ellswift_decode(CTX, &pub, ell64); - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &dec, &pub); - rustsecp256k1zkp_v0_10_0_ecmult(&res, NULL, &rustsecp256k1zkp_v0_10_0_scalar_zero, &sec); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&res, &dec)); + rustsecp256k1zkp_v0_10_1_ellswift_decode(CTX, &pub, ell64); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &dec, &pub); + rustsecp256k1zkp_v0_10_1_ecmult(&res, NULL, &rustsecp256k1zkp_v0_10_1_scalar_zero, &sec); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&res, &dec)); } - /* Verify that rustsecp256k1zkp_v0_10_0_ellswift_xdh computes the right shared X coordinate. */ + /* Verify that rustsecp256k1zkp_v0_10_1_ellswift_xdh computes the right shared X coordinate. */ for (i = 0; i < 800 * COUNT; i++) { unsigned char ell64[64], sec32[32], share32[32]; - rustsecp256k1zkp_v0_10_0_scalar sec; - rustsecp256k1zkp_v0_10_0_ge dec, res; - rustsecp256k1zkp_v0_10_0_fe share_x; - rustsecp256k1zkp_v0_10_0_gej decj, resj; - rustsecp256k1zkp_v0_10_0_pubkey pub; + rustsecp256k1zkp_v0_10_1_scalar sec; + rustsecp256k1zkp_v0_10_1_ge dec, res; + rustsecp256k1zkp_v0_10_1_fe share_x; + rustsecp256k1zkp_v0_10_1_gej decj, resj; + rustsecp256k1zkp_v0_10_1_pubkey pub; int ret; /* Generate random secret key. */ random_scalar_order_test(&sec); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec32, &sec); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec32, &sec); /* Generate random ElligatorSwift encoding for the remote key and decode it. */ - rustsecp256k1zkp_v0_10_0_testrand256_test(ell64); - rustsecp256k1zkp_v0_10_0_testrand256_test(ell64 + 32); - rustsecp256k1zkp_v0_10_0_ellswift_decode(CTX, &pub, ell64); - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &dec, &pub); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&decj, &dec); + rustsecp256k1zkp_v0_10_1_testrand256_test(ell64); + rustsecp256k1zkp_v0_10_1_testrand256_test(ell64 + 32); + rustsecp256k1zkp_v0_10_1_ellswift_decode(CTX, &pub, ell64); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &dec, &pub); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&decj, &dec); /* Compute the X coordinate of seckey*pubkey using ellswift_xdh. Note that we * pass ell64 as claimed (but incorrect) encoding for sec32 here; this works * because the "hasher" function we use here ignores the ell64 arguments. */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32, ell64, ell64, sec32, i & 1, &ellswift_xdh_hash_x32, NULL); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32, ell64, ell64, sec32, i & 1, &ellswift_xdh_hash_x32, NULL); CHECK(ret); - (void)rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&share_x, share32); /* no overflow is possible */ + (void)rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&share_x, share32); /* no overflow is possible */ SECP256K1_FE_VERIFY(&share_x); /* Compute seckey*pubkey directly. */ - rustsecp256k1zkp_v0_10_0_ecmult(&resj, &decj, &sec, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res, &resj); + rustsecp256k1zkp_v0_10_1_ecmult(&resj, &decj, &sec, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res, &resj); /* Compare. */ CHECK(check_fe_equal(&res.x, &share_x)); } - /* Verify the joint behavior of rustsecp256k1zkp_v0_10_0_ellswift_xdh */ + /* Verify the joint behavior of rustsecp256k1zkp_v0_10_1_ellswift_xdh */ for (i = 0; i < 200 * COUNT; i++) { unsigned char auxrnd32a[32], auxrnd32b[32], auxrnd32a_bad[32], auxrnd32b_bad[32]; unsigned char sec32a[32], sec32b[32], sec32a_bad[32], sec32b_bad[32]; - rustsecp256k1zkp_v0_10_0_scalar seca, secb; + rustsecp256k1zkp_v0_10_1_scalar seca, secb; unsigned char ell64a[64], ell64b[64], ell64a_bad[64], ell64b_bad[64]; unsigned char share32a[32], share32b[32], share32_bad[32]; unsigned char prefix64[64]; - rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function hash_function; + rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function hash_function; void* data; int ret; @@ -309,126 +309,126 @@ void run_ellswift_tests(void) { hash_function = ellswift_xdh_hash_x32; data = NULL; } else if ((i % 3) == 1) { - hash_function = rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_bip324; + hash_function = rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_bip324; data = NULL; } else { - hash_function = rustsecp256k1zkp_v0_10_0_ellswift_xdh_hash_function_prefix; - rustsecp256k1zkp_v0_10_0_testrand256_test(prefix64); - rustsecp256k1zkp_v0_10_0_testrand256_test(prefix64 + 32); + hash_function = rustsecp256k1zkp_v0_10_1_ellswift_xdh_hash_function_prefix; + rustsecp256k1zkp_v0_10_1_testrand256_test(prefix64); + rustsecp256k1zkp_v0_10_1_testrand256_test(prefix64 + 32); data = prefix64; } /* Generate random secret keys and random randomizers. */ - rustsecp256k1zkp_v0_10_0_testrand256_test(auxrnd32a); - rustsecp256k1zkp_v0_10_0_testrand256_test(auxrnd32b); + rustsecp256k1zkp_v0_10_1_testrand256_test(auxrnd32a); + rustsecp256k1zkp_v0_10_1_testrand256_test(auxrnd32b); random_scalar_order_test(&seca); /* Draw secb uniformly at random to make sure that the secret keys * differ */ random_scalar_order(&secb); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec32a, &seca); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec32b, &secb); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec32a, &seca); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec32b, &secb); /* Construct ElligatorSwift-encoded public keys for those keys. */ /* For A: */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(CTX, ell64a, sec32a, auxrnd32a); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(CTX, ell64a, sec32a, auxrnd32a); CHECK(ret); /* For B: */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(CTX, ell64b, sec32b, auxrnd32b); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(CTX, ell64b, sec32b, auxrnd32b); CHECK(ret); /* Compute the shared secret both ways and compare with each other. */ /* For A: */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32a, ell64a, ell64b, sec32a, 0, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32a, ell64a, ell64b, sec32a, 0, hash_function, data); CHECK(ret); /* For B: */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32b, ell64a, ell64b, sec32b, 1, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32b, ell64a, ell64b, sec32b, 1, hash_function, data); CHECK(ret); /* And compare: */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32a, share32b, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32a, share32b, 32) == 0); /* Verify that the shared secret doesn't match if other side's public key is incorrect. */ /* For A (using a bad public key for B): */ memcpy(ell64b_bad, ell64b, sizeof(ell64a_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(ell64b_bad, sizeof(ell64b_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b_bad, sec32a, 0, hash_function, data); - CHECK(ret); /* Mismatching encodings don't get detected by rustsecp256k1zkp_v0_10_0_ellswift_xdh. */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32a, 32) != 0); + rustsecp256k1zkp_v0_10_1_testrand_flip(ell64b_bad, sizeof(ell64b_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b_bad, sec32a, 0, hash_function, data); + CHECK(ret); /* Mismatching encodings don't get detected by rustsecp256k1zkp_v0_10_1_ellswift_xdh. */ + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32a, 32) != 0); /* For B (using a bad public key for A): */ memcpy(ell64a_bad, ell64a, sizeof(ell64a_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(ell64a_bad, sizeof(ell64a_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a_bad, ell64b, sec32b, 1, hash_function, data); + rustsecp256k1zkp_v0_10_1_testrand_flip(ell64a_bad, sizeof(ell64a_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a_bad, ell64b, sec32b, 1, hash_function, data); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32b, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32b, 32) != 0); /* Verify that the shared secret doesn't match if the private key is incorrect. */ /* For A: */ memcpy(sec32a_bad, sec32a, sizeof(sec32a_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(sec32a_bad, sizeof(sec32a_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32a_bad, 0, hash_function, data); - CHECK(!ret || rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32a, 32) != 0); + rustsecp256k1zkp_v0_10_1_testrand_flip(sec32a_bad, sizeof(sec32a_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32a_bad, 0, hash_function, data); + CHECK(!ret || rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32a, 32) != 0); /* For B: */ memcpy(sec32b_bad, sec32b, sizeof(sec32b_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(sec32b_bad, sizeof(sec32b_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32b_bad, 1, hash_function, data); - CHECK(!ret || rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32b, 32) != 0); + rustsecp256k1zkp_v0_10_1_testrand_flip(sec32b_bad, sizeof(sec32b_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32b_bad, 1, hash_function, data); + CHECK(!ret || rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32b, 32) != 0); if (hash_function != ellswift_xdh_hash_x32) { /* Verify that the shared secret doesn't match when a different encoding of the same public key is used. */ /* For A (changing B's public key): */ memcpy(auxrnd32b_bad, auxrnd32b, sizeof(auxrnd32b_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(auxrnd32b_bad, sizeof(auxrnd32b_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(CTX, ell64b_bad, sec32b, auxrnd32b_bad); + rustsecp256k1zkp_v0_10_1_testrand_flip(auxrnd32b_bad, sizeof(auxrnd32b_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(CTX, ell64b_bad, sec32b, auxrnd32b_bad); CHECK(ret); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b_bad, sec32a, 0, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b_bad, sec32a, 0, hash_function, data); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32a, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32a, 32) != 0); /* For B (changing A's public key): */ memcpy(auxrnd32a_bad, auxrnd32a, sizeof(auxrnd32a_bad)); - rustsecp256k1zkp_v0_10_0_testrand_flip(auxrnd32a_bad, sizeof(auxrnd32a_bad)); - ret = rustsecp256k1zkp_v0_10_0_ellswift_create(CTX, ell64a_bad, sec32a, auxrnd32a_bad); + rustsecp256k1zkp_v0_10_1_testrand_flip(auxrnd32a_bad, sizeof(auxrnd32a_bad)); + ret = rustsecp256k1zkp_v0_10_1_ellswift_create(CTX, ell64a_bad, sec32a, auxrnd32a_bad); CHECK(ret); - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a_bad, ell64b, sec32b, 1, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a_bad, ell64b, sec32b, 1, hash_function, data); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32b, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32b, 32) != 0); /* Verify that swapping sides changes the shared secret. */ /* For A (claiming to be B): */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32a, 1, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32a, 1, hash_function, data); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32a, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32a, 32) != 0); /* For B (claiming to be A): */ - ret = rustsecp256k1zkp_v0_10_0_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32b, 0, hash_function, data); + ret = rustsecp256k1zkp_v0_10_1_ellswift_xdh(CTX, share32_bad, ell64a, ell64b, sec32b, 0, hash_function, data); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(share32_bad, share32b, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(share32_bad, share32b, 32) != 0); } } /* Test hash initializers. */ { - rustsecp256k1zkp_v0_10_0_sha256 sha, sha_optimized; - static const unsigned char encode_tag[25] = "rustsecp256k1zkp_v0_10_0_ellswift_encode"; - static const unsigned char create_tag[25] = "rustsecp256k1zkp_v0_10_0_ellswift_create"; + rustsecp256k1zkp_v0_10_1_sha256 sha, sha_optimized; + static const unsigned char encode_tag[25] = "rustsecp256k1zkp_v0_10_1_ellswift_encode"; + static const unsigned char create_tag[25] = "rustsecp256k1zkp_v0_10_1_ellswift_create"; static const unsigned char bip324_tag[26] = "bip324_ellswift_xonly_ecdh"; /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_encode has the expected + * rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_encode has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, encode_tag, sizeof(encode_tag)); - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_encode(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, encode_tag, sizeof(encode_tag)); + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_encode(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_create has the expected + * rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_create has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, create_tag, sizeof(create_tag)); - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_create(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, create_tag, sizeof(create_tag)); + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_create(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_bip324 has the expected + * rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_bip324 has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, bip324_tag, sizeof(bip324_tag)); - rustsecp256k1zkp_v0_10_0_ellswift_sha256_init_bip324(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, bip324_tag, sizeof(bip324_tag)); + rustsecp256k1zkp_v0_10_1_ellswift_sha256_init_bip324(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/Makefile.am.include index 0127d6f3..031b84b2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_extrakeys.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_extrakeys.h noinst_HEADERS += src/modules/extrakeys/tests_impl.h noinst_HEADERS += src/modules/extrakeys/tests_exhaustive_impl.h noinst_HEADERS += src/modules/extrakeys/main_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort.h index 9b0915b5..4d0a24b0 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort.h @@ -16,7 +16,7 @@ * * See the qsort_r manpage for a description of the interface. */ -static void rustsecp256k1zkp_v0_10_0_hsort(void *ptr, size_t count, size_t size, +static void rustsecp256k1zkp_v0_10_1_hsort(void *ptr, size_t count, size_t size, int (*cmp)(const void *, const void *, void *), void *cmp_data); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort_impl.h index 1f55d66e..04394743 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/hsort_impl.h @@ -96,7 +96,7 @@ static SECP256K1_INLINE void heap_down(unsigned char *a, size_t i, size_t heap_s } /* In-place heap sort. */ -static void rustsecp256k1zkp_v0_10_0_hsort(void *ptr, size_t count, size_t size, +static void rustsecp256k1zkp_v0_10_1_hsort(void *ptr, size_t count, size_t size, int (*cmp)(const void *, const void *, void *), void *cmp_data ) { size_t i; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/main_impl.h index 831aba35..d5a293a9 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/main_impl.h @@ -12,54 +12,54 @@ #include "hsort_impl.h" #include "../../util.h" -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ge *ge, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey) { - return rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, ge, (const rustsecp256k1zkp_v0_10_0_pubkey *) pubkey); +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ge *ge, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey) { + return rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, ge, (const rustsecp256k1zkp_v0_10_1_pubkey *) pubkey); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_xonly_pubkey_save(rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey, rustsecp256k1zkp_v0_10_0_ge *ge) { - rustsecp256k1zkp_v0_10_0_pubkey_save((rustsecp256k1zkp_v0_10_0_pubkey *) pubkey, ge); +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_xonly_pubkey_save(rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey, rustsecp256k1zkp_v0_10_1_ge *ge) { + rustsecp256k1zkp_v0_10_1_pubkey_save((rustsecp256k1zkp_v0_10_1_pubkey *) pubkey, ge); } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey, const unsigned char *input32) { - rustsecp256k1zkp_v0_10_0_ge pk; - rustsecp256k1zkp_v0_10_0_fe x; +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey, const unsigned char *input32) { + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_fe x; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); memset(pubkey, 0, sizeof(*pubkey)); ARG_CHECK(input32 != NULL); - if (!rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&x, input32)) { + if (!rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&x, input32)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&pk, &x, 0)) { + if (!rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&pk, &x, 0)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(&pk)) { + if (!rustsecp256k1zkp_v0_10_1_ge_is_in_correct_subgroup(&pk)) { return 0; } - rustsecp256k1zkp_v0_10_0_xonly_pubkey_save(pubkey, &pk); + rustsecp256k1zkp_v0_10_1_xonly_pubkey_save(pubkey, &pk); return 1; } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output32, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey) { - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output32, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_ge pk; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output32 != NULL); memset(output32, 0, 32); ARG_CHECK(pubkey != NULL); - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(ctx, &pk, pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pk, pubkey)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_get_b32(output32, &pk.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(output32, &pk.x); return 1; } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_xonly_pubkey* pk0, const rustsecp256k1zkp_v0_10_0_xonly_pubkey* pk1) { +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_xonly_pubkey* pk0, const rustsecp256k1zkp_v0_10_1_xonly_pubkey* pk1) { unsigned char out[2][32]; - const rustsecp256k1zkp_v0_10_0_xonly_pubkey* pk[2]; + const rustsecp256k1zkp_v0_10_1_xonly_pubkey* pk[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -72,7 +72,7 @@ int rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_con * pubkeys are involved and prevents edge cases such as sorting * algorithms that use this function and do not terminate as a * result. */ - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, out[i], pk[i])) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, out[i], pk[i])) { /* Note that xonly_pubkey_serialize should already set the output to * zero in that case, but it's not guaranteed by the API, we can't * test it and writing a VERIFY_CHECK is more complex than @@ -80,44 +80,44 @@ int rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_con memset(out[i], 0, sizeof(out[i])); } } - return rustsecp256k1zkp_v0_10_0_memcmp_var(out[0], out[1], sizeof(out[1])); + return rustsecp256k1zkp_v0_10_1_memcmp_var(out[0], out[1], sizeof(out[1])); } /** Keeps a group element as is if it has an even Y and otherwise negates it. * y_parity is set to 0 in the former case and to 1 in the latter case. * Requires that the coordinates of r are normalized. */ -static int rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(rustsecp256k1zkp_v0_10_0_ge *r) { +static int rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(rustsecp256k1zkp_v0_10_1_ge *r) { int y_parity = 0; - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(r)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(r)); - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&r->y)) { - rustsecp256k1zkp_v0_10_0_fe_negate(&r->y, &r->y, 1); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&r->y)) { + rustsecp256k1zkp_v0_10_1_fe_negate(&r->y, &r->y, 1); y_parity = 1; } return y_parity; } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_xonly_pubkey *xonly_pubkey, int *pk_parity, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey) { - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_xonly_pubkey *xonly_pubkey, int *pk_parity, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_ge pk; int tmp; VERIFY_CHECK(ctx != NULL); ARG_CHECK(xonly_pubkey != NULL); ARG_CHECK(pubkey != NULL); - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pk, pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pk, pubkey)) { return 0; } - tmp = rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(&pk); + tmp = rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(&pk); if (pk_parity != NULL) { *pk_parity = tmp; } - rustsecp256k1zkp_v0_10_0_xonly_pubkey_save(xonly_pubkey, &pk); + rustsecp256k1zkp_v0_10_1_xonly_pubkey_save(xonly_pubkey, &pk); return 1; } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *internal_pubkey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *internal_pubkey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_ge pk; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output_pubkey != NULL); @@ -125,16 +125,16 @@ int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(const rustsecp256k1zkp_v0_10 ARG_CHECK(internal_pubkey != NULL); ARG_CHECK(tweak32 != NULL); - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(ctx, &pk, internal_pubkey) - || !rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(&pk, tweak32)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pk, internal_pubkey) + || !rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(&pk, tweak32)) { return 0; } - rustsecp256k1zkp_v0_10_0_pubkey_save(output_pubkey, &pk); + rustsecp256k1zkp_v0_10_1_pubkey_save(output_pubkey, &pk); return 1; } -int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(const rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char *tweaked_pubkey32, int tweaked_pk_parity, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *internal_pubkey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(const rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char *tweaked_pubkey32, int tweaked_pk_parity, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *internal_pubkey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_ge pk; unsigned char pk_expected32[32]; VERIFY_CHECK(ctx != NULL); @@ -142,31 +142,31 @@ int rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(const rustsecp256k1zkp ARG_CHECK(tweaked_pubkey32 != NULL); ARG_CHECK(tweak32 != NULL); - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(ctx, &pk, internal_pubkey) - || !rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(&pk, tweak32)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pk, internal_pubkey) + || !rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(&pk, tweak32)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&pk.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&pk.y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(pk_expected32, &pk.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&pk.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&pk.y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(pk_expected32, &pk.x); - return rustsecp256k1zkp_v0_10_0_memcmp_var(&pk_expected32, tweaked_pubkey32, 32) == 0 - && rustsecp256k1zkp_v0_10_0_fe_is_odd(&pk.y) == tweaked_pk_parity; + return rustsecp256k1zkp_v0_10_1_memcmp_var(&pk_expected32, tweaked_pubkey32, 32) == 0 + && rustsecp256k1zkp_v0_10_1_fe_is_odd(&pk.y) == tweaked_pk_parity; } -static void rustsecp256k1zkp_v0_10_0_keypair_save(rustsecp256k1zkp_v0_10_0_keypair *keypair, const rustsecp256k1zkp_v0_10_0_scalar *sk, rustsecp256k1zkp_v0_10_0_ge *pk) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&keypair->data[0], sk); - rustsecp256k1zkp_v0_10_0_pubkey_save((rustsecp256k1zkp_v0_10_0_pubkey *)&keypair->data[32], pk); +static void rustsecp256k1zkp_v0_10_1_keypair_save(rustsecp256k1zkp_v0_10_1_keypair *keypair, const rustsecp256k1zkp_v0_10_1_scalar *sk, rustsecp256k1zkp_v0_10_1_ge *pk) { + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&keypair->data[0], sk); + rustsecp256k1zkp_v0_10_1_pubkey_save((rustsecp256k1zkp_v0_10_1_pubkey *)&keypair->data[32], pk); } -static int rustsecp256k1zkp_v0_10_0_keypair_seckey_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar *sk, const rustsecp256k1zkp_v0_10_0_keypair *keypair) { +static int rustsecp256k1zkp_v0_10_1_keypair_seckey_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar *sk, const rustsecp256k1zkp_v0_10_1_keypair *keypair) { int ret; - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(sk, &keypair->data[0]); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(sk, &keypair->data[0]); /* We can declassify ret here because sk is only zero if a keypair function * failed (which zeroes the keypair) and its return value is ignored. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &ret, sizeof(ret)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &ret, sizeof(ret)); ARG_CHECK(ret); return ret; } @@ -174,45 +174,45 @@ static int rustsecp256k1zkp_v0_10_0_keypair_seckey_load(const rustsecp256k1zkp_v /* Load a keypair into pk and sk (if non-NULL). This function declassifies pk * and ARG_CHECKs that the keypair is not invalid. It always initializes sk and * pk with dummy values. */ -static int rustsecp256k1zkp_v0_10_0_keypair_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar *sk, rustsecp256k1zkp_v0_10_0_ge *pk, const rustsecp256k1zkp_v0_10_0_keypair *keypair) { +static int rustsecp256k1zkp_v0_10_1_keypair_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar *sk, rustsecp256k1zkp_v0_10_1_ge *pk, const rustsecp256k1zkp_v0_10_1_keypair *keypair) { int ret; - const rustsecp256k1zkp_v0_10_0_pubkey *pubkey = (const rustsecp256k1zkp_v0_10_0_pubkey *)&keypair->data[32]; + const rustsecp256k1zkp_v0_10_1_pubkey *pubkey = (const rustsecp256k1zkp_v0_10_1_pubkey *)&keypair->data[32]; /* Need to declassify the pubkey because pubkey_load ARG_CHECKs if it's * invalid. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, pubkey, sizeof(*pubkey)); - ret = rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, pk, pubkey); + rustsecp256k1zkp_v0_10_1_declassify(ctx, pubkey, sizeof(*pubkey)); + ret = rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, pk, pubkey); if (sk != NULL) { - ret = ret && rustsecp256k1zkp_v0_10_0_keypair_seckey_load(ctx, sk, keypair); + ret = ret && rustsecp256k1zkp_v0_10_1_keypair_seckey_load(ctx, sk, keypair); } if (!ret) { - *pk = rustsecp256k1zkp_v0_10_0_ge_const_g; + *pk = rustsecp256k1zkp_v0_10_1_ge_const_g; if (sk != NULL) { - *sk = rustsecp256k1zkp_v0_10_0_scalar_one; + *sk = rustsecp256k1zkp_v0_10_1_scalar_one; } } return ret; } -int rustsecp256k1zkp_v0_10_0_keypair_create(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_keypair *keypair, const unsigned char *seckey32) { - rustsecp256k1zkp_v0_10_0_scalar sk; - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_keypair_create(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *seckey32) { + rustsecp256k1zkp_v0_10_1_scalar sk; + rustsecp256k1zkp_v0_10_1_ge pk; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(keypair != NULL); memset(keypair, 0, sizeof(*keypair)); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(seckey32 != NULL); - ret = rustsecp256k1zkp_v0_10_0_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &sk, &pk, seckey32); - rustsecp256k1zkp_v0_10_0_keypair_save(keypair, &sk, &pk); - rustsecp256k1zkp_v0_10_0_memczero(keypair, sizeof(*keypair), !ret); + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &sk, &pk, seckey32); + rustsecp256k1zkp_v0_10_1_keypair_save(keypair, &sk, &pk); + rustsecp256k1zkp_v0_10_1_memczero(keypair, sizeof(*keypair), !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sk); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); return ret; } -int rustsecp256k1zkp_v0_10_0_keypair_sec(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey, const rustsecp256k1zkp_v0_10_0_keypair *keypair) { +int rustsecp256k1zkp_v0_10_1_keypair_sec(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const rustsecp256k1zkp_v0_10_1_keypair *keypair) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(seckey != NULL); memset(seckey, 0, 32); @@ -222,7 +222,7 @@ int rustsecp256k1zkp_v0_10_0_keypair_sec(const rustsecp256k1zkp_v0_10_0_context* return 1; } -int rustsecp256k1zkp_v0_10_0_keypair_pub(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const rustsecp256k1zkp_v0_10_0_keypair *keypair) { +int rustsecp256k1zkp_v0_10_1_keypair_pub(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const rustsecp256k1zkp_v0_10_1_keypair *keypair) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); memset(pubkey, 0, sizeof(*pubkey)); @@ -232,8 +232,8 @@ int rustsecp256k1zkp_v0_10_0_keypair_pub(const rustsecp256k1zkp_v0_10_0_context* return 1; } -int rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey, int *pk_parity, const rustsecp256k1zkp_v0_10_0_keypair *keypair) { - rustsecp256k1zkp_v0_10_0_ge pk; +int rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey, int *pk_parity, const rustsecp256k1zkp_v0_10_1_keypair *keypair) { + rustsecp256k1zkp_v0_10_1_ge pk; int tmp; VERIFY_CHECK(ctx != NULL); @@ -241,21 +241,21 @@ int rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(const rustsecp256k1zkp_v0_10_0_co memset(pubkey, 0, sizeof(*pubkey)); ARG_CHECK(keypair != NULL); - if (!rustsecp256k1zkp_v0_10_0_keypair_load(ctx, NULL, &pk, keypair)) { + if (!rustsecp256k1zkp_v0_10_1_keypair_load(ctx, NULL, &pk, keypair)) { return 0; } - tmp = rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(&pk); + tmp = rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(&pk); if (pk_parity != NULL) { *pk_parity = tmp; } - rustsecp256k1zkp_v0_10_0_xonly_pubkey_save(pubkey, &pk); + rustsecp256k1zkp_v0_10_1_xonly_pubkey_save(pubkey, &pk); return 1; } -int rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_keypair *keypair, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_ge pk; - rustsecp256k1zkp_v0_10_0_scalar sk; +int rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_scalar sk; int y_parity; int ret; @@ -263,40 +263,40 @@ int rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(const rustsecp256k1zkp_v0_1 ARG_CHECK(keypair != NULL); ARG_CHECK(tweak32 != NULL); - ret = rustsecp256k1zkp_v0_10_0_keypair_load(ctx, &sk, &pk, keypair); + ret = rustsecp256k1zkp_v0_10_1_keypair_load(ctx, &sk, &pk, keypair); memset(keypair, 0, sizeof(*keypair)); - y_parity = rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(&pk); + y_parity = rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(&pk); if (y_parity == 1) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&sk, &sk); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sk, &sk); } - ret &= rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(&sk, tweak32); - ret &= rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(&pk, tweak32); + ret &= rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(&sk, tweak32); + ret &= rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(&pk, tweak32); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &ret, sizeof(ret)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &ret, sizeof(ret)); if (ret) { - rustsecp256k1zkp_v0_10_0_keypair_save(keypair, &sk, &pk); + rustsecp256k1zkp_v0_10_1_keypair_save(keypair, &sk, &pk); } - rustsecp256k1zkp_v0_10_0_scalar_clear(&sk); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); return ret; } -/* This struct wraps a const context pointer to satisfy the rustsecp256k1zkp_v0_10_0_hsort api +/* This struct wraps a const context pointer to satisfy the rustsecp256k1zkp_v0_10_1_hsort api * which expects a non-const cmp_data pointer. */ typedef struct { - const rustsecp256k1zkp_v0_10_0_context *ctx; -} rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp_data; + const rustsecp256k1zkp_v0_10_1_context *ctx; +} rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp_data; -static int rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp(const void* pk1, const void* pk2, void *cmp_data) { - return rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(((rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp_data*)cmp_data)->ctx, - *(rustsecp256k1zkp_v0_10_0_pubkey **)pk1, - *(rustsecp256k1zkp_v0_10_0_pubkey **)pk2); +static int rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp(const void* pk1, const void* pk2, void *cmp_data) { + return rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(((rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp_data*)cmp_data)->ctx, + *(rustsecp256k1zkp_v0_10_1_pubkey **)pk1, + *(rustsecp256k1zkp_v0_10_1_pubkey **)pk2); } -int rustsecp256k1zkp_v0_10_0_pubkey_sort(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_pubkey **pubkeys, size_t n_pubkeys) { - rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp_data cmp_data; +int rustsecp256k1zkp_v0_10_1_pubkey_sort(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_pubkey **pubkeys, size_t n_pubkeys) { + rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp_data cmp_data; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkeys != NULL); @@ -308,7 +308,7 @@ int rustsecp256k1zkp_v0_10_0_pubkey_sort(const rustsecp256k1zkp_v0_10_0_context* #pragma warning(disable: 4090) #endif - rustsecp256k1zkp_v0_10_0_hsort(pubkeys, n_pubkeys, sizeof(*pubkeys), rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp, &cmp_data); + rustsecp256k1zkp_v0_10_1_hsort(pubkeys, n_pubkeys, sizeof(*pubkeys), rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp, &cmp_data); #if defined(_MSC_VER) && (_MSC_VER < 1933) #pragma warning(pop) diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_exhaustive_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_exhaustive_impl.h index 4225762a..8d756b54 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_exhaustive_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_exhaustive_impl.h @@ -10,54 +10,54 @@ #include "../../../include/secp256k1_extrakeys.h" #include "main_impl.h" -static void test_exhaustive_extrakeys(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge* group) { - rustsecp256k1zkp_v0_10_0_keypair keypair[EXHAUSTIVE_TEST_ORDER - 1]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey[EXHAUSTIVE_TEST_ORDER - 1]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pubkey[EXHAUSTIVE_TEST_ORDER - 1]; +static void test_exhaustive_extrakeys(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge* group) { + rustsecp256k1zkp_v0_10_1_keypair keypair[EXHAUSTIVE_TEST_ORDER - 1]; + rustsecp256k1zkp_v0_10_1_pubkey pubkey[EXHAUSTIVE_TEST_ORDER - 1]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pubkey[EXHAUSTIVE_TEST_ORDER - 1]; int parities[EXHAUSTIVE_TEST_ORDER - 1]; unsigned char xonly_pubkey_bytes[EXHAUSTIVE_TEST_ORDER - 1][32]; int i; for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_fe fe; - rustsecp256k1zkp_v0_10_0_scalar scalar_i; + rustsecp256k1zkp_v0_10_1_fe fe; + rustsecp256k1zkp_v0_10_1_scalar scalar_i; unsigned char buf[33]; int parity; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalar_i, i); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf, &scalar_i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalar_i, i); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf, &scalar_i); /* Construct pubkey and keypair. */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair[i - 1], buf)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(ctx, &pubkey[i - 1], buf)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair[i - 1], buf)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(ctx, &pubkey[i - 1], buf)); /* Construct serialized xonly_pubkey from keypair. */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(ctx, &xonly_pubkey[i - 1], &parities[i - 1], &keypair[i - 1])); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, xonly_pubkey_bytes[i - 1], &xonly_pubkey[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(ctx, &xonly_pubkey[i - 1], &parities[i - 1], &keypair[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, xonly_pubkey_bytes[i - 1], &xonly_pubkey[i - 1])); /* Parse the xonly_pubkey back and verify it matches the previously serialized value. */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(ctx, &xonly_pubkey[i - 1], xonly_pubkey_bytes[i - 1])); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, buf, &xonly_pubkey[i - 1])); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(xonly_pubkey_bytes[i - 1], buf, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(ctx, &xonly_pubkey[i - 1], xonly_pubkey_bytes[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, buf, &xonly_pubkey[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(xonly_pubkey_bytes[i - 1], buf, 32) == 0); /* Construct the xonly_pubkey from the pubkey, and verify it matches the same. */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(ctx, &xonly_pubkey[i - 1], &parity, &pubkey[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(ctx, &xonly_pubkey[i - 1], &parity, &pubkey[i - 1])); CHECK(parity == parities[i - 1]); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, buf, &xonly_pubkey[i - 1])); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(xonly_pubkey_bytes[i - 1], buf, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, buf, &xonly_pubkey[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(xonly_pubkey_bytes[i - 1], buf, 32) == 0); /* Compare the xonly_pubkey bytes against the precomputed group. */ - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&fe, xonly_pubkey_bytes[i - 1]); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&fe, &group[i].x)); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&fe, xonly_pubkey_bytes[i - 1]); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&fe, &group[i].x)); /* Check the parity against the precomputed group. */ fe = group[i].y; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&fe); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_odd(&fe) == parities[i - 1]); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&fe); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_odd(&fe) == parities[i - 1]); /* Verify that the higher half is identical to the lower half mirrored. */ if (i > EXHAUSTIVE_TEST_ORDER / 2) { - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(xonly_pubkey_bytes[i - 1], xonly_pubkey_bytes[EXHAUSTIVE_TEST_ORDER - i - 1], 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(xonly_pubkey_bytes[i - 1], xonly_pubkey_bytes[EXHAUSTIVE_TEST_ORDER - i - 1], 32) == 0); CHECK(parities[i - 1] == 1 - parities[EXHAUSTIVE_TEST_ORDER - i - 1]); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_impl.h index 0a267db0..81b805ac 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/extrakeys/tests_impl.h @@ -10,11 +10,11 @@ #include "../../../include/secp256k1_extrakeys.h" static void test_xonly_pubkey(void) { - rustsecp256k1zkp_v0_10_0_pubkey pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pk, xonly_pk_tmp; - rustsecp256k1zkp_v0_10_0_ge pk1; - rustsecp256k1zkp_v0_10_0_ge pk2; - rustsecp256k1zkp_v0_10_0_fe y; + rustsecp256k1zkp_v0_10_1_pubkey pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pk, xonly_pk_tmp; + rustsecp256k1zkp_v0_10_1_ge pk1; + rustsecp256k1zkp_v0_10_1_ge pk2; + rustsecp256k1zkp_v0_10_1_fe y; unsigned char sk[32]; unsigned char xy_sk[32]; unsigned char buf32[32]; @@ -23,86 +23,86 @@ static void test_xonly_pubkey(void) { int pk_parity; int i; - rustsecp256k1zkp_v0_10_0_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(sk); memset(ones32, 0xFF, 32); - rustsecp256k1zkp_v0_10_0_testrand256(xy_sk); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(xy_sk); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); /* Test xonly_pubkey_from_pubkey */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, NULL, &pk_parity, &pk)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, NULL, &pk_parity, &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, NULL)); memset(&pk, 0, sizeof(pk)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk)); /* Choose a secret key such that the resulting pubkey and xonly_pubkey match. */ memset(sk, 0, sizeof(sk)); sk[0] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pk, &xonly_pk, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pk, &xonly_pk, sizeof(pk)) == 0); CHECK(pk_parity == 0); /* Choose a secret key such that pubkey and xonly_pubkey are each others * negation. */ sk[0] = 2; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, &pk, sizeof(xonly_pk)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, &pk, sizeof(xonly_pk)) != 0); CHECK(pk_parity == 1); - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &pk1, &pk); - rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &pk2, (rustsecp256k1zkp_v0_10_0_pubkey *) &xonly_pk); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&pk1.x, &pk2.x) == 1); - rustsecp256k1zkp_v0_10_0_fe_negate(&y, &pk2.y, 1); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&pk1.y, &y) == 1); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &pk1, &pk); + rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &pk2, (rustsecp256k1zkp_v0_10_1_pubkey *) &xonly_pk); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&pk1.x, &pk2.x) == 1); + rustsecp256k1zkp_v0_10_1_fe_negate(&y, &pk2.y, 1); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&pk1.y, &y) == 1); /* Test xonly_pubkey_serialize and xonly_pubkey_parse */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, NULL, &xonly_pk)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf32, zeros64, 32) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, NULL, &xonly_pk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf32, zeros64, 32) == 0); { /* A pubkey filled with 0s will fail to serialize due to pubkey_load * special casing. */ - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk_tmp; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk_tmp; memset(&pk_tmp, 0, sizeof(pk_tmp)); /* pubkey_load calls illegal callback */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, &pk_tmp)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, &pk_tmp)); } - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, &xonly_pk) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, NULL, buf32)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, &xonly_pk) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, NULL, buf32)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk, NULL)); /* Serialization and parse roundtrip */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, &xonly_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk_tmp, buf32) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, &xonly_pk_tmp, sizeof(xonly_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, &xonly_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk_tmp, buf32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, &xonly_pk_tmp, sizeof(xonly_pk)) == 0); /* Test parsing invalid field elements */ memset(&xonly_pk, 1, sizeof(xonly_pk)); /* Overflowing field element */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk, ones32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk, ones32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); memset(&xonly_pk, 1, sizeof(xonly_pk)); /* There's no point with x-coordinate 0 on secp256k1 */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk, zeros64) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk, zeros64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); /* If a random 32-byte string can not be parsed with ec_pubkey_parse * (because interpreted as X coordinate it does not correspond to a point on * the curve) then xonly_pubkey_parse should fail as well. */ for (i = 0; i < COUNT; i++) { unsigned char rand33[33]; - rustsecp256k1zkp_v0_10_0_testrand256(&rand33[1]); + rustsecp256k1zkp_v0_10_1_testrand256(&rand33[1]); rand33[0] = SECP256K1_TAG_PUBKEY_EVEN; - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk, rand33, 33)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk, rand33, 33)) { memset(&xonly_pk, 1, sizeof(xonly_pk)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk, &rand33[1]) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk, &rand33[1]) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, zeros64, sizeof(xonly_pk)) == 0); } else { - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &xonly_pk, &rand33[1]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &xonly_pk, &rand33[1]) == 1); } } } @@ -116,133 +116,133 @@ static void test_xonly_pubkey_comparison(void) { 0xde, 0x36, 0x0e, 0x87, 0x59, 0x8f, 0x3c, 0x01, 0x36, 0x2a, 0x2a, 0xb8, 0xc6, 0xf4, 0x5e, 0x4d, 0xb2, 0xc2, 0xd5, 0x03, 0xa7, 0xf9, 0xf1, 0x4f, 0xa8, 0xfa, 0x95, 0xa8, 0xe9, 0x69, 0x76, 0x1c }; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk1; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk2; - - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk1, pk1_ser) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk2, pk2_ser) == 1); - - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, NULL, &pk2) < 0)); - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk1, NULL) > 0)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk1, &pk2) < 0); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk2, &pk1) > 0); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk1, &pk1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk2, &pk2) == 0); + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk1; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk2; + + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk1, pk1_ser) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk2, pk2_ser) == 1); + + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, NULL, &pk2) < 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk1, NULL) > 0)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk1, &pk2) < 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk2, &pk1) > 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk1, &pk1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk2, &pk2) == 0); memset(&pk1, 0, sizeof(pk1)); /* illegal pubkey */ - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk1, &pk2) < 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk1, &pk2) < 0)); { int32_t ecount = 0; - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk1, &pk1) == 0); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk1, &pk1) == 0); CHECK(ecount == 2); - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, NULL, NULL); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, NULL, NULL); } - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_cmp(CTX, &pk2, &pk1) > 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_cmp(CTX, &pk2, &pk1) > 0)); } static void test_xonly_pubkey_tweak(void) { unsigned char zeros64[64] = { 0 }; unsigned char overflows[32]; unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_pubkey internal_pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey internal_xonly_pk; - rustsecp256k1zkp_v0_10_0_pubkey output_pk; + rustsecp256k1zkp_v0_10_1_pubkey internal_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey internal_xonly_pk; + rustsecp256k1zkp_v0_10_1_pubkey output_pk; int pk_parity; unsigned char tweak[32]; int i; memset(overflows, 0xff, sizeof(overflows)); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &internal_pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, &pk_parity, &internal_pk) == 1); - - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, NULL, &internal_xonly_pk, tweak)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, NULL, tweak)); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &internal_pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, &pk_parity, &internal_pk) == 1); + + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, NULL, &internal_xonly_pk, tweak)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, NULL, tweak)); /* NULL internal_xonly_pk zeroes the output_pk */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, NULL)); /* NULL tweak zeroes the output_pk */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); /* Invalid tweak zeroes the output_pk */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, overflows) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, overflows) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); /* A zero tweak is fine */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, zeros64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, zeros64) == 1); /* Fails if the resulting key was infinity */ for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_scalar scalar_tweak; + rustsecp256k1zkp_v0_10_1_scalar scalar_tweak; /* Because sk may be negated before adding, we need to try with tweak = * sk as well as tweak = -sk. */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&scalar_tweak, sk, NULL); - rustsecp256k1zkp_v0_10_0_scalar_negate(&scalar_tweak, &scalar_tweak); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tweak, &scalar_tweak); - CHECK((rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, sk) == 0) - || (rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 0)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&scalar_tweak, sk, NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&scalar_tweak, &scalar_tweak); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tweak, &scalar_tweak); + CHECK((rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, sk) == 0) + || (rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 0)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); } /* Invalid pk with a valid tweak */ memset(&internal_xonly_pk, 0, sizeof(internal_xonly_pk)); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); } static void test_xonly_pubkey_tweak_check(void) { unsigned char zeros64[64] = { 0 }; unsigned char overflows[32]; unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_pubkey internal_pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey internal_xonly_pk; - rustsecp256k1zkp_v0_10_0_pubkey output_pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey output_xonly_pk; + rustsecp256k1zkp_v0_10_1_pubkey internal_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey internal_xonly_pk; + rustsecp256k1zkp_v0_10_1_pubkey output_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey output_xonly_pk; unsigned char output_pk32[32]; unsigned char buf32[32]; int pk_parity; unsigned char tweak[32]; memset(overflows, 0xff, sizeof(overflows)); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &internal_pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, &pk_parity, &internal_pk) == 1); - - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &output_xonly_pk, &pk_parity, &output_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, &output_xonly_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, NULL, pk_parity, &internal_xonly_pk, tweak)); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &internal_pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, &pk_parity, &internal_pk) == 1); + + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &output_xonly_pk, &pk_parity, &output_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, &output_xonly_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, NULL, pk_parity, &internal_xonly_pk, tweak)); /* invalid pk_parity value */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, 2, &internal_xonly_pk, tweak) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, NULL, tweak)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, 2, &internal_xonly_pk, tweak) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, NULL, tweak)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, NULL)); memset(tweak, 1, sizeof(tweak)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, NULL, &internal_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &output_xonly_pk, &pk_parity, &output_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, output_pk32, &output_xonly_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, output_pk32, pk_parity, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &internal_xonly_pk, NULL, &internal_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &output_xonly_pk, &pk_parity, &output_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, output_pk32, &output_xonly_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, output_pk32, pk_parity, &internal_xonly_pk, tweak) == 1); /* Wrong pk_parity */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, output_pk32, !pk_parity, &internal_xonly_pk, tweak) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, output_pk32, !pk_parity, &internal_xonly_pk, tweak) == 0); /* Wrong public key */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, buf32, &internal_xonly_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, buf32, &internal_xonly_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, buf32, pk_parity, &internal_xonly_pk, tweak) == 0); /* Overflowing tweak not allowed */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, output_pk32, pk_parity, &internal_xonly_pk, overflows) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, overflows) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, output_pk32, pk_parity, &internal_xonly_pk, overflows) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk, &internal_xonly_pk, overflows) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk, zeros64, sizeof(output_pk)) == 0); } /* Starts with an initial pubkey and recursively creates N_PUBKEYS - 1 @@ -251,29 +251,29 @@ static void test_xonly_pubkey_tweak_check(void) { #define N_PUBKEYS 32 static void test_xonly_pubkey_tweak_recursive(void) { unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_pubkey pk[N_PUBKEYS]; + rustsecp256k1zkp_v0_10_1_pubkey pk[N_PUBKEYS]; unsigned char pk_serialized[32]; unsigned char tweak[N_PUBKEYS - 1][32]; int i; - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk[0], sk) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk[0], sk) == 1); /* Add tweaks */ for (i = 0; i < N_PUBKEYS - 1; i++) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pk; memset(tweak[i], i + 1, sizeof(tweak[i])); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &pk[i + 1], &xonly_pk, tweak[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &pk[i + 1], &xonly_pk, tweak[i]) == 1); } /* Verify tweaks */ for (i = N_PUBKEYS - 1; i > 0; i--) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pk; int pk_parity; - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, pk_serialized, &xonly_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk[i - 1]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, pk_serialized, pk_parity, &xonly_pk, tweak[i - 1]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, pk_serialized, &xonly_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, NULL, &pk[i - 1]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, pk_serialized, pk_parity, &xonly_pk, tweak[i - 1]) == 1); } } #undef N_PUBKEYS @@ -283,187 +283,187 @@ static void test_keypair(void) { unsigned char sk_tmp[32]; unsigned char zeros96[96] = { 0 }; unsigned char overflows[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_pubkey pk, pk_tmp; - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pk, xonly_pk_tmp; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_pubkey pk, pk_tmp; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pk, xonly_pk_tmp; int pk_parity, pk_parity_tmp; CHECK(sizeof(zeros96) == sizeof(keypair)); memset(overflows, 0xFF, sizeof(overflows)); /* Test keypair_create */ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_create(CTX, NULL, sk)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_keypair_create(STATIC_CTX, &keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_create(CTX, NULL, sk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_keypair_create(STATIC_CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); /* Invalid secret key */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, zeros96) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, overflows) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, zeros96) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, overflows) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &keypair, sizeof(keypair)) == 0); /* Test keypair_pub */ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, &pk, &keypair) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, NULL, &keypair)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, &pk, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &pk, sizeof(pk)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, &pk, &keypair) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, NULL, &keypair)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, &pk, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &pk, sizeof(pk)) == 0); /* Using an invalid keypair is fine for keypair_pub */ memset(&keypair, 0, sizeof(keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, &pk, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &pk, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, &pk, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &pk, sizeof(pk)) == 0); /* keypair holds the same pubkey as pubkey_create */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, &pk_tmp, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pk, &pk_tmp, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, &pk_tmp, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pk, &pk_tmp, sizeof(pk)) == 0); /** Test keypair_xonly_pub **/ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, NULL, &pk_parity, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk, NULL, &keypair) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &xonly_pk, sizeof(xonly_pk)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, NULL, &pk_parity, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk, NULL, &keypair) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &xonly_pk, sizeof(xonly_pk)) == 0); /* Using an invalid keypair will set the xonly_pk to 0 (first reset * xonly_pk). */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair) == 1); memset(&keypair, 0, sizeof(keypair)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, &xonly_pk, sizeof(xonly_pk)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk, &pk_parity, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, &xonly_pk, sizeof(xonly_pk)) == 0); /** keypair holds the same xonly pubkey as pubkey_create **/ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pk, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &xonly_pk_tmp, &pk_parity_tmp, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xonly_pk, &xonly_pk_tmp, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pk, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &xonly_pk, &pk_parity, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &xonly_pk_tmp, &pk_parity_tmp, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xonly_pk, &xonly_pk_tmp, sizeof(pk)) == 0); CHECK(pk_parity == pk_parity_tmp); /* Test keypair_seckey */ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, sk_tmp, &keypair) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, NULL, &keypair)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, sk_tmp, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, sk_tmp, sizeof(sk_tmp)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, sk_tmp, &keypair) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, NULL, &keypair)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, sk_tmp, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, sk_tmp, sizeof(sk_tmp)) == 0); /* keypair returns the same seckey it got */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, sk_tmp, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sk, sk_tmp, sizeof(sk_tmp)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, sk_tmp, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sk, sk_tmp, sizeof(sk_tmp)) == 0); /* Using an invalid keypair is fine for keypair_seckey */ memset(&keypair, 0, sizeof(keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, sk_tmp, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros96, sk_tmp, sizeof(sk_tmp)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, sk_tmp, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros96, sk_tmp, sizeof(sk_tmp)) == 0); } static void test_keypair_add(void) { unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_keypair keypair; unsigned char overflows[32]; unsigned char zeros96[96] = { 0 }; unsigned char tweak[32]; int i; CHECK(sizeof(zeros96) == sizeof(keypair)); - rustsecp256k1zkp_v0_10_0_testrand256(sk); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); memset(overflows, 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, NULL, tweak)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, NULL, tweak)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, NULL)); /* This does not set the keypair to zeroes */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&keypair, zeros96, sizeof(keypair)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&keypair, zeros96, sizeof(keypair)) != 0); /* Invalid tweak zeroes the keypair */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, overflows) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, overflows) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0); /* A zero tweak is fine */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, zeros96) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, zeros96) == 1); /* Fails if the resulting keypair was (sk=0, pk=infinity) */ for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_scalar scalar_tweak; - rustsecp256k1zkp_v0_10_0_keypair keypair_tmp; - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); + rustsecp256k1zkp_v0_10_1_scalar scalar_tweak; + rustsecp256k1zkp_v0_10_1_keypair keypair_tmp; + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); memcpy(&keypair_tmp, &keypair, sizeof(keypair)); /* Because sk may be negated before adding, we need to try with tweak = * sk as well as tweak = -sk. */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&scalar_tweak, sk, NULL); - rustsecp256k1zkp_v0_10_0_scalar_negate(&scalar_tweak, &scalar_tweak); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tweak, &scalar_tweak); - CHECK((rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, sk) == 0) - || (rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair_tmp, tweak) == 0)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0 - || rustsecp256k1zkp_v0_10_0_memcmp_var(&keypair_tmp, zeros96, sizeof(keypair_tmp)) == 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&scalar_tweak, sk, NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&scalar_tweak, &scalar_tweak); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tweak, &scalar_tweak); + CHECK((rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, sk) == 0) + || (rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair_tmp, tweak) == 0)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0 + || rustsecp256k1zkp_v0_10_1_memcmp_var(&keypair_tmp, zeros96, sizeof(keypair_tmp)) == 0); } /* Invalid keypair with a valid tweak */ memset(&keypair, 0, sizeof(keypair)); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&keypair, zeros96, sizeof(keypair)) == 0); /* Only seckey part of keypair invalid */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); memset(&keypair, 0, 32); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak)); /* Only pubkey part of keypair invalid */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); memset(&keypair.data[32], 0, 64); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak)); /* Check that the keypair_tweak_add implementation is correct */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey internal_pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey output_pk; - rustsecp256k1zkp_v0_10_0_pubkey output_pk_xy; - rustsecp256k1zkp_v0_10_0_pubkey output_pk_expected; + rustsecp256k1zkp_v0_10_1_xonly_pubkey internal_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey output_pk; + rustsecp256k1zkp_v0_10_1_pubkey output_pk_xy; + rustsecp256k1zkp_v0_10_1_pubkey output_pk_expected; unsigned char pk32[32]; unsigned char sk32[32]; int pk_parity; - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &internal_pk, NULL, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &output_pk, &pk_parity, &keypair) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &internal_pk, NULL, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &output_pk, &pk_parity, &keypair) == 1); /* Check that it passes xonly_pubkey_tweak_add_check */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, pk32, &output_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, pk32, pk_parity, &internal_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, pk32, &output_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, pk32, pk_parity, &internal_pk, tweak) == 1); /* Check that the resulting pubkey matches xonly_pubkey_tweak_add */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, &output_pk_xy, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add(CTX, &output_pk_expected, &internal_pk, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk_xy, &output_pk_expected, sizeof(output_pk_xy)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, &output_pk_xy, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add(CTX, &output_pk_expected, &internal_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk_xy, &output_pk_expected, sizeof(output_pk_xy)) == 0); /* Check that the secret key in the keypair is tweaked correctly */ - CHECK(rustsecp256k1zkp_v0_10_0_keypair_sec(CTX, sk32, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &output_pk_expected, sk32) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&output_pk_xy, &output_pk_expected, sizeof(output_pk_xy)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_sec(CTX, sk32, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &output_pk_expected, sk32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&output_pk_xy, &output_pk_expected, sizeof(output_pk_xy)) == 0); } } @@ -485,38 +485,38 @@ static void test_hsort(void) { size_t counter = 0; int i, j; - rustsecp256k1zkp_v0_10_0_hsort(ints, 0, sizeof(ints[0]), test_hsort_cmp, &counter); + rustsecp256k1zkp_v0_10_1_hsort(ints, 0, sizeof(ints[0]), test_hsort_cmp, &counter); CHECK(counter == 0); - rustsecp256k1zkp_v0_10_0_hsort(ints, 1, sizeof(ints[0]), test_hsort_cmp, &counter); + rustsecp256k1zkp_v0_10_1_hsort(ints, 1, sizeof(ints[0]), test_hsort_cmp, &counter); CHECK(counter == 0); - rustsecp256k1zkp_v0_10_0_hsort(ints, NUM, sizeof(ints[0]), test_hsort_cmp, &counter); + rustsecp256k1zkp_v0_10_1_hsort(ints, NUM, sizeof(ints[0]), test_hsort_cmp, &counter); CHECK(counter > 0); test_hsort_is_sorted(ints, NUM); /* Test hsort with length n array and random elements in * [-interval/2, interval/2] */ for (i = 0; i < COUNT; i++) { - int n = rustsecp256k1zkp_v0_10_0_testrand_int(NUM); - int interval = rustsecp256k1zkp_v0_10_0_testrand_int(63) + 1; + int n = rustsecp256k1zkp_v0_10_1_testrand_int(NUM); + int interval = rustsecp256k1zkp_v0_10_1_testrand_int(63) + 1; for (j = 0; j < n; j++) { - ints[j] = rustsecp256k1zkp_v0_10_0_testrand_int(interval) - interval/2; + ints[j] = rustsecp256k1zkp_v0_10_1_testrand_int(interval) - interval/2; } - rustsecp256k1zkp_v0_10_0_hsort(ints, n, sizeof(ints[0]), test_hsort_cmp, &counter); + rustsecp256k1zkp_v0_10_1_hsort(ints, n, sizeof(ints[0]), test_hsort_cmp, &counter); test_hsort_is_sorted(ints, n); } } #undef NUM -static void test_sort_helper(rustsecp256k1zkp_v0_10_0_pubkey *pk, size_t *pk_order, size_t n_pk) { +static void test_sort_helper(rustsecp256k1zkp_v0_10_1_pubkey *pk, size_t *pk_order, size_t n_pk) { size_t i; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_test[5]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_test[5]; for (i = 0; i < n_pk; i++) { pk_test[i] = &pk[pk_order[i]]; } - rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pk_test, n_pk); + rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pk_test, n_pk); for (i = 0; i < n_pk; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(pk_test[i], &pk[i], sizeof(*pk_test[i])) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(pk_test[i], &pk[i], sizeof(*pk_test[i])) == 0); } } @@ -524,24 +524,24 @@ static void permute(size_t *arr, size_t n) { size_t i; for (i = n - 1; i >= 1; i--) { size_t tmp, j; - j = rustsecp256k1zkp_v0_10_0_testrand_int(i + 1); + j = rustsecp256k1zkp_v0_10_1_testrand_int(i + 1); tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; } } -static void rand_pk(rustsecp256k1zkp_v0_10_0_pubkey *pk) { +static void rand_pk(rustsecp256k1zkp_v0_10_1_pubkey *pk) { unsigned char seckey[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_testrand256(seckey); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, seckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, pk, &keypair) == 1); + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_testrand256(seckey); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, seckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, pk, &keypair) == 1); } static void test_sort_api(void) { - rustsecp256k1zkp_v0_10_0_pubkey pks[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pks_ptr[2]; + rustsecp256k1zkp_v0_10_1_pubkey pks[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pks_ptr[2]; pks_ptr[0] = &pks[0]; pks_ptr[1] = &pks[1]; @@ -549,24 +549,24 @@ static void test_sort_api(void) { rand_pk(&pks[0]); rand_pk(&pks[1]); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pks_ptr, 2) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, NULL, 2)); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pks_ptr, 0) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pks_ptr, 2) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, NULL, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pks_ptr, 0) == 1); /* Test illegal public keys */ memset(&pks[0], 0, sizeof(pks[0])); - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pks_ptr, 2) == 1)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pks_ptr, 2) == 1)); memset(&pks[1], 0, sizeof(pks[1])); { int32_t ecount = 0; - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pks_ptr, 2) == 1); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pks_ptr, 2) == 1); CHECK(ecount == 2); - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, NULL, NULL); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, NULL, NULL); } } static void test_sort(void) { - rustsecp256k1zkp_v0_10_0_pubkey pk[5]; + rustsecp256k1zkp_v0_10_1_pubkey pk[5]; unsigned char pk_ser[5][33] = { { 0x02, 0x08 }, { 0x02, 0x0b }, @@ -578,7 +578,7 @@ static void test_sort(void) { size_t pk_order[5] = { 0, 1, 2, 3, 4 }; for (i = 0; i < 5; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk[i], pk_ser[i], sizeof(pk_ser[i]))); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk[i], pk_ser[i], sizeof(pk_ser[i]))); } permute(pk_order, 1); @@ -598,14 +598,14 @@ static void test_sort(void) { /* Check that sorting also works for random pubkeys */ for (i = 0; i < COUNT; i++) { int j; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[5]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[5]; for (j = 0; j < 5; j++) { rand_pk(&pk[j]); pk_ptr[j] = &pk[j]; } - rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pk_ptr, 5); + rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pk_ptr, 5); for (j = 1; j < 5; j++) { - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort_cmp(&pk_ptr[j - 1], &pk_ptr[j], CTX) <= 0); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort_cmp(&pk_ptr[j - 1], &pk_ptr[j], CTX) <= 0); } } } @@ -633,9 +633,9 @@ static void test_sort_vectors(void) { 0xA7, 0x2B, 0x9C, 0xC1, 0xB7, 0xCC, 0x01, 0x39, 0x71, 0x53, 0x09, 0xB0, 0x86, 0xC9, 0x60, 0xE1, 0x8F, 0xD9, 0x69, 0x77, 0x4E, 0xB8 } }; - rustsecp256k1zkp_v0_10_0_pubkey pubkeys[N_PUBKEYS]; - rustsecp256k1zkp_v0_10_0_pubkey *sorted[N_PUBKEYS]; - const rustsecp256k1zkp_v0_10_0_pubkey *pks_ptr[N_PUBKEYS]; + rustsecp256k1zkp_v0_10_1_pubkey pubkeys[N_PUBKEYS]; + rustsecp256k1zkp_v0_10_1_pubkey *sorted[N_PUBKEYS]; + const rustsecp256k1zkp_v0_10_1_pubkey *pks_ptr[N_PUBKEYS]; int i; sorted[0] = &pubkeys[3]; @@ -646,12 +646,12 @@ static void test_sort_vectors(void) { sorted[5] = &pubkeys[2]; for (i = 0; i < N_PUBKEYS; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkeys[i], pk_ser[i], sizeof(pk_ser[i]))); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkeys[i], pk_ser[i], sizeof(pk_ser[i]))); pks_ptr[i] = &pubkeys[i]; } - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_sort(CTX, pks_ptr, N_PUBKEYS) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_sort(CTX, pks_ptr, N_PUBKEYS) == 1); for (i = 0; i < N_PUBKEYS; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(pks_ptr[i], sorted[i], sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(pks_ptr[i], sorted[i], sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/Makefile.am.include index 2ad6fb3d..0bf9b69c 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_generator.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_generator.h noinst_HEADERS += src/modules/generator/pedersen.h noinst_HEADERS += src/modules/generator/pedersen_impl.h noinst_HEADERS += src/modules/generator/main_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/main_impl.h index 69635392..71dcf048 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/main_impl.h @@ -27,70 +27,70 @@ H = EllipticCurve ([F (0), F (7)]).lift_x(F(int(hashlib.sha256(bytes.fromhex(G)).hexdigest(),16))) print('%x %x' % H.xy()) */ -static const rustsecp256k1zkp_v0_10_0_generator rustsecp256k1zkp_v0_10_0_generator_h_internal = {{ +static const rustsecp256k1zkp_v0_10_1_generator rustsecp256k1zkp_v0_10_1_generator_h_internal = {{ 0x50, 0x92, 0x9b, 0x74, 0xc1, 0xa0, 0x49, 0x54, 0xb7, 0x8b, 0x4b, 0x60, 0x35, 0xe9, 0x7a, 0x5e, 0x07, 0x8a, 0x5a, 0x0f, 0x28, 0xec, 0x96, 0xd5, 0x47, 0xbf, 0xee, 0x9a, 0xce, 0x80, 0x3a, 0xc0, 0x31, 0xd3, 0xc6, 0x86, 0x39, 0x73, 0x92, 0x6e, 0x04, 0x9e, 0x63, 0x7c, 0xb1, 0xb5, 0xf4, 0x0a, 0x36, 0xda, 0xc2, 0x8a, 0xf1, 0x76, 0x69, 0x68, 0xc3, 0x0c, 0x23, 0x13, 0xf3, 0xa3, 0x89, 0x04 }}; -const rustsecp256k1zkp_v0_10_0_generator *rustsecp256k1zkp_v0_10_0_generator_h = &rustsecp256k1zkp_v0_10_0_generator_h_internal; +const rustsecp256k1zkp_v0_10_1_generator *rustsecp256k1zkp_v0_10_1_generator_h = &rustsecp256k1zkp_v0_10_1_generator_h_internal; -static void rustsecp256k1zkp_v0_10_0_generator_load(rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_generator* gen) { +static void rustsecp256k1zkp_v0_10_1_generator_load(rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_generator* gen) { int succeed; - succeed = rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&ge->x, &gen->data[0]); + succeed = rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&ge->x, &gen->data[0]); VERIFY_CHECK(succeed != 0); - succeed = rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&ge->y, &gen->data[32]); + succeed = rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&ge->y, &gen->data[32]); VERIFY_CHECK(succeed != 0); ge->infinity = 0; (void) succeed; } -static void rustsecp256k1zkp_v0_10_0_generator_save(rustsecp256k1zkp_v0_10_0_generator *gen, rustsecp256k1zkp_v0_10_0_ge* ge) { - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(ge)); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge->x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge->y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&gen->data[0], &ge->x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&gen->data[32], &ge->y); +static void rustsecp256k1zkp_v0_10_1_generator_save(rustsecp256k1zkp_v0_10_1_generator *gen, rustsecp256k1zkp_v0_10_1_ge* ge) { + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(ge)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge->x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge->y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&gen->data[0], &ge->x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&gen->data[32], &ge->y); } -int rustsecp256k1zkp_v0_10_0_generator_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_generator* gen, const unsigned char *input) { - rustsecp256k1zkp_v0_10_0_fe x; - rustsecp256k1zkp_v0_10_0_ge ge; +int rustsecp256k1zkp_v0_10_1_generator_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_generator* gen, const unsigned char *input) { + rustsecp256k1zkp_v0_10_1_fe x; + rustsecp256k1zkp_v0_10_1_ge ge; VERIFY_CHECK(ctx != NULL); ARG_CHECK(gen != NULL); ARG_CHECK(input != NULL); if ((input[0] & 0xFE) != 10 || - !rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&x, &input[1]) || - !rustsecp256k1zkp_v0_10_0_ge_set_xquad(&ge, &x)) { + !rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&x, &input[1]) || + !rustsecp256k1zkp_v0_10_1_ge_set_xquad(&ge, &x)) { return 0; } if (input[0] & 1) { - rustsecp256k1zkp_v0_10_0_ge_neg(&ge, &ge); + rustsecp256k1zkp_v0_10_1_ge_neg(&ge, &ge); } - rustsecp256k1zkp_v0_10_0_generator_save(gen, &ge); + rustsecp256k1zkp_v0_10_1_generator_save(gen, &ge); return 1; } -int rustsecp256k1zkp_v0_10_0_generator_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_0_generator* gen) { - rustsecp256k1zkp_v0_10_0_ge ge; +int rustsecp256k1zkp_v0_10_1_generator_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_1_generator* gen) { + rustsecp256k1zkp_v0_10_1_ge ge; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output != NULL); ARG_CHECK(gen != NULL); - rustsecp256k1zkp_v0_10_0_generator_load(&ge, gen); + rustsecp256k1zkp_v0_10_1_generator_load(&ge, gen); - output[0] = 11 ^ rustsecp256k1zkp_v0_10_0_fe_is_square_var(&ge.y); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&output[1], &ge.x); + output[0] = 11 ^ rustsecp256k1zkp_v0_10_1_fe_is_square_var(&ge.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&output[1], &ge.x); return 1; } -static void shallue_van_de_woestijne(rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_fe* t) { +static void shallue_van_de_woestijne(rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_fe* t) { /* Implements the algorithm from: * Indifferentiable Hashing to Barreto-Naehrig Curves * Pierre-Alain Fouque and Mehdi Tibouchi @@ -121,161 +121,161 @@ static void shallue_van_de_woestijne(rustsecp256k1zkp_v0_10_0_ge* ge, const rust If j = 0, the function outputs the point (d, f(d)). This point is equal to (x1, f(x1)) as defined above if division by 0 is defined to be 0. In - below code this is not special-cased because rustsecp256k1zkp_v0_10_0_fe_inv returns 0 + below code this is not special-cased because rustsecp256k1zkp_v0_10_1_fe_inv returns 0 on input 0. j = 0 happens only when t = 0 (since wd != 0 as -8 is not a square). */ - static const rustsecp256k1zkp_v0_10_0_fe negc = SECP256K1_FE_CONST(0xf5d2d456, 0xcaf80e20, 0xdcc88f3d, 0x586869d3, 0x39e092ea, 0x25eb132b, 0x8272d850, 0xe32a03dd); - static const rustsecp256k1zkp_v0_10_0_fe d = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40); + static const rustsecp256k1zkp_v0_10_1_fe negc = SECP256K1_FE_CONST(0xf5d2d456, 0xcaf80e20, 0xdcc88f3d, 0x586869d3, 0x39e092ea, 0x25eb132b, 0x8272d850, 0xe32a03dd); + static const rustsecp256k1zkp_v0_10_1_fe d = SECP256K1_FE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40); - rustsecp256k1zkp_v0_10_0_fe wd, x3d, jinv, tmp, x1, x2, x3, alphain, betain, gammain, y1, y2, y3; + rustsecp256k1zkp_v0_10_1_fe wd, x3d, jinv, tmp, x1, x2, x3, alphain, betain, gammain, y1, y2, y3; int alphaquad, betaquad; /* wd = t^2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&wd, t); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&wd, t); /* mag 1 */ /* x1 = -c * t^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&x1, &negc, &wd); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&x1, &negc, &wd); /* mag 1 */ /* x3d = t^2 */ x3d = wd; /* mag 1 */ /* x3d = 3 * t^2 */ - rustsecp256k1zkp_v0_10_0_fe_mul_int(&x3d, 3); /* mag 3 */ + rustsecp256k1zkp_v0_10_1_fe_mul_int(&x3d, 3); /* mag 3 */ /* x3d = -3 * t^2 */ - rustsecp256k1zkp_v0_10_0_fe_negate(&x3d, &x3d, 3); /* mag 4 */ + rustsecp256k1zkp_v0_10_1_fe_negate(&x3d, &x3d, 3); /* mag 4 */ /* wd = 1 + b + t^2 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&wd, SECP256K1_B + 1); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&wd, SECP256K1_B + 1); /* mag 2 */ /* jinv = wd * x3d */ - rustsecp256k1zkp_v0_10_0_fe_mul(&jinv, &wd, &x3d); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&jinv, &wd, &x3d); /* mag 1 */ /* jinv = 1/(wd * x3d) */ - rustsecp256k1zkp_v0_10_0_fe_inv(&jinv, &jinv); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_inv(&jinv, &jinv); /* mag 1 */ /* x1 = -c * t^2 * x3d */ - rustsecp256k1zkp_v0_10_0_fe_mul(&x1, &x1, &x3d); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&x1, &x1, &x3d); /* mag 1 */ /* x1 = -c * t^2 * x3d * 1/j */ - rustsecp256k1zkp_v0_10_0_fe_mul(&x1, &x1, &jinv); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&x1, &x1, &jinv); /* mag 1 */ /* x1 = d + -c * t^2 * x3d * 1/j */ - rustsecp256k1zkp_v0_10_0_fe_add(&x1, &d); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_add(&x1, &d); /* mag 2 */ /* x2 = x1 */ x2 = x1; /* mag 2 */ /* x2 = x1 + 1 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&x2, 1); /* mag 3 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&x2, 1); /* mag 3 */ /* x2 = - (x1 + 1) */ - rustsecp256k1zkp_v0_10_0_fe_negate(&x2, &x2, 3); /* mag 4 */ + rustsecp256k1zkp_v0_10_1_fe_negate(&x2, &x2, 3); /* mag 4 */ /* x3 = wd^2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&x3, &wd); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&x3, &wd); /* mag 1 */ /* x3 = wd^3 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&x3, &x3, &wd); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&x3, &x3, &wd); /* mag 1 */ /* x3 = wd^3 * 1/j */ - rustsecp256k1zkp_v0_10_0_fe_mul(&x3, &x3, &jinv); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&x3, &x3, &jinv); /* mag 1 */ /* x3 = 1 + (wd^3 * 1/j) */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&x3, 1); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&x3, 1); /* mag 2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&alphain, &x1); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&alphain, &alphain, &x1); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&alphain, SECP256K1_B); /* mag 2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&betain, &x2); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&betain, &betain, &x2); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&betain, SECP256K1_B); /* mag 2 */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&gammain, &x3); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_mul(&gammain, &gammain, &x3); /* mag 1 */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&gammain, SECP256K1_B); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&alphain, &x1); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&alphain, &alphain, &x1); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&alphain, SECP256K1_B); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&betain, &x2); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&betain, &betain, &x2); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&betain, SECP256K1_B); /* mag 2 */ + rustsecp256k1zkp_v0_10_1_fe_sqr(&gammain, &x3); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&gammain, &gammain, &x3); /* mag 1 */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&gammain, SECP256K1_B); /* mag 2 */ - alphaquad = rustsecp256k1zkp_v0_10_0_fe_sqrt(&y1, &alphain); - betaquad = rustsecp256k1zkp_v0_10_0_fe_sqrt(&y2, &betain); - rustsecp256k1zkp_v0_10_0_fe_sqrt(&y3, &gammain); + alphaquad = rustsecp256k1zkp_v0_10_1_fe_sqrt(&y1, &alphain); + betaquad = rustsecp256k1zkp_v0_10_1_fe_sqrt(&y2, &betain); + rustsecp256k1zkp_v0_10_1_fe_sqrt(&y3, &gammain); - rustsecp256k1zkp_v0_10_0_fe_cmov(&x1, &x2, (!alphaquad) & betaquad); - rustsecp256k1zkp_v0_10_0_fe_cmov(&y1, &y2, (!alphaquad) & betaquad); - rustsecp256k1zkp_v0_10_0_fe_cmov(&x1, &x3, (!alphaquad) & !betaquad); - rustsecp256k1zkp_v0_10_0_fe_cmov(&y1, &y3, (!alphaquad) & !betaquad); + rustsecp256k1zkp_v0_10_1_fe_cmov(&x1, &x2, (!alphaquad) & betaquad); + rustsecp256k1zkp_v0_10_1_fe_cmov(&y1, &y2, (!alphaquad) & betaquad); + rustsecp256k1zkp_v0_10_1_fe_cmov(&x1, &x3, (!alphaquad) & !betaquad); + rustsecp256k1zkp_v0_10_1_fe_cmov(&y1, &y3, (!alphaquad) & !betaquad); - rustsecp256k1zkp_v0_10_0_ge_set_xy(ge, &x1, &y1); + rustsecp256k1zkp_v0_10_1_ge_set_xy(ge, &x1, &y1); /* The linked algorithm from the paper uses the Jacobi symbol of t to * determine the Jacobi symbol of the produced y coordinate. Since the * rest of the algorithm only uses t^2, we can safely use another criterion * as long as negation of t results in negation of the y coordinate. Here * we choose to use t's oddness, as it is faster to determine. */ - rustsecp256k1zkp_v0_10_0_fe_negate(&tmp, &ge->y, 1); - rustsecp256k1zkp_v0_10_0_fe_cmov(&ge->y, &tmp, rustsecp256k1zkp_v0_10_0_fe_is_odd(t)); + rustsecp256k1zkp_v0_10_1_fe_negate(&tmp, &ge->y, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&ge->y, &tmp, rustsecp256k1zkp_v0_10_1_fe_is_odd(t)); } -static int rustsecp256k1zkp_v0_10_0_generator_generate_internal(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_generator* gen, const unsigned char *key32, const unsigned char *blind32) { +static int rustsecp256k1zkp_v0_10_1_generator_generate_internal(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_generator* gen, const unsigned char *key32, const unsigned char *blind32) { static const unsigned char prefix1[17] = "1st generation: "; static const unsigned char prefix2[17] = "2nd generation: "; - rustsecp256k1zkp_v0_10_0_fe t = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 4); - rustsecp256k1zkp_v0_10_0_ge add; - rustsecp256k1zkp_v0_10_0_gej accum; + rustsecp256k1zkp_v0_10_1_fe t = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 4); + rustsecp256k1zkp_v0_10_1_ge add; + rustsecp256k1zkp_v0_10_1_gej accum; int overflow; - rustsecp256k1zkp_v0_10_0_sha256 sha256; + rustsecp256k1zkp_v0_10_1_sha256 sha256; unsigned char b32[32]; int ret = 1; if (blind32) { - rustsecp256k1zkp_v0_10_0_scalar blind; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&blind, blind32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar blind; + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&blind, blind32, &overflow); ret = !overflow; - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &accum, &blind); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &accum, &blind); } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, prefix1, 16); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, key32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256, b32); - ret &= rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&t, b32); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, prefix1, 16); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, key32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256, b32); + ret &= rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&t, b32); shallue_van_de_woestijne(&add, &t); if (blind32) { - rustsecp256k1zkp_v0_10_0_gej_add_ge(&accum, &accum, &add); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&accum, &accum, &add); } else { - rustsecp256k1zkp_v0_10_0_gej_set_ge(&accum, &add); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&accum, &add); } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, prefix2, 16); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256, key32, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256, b32); - ret &= rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&t, b32); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, prefix2, 16); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256, key32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256, b32); + ret &= rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&t, b32); shallue_van_de_woestijne(&add, &t); - rustsecp256k1zkp_v0_10_0_gej_add_ge(&accum, &accum, &add); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&accum, &accum, &add); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&add, &accum); - rustsecp256k1zkp_v0_10_0_generator_save(gen, &add); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&add, &accum); + rustsecp256k1zkp_v0_10_1_generator_save(gen, &add); return ret; } -int rustsecp256k1zkp_v0_10_0_generator_generate(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_generator* gen, const unsigned char *key32) { +int rustsecp256k1zkp_v0_10_1_generator_generate(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_generator* gen, const unsigned char *key32) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(gen != NULL); ARG_CHECK(key32 != NULL); - return rustsecp256k1zkp_v0_10_0_generator_generate_internal(ctx, gen, key32, NULL); + return rustsecp256k1zkp_v0_10_1_generator_generate_internal(ctx, gen, key32, NULL); } -int rustsecp256k1zkp_v0_10_0_generator_generate_blinded(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_generator* gen, const unsigned char *key32, const unsigned char *blind32) { +int rustsecp256k1zkp_v0_10_1_generator_generate_blinded(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_generator* gen, const unsigned char *key32, const unsigned char *blind32) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(gen != NULL); ARG_CHECK(key32 != NULL); ARG_CHECK(blind32 != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); - return rustsecp256k1zkp_v0_10_0_generator_generate_internal(ctx, gen, key32, blind32); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + return rustsecp256k1zkp_v0_10_1_generator_generate_internal(ctx, gen, key32, blind32); } -static void rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_pedersen_commitment* commit) { - rustsecp256k1zkp_v0_10_0_fe fe; - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&fe, &commit->data[1]); - rustsecp256k1zkp_v0_10_0_ge_set_xquad(ge, &fe); +static void rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_pedersen_commitment* commit) { + rustsecp256k1zkp_v0_10_1_fe fe; + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&fe, &commit->data[1]); + rustsecp256k1zkp_v0_10_1_ge_set_xquad(ge, &fe); if (commit->data[0] & 1) { - rustsecp256k1zkp_v0_10_0_ge_neg(ge, ge); + rustsecp256k1zkp_v0_10_1_ge_neg(ge, ge); } } -static void rustsecp256k1zkp_v0_10_0_pedersen_commitment_save(rustsecp256k1zkp_v0_10_0_pedersen_commitment* commit, rustsecp256k1zkp_v0_10_0_ge* ge) { - rustsecp256k1zkp_v0_10_0_fe_normalize(&ge->x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&commit->data[1], &ge->x); - commit->data[0] = 9 ^ rustsecp256k1zkp_v0_10_0_fe_is_square_var(&ge->y); +static void rustsecp256k1zkp_v0_10_1_pedersen_commitment_save(rustsecp256k1zkp_v0_10_1_pedersen_commitment* commit, rustsecp256k1zkp_v0_10_1_ge* ge) { + rustsecp256k1zkp_v0_10_1_fe_normalize(&ge->x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&commit->data[1], &ge->x); + commit->data[0] = 9 ^ rustsecp256k1zkp_v0_10_1_fe_is_square_var(&ge->y); } -int rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pedersen_commitment* commit, const unsigned char *input) { - rustsecp256k1zkp_v0_10_0_fe x; +int rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pedersen_commitment* commit, const unsigned char *input) { + rustsecp256k1zkp_v0_10_1_fe x; VERIFY_CHECK(ctx != NULL); ARG_CHECK(commit != NULL); @@ -283,8 +283,8 @@ int rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(const rustsecp256k1zkp_v0 (void) ctx; if ((input[0] & 0xFE) != 8 || - !rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&x, &input[1]) || - !rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&x)) { + !rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&x, &input[1]) || + !rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&x)) { return 0; } @@ -292,7 +292,7 @@ int rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(const rustsecp256k1zkp_v0 return 1; } -int rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_0_pedersen_commitment* commit) { +int rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, const rustsecp256k1zkp_v0_10_1_pedersen_commitment* commit) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(output != NULL); ARG_CHECK(commit != NULL); @@ -302,40 +302,40 @@ int rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize(const rustsecp256k1zk } /* Generates a pedersen commitment: *commit = blind * G + value * G2. The blinding factor is 32 bytes.*/ -int rustsecp256k1zkp_v0_10_0_pedersen_commit(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, const unsigned char *blind, uint64_t value, const rustsecp256k1zkp_v0_10_0_generator* gen) { - rustsecp256k1zkp_v0_10_0_ge genp; - rustsecp256k1zkp_v0_10_0_gej rj; - rustsecp256k1zkp_v0_10_0_ge r; - rustsecp256k1zkp_v0_10_0_scalar sec; +int rustsecp256k1zkp_v0_10_1_pedersen_commit(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *blind, uint64_t value, const rustsecp256k1zkp_v0_10_1_generator* gen) { + rustsecp256k1zkp_v0_10_1_ge genp; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_ge r; + rustsecp256k1zkp_v0_10_1_scalar sec; int overflow; int ret = 0; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(commit != NULL); ARG_CHECK(blind != NULL); ARG_CHECK(gen != NULL); - rustsecp256k1zkp_v0_10_0_generator_load(&genp, gen); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sec, blind, &overflow); + rustsecp256k1zkp_v0_10_1_generator_load(&genp, gen); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sec, blind, &overflow); if (!overflow) { - rustsecp256k1zkp_v0_10_0_pedersen_ecmult(&ctx->ecmult_gen_ctx, &rj, &sec, value, &genp); - if (!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rj)) { - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r, &rj); - rustsecp256k1zkp_v0_10_0_pedersen_commitment_save(commit, &r); + rustsecp256k1zkp_v0_10_1_pedersen_ecmult(&ctx->ecmult_gen_ctx, &rj, &sec, value, &genp); + if (!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rj)) { + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rj); + rustsecp256k1zkp_v0_10_1_pedersen_commitment_save(commit, &r); ret = 1; } - rustsecp256k1zkp_v0_10_0_gej_clear(&rj); - rustsecp256k1zkp_v0_10_0_ge_clear(&r); + rustsecp256k1zkp_v0_10_1_gej_clear(&rj); + rustsecp256k1zkp_v0_10_1_ge_clear(&r); } - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); return ret; } /** Takes a list of n pointers to 32 byte blinding values, the first negs of which are treated with positive sign and the rest * negative, then calculates an additional blinding value that adds to zero. */ -int rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *blind_out, const unsigned char * const *blinds, size_t n, size_t npositive) { - rustsecp256k1zkp_v0_10_0_scalar acc; - rustsecp256k1zkp_v0_10_0_scalar x; +int rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *blind_out, const unsigned char * const *blinds, size_t n, size_t npositive) { + rustsecp256k1zkp_v0_10_1_scalar acc; + rustsecp256k1zkp_v0_10_1_scalar x; size_t i; int overflow; VERIFY_CHECK(ctx != NULL); @@ -343,48 +343,48 @@ int rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(const rustsecp256k1zkp_v0_10_0_c ARG_CHECK(blinds != NULL); ARG_CHECK(npositive <= n); (void) ctx; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&acc, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&acc, 0); for (i = 0; i < n; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x, blinds[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x, blinds[i], &overflow); if (overflow) { return 0; } if (i >= npositive) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&x, &x); + rustsecp256k1zkp_v0_10_1_scalar_negate(&x, &x); } - rustsecp256k1zkp_v0_10_0_scalar_add(&acc, &acc, &x); + rustsecp256k1zkp_v0_10_1_scalar_add(&acc, &acc, &x); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(blind_out, &acc); - rustsecp256k1zkp_v0_10_0_scalar_clear(&acc); - rustsecp256k1zkp_v0_10_0_scalar_clear(&x); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(blind_out, &acc); + rustsecp256k1zkp_v0_10_1_scalar_clear(&acc); + rustsecp256k1zkp_v0_10_1_scalar_clear(&x); return 1; } /* Takes two lists of commitments and sums the first set and subtracts the second and verifies that they sum to excess. */ -int rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_pedersen_commitment * const* commits, size_t pcnt, const rustsecp256k1zkp_v0_10_0_pedersen_commitment * const* ncommits, size_t ncnt) { - rustsecp256k1zkp_v0_10_0_gej accj; - rustsecp256k1zkp_v0_10_0_ge add; +int rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_pedersen_commitment * const* commits, size_t pcnt, const rustsecp256k1zkp_v0_10_1_pedersen_commitment * const* ncommits, size_t ncnt) { + rustsecp256k1zkp_v0_10_1_gej accj; + rustsecp256k1zkp_v0_10_1_ge add; size_t i; VERIFY_CHECK(ctx != NULL); ARG_CHECK(!pcnt || (commits != NULL)); ARG_CHECK(!ncnt || (ncommits != NULL)); (void) ctx; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&accj); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&accj); for (i = 0; i < ncnt; i++) { - rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(&add, ncommits[i]); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&accj, &accj, &add, NULL); + rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(&add, ncommits[i]); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&accj, &accj, &add, NULL); } - rustsecp256k1zkp_v0_10_0_gej_neg(&accj, &accj); + rustsecp256k1zkp_v0_10_1_gej_neg(&accj, &accj); for (i = 0; i < pcnt; i++) { - rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(&add, commits[i]); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&accj, &accj, &add, NULL); + rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(&add, commits[i]); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&accj, &accj, &add, NULL); } - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&accj); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&accj); } -int rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(const rustsecp256k1zkp_v0_10_0_context* ctx, const uint64_t *value, const unsigned char* const* generator_blind, unsigned char* const* blinding_factor, size_t n_total, size_t n_inputs) { - rustsecp256k1zkp_v0_10_0_scalar sum; - rustsecp256k1zkp_v0_10_0_scalar tmp; +int rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(const rustsecp256k1zkp_v0_10_1_context* ctx, const uint64_t *value, const unsigned char* const* generator_blind, unsigned char* const* blinding_factor, size_t n_total, size_t n_inputs) { + rustsecp256k1zkp_v0_10_1_scalar sum; + rustsecp256k1zkp_v0_10_1_scalar tmp; size_t i; VERIFY_CHECK(ctx != NULL); @@ -398,8 +398,8 @@ int rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(const rustsecp25 return 1; } - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sum, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&tmp, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sum, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&tmp, 0); /* Here, n_total > 0. Thus the loop runs at least once. Thus we may use a do-while loop, which checks the loop @@ -410,40 +410,40 @@ int rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(const rustsecp25 i = 0; do { int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar addend; - rustsecp256k1zkp_v0_10_0_scalar_set_u64(&addend, value[i]); /* s = v */ + rustsecp256k1zkp_v0_10_1_scalar addend; + rustsecp256k1zkp_v0_10_1_scalar_set_u64(&addend, value[i]); /* s = v */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&tmp, generator_blind[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&tmp, generator_blind[i], &overflow); if (overflow == 1) { - rustsecp256k1zkp_v0_10_0_scalar_clear(&tmp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&addend); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sum); + rustsecp256k1zkp_v0_10_1_scalar_clear(&tmp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&addend); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sum); return 0; } - rustsecp256k1zkp_v0_10_0_scalar_mul(&addend, &addend, &tmp); /* s = vr */ + rustsecp256k1zkp_v0_10_1_scalar_mul(&addend, &addend, &tmp); /* s = vr */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&tmp, blinding_factor[i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&tmp, blinding_factor[i], &overflow); if (overflow == 1) { - rustsecp256k1zkp_v0_10_0_scalar_clear(&tmp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&addend); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sum); + rustsecp256k1zkp_v0_10_1_scalar_clear(&tmp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&addend); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sum); return 0; } - rustsecp256k1zkp_v0_10_0_scalar_add(&addend, &addend, &tmp); /* s = vr + r' */ - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&addend, i < n_inputs); /* s is negated if it's an input */ - rustsecp256k1zkp_v0_10_0_scalar_add(&sum, &sum, &addend); /* sum += s */ - rustsecp256k1zkp_v0_10_0_scalar_clear(&addend); + rustsecp256k1zkp_v0_10_1_scalar_add(&addend, &addend, &tmp); /* s = vr + r' */ + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&addend, i < n_inputs); /* s is negated if it's an input */ + rustsecp256k1zkp_v0_10_1_scalar_add(&sum, &sum, &addend); /* sum += s */ + rustsecp256k1zkp_v0_10_1_scalar_clear(&addend); i++; } while (i < n_total); /* Right now tmp has the last pedersen blinding factor. Subtract the sum from it. */ - rustsecp256k1zkp_v0_10_0_scalar_negate(&sum, &sum); - rustsecp256k1zkp_v0_10_0_scalar_add(&tmp, &tmp, &sum); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(blinding_factor[n_total - 1], &tmp); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sum, &sum); + rustsecp256k1zkp_v0_10_1_scalar_add(&tmp, &tmp, &sum); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(blinding_factor[n_total - 1], &tmp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&tmp); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sum); + rustsecp256k1zkp_v0_10_1_scalar_clear(&tmp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sum); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen.h index 23fd98b9..de63c037 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen.h @@ -14,9 +14,9 @@ #include /** Multiply a small number with the generator: r = gn*G2 */ -static void rustsecp256k1zkp_v0_10_0_pedersen_ecmult_small(rustsecp256k1zkp_v0_10_0_gej *r, uint64_t gn, const rustsecp256k1zkp_v0_10_0_ge* genp); +static void rustsecp256k1zkp_v0_10_1_pedersen_ecmult_small(rustsecp256k1zkp_v0_10_1_gej *r, uint64_t gn, const rustsecp256k1zkp_v0_10_1_ge* genp); /* sec * G + value * G2. */ -static void rustsecp256k1zkp_v0_10_0_pedersen_ecmult(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_0_gej *rj, const rustsecp256k1zkp_v0_10_0_scalar *sec, uint64_t value, const rustsecp256k1zkp_v0_10_0_ge* genp); +static void rustsecp256k1zkp_v0_10_1_pedersen_ecmult(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_1_gej *rj, const rustsecp256k1zkp_v0_10_1_scalar *sec, uint64_t value, const rustsecp256k1zkp_v0_10_1_ge* genp); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen_impl.h index f4158901..b592b0ee 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/pedersen_impl.h @@ -17,7 +17,7 @@ #include "../../scalar.h" #include "../../util.h" -static void rustsecp256k1zkp_v0_10_0_pedersen_scalar_set_u64(rustsecp256k1zkp_v0_10_0_scalar *sec, uint64_t value) { +static void rustsecp256k1zkp_v0_10_1_pedersen_scalar_set_u64(rustsecp256k1zkp_v0_10_1_scalar *sec, uint64_t value) { unsigned char data[32]; int i; for (i = 0; i < 24; i++) { @@ -27,25 +27,25 @@ static void rustsecp256k1zkp_v0_10_0_pedersen_scalar_set_u64(rustsecp256k1zkp_v0 data[i] = value >> 56; value <<= 8; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(sec, data, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(sec, data, NULL); memset(data, 0, 32); } -static void rustsecp256k1zkp_v0_10_0_pedersen_ecmult_small(rustsecp256k1zkp_v0_10_0_gej *r, uint64_t gn, const rustsecp256k1zkp_v0_10_0_ge* genp) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_pedersen_scalar_set_u64(&s, gn); - rustsecp256k1zkp_v0_10_0_ecmult_const(r, genp, &s); - rustsecp256k1zkp_v0_10_0_scalar_clear(&s); +static void rustsecp256k1zkp_v0_10_1_pedersen_ecmult_small(rustsecp256k1zkp_v0_10_1_gej *r, uint64_t gn, const rustsecp256k1zkp_v0_10_1_ge* genp) { + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_pedersen_scalar_set_u64(&s, gn); + rustsecp256k1zkp_v0_10_1_ecmult_const(r, genp, &s); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s); } /* sec * G + value * G2. */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_pedersen_ecmult(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_0_gej *rj, const rustsecp256k1zkp_v0_10_0_scalar *sec, uint64_t value, const rustsecp256k1zkp_v0_10_0_ge* genp) { - rustsecp256k1zkp_v0_10_0_gej vj; - rustsecp256k1zkp_v0_10_0_ecmult_gen(ecmult_gen_ctx, rj, sec); - rustsecp256k1zkp_v0_10_0_pedersen_ecmult_small(&vj, value, genp); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_pedersen_ecmult(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_1_gej *rj, const rustsecp256k1zkp_v0_10_1_scalar *sec, uint64_t value, const rustsecp256k1zkp_v0_10_1_ge* genp) { + rustsecp256k1zkp_v0_10_1_gej vj; + rustsecp256k1zkp_v0_10_1_ecmult_gen(ecmult_gen_ctx, rj, sec); + rustsecp256k1zkp_v0_10_1_pedersen_ecmult_small(&vj, value, genp); /* FIXME: constant time. */ - rustsecp256k1zkp_v0_10_0_gej_add_var(rj, rj, &vj, NULL); - rustsecp256k1zkp_v0_10_0_gej_clear(&vj); + rustsecp256k1zkp_v0_10_1_gej_add_var(rj, rj, &vj, NULL); + rustsecp256k1zkp_v0_10_1_gej_clear(&vj); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/tests_impl.h index 3b48d098..17ebbcaa 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/generator/tests_impl.h @@ -21,34 +21,34 @@ static void test_generator_api(void) { unsigned char key[32]; unsigned char blind[32]; unsigned char sergen[33]; - rustsecp256k1zkp_v0_10_0_generator gen; + rustsecp256k1zkp_v0_10_1_generator gen; - rustsecp256k1zkp_v0_10_0_testrand256(key); - rustsecp256k1zkp_v0_10_0_testrand256(blind); + rustsecp256k1zkp_v0_10_1_testrand256(key); + rustsecp256k1zkp_v0_10_1_testrand256(blind); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate(CTX, &gen, key) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_generate(CTX, NULL, key)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_generate(CTX, &gen, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate(CTX, &gen, key) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_generate(CTX, NULL, key)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_generate(CTX, &gen, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &gen, key, blind) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_generator_generate_blinded(STATIC_CTX, &gen, key, blind)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, NULL, key, blind)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &gen, NULL, blind)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &gen, key, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &gen, key, blind) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_generator_generate_blinded(STATIC_CTX, &gen, key, blind)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, NULL, key, blind)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &gen, NULL, blind)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &gen, key, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_serialize(CTX, sergen, &gen) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_serialize(CTX, NULL, &gen)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_serialize(CTX, sergen, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_serialize(CTX, sergen, &gen) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_serialize(CTX, NULL, &gen)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_serialize(CTX, sergen, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_serialize(CTX, sergen, &gen) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &gen, sergen) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_parse(CTX, NULL, sergen)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &gen, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_serialize(CTX, sergen, &gen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &gen, sergen) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_parse(CTX, NULL, sergen)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &gen, NULL)); } static void test_shallue_van_de_woestijne(void) { /* Matches with the output of the shallue_van_de_woestijne.sage SAGE program */ - static const rustsecp256k1zkp_v0_10_0_ge_storage results[34] = { + static const rustsecp256k1zkp_v0_10_1_ge_storage results[34] = { SECP256K1_GE_STORAGE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40, 0x4218f20a, 0xe6c646b3, 0x63db6860, 0x5822fb14, 0x264ca8d2, 0x587fdd6f, 0xbc750d58, 0x7e76a7ee), SECP256K1_GE_STORAGE_CONST(0x851695d4, 0x9a83f8ef, 0x919bb861, 0x53cbcb16, 0x630fb68a, 0xed0a766a, 0x3ec693d6, 0x8e6afa40, 0x4218f20a, 0xe6c646b3, 0x63db6860, 0x5822fb14, 0x264ca8d2, 0x587fdd6f, 0xbc750d58, 0x7e76a7ee), SECP256K1_GE_STORAGE_CONST(0xedd1fd3e, 0x327ce90c, 0xc7a35426, 0x14289aee, 0x9682003e, 0x9cf7dcc9, 0xcf2ca974, 0x3be5aa0c, 0x0225f529, 0xee75acaf, 0xccfc4560, 0x26c5e46b, 0xf80237a3, 0x3924655a, 0x16f90e88, 0x085ed52a), @@ -85,28 +85,28 @@ static void test_shallue_van_de_woestijne(void) { SECP256K1_GE_STORAGE_CONST(0xf75763bc, 0x2907e79b, 0x125e33c3, 0x9a027f48, 0x0f8c6409, 0x2153432f, 0x967bc2b1, 0x1d1f5cf0, 0x4b571239, 0xc9c6e4c6, 0x643de63f, 0xc2fcced7, 0x2432b9c1, 0x2daf9c6b, 0xf9e47859, 0x61aef9fa), }; - rustsecp256k1zkp_v0_10_0_ge ge; - rustsecp256k1zkp_v0_10_0_fe fe; - rustsecp256k1zkp_v0_10_0_ge_storage ges; + rustsecp256k1zkp_v0_10_1_ge ge; + rustsecp256k1zkp_v0_10_1_fe fe; + rustsecp256k1zkp_v0_10_1_ge_storage ges; int i, s; for (i = 0; i <= 16; i++) { - rustsecp256k1zkp_v0_10_0_fe_set_int(&fe, i); + rustsecp256k1zkp_v0_10_1_fe_set_int(&fe, i); for (s = 0; s < 2; s++) { if (s) { - rustsecp256k1zkp_v0_10_0_fe_negate(&fe, &fe, 1); - rustsecp256k1zkp_v0_10_0_fe_normalize(&fe); + rustsecp256k1zkp_v0_10_1_fe_negate(&fe, &fe, 1); + rustsecp256k1zkp_v0_10_1_fe_normalize(&fe); } shallue_van_de_woestijne(&ge, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&ge)); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&ges, &ge); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&ges, &results[i * 2 + s], sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&ge)); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&ges, &ge); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&ges, &results[i * 2 + s], sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)) == 0); } } } static void test_generator_generate(void) { - static const rustsecp256k1zkp_v0_10_0_ge_storage results[32] = { + static const rustsecp256k1zkp_v0_10_1_ge_storage results[32] = { SECP256K1_GE_STORAGE_CONST(0x806cd8ed, 0xd6c153e3, 0x4aa9b9a0, 0x8755c4be, 0x4718b1ef, 0xb26cb93f, 0xfdd99e1b, 0x21f2af8e, 0xc7062208, 0xcc649a03, 0x1bdc1a33, 0x9d01f115, 0x4bcd0dca, 0xfe0b875d, 0x62f35f73, 0x28673006), SECP256K1_GE_STORAGE_CONST(0xd91b15ec, 0x47a811f4, 0xaa189561, 0xd13f5c4d, 0x4e81f10d, 0xc7dc551f, 0x4fea9b84, 0x610314c4, 0x9b0ada1e, 0xb38efd67, 0x8bff0b6c, 0x7d7315f7, 0xb49b8cc5, 0xa679fad4, 0xc94f9dc6, 0x9da66382), SECP256K1_GE_STORAGE_CONST(0x11c00de6, 0xf885035e, 0x76051430, 0xa3c38b2a, 0x5f86ab8c, 0xf66dae58, 0x04ea7307, 0x348b19bf, 0xe0858ae7, 0x61dcb1ba, 0xff247e37, 0xd38fcd88, 0xf3bd7911, 0xaa4ed6e0, 0x28d792dd, 0x3ee1ac09), @@ -140,34 +140,34 @@ static void test_generator_generate(void) { SECP256K1_GE_STORAGE_CONST(0x150df593, 0x5b6956a0, 0x0cfed843, 0xb9d6ffce, 0x4f790022, 0xea18730f, 0xc495111d, 0x91568e55, 0x6700a2ca, 0x9ff4ed32, 0xc1697312, 0x4eb51ce3, 0x5656344b, 0x65a1e3d5, 0xd6c1f7ce, 0x29233f82), SECP256K1_GE_STORAGE_CONST(0x38e02eaf, 0x2c8774fd, 0x58b8b373, 0x732457f1, 0x16dbe53b, 0xea5683d9, 0xada20dd7, 0x14ce20a6, 0x6ac5362e, 0xbb425416, 0x8250f43f, 0xa4ee2b63, 0x0406324f, 0x1c876d60, 0xebe5be2c, 0x6eb1515b), }; - rustsecp256k1zkp_v0_10_0_generator gen; - rustsecp256k1zkp_v0_10_0_ge ge; - rustsecp256k1zkp_v0_10_0_ge_storage ges; + rustsecp256k1zkp_v0_10_1_generator gen; + rustsecp256k1zkp_v0_10_1_ge ge; + rustsecp256k1zkp_v0_10_1_ge_storage ges; int i; unsigned char v[32]; unsigned char s[32] = {0}; - rustsecp256k1zkp_v0_10_0_scalar sc; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sc, s, NULL); + rustsecp256k1zkp_v0_10_1_scalar sc; + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sc, s, NULL); for (i = 1; i <= 32; i++) { memset(v, 0, 31); v[31] = i; - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &gen, v, s)); - rustsecp256k1zkp_v0_10_0_generator_load(&ge, &gen); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&ges, &ge); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&ges, &results[i - 1], sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate(CTX, &gen, v)); - rustsecp256k1zkp_v0_10_0_generator_load(&ge, &gen); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&ges, &ge); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&ges, &results[i - 1], sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &gen, v, s)); + rustsecp256k1zkp_v0_10_1_generator_load(&ge, &gen); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&ges, &ge); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&ges, &results[i - 1], sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate(CTX, &gen, v)); + rustsecp256k1zkp_v0_10_1_generator_load(&ge, &gen); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&ges, &ge); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&ges, &results[i - 1], sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)) == 0); } /* There is no range restriction on the value, but the blinder must be a * valid scalar. Check that an invalid blinder causes the call to fail * but not crash. */ memset(v, 0xff, 32); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate(CTX, &gen, v)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate(CTX, &gen, v)); memset(s, 0xff, 32); - CHECK(!rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &gen, v, s)); + CHECK(!rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &gen, v, s)); } static void test_generator_fixed_vector(void) { @@ -177,71 +177,71 @@ static void test_generator_fixed_vector(void) { 0x5c, 0x77, 0x8e, 0x4b, 0x8c, 0xef, 0x3c, 0xa7, 0xab, 0xac, 0x09, 0xb9, 0x5c, 0x70, 0x9e, 0xe5 }; unsigned char result[33]; - rustsecp256k1zkp_v0_10_0_generator parse; + rustsecp256k1zkp_v0_10_1_generator parse; - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &parse, two_g)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_serialize(CTX, result, &parse)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(two_g, result, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &parse, two_g)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_serialize(CTX, result, &parse)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(two_g, result, 33) == 0); result[0] = 0x0a; - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &parse, result)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &parse, result)); result[0] = 0x08; - CHECK(!rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &parse, result)); + CHECK(!rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &parse, result)); } static void test_pedersen_api(void) { - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit_ptr = &commit; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit_ptr = &commit; unsigned char blind[32]; unsigned char blind_out[32]; const unsigned char *blind_ptr = blind; unsigned char *blind_out_ptr = blind_out; - uint64_t val = rustsecp256k1zkp_v0_10_0_testrand32(); - - rustsecp256k1zkp_v0_10_0_testrand256(blind); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_pedersen_commit(STATIC_CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_0_generator_h)); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, NULL, blind, val, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, NULL, val, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, val, NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 1, 1) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, NULL, &blind_ptr, 1, 1)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, blind_out, NULL, 1, 1)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 0, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 0, 0) != 0); - - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &commit_ptr, 1, &commit_ptr, 1) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, NULL, 0, &commit_ptr, 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &commit_ptr, 1, NULL, 0) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, NULL, 0, NULL, 0) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, NULL, 1, &commit_ptr, 1)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &commit_ptr, 1, NULL, 1)); - - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 1, 0) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 1, 1)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 0, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, NULL, &blind_ptr, &blind_out_ptr, 1, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, &val, NULL, &blind_out_ptr, 1, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, NULL, 1, 0)); + uint64_t val = rustsecp256k1zkp_v0_10_1_testrand32(); + + rustsecp256k1zkp_v0_10_1_testrand256(blind); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_pedersen_commit(STATIC_CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_1_generator_h)); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, NULL, blind, val, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, NULL, val, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, val, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 1, 1) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, NULL, &blind_ptr, 1, 1)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, blind_out, NULL, 1, 1)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 0, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, blind_out, &blind_ptr, 0, 0) != 0); + + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &commit_ptr, 1, &commit_ptr, 1) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, NULL, 0, &commit_ptr, 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &commit_ptr, 1, NULL, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, NULL, 0, NULL, 0) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, NULL, 1, &commit_ptr, 1)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &commit_ptr, 1, NULL, 1)); + + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 1, 0) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 1, 1)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, &blind_out_ptr, 0, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, NULL, &blind_ptr, &blind_out_ptr, 1, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, &val, NULL, &blind_out_ptr, 1, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, &val, &blind_ptr, NULL, 1, 0)); } static void test_pedersen(void) { - rustsecp256k1zkp_v0_10_0_pedersen_commitment commits[19]; - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *cptr[19]; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commits[19]; + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *cptr[19]; unsigned char blinds[32*19]; const unsigned char *bptr[19]; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; uint64_t values[19]; int64_t totalv; int i; int inputs; int outputs; int total; - inputs = (rustsecp256k1zkp_v0_10_0_testrand32() & 7) + 1; - outputs = (rustsecp256k1zkp_v0_10_0_testrand32() & 7) + 2; + inputs = (rustsecp256k1zkp_v0_10_1_testrand32() & 7) + 1; + outputs = (rustsecp256k1zkp_v0_10_1_testrand32() & 7) + 2; total = inputs + outputs; for (i = 0; i < 19; i++) { cptr[i] = &commits[i]; @@ -249,46 +249,46 @@ static void test_pedersen(void) { } totalv = 0; for (i = 0; i < inputs; i++) { - values[i] = rustsecp256k1zkp_v0_10_0_testrandi64(0, INT64_MAX - totalv); + values[i] = rustsecp256k1zkp_v0_10_1_testrandi64(0, INT64_MAX - totalv); totalv += values[i]; } for (i = 0; i < outputs - 1; i++) { - values[i + inputs] = rustsecp256k1zkp_v0_10_0_testrandi64(0, totalv); + values[i + inputs] = rustsecp256k1zkp_v0_10_1_testrandi64(0, totalv); totalv -= values[i + inputs]; } values[total - 1] = totalv; for (i = 0; i < total - 1; i++) { random_scalar_order(&s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&blinds[i * 32], &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&blinds[i * 32], &s); } - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_blind_sum(CTX, &blinds[(total - 1) * 32], bptr, total - 1, inputs)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_blind_sum(CTX, &blinds[(total - 1) * 32], bptr, total - 1, inputs)); for (i = 0; i < total; i++) { unsigned char result[33]; - rustsecp256k1zkp_v0_10_0_pedersen_commitment parse; + rustsecp256k1zkp_v0_10_1_pedersen_commitment parse; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commits[i], &blinds[i * 32], values[i], rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize(CTX, result, &commits[i])); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &parse, result)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&commits[i], &parse, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commits[i], &blinds[i * 32], values[i], rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize(CTX, result, &commits[i])); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &parse, result)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&commits[i], &parse, 33) == 0); } - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, cptr, inputs, &cptr[inputs], outputs)); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &cptr[inputs], outputs, cptr, inputs)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, cptr, inputs, &cptr[inputs], outputs)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &cptr[inputs], outputs, cptr, inputs)); if (inputs > 0 && values[0] > 0) { - CHECK(!rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, cptr, inputs - 1, &cptr[inputs], outputs)); + CHECK(!rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, cptr, inputs - 1, &cptr[inputs], outputs)); } random_scalar_order(&s); for (i = 0; i < 4; i++) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&blinds[i * 32], &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&blinds[i * 32], &s); } values[0] = INT64_MAX; values[1] = 0; values[2] = 1; for (i = 0; i < 3; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commits[i], &blinds[i * 32], values[i], rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commits[i], &blinds[i * 32], values[i], rustsecp256k1zkp_v0_10_1_generator_h)); } - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &cptr[0], 1, &cptr[0], 1)); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &cptr[1], 1, &cptr[1], 1)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &cptr[0], 1, &cptr[0], 1)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &cptr[1], 1, &cptr[1], 1)); } static void test_pedersen_commitment_fixed_vector(void) { @@ -298,16 +298,16 @@ static void test_pedersen_commitment_fixed_vector(void) { 0x5c, 0x77, 0x8e, 0x4b, 0x8c, 0xef, 0x3c, 0xa7, 0xab, 0xac, 0x09, 0xb9, 0x5c, 0x70, 0x9e, 0xe5 }; unsigned char result[33]; - rustsecp256k1zkp_v0_10_0_pedersen_commitment parse; + rustsecp256k1zkp_v0_10_1_pedersen_commitment parse; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &parse, two_g)); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize(CTX, result, &parse)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(two_g, result, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &parse, two_g)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize(CTX, result, &parse)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(two_g, result, 33) == 0); result[0] = 0x08; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &parse, result)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &parse, result)); result[0] = 0x0c; - CHECK(!rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &parse, result)); + CHECK(!rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &parse, result)); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/Makefile.am.include index c5e8a922..7a19d9b6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_musig.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_musig.h noinst_HEADERS += src/modules/musig/main_impl.h noinst_HEADERS += src/modules/musig/keyagg.h noinst_HEADERS += src/modules/musig/keyagg_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/adaptor_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/adaptor_impl.h index 660cf543..c9191dde 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/adaptor_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/adaptor_impl.h @@ -15,22 +15,22 @@ #include "session.h" #include "../../scalar.h" -int rustsecp256k1zkp_v0_10_0_musig_nonce_parity(const rustsecp256k1zkp_v0_10_0_context* ctx, int *nonce_parity, const rustsecp256k1zkp_v0_10_0_musig_session *session) { - rustsecp256k1zkp_v0_10_0_musig_session_internal session_i; +int rustsecp256k1zkp_v0_10_1_musig_nonce_parity(const rustsecp256k1zkp_v0_10_1_context* ctx, int *nonce_parity, const rustsecp256k1zkp_v0_10_1_musig_session *session) { + rustsecp256k1zkp_v0_10_1_musig_session_internal session_i; VERIFY_CHECK(ctx != NULL); ARG_CHECK(nonce_parity != NULL); ARG_CHECK(session != NULL); - if (!rustsecp256k1zkp_v0_10_0_musig_session_load(ctx, &session_i, session)) { + if (!rustsecp256k1zkp_v0_10_1_musig_session_load(ctx, &session_i, session)) { return 0; } *nonce_parity = session_i.fin_nonce_parity; return 1; } -int rustsecp256k1zkp_v0_10_0_musig_adapt(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const unsigned char *pre_sig64, const unsigned char *sec_adaptor32, int nonce_parity) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar t; +int rustsecp256k1zkp_v0_10_1_musig_adapt(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const unsigned char *pre_sig64, const unsigned char *sec_adaptor32, int nonce_parity) { + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar t; int overflow; int ret = 1; @@ -40,11 +40,11 @@ int rustsecp256k1zkp_v0_10_0_musig_adapt(const rustsecp256k1zkp_v0_10_0_context* ARG_CHECK(sec_adaptor32 != NULL); ARG_CHECK(nonce_parity == 0 || nonce_parity == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &pre_sig64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig64[32], &overflow); if (overflow) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&t, sec_adaptor32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&t, sec_adaptor32, &overflow); ret &= !overflow; /* Determine if the secret adaptor should be negated. @@ -58,19 +58,19 @@ int rustsecp256k1zkp_v0_10_0_musig_adapt(const rustsecp256k1zkp_v0_10_0_context* * pre-signature requires negating t in this case. */ if (nonce_parity) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&t, &t); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); } - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &t); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig64[32], &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &t); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig64[32], &s); memmove(sig64, pre_sig64, 32); - rustsecp256k1zkp_v0_10_0_scalar_clear(&t); + rustsecp256k1zkp_v0_10_1_scalar_clear(&t); return ret; } -int rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sec_adaptor32, const unsigned char *sig64, const unsigned char *pre_sig64, int nonce_parity) { - rustsecp256k1zkp_v0_10_0_scalar t; - rustsecp256k1zkp_v0_10_0_scalar s; +int rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sec_adaptor32, const unsigned char *sig64, const unsigned char *pre_sig64, int nonce_parity) { + rustsecp256k1zkp_v0_10_1_scalar t; + rustsecp256k1zkp_v0_10_1_scalar s; int overflow; int ret = 1; @@ -80,21 +80,21 @@ int rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(const rustsecp256k1zkp_v0_10_ ARG_CHECK(pre_sig64 != NULL); ARG_CHECK(nonce_parity == 0 || nonce_parity == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&t, &sig64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&t, &sig64[32], &overflow); ret &= !overflow; - rustsecp256k1zkp_v0_10_0_scalar_negate(&t, &t); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &pre_sig64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig64[32], &overflow); if (overflow) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_add(&t, &t, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&t, &t, &s); if (!nonce_parity) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&t, &t); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sec_adaptor32, &t); - rustsecp256k1zkp_v0_10_0_scalar_clear(&t); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec_adaptor32, &t); + rustsecp256k1zkp_v0_10_1_scalar_clear(&t); return ret; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg.h index ee9df295..b1150b6d 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg.h @@ -15,26 +15,26 @@ #include "../../scalar.h" typedef struct { - rustsecp256k1zkp_v0_10_0_ge pk; + rustsecp256k1zkp_v0_10_1_ge pk; /* If there is no "second" public key, second_pk is set to the point at * infinity */ - rustsecp256k1zkp_v0_10_0_ge second_pk; + rustsecp256k1zkp_v0_10_1_ge second_pk; unsigned char pk_hash[32]; /* tweak is identical to value tacc[v] in the specification. */ - rustsecp256k1zkp_v0_10_0_scalar tweak; + rustsecp256k1zkp_v0_10_1_scalar tweak; /* parity_acc corresponds to gacc[v] in the spec. If gacc[v] is -1, * parity_acc is 1. Otherwise, parity_acc is 0. */ int parity_acc; -} rustsecp256k1zkp_v0_10_0_keyagg_cache_internal; +} rustsecp256k1zkp_v0_10_1_keyagg_cache_internal; /* point_save_ext and point_load_ext are identical to point_save and point_load * except that they allow saving and loading the point at infinity */ -static void rustsecp256k1zkp_v0_10_0_point_save_ext(unsigned char *data, rustsecp256k1zkp_v0_10_0_ge *ge); +static void rustsecp256k1zkp_v0_10_1_point_save_ext(unsigned char *data, rustsecp256k1zkp_v0_10_1_ge *ge); -static void rustsecp256k1zkp_v0_10_0_point_load_ext(rustsecp256k1zkp_v0_10_0_ge *ge, const unsigned char *data); +static void rustsecp256k1zkp_v0_10_1_point_load_ext(rustsecp256k1zkp_v0_10_1_ge *ge, const unsigned char *data); -static int rustsecp256k1zkp_v0_10_0_keyagg_cache_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_keyagg_cache_internal *cache_i, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *cache); +static int rustsecp256k1zkp_v0_10_1_keyagg_cache_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_keyagg_cache_internal *cache_i, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *cache); -static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_keyagg_cache_internal *cache_i, rustsecp256k1zkp_v0_10_0_ge *pk); +static void rustsecp256k1zkp_v0_10_1_musig_keyaggcoef(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_keyagg_cache_internal *cache_i, rustsecp256k1zkp_v0_10_1_ge *pk); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg_impl.h index d849af70..e810d6e1 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/keyagg_impl.h @@ -17,24 +17,24 @@ #include "../../hash.h" #include "../../util.h" -static void rustsecp256k1zkp_v0_10_0_point_save_ext(unsigned char *data, rustsecp256k1zkp_v0_10_0_ge *ge) { - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(ge)) { +static void rustsecp256k1zkp_v0_10_1_point_save_ext(unsigned char *data, rustsecp256k1zkp_v0_10_1_ge *ge) { + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(ge)) { memset(data, 0, 64); } else { - rustsecp256k1zkp_v0_10_0_ge_to_bytes(data, ge); + rustsecp256k1zkp_v0_10_1_ge_to_bytes(data, ge); } } -static void rustsecp256k1zkp_v0_10_0_point_load_ext(rustsecp256k1zkp_v0_10_0_ge *ge, const unsigned char *data) { +static void rustsecp256k1zkp_v0_10_1_point_load_ext(rustsecp256k1zkp_v0_10_1_ge *ge, const unsigned char *data) { unsigned char zeros[64] = { 0 }; - if (rustsecp256k1zkp_v0_10_0_memcmp_var(data, zeros, sizeof(zeros)) == 0) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(ge); + if (rustsecp256k1zkp_v0_10_1_memcmp_var(data, zeros, sizeof(zeros)) == 0) { + rustsecp256k1zkp_v0_10_1_ge_set_infinity(ge); } else { - rustsecp256k1zkp_v0_10_0_ge_from_bytes(ge, data); + rustsecp256k1zkp_v0_10_1_ge_from_bytes(ge, data); } } -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_keyagg_cache_magic[4] = { 0xf4, 0xad, 0xbb, 0xdf }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_keyagg_cache_magic[4] = { 0xf4, 0xad, 0xbb, 0xdf }; /* A keyagg cache consists of * - 4 byte magic set during initialization to allow detecting an uninitialized @@ -46,41 +46,41 @@ static const unsigned char rustsecp256k1zkp_v0_10_0_musig_keyagg_cache_magic[4] * - 32 byte tweak */ /* Requires that cache_i->pk is not infinity and cache_i->second_pk_x to be normalized. */ -static void rustsecp256k1zkp_v0_10_0_keyagg_cache_save(rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *cache, rustsecp256k1zkp_v0_10_0_keyagg_cache_internal *cache_i) { +static void rustsecp256k1zkp_v0_10_1_keyagg_cache_save(rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *cache, rustsecp256k1zkp_v0_10_1_keyagg_cache_internal *cache_i) { unsigned char *ptr = cache->data; - memcpy(ptr, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache_magic, 4); + memcpy(ptr, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache_magic, 4); ptr += 4; - rustsecp256k1zkp_v0_10_0_ge_to_bytes(ptr, &cache_i->pk); + rustsecp256k1zkp_v0_10_1_ge_to_bytes(ptr, &cache_i->pk); ptr += 64; - rustsecp256k1zkp_v0_10_0_point_save_ext(ptr, &cache_i->second_pk); + rustsecp256k1zkp_v0_10_1_point_save_ext(ptr, &cache_i->second_pk); ptr += 64; memcpy(ptr, cache_i->pk_hash, 32); ptr += 32; *ptr = cache_i->parity_acc; ptr += 1; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ptr, &cache_i->tweak); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ptr, &cache_i->tweak); } -static int rustsecp256k1zkp_v0_10_0_keyagg_cache_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_keyagg_cache_internal *cache_i, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *cache) { +static int rustsecp256k1zkp_v0_10_1_keyagg_cache_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_keyagg_cache_internal *cache_i, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *cache) { const unsigned char *ptr = cache->data; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(ptr, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache_magic, 4) == 0); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(ptr, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache_magic, 4) == 0); ptr += 4; - rustsecp256k1zkp_v0_10_0_ge_from_bytes(&cache_i->pk, ptr); + rustsecp256k1zkp_v0_10_1_ge_from_bytes(&cache_i->pk, ptr); ptr += 64; - rustsecp256k1zkp_v0_10_0_point_load_ext(&cache_i->second_pk, ptr); + rustsecp256k1zkp_v0_10_1_point_load_ext(&cache_i->second_pk, ptr); ptr += 64; memcpy(cache_i->pk_hash, ptr, 32); ptr += 32; cache_i->parity_acc = *ptr & 1; ptr += 1; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&cache_i->tweak, ptr, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&cache_i->tweak, ptr, NULL); return 1; } /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("KeyAgg list")||SHA256("KeyAgg list"). */ -static void rustsecp256k1zkp_v0_10_0_musig_keyagglist_sha256(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_musig_keyagglist_sha256(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0xb399d5e0ul; sha->s[1] = 0xc8fff302ul; @@ -94,28 +94,28 @@ static void rustsecp256k1zkp_v0_10_0_musig_keyagglist_sha256(rustsecp256k1zkp_v0 } /* Computes pk_hash = tagged_hash(pk[0], ..., pk[np-1]) */ -static int rustsecp256k1zkp_v0_10_0_musig_compute_pk_hash(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *pk_hash, const rustsecp256k1zkp_v0_10_0_pubkey * const* pk, size_t np) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +static int rustsecp256k1zkp_v0_10_1_musig_compute_pk_hash(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *pk_hash, const rustsecp256k1zkp_v0_10_1_pubkey * const* pk, size_t np) { + rustsecp256k1zkp_v0_10_1_sha256 sha; size_t i; - rustsecp256k1zkp_v0_10_0_musig_keyagglist_sha256(&sha); + rustsecp256k1zkp_v0_10_1_musig_keyagglist_sha256(&sha); for (i = 0; i < np; i++) { unsigned char ser[33]; size_t ser_len = sizeof(ser); - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, ser, &ser_len, pk[i], SECP256K1_EC_COMPRESSED)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, ser, &ser_len, pk[i], SECP256K1_EC_COMPRESSED)) { return 0; } VERIFY_CHECK(ser_len == sizeof(ser)); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, ser, sizeof(ser)); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, ser, sizeof(ser)); } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, pk_hash); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, pk_hash); return 1; } /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("KeyAgg coefficient")||SHA256("KeyAgg coefficient"). */ -static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_sha256(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_sha256(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x6ef02c5aul; sha->s[1] = 0x06a480deul; @@ -133,22 +133,22 @@ static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_sha256(rustsecp256k1zkp_v0 * second_pk is the point at infinity in case there is no second_pk. Assumes * that pk is not the point at infinity and that the Y-coordinates of pk and * second_pk are normalized. */ -static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_internal(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *pk_hash, rustsecp256k1zkp_v0_10_0_ge *pk, const rustsecp256k1zkp_v0_10_0_ge *second_pk) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +static void rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_internal(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *pk_hash, rustsecp256k1zkp_v0_10_1_ge *pk, const rustsecp256k1zkp_v0_10_1_ge *second_pk) { + rustsecp256k1zkp_v0_10_1_sha256 sha; - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(pk)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(pk)); - if (!rustsecp256k1zkp_v0_10_0_ge_is_infinity(second_pk) - && rustsecp256k1zkp_v0_10_0_fe_equal(&pk->x, &second_pk->x) - && rustsecp256k1zkp_v0_10_0_fe_is_odd(&pk->y) == rustsecp256k1zkp_v0_10_0_fe_is_odd(&second_pk->y)) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(r, 1); + if (!rustsecp256k1zkp_v0_10_1_ge_is_infinity(second_pk) + && rustsecp256k1zkp_v0_10_1_fe_equal(&pk->x, &second_pk->x) + && rustsecp256k1zkp_v0_10_1_fe_is_odd(&pk->y) == rustsecp256k1zkp_v0_10_1_fe_is_odd(&second_pk->y)) { + rustsecp256k1zkp_v0_10_1_scalar_set_int(r, 1); } else { unsigned char buf[33]; size_t buflen = sizeof(buf); int ret; - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_sha256(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, pk_hash, 32); - ret = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(pk, buf, &buflen, 1); + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_sha256(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, pk_hash, 32); + ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(pk, buf, &buflen, 1); #ifdef VERIFY /* Serialization does not fail since the pk is not the point at infinity * (according to this function's precondition). */ @@ -156,30 +156,30 @@ static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_internal(rustsecp256k1zkp_ #else (void) ret; #endif - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, sizeof(buf)); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, buf, NULL); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, sizeof(buf)); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, buf, NULL); } } /* Assumes both field elements x and second_pk_x are normalized. */ -static void rustsecp256k1zkp_v0_10_0_musig_keyaggcoef(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_keyagg_cache_internal *cache_i, rustsecp256k1zkp_v0_10_0_ge *pk) { - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_internal(r, cache_i->pk_hash, pk, &cache_i->second_pk); +static void rustsecp256k1zkp_v0_10_1_musig_keyaggcoef(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_keyagg_cache_internal *cache_i, rustsecp256k1zkp_v0_10_1_ge *pk) { + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_internal(r, cache_i->pk_hash, pk, &cache_i->second_pk); } typedef struct { - const rustsecp256k1zkp_v0_10_0_context *ctx; + const rustsecp256k1zkp_v0_10_1_context *ctx; /* pk_hash is the hash of the public keys */ unsigned char pk_hash[32]; - const rustsecp256k1zkp_v0_10_0_pubkey * const* pks; - rustsecp256k1zkp_v0_10_0_ge second_pk; -} rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_ecmult_data; + const rustsecp256k1zkp_v0_10_1_pubkey * const* pks; + rustsecp256k1zkp_v0_10_1_ge second_pk; +} rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_ecmult_data; /* Callback for batch EC multiplication to compute keyaggcoef_0*P0 + keyaggcoef_1*P1 + ... */ -static int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_callback(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *data) { - rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_ecmult_data *ctx = (rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_ecmult_data *) data; +static int rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_callback(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *data) { + rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_ecmult_data *ctx = (rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_ecmult_data *) data; int ret; - ret = rustsecp256k1zkp_v0_10_0_pubkey_load(ctx->ctx, pt, ctx->pks[idx]); + ret = rustsecp256k1zkp_v0_10_1_pubkey_load(ctx->ctx, pt, ctx->pks[idx]); #ifdef VERIFY /* pubkey_load can't fail because the same pks have already been loaded in * `musig_compute_pk_hash` (and we test this). */ @@ -187,14 +187,14 @@ static int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_callback(rustsecp256k1zkp_v #else (void) ret; #endif - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_internal(sc, ctx->pk_hash, pt, &ctx->second_pk); + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_internal(sc, ctx->pk_hash, pt, &ctx->second_pk); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scratch_space *scratch, rustsecp256k1zkp_v0_10_0_xonly_pubkey *agg_pk, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_0_pubkey * const* pubkeys, size_t n_pubkeys) { - rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_ecmult_data ecmult_data; - rustsecp256k1zkp_v0_10_0_gej pkj; - rustsecp256k1zkp_v0_10_0_ge pkp; +int rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scratch_space *scratch, rustsecp256k1zkp_v0_10_1_xonly_pubkey *agg_pk, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_1_pubkey * const* pubkeys, size_t n_pubkeys) { + rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_ecmult_data ecmult_data; + rustsecp256k1zkp_v0_10_1_gej pkj; + rustsecp256k1zkp_v0_10_1_ge pkp; size_t i; (void) scratch; @@ -208,11 +208,11 @@ int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(const rustsecp256k1zkp_v0_10_0_con ecmult_data.ctx = ctx; ecmult_data.pks = pubkeys; - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&ecmult_data.second_pk); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&ecmult_data.second_pk); for (i = 1; i < n_pubkeys; i++) { - if (rustsecp256k1zkp_v0_10_0_memcmp_var(pubkeys[0], pubkeys[i], sizeof(*pubkeys[0])) != 0) { - rustsecp256k1zkp_v0_10_0_ge pk; - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pk, pubkeys[i])) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(pubkeys[0], pubkeys[i], sizeof(*pubkeys[0])) != 0) { + rustsecp256k1zkp_v0_10_1_ge pk; + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pk, pubkeys[i])) { return 0; } ecmult_data.second_pk = pk; @@ -220,54 +220,54 @@ int rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(const rustsecp256k1zkp_v0_10_0_con } } - if (!rustsecp256k1zkp_v0_10_0_musig_compute_pk_hash(ctx, ecmult_data.pk_hash, pubkeys, n_pubkeys)) { + if (!rustsecp256k1zkp_v0_10_1_musig_compute_pk_hash(ctx, ecmult_data.pk_hash, pubkeys, n_pubkeys)) { return 0; } /* TODO: actually use optimized ecmult_multi algorithms by providing a * scratch space */ - if (!rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, NULL, &pkj, NULL, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg_callback, (void *) &ecmult_data, n_pubkeys)) { + if (!rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, NULL, &pkj, NULL, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg_callback, (void *) &ecmult_data, n_pubkeys)) { /* In order to reach this line with the current implementation of * ecmult_multi_var one would need to provide a callback that can * fail. */ return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pkp, &pkj); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&pkp.y); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pkp, &pkj); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&pkp.y); /* The resulting public key is infinity with negligible probability */ - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(&pkp)); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(&pkp)); if (keyagg_cache != NULL) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i = { 0 }; + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i = { 0 }; cache_i.pk = pkp; cache_i.second_pk = ecmult_data.second_pk; memcpy(cache_i.pk_hash, ecmult_data.pk_hash, sizeof(cache_i.pk_hash)); - rustsecp256k1zkp_v0_10_0_keyagg_cache_save(keyagg_cache, &cache_i); + rustsecp256k1zkp_v0_10_1_keyagg_cache_save(keyagg_cache, &cache_i); } - rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(&pkp); + rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(&pkp); if (agg_pk != NULL) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey_save(agg_pk, &pkp); + rustsecp256k1zkp_v0_10_1_xonly_pubkey_save(agg_pk, &pkp); } return 1; } -int rustsecp256k1zkp_v0_10_0_musig_pubkey_get(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *agg_pk, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; +int rustsecp256k1zkp_v0_10_1_musig_pubkey_get(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *agg_pk, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache) { + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; VERIFY_CHECK(ctx != NULL); ARG_CHECK(agg_pk != NULL); memset(agg_pk, 0, sizeof(*agg_pk)); ARG_CHECK(keyagg_cache != NULL); - if(!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + if(!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { return 0; } - rustsecp256k1zkp_v0_10_0_pubkey_save(agg_pk, &cache_i.pk); + rustsecp256k1zkp_v0_10_1_pubkey_save(agg_pk, &cache_i.pk); return 1; } -static int rustsecp256k1zkp_v0_10_0_musig_pubkey_tweak_add_internal(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32, int xonly) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; +static int rustsecp256k1zkp_v0_10_1_musig_pubkey_tweak_add_internal(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32, int xonly) { + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar tweak; + rustsecp256k1zkp_v0_10_1_scalar tweak; VERIFY_CHECK(ctx != NULL); if (output_pubkey != NULL) { @@ -276,36 +276,36 @@ static int rustsecp256k1zkp_v0_10_0_musig_pubkey_tweak_add_internal(const rustse ARG_CHECK(keyagg_cache != NULL); ARG_CHECK(tweak32 != NULL); - if (!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + if (!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&tweak, tweak32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&tweak, tweak32, &overflow); if (overflow) { return 0; } - if (xonly && rustsecp256k1zkp_v0_10_0_extrakeys_ge_even_y(&cache_i.pk)) { + if (xonly && rustsecp256k1zkp_v0_10_1_extrakeys_ge_even_y(&cache_i.pk)) { cache_i.parity_acc ^= 1; - rustsecp256k1zkp_v0_10_0_scalar_negate(&cache_i.tweak, &cache_i.tweak); + rustsecp256k1zkp_v0_10_1_scalar_negate(&cache_i.tweak, &cache_i.tweak); } - rustsecp256k1zkp_v0_10_0_scalar_add(&cache_i.tweak, &cache_i.tweak, &tweak); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(&cache_i.pk, &tweak)) { + rustsecp256k1zkp_v0_10_1_scalar_add(&cache_i.tweak, &cache_i.tweak, &tweak); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(&cache_i.pk, &tweak)) { return 0; } /* eckey_pubkey_tweak_add fails if cache_i.pk is infinity */ - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(&cache_i.pk)); - rustsecp256k1zkp_v0_10_0_keyagg_cache_save(keyagg_cache, &cache_i); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(&cache_i.pk)); + rustsecp256k1zkp_v0_10_1_keyagg_cache_save(keyagg_cache, &cache_i); if (output_pubkey != NULL) { - rustsecp256k1zkp_v0_10_0_pubkey_save(output_pubkey, &cache_i.pk); + rustsecp256k1zkp_v0_10_1_pubkey_save(output_pubkey, &cache_i.pk); } return 1; } -int rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32) { - return rustsecp256k1zkp_v0_10_0_musig_pubkey_tweak_add_internal(ctx, output_pubkey, keyagg_cache, tweak32, 0); +int rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32) { + return rustsecp256k1zkp_v0_10_1_musig_pubkey_tweak_add_internal(ctx, output_pubkey, keyagg_cache, tweak32, 0); } -int rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32) { - return rustsecp256k1zkp_v0_10_0_musig_pubkey_tweak_add_internal(ctx, output_pubkey, keyagg_cache, tweak32, 1); +int rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32) { + return rustsecp256k1zkp_v0_10_1_musig_pubkey_tweak_add_internal(ctx, output_pubkey, keyagg_cache, tweak32, 1); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/musig.md b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/musig.md index 4f043b6e..c1d56212 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/musig.md +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/musig.md @@ -1,7 +1,7 @@ Notes on the musig module API =========================== -The following sections contain additional notes on the API of the musig module (`include/rustsecp256k1zkp_v0_10_0_musig.h`). +The following sections contain additional notes on the API of the musig module (`include/rustsecp256k1zkp_v0_10_1_musig.h`). A usage example can be found in `examples/musig.c`. # API misuse @@ -12,35 +12,35 @@ While the results can be catastrophic (e.g. leaking of the secret key), it is un Therefore, users of the musig module must take great care to make sure of the following: -1. A unique nonce per signing session is generated in `rustsecp256k1zkp_v0_10_0_musig_nonce_gen`. - See the corresponding comment in `include/rustsecp256k1zkp_v0_10_0_musig.h` for how to ensure that. -2. The `rustsecp256k1zkp_v0_10_0_musig_secnonce` structure is never copied or serialized. - See also the comment on `rustsecp256k1zkp_v0_10_0_musig_secnonce` in `include/rustsecp256k1zkp_v0_10_0_musig.h`. +1. A unique nonce per signing session is generated in `rustsecp256k1zkp_v0_10_1_musig_nonce_gen`. + See the corresponding comment in `include/rustsecp256k1zkp_v0_10_1_musig.h` for how to ensure that. +2. The `rustsecp256k1zkp_v0_10_1_musig_secnonce` structure is never copied or serialized. + See also the comment on `rustsecp256k1zkp_v0_10_1_musig_secnonce` in `include/rustsecp256k1zkp_v0_10_1_musig.h`. 3. Opaque data structures are never written to or read from directly. Instead, only the provided accessor functions are used. 4. If adaptor signatures are used, all partial signatures are verified. # Key Aggregation and (Taproot) Tweaking -Given a set of public keys, the aggregate public key is computed with `rustsecp256k1zkp_v0_10_0_musig_pubkey_agg`. -A (Taproot) tweak can be added to the resulting public key with `rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add` and a plain tweak can be added with `rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add`. +Given a set of public keys, the aggregate public key is computed with `rustsecp256k1zkp_v0_10_1_musig_pubkey_agg`. +A (Taproot) tweak can be added to the resulting public key with `rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add` and a plain tweak can be added with `rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add`. # Signing This is covered by `examples/musig.c`. Essentially, the protocol proceeds in the following steps: -1. Generate a keypair with `rustsecp256k1zkp_v0_10_0_keypair_create` and obtain the public key with `rustsecp256k1zkp_v0_10_0_keypair_pub`. -2. Call `rustsecp256k1zkp_v0_10_0_musig_pubkey_agg` with the pubkeys of all participants. -3. Optionally add a (Taproot) tweak with `rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add` and a plain tweak with `rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add`. -4. Generate a pair of secret and public nonce with `rustsecp256k1zkp_v0_10_0_musig_nonce_gen` and send the public nonce to the other signers. -5. Someone (not necessarily the signer) aggregates the public nonce with `rustsecp256k1zkp_v0_10_0_musig_nonce_agg` and sends it to the signers. -6. Process the aggregate nonce with `rustsecp256k1zkp_v0_10_0_musig_nonce_process`. -7. Create a partial signature with `rustsecp256k1zkp_v0_10_0_musig_partial_sign`. -8. Verify the partial signatures (optional in some scenarios) with `rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify`. -9. Someone (not necessarily the signer) obtains all partial signatures and aggregates them into the final Schnorr signature using `rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg`. +1. Generate a keypair with `rustsecp256k1zkp_v0_10_1_keypair_create` and obtain the public key with `rustsecp256k1zkp_v0_10_1_keypair_pub`. +2. Call `rustsecp256k1zkp_v0_10_1_musig_pubkey_agg` with the pubkeys of all participants. +3. Optionally add a (Taproot) tweak with `rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add` and a plain tweak with `rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add`. +4. Generate a pair of secret and public nonce with `rustsecp256k1zkp_v0_10_1_musig_nonce_gen` and send the public nonce to the other signers. +5. Someone (not necessarily the signer) aggregates the public nonce with `rustsecp256k1zkp_v0_10_1_musig_nonce_agg` and sends it to the signers. +6. Process the aggregate nonce with `rustsecp256k1zkp_v0_10_1_musig_nonce_process`. +7. Create a partial signature with `rustsecp256k1zkp_v0_10_1_musig_partial_sign`. +8. Verify the partial signatures (optional in some scenarios) with `rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify`. +9. Someone (not necessarily the signer) obtains all partial signatures and aggregates them into the final Schnorr signature using `rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg`. -The aggregate signature can be verified with `rustsecp256k1zkp_v0_10_0_schnorrsig_verify`. +The aggregate signature can be verified with `rustsecp256k1zkp_v0_10_1_schnorrsig_verify`. Note that steps 1 to 5 can happen before the message to be signed is known to the signers. Therefore, the communication round to exchange nonces can be viewed as a pre-processing step that is run whenever convenient to the signers. @@ -56,8 +56,8 @@ A participant who wants to verify the partial signatures, but does not sign itse The signing API supports the production of "adaptor signatures", modified partial signatures which are offset by an auxiliary secret known to one party. That is, 1. One party generates a (secret) adaptor `t` with corresponding (public) adaptor `T = t*G`. -2. When calling `rustsecp256k1zkp_v0_10_0_musig_nonce_process`, the public adaptor `T` is provided as the `adaptor` argument. +2. When calling `rustsecp256k1zkp_v0_10_1_musig_nonce_process`, the public adaptor `T` is provided as the `adaptor` argument. 3. The party who is going to extract the secret adaptor `t` later must verify all partial signatures. -4. Due to step 2, the signature output of `rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg` is a pre-signature and not a valid Schnorr signature. All parties involved extract this session's `nonce_parity` with `rustsecp256k1zkp_v0_10_0_musig_nonce_parity`. -5. The party who knows `t` must "adapt" the pre-signature with `t` (and the `nonce_parity` using `rustsecp256k1zkp_v0_10_0_musig_adapt` to complete the signature. -6. Any party who sees both the final signature and the pre-signature (and has the `nonce_parity`) can extract `t` with `rustsecp256k1zkp_v0_10_0_musig_extract_adaptor`. +4. Due to step 2, the signature output of `rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg` is a pre-signature and not a valid Schnorr signature. All parties involved extract this session's `nonce_parity` with `rustsecp256k1zkp_v0_10_1_musig_nonce_parity`. +5. The party who knows `t` must "adapt" the pre-signature with `t` (and the `nonce_parity` using `rustsecp256k1zkp_v0_10_1_musig_adapt` to complete the signature. +6. Any party who sees both the final signature and the pre-signature (and has the `nonce_parity`) can extract `t` with `rustsecp256k1zkp_v0_10_1_musig_extract_adaptor`. diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session.h index 2efa0c20..b4310cc4 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session.h @@ -15,11 +15,11 @@ typedef struct { int fin_nonce_parity; unsigned char fin_nonce[32]; - rustsecp256k1zkp_v0_10_0_scalar noncecoef; - rustsecp256k1zkp_v0_10_0_scalar challenge; - rustsecp256k1zkp_v0_10_0_scalar s_part; -} rustsecp256k1zkp_v0_10_0_musig_session_internal; + rustsecp256k1zkp_v0_10_1_scalar noncecoef; + rustsecp256k1zkp_v0_10_1_scalar challenge; + rustsecp256k1zkp_v0_10_1_scalar s_part; +} rustsecp256k1zkp_v0_10_1_musig_session_internal; -static int rustsecp256k1zkp_v0_10_0_musig_session_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_session_internal *session_i, const rustsecp256k1zkp_v0_10_0_musig_session *session); +static int rustsecp256k1zkp_v0_10_1_musig_session_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_session_internal *session_i, const rustsecp256k1zkp_v0_10_1_musig_session *session); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session_impl.h index 489c1ea4..8d052172 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/session_impl.h @@ -20,85 +20,85 @@ #include "../../scalar.h" #include "../../util.h" -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_secnonce_magic[4] = { 0x22, 0x0e, 0xdc, 0xf1 }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_secnonce_magic[4] = { 0x22, 0x0e, 0xdc, 0xf1 }; -static void rustsecp256k1zkp_v0_10_0_musig_secnonce_save(rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, const rustsecp256k1zkp_v0_10_0_scalar *k, rustsecp256k1zkp_v0_10_0_ge *pk) { - memcpy(&secnonce->data[0], rustsecp256k1zkp_v0_10_0_musig_secnonce_magic, 4); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&secnonce->data[4], &k[0]); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&secnonce->data[36], &k[1]); - rustsecp256k1zkp_v0_10_0_ge_to_bytes(&secnonce->data[68], pk); +static void rustsecp256k1zkp_v0_10_1_musig_secnonce_save(rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, const rustsecp256k1zkp_v0_10_1_scalar *k, rustsecp256k1zkp_v0_10_1_ge *pk) { + memcpy(&secnonce->data[0], rustsecp256k1zkp_v0_10_1_musig_secnonce_magic, 4); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&secnonce->data[4], &k[0]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&secnonce->data[36], &k[1]); + rustsecp256k1zkp_v0_10_1_ge_to_bytes(&secnonce->data[68], pk); } -static int rustsecp256k1zkp_v0_10_0_musig_secnonce_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar *k, rustsecp256k1zkp_v0_10_0_ge *pk, rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce) { +static int rustsecp256k1zkp_v0_10_1_musig_secnonce_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar *k, rustsecp256k1zkp_v0_10_1_ge *pk, rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce) { int is_zero; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&secnonce->data[0], rustsecp256k1zkp_v0_10_0_musig_secnonce_magic, 4) == 0); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k[0], &secnonce->data[4], NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k[1], &secnonce->data[36], NULL); - rustsecp256k1zkp_v0_10_0_ge_from_bytes(pk, &secnonce->data[68]); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&secnonce->data[0], rustsecp256k1zkp_v0_10_1_musig_secnonce_magic, 4) == 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k[0], &secnonce->data[4], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k[1], &secnonce->data[36], NULL); + rustsecp256k1zkp_v0_10_1_ge_from_bytes(pk, &secnonce->data[68]); /* We make very sure that the nonce isn't invalidated by checking the values * in addition to the magic. */ - is_zero = rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k[0]) & rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k[1]); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &is_zero, sizeof(is_zero)); + is_zero = rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k[0]) & rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k[1]); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &is_zero, sizeof(is_zero)); ARG_CHECK(!is_zero); return 1; } /* If flag is true, invalidate the secnonce; otherwise leave it. Constant-time. */ -static void rustsecp256k1zkp_v0_10_0_musig_secnonce_invalidate(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, int flag) { - rustsecp256k1zkp_v0_10_0_memczero(secnonce->data, sizeof(secnonce->data), flag); +static void rustsecp256k1zkp_v0_10_1_musig_secnonce_invalidate(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, int flag) { + rustsecp256k1zkp_v0_10_1_memczero(secnonce->data, sizeof(secnonce->data), flag); /* The flag argument is usually classified. So, the line above makes the * magic and public key classified. However, we need both to be * declassified. Note that we don't declassify the entire object, because if * flag is 0, then k[0] and k[1] have not been zeroed. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, secnonce->data, sizeof(rustsecp256k1zkp_v0_10_0_musig_secnonce_magic)); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &secnonce->data[68], 64); + rustsecp256k1zkp_v0_10_1_declassify(ctx, secnonce->data, sizeof(rustsecp256k1zkp_v0_10_1_musig_secnonce_magic)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &secnonce->data[68], 64); } -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_pubnonce_magic[4] = { 0xf5, 0x7a, 0x3d, 0xa0 }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_pubnonce_magic[4] = { 0xf5, 0x7a, 0x3d, 0xa0 }; /* Saves two group elements into a pubnonce. Requires that none of the provided * group elements is infinity. */ -static void rustsecp256k1zkp_v0_10_0_musig_pubnonce_save(rustsecp256k1zkp_v0_10_0_musig_pubnonce* nonce, rustsecp256k1zkp_v0_10_0_ge* ge) { +static void rustsecp256k1zkp_v0_10_1_musig_pubnonce_save(rustsecp256k1zkp_v0_10_1_musig_pubnonce* nonce, rustsecp256k1zkp_v0_10_1_ge* ge) { int i; - memcpy(&nonce->data[0], rustsecp256k1zkp_v0_10_0_musig_pubnonce_magic, 4); + memcpy(&nonce->data[0], rustsecp256k1zkp_v0_10_1_musig_pubnonce_magic, 4); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_to_bytes(nonce->data + 4+64*i, &ge[i]); + rustsecp256k1zkp_v0_10_1_ge_to_bytes(nonce->data + 4+64*i, &ge[i]); } } /* Loads two group elements from a pubnonce. Returns 1 unless the nonce wasn't * properly initialized */ -static int rustsecp256k1zkp_v0_10_0_musig_pubnonce_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_musig_pubnonce* nonce) { +static int rustsecp256k1zkp_v0_10_1_musig_pubnonce_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_musig_pubnonce* nonce) { int i; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&nonce->data[0], rustsecp256k1zkp_v0_10_0_musig_pubnonce_magic, 4) == 0); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&nonce->data[0], rustsecp256k1zkp_v0_10_1_musig_pubnonce_magic, 4) == 0); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_from_bytes(&ge[i], nonce->data + 4 + 64*i); + rustsecp256k1zkp_v0_10_1_ge_from_bytes(&ge[i], nonce->data + 4 + 64*i); } return 1; } -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_aggnonce_magic[4] = { 0xa8, 0xb7, 0xe4, 0x67 }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_aggnonce_magic[4] = { 0xa8, 0xb7, 0xe4, 0x67 }; -static void rustsecp256k1zkp_v0_10_0_musig_aggnonce_save(rustsecp256k1zkp_v0_10_0_musig_aggnonce* nonce, rustsecp256k1zkp_v0_10_0_ge* ge) { +static void rustsecp256k1zkp_v0_10_1_musig_aggnonce_save(rustsecp256k1zkp_v0_10_1_musig_aggnonce* nonce, rustsecp256k1zkp_v0_10_1_ge* ge) { int i; - memcpy(&nonce->data[0], rustsecp256k1zkp_v0_10_0_musig_aggnonce_magic, 4); + memcpy(&nonce->data[0], rustsecp256k1zkp_v0_10_1_musig_aggnonce_magic, 4); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_point_save_ext(&nonce->data[4 + 64*i], &ge[i]); + rustsecp256k1zkp_v0_10_1_point_save_ext(&nonce->data[4 + 64*i], &ge[i]); } } -static int rustsecp256k1zkp_v0_10_0_musig_aggnonce_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_musig_aggnonce* nonce) { +static int rustsecp256k1zkp_v0_10_1_musig_aggnonce_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_musig_aggnonce* nonce) { int i; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&nonce->data[0], rustsecp256k1zkp_v0_10_0_musig_aggnonce_magic, 4) == 0); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&nonce->data[0], rustsecp256k1zkp_v0_10_1_musig_aggnonce_magic, 4) == 0); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_point_load_ext(&ge[i], &nonce->data[4 + 64*i]); + rustsecp256k1zkp_v0_10_1_point_load_ext(&ge[i], &nonce->data[4 + 64*i]); } return 1; } -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_session_cache_magic[4] = { 0x9d, 0xed, 0xe9, 0x17 }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_session_cache_magic[4] = { 0x9d, 0xed, 0xe9, 0x17 }; /* A session consists of * - 4 byte session cache magic @@ -108,58 +108,58 @@ static const unsigned char rustsecp256k1zkp_v0_10_0_musig_session_cache_magic[4] * - 32 byte signature challenge hash e * - 32 byte scalar s that is added to the partial signatures of the signers */ -static void rustsecp256k1zkp_v0_10_0_musig_session_save(rustsecp256k1zkp_v0_10_0_musig_session *session, const rustsecp256k1zkp_v0_10_0_musig_session_internal *session_i) { +static void rustsecp256k1zkp_v0_10_1_musig_session_save(rustsecp256k1zkp_v0_10_1_musig_session *session, const rustsecp256k1zkp_v0_10_1_musig_session_internal *session_i) { unsigned char *ptr = session->data; - memcpy(ptr, rustsecp256k1zkp_v0_10_0_musig_session_cache_magic, 4); + memcpy(ptr, rustsecp256k1zkp_v0_10_1_musig_session_cache_magic, 4); ptr += 4; *ptr = session_i->fin_nonce_parity; ptr += 1; memcpy(ptr, session_i->fin_nonce, 32); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ptr, &session_i->noncecoef); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ptr, &session_i->noncecoef); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ptr, &session_i->challenge); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ptr, &session_i->challenge); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(ptr, &session_i->s_part); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(ptr, &session_i->s_part); } -static int rustsecp256k1zkp_v0_10_0_musig_session_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_session_internal *session_i, const rustsecp256k1zkp_v0_10_0_musig_session *session) { +static int rustsecp256k1zkp_v0_10_1_musig_session_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_session_internal *session_i, const rustsecp256k1zkp_v0_10_1_musig_session *session) { const unsigned char *ptr = session->data; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(ptr, rustsecp256k1zkp_v0_10_0_musig_session_cache_magic, 4) == 0); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(ptr, rustsecp256k1zkp_v0_10_1_musig_session_cache_magic, 4) == 0); ptr += 4; session_i->fin_nonce_parity = *ptr; ptr += 1; memcpy(session_i->fin_nonce, ptr, 32); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&session_i->noncecoef, ptr, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&session_i->noncecoef, ptr, NULL); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&session_i->challenge, ptr, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&session_i->challenge, ptr, NULL); ptr += 32; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&session_i->s_part, ptr, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&session_i->s_part, ptr, NULL); return 1; } -static const unsigned char rustsecp256k1zkp_v0_10_0_musig_partial_sig_magic[4] = { 0xeb, 0xfb, 0x1a, 0x32 }; +static const unsigned char rustsecp256k1zkp_v0_10_1_musig_partial_sig_magic[4] = { 0xeb, 0xfb, 0x1a, 0x32 }; -static void rustsecp256k1zkp_v0_10_0_musig_partial_sig_save(rustsecp256k1zkp_v0_10_0_musig_partial_sig* sig, rustsecp256k1zkp_v0_10_0_scalar *s) { - memcpy(&sig->data[0], rustsecp256k1zkp_v0_10_0_musig_partial_sig_magic, 4); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[4], s); +static void rustsecp256k1zkp_v0_10_1_musig_partial_sig_save(rustsecp256k1zkp_v0_10_1_musig_partial_sig* sig, rustsecp256k1zkp_v0_10_1_scalar *s) { + memcpy(&sig->data[0], rustsecp256k1zkp_v0_10_1_musig_partial_sig_magic, 4); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[4], s); } -static int rustsecp256k1zkp_v0_10_0_musig_partial_sig_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar *s, const rustsecp256k1zkp_v0_10_0_musig_partial_sig* sig) { +static int rustsecp256k1zkp_v0_10_1_musig_partial_sig_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar *s, const rustsecp256k1zkp_v0_10_1_musig_partial_sig* sig) { int overflow; - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&sig->data[0], rustsecp256k1zkp_v0_10_0_musig_partial_sig_magic, 4) == 0); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(s, &sig->data[4], &overflow); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&sig->data[0], rustsecp256k1zkp_v0_10_1_musig_partial_sig_magic, 4) == 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(s, &sig->data[4], &overflow); /* Parsed signatures can not overflow */ VERIFY_CHECK(!overflow); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *out66, const rustsecp256k1zkp_v0_10_0_musig_pubnonce* nonce) { - rustsecp256k1zkp_v0_10_0_ge ge[2]; +int rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *out66, const rustsecp256k1zkp_v0_10_1_musig_pubnonce* nonce) { + rustsecp256k1zkp_v0_10_1_ge ge[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -167,13 +167,13 @@ int rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(const rustsecp256k1zkp_v0_ memset(out66, 0, 66); ARG_CHECK(nonce != NULL); - if (!rustsecp256k1zkp_v0_10_0_musig_pubnonce_load(ctx, ge, nonce)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubnonce_load(ctx, ge, nonce)) { return 0; } for (i = 0; i < 2; i++) { int ret; size_t size = 33; - ret = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&ge[i], &out66[33*i], &size, 1); + ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&ge[i], &out66[33*i], &size, 1); #ifdef VERIFY /* serialize must succeed because the point was just loaded */ VERIFY_CHECK(ret && size == 33); @@ -184,8 +184,8 @@ int rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(const rustsecp256k1zkp_v0_ return 1; } -int rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_pubnonce* nonce, const unsigned char *in66) { - rustsecp256k1zkp_v0_10_0_ge ge[2]; +int rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_pubnonce* nonce, const unsigned char *in66) { + rustsecp256k1zkp_v0_10_1_ge ge[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -193,19 +193,19 @@ int rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(const rustsecp256k1zkp_v0_10_0 ARG_CHECK(in66 != NULL); for (i = 0; i < 2; i++) { - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&ge[i], &in66[33*i], 33)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&ge[i], &in66[33*i], 33)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(&ge[i])) { + if (!rustsecp256k1zkp_v0_10_1_ge_is_in_correct_subgroup(&ge[i])) { return 0; } } - rustsecp256k1zkp_v0_10_0_musig_pubnonce_save(nonce, ge); + rustsecp256k1zkp_v0_10_1_musig_pubnonce_save(nonce, ge); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *out66, const rustsecp256k1zkp_v0_10_0_musig_aggnonce* nonce) { - rustsecp256k1zkp_v0_10_0_ge ge[2]; +int rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *out66, const rustsecp256k1zkp_v0_10_1_musig_aggnonce* nonce) { + rustsecp256k1zkp_v0_10_1_ge ge[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -213,17 +213,17 @@ int rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(const rustsecp256k1zkp_v0_ memset(out66, 0, 66); ARG_CHECK(nonce != NULL); - if (!rustsecp256k1zkp_v0_10_0_musig_aggnonce_load(ctx, ge, nonce)) { + if (!rustsecp256k1zkp_v0_10_1_musig_aggnonce_load(ctx, ge, nonce)) { return 0; } for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_serialize_ext(&out66[33*i], &ge[i]); + rustsecp256k1zkp_v0_10_1_ge_serialize_ext(&out66[33*i], &ge[i]); } return 1; } -int rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_aggnonce* nonce, const unsigned char *in66) { - rustsecp256k1zkp_v0_10_0_ge ge[2]; +int rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_aggnonce* nonce, const unsigned char *in66) { + rustsecp256k1zkp_v0_10_1_ge ge[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -231,15 +231,15 @@ int rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(const rustsecp256k1zkp_v0_10_0 ARG_CHECK(in66 != NULL); for (i = 0; i < 2; i++) { - if (!rustsecp256k1zkp_v0_10_0_ge_parse_ext(&ge[i], &in66[33*i])) { + if (!rustsecp256k1zkp_v0_10_1_ge_parse_ext(&ge[i], &in66[33*i])) { return 0; } } - rustsecp256k1zkp_v0_10_0_musig_aggnonce_save(nonce, ge); + rustsecp256k1zkp_v0_10_1_musig_aggnonce_save(nonce, ge); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *out32, const rustsecp256k1zkp_v0_10_0_musig_partial_sig* sig) { +int rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *out32, const rustsecp256k1zkp_v0_10_1_musig_partial_sig* sig) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(out32 != NULL); ARG_CHECK(sig != NULL); @@ -247,49 +247,49 @@ int rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(const rustsecp256k1zkp_ return 1; } -int rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_partial_sig* sig, const unsigned char *in32) { - rustsecp256k1zkp_v0_10_0_scalar tmp; +int rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_partial_sig* sig, const unsigned char *in32) { + rustsecp256k1zkp_v0_10_1_scalar tmp; int overflow; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(in32 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&tmp, in32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&tmp, in32, &overflow); if (overflow) { return 0; } - rustsecp256k1zkp_v0_10_0_musig_partial_sig_save(sig, &tmp); + rustsecp256k1zkp_v0_10_1_musig_partial_sig_save(sig, &tmp); return 1; } /* Write optional inputs into the hash */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_musig_helper(rustsecp256k1zkp_v0_10_0_sha256 *sha, unsigned int prefix_size, const unsigned char *data, unsigned char len) { +static void rustsecp256k1zkp_v0_10_1_nonce_function_musig_helper(rustsecp256k1zkp_v0_10_1_sha256 *sha, unsigned int prefix_size, const unsigned char *data, unsigned char len) { unsigned char zero[7] = { 0 }; /* The spec requires length prefixes to be between 1 and 8 bytes * (inclusive) */ VERIFY_CHECK(prefix_size <= 8); /* Since the length of all input data fits in a byte, we can always pad the * length prefix with prefix_size - 1 zero bytes. */ - rustsecp256k1zkp_v0_10_0_sha256_write(sha, zero, prefix_size - 1); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, zero, prefix_size - 1); if (data != NULL) { - rustsecp256k1zkp_v0_10_0_sha256_write(sha, &len, 1); - rustsecp256k1zkp_v0_10_0_sha256_write(sha, data, len); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, &len, 1); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, data, len); } else { len = 0; - rustsecp256k1zkp_v0_10_0_sha256_write(sha, &len, 1); + rustsecp256k1zkp_v0_10_1_sha256_write(sha, &len, 1); } } -static void rustsecp256k1zkp_v0_10_0_nonce_function_musig(rustsecp256k1zkp_v0_10_0_scalar *k, const unsigned char *session_id, const unsigned char *msg32, const unsigned char *seckey32, const unsigned char *pk33, const unsigned char *agg_pk32, const unsigned char *extra_input32) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +static void rustsecp256k1zkp_v0_10_1_nonce_function_musig(rustsecp256k1zkp_v0_10_1_scalar *k, const unsigned char *session_id, const unsigned char *msg32, const unsigned char *seckey32, const unsigned char *pk33, const unsigned char *agg_pk32, const unsigned char *extra_input32) { + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char rand[32]; unsigned char i; unsigned char msg_present; if (seckey32 != NULL) { - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/aux", sizeof("MuSig/aux") - 1); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, session_id, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, rand); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/aux", sizeof("MuSig/aux") - 1); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, session_id, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, rand); for (i = 0; i < 32; i++) { rand[i] ^= seckey32[i]; } @@ -298,36 +298,36 @@ static void rustsecp256k1zkp_v0_10_0_nonce_function_musig(rustsecp256k1zkp_v0_10 } /* Subtract one from `sizeof` to avoid hashing the implicit null byte */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/nonce", sizeof("MuSig/nonce") - 1); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, rand, sizeof(rand)); - rustsecp256k1zkp_v0_10_0_nonce_function_musig_helper(&sha, 1, pk33, 33); - rustsecp256k1zkp_v0_10_0_nonce_function_musig_helper(&sha, 1, agg_pk32, 32); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/nonce", sizeof("MuSig/nonce") - 1); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, rand, sizeof(rand)); + rustsecp256k1zkp_v0_10_1_nonce_function_musig_helper(&sha, 1, pk33, 33); + rustsecp256k1zkp_v0_10_1_nonce_function_musig_helper(&sha, 1, agg_pk32, 32); msg_present = msg32 != NULL; - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, &msg_present, 1); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, &msg_present, 1); if (msg_present) { - rustsecp256k1zkp_v0_10_0_nonce_function_musig_helper(&sha, 8, msg32, 32); + rustsecp256k1zkp_v0_10_1_nonce_function_musig_helper(&sha, 8, msg32, 32); } - rustsecp256k1zkp_v0_10_0_nonce_function_musig_helper(&sha, 4, extra_input32, 32); + rustsecp256k1zkp_v0_10_1_nonce_function_musig_helper(&sha, 4, extra_input32, 32); for (i = 0; i < 2; i++) { unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha_tmp = sha; - rustsecp256k1zkp_v0_10_0_sha256_write(&sha_tmp, &i, 1); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha_tmp, buf); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k[i], buf, NULL); + rustsecp256k1zkp_v0_10_1_sha256 sha_tmp = sha; + rustsecp256k1zkp_v0_10_1_sha256_write(&sha_tmp, &i, 1); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha_tmp, buf); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k[i], buf, NULL); } } -int rustsecp256k1zkp_v0_10_0_musig_nonce_gen(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce, const unsigned char *session_id32, const unsigned char *seckey, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const unsigned char *extra_input32) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; - rustsecp256k1zkp_v0_10_0_scalar k[2]; - rustsecp256k1zkp_v0_10_0_ge nonce_pt[2]; +int rustsecp256k1zkp_v0_10_1_musig_nonce_gen(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce, const unsigned char *session_id32, const unsigned char *seckey, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *extra_input32) { + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; + rustsecp256k1zkp_v0_10_1_scalar k[2]; + rustsecp256k1zkp_v0_10_1_ge nonce_pt[2]; int i; unsigned char pk_ser[33]; size_t pk_ser_len = sizeof(pk_ser); unsigned char aggpk_ser[32]; unsigned char *aggpk_ser_ptr = NULL; - rustsecp256k1zkp_v0_10_0_ge pk; + rustsecp256k1zkp_v0_10_1_ge pk; int pk_serialize_success; int ret = 1; @@ -338,7 +338,7 @@ int rustsecp256k1zkp_v0_10_0_musig_nonce_gen(const rustsecp256k1zkp_v0_10_0_cont memset(pubnonce, 0, sizeof(*pubnonce)); ARG_CHECK(session_id32 != NULL); ARG_CHECK(pubkey != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); if (seckey == NULL) { /* Check in constant time that the session_id is not 0 as a * defense-in-depth measure that may protect against a faulty RNG. */ @@ -352,23 +352,23 @@ int rustsecp256k1zkp_v0_10_0_musig_nonce_gen(const rustsecp256k1zkp_v0_10_0_cont /* Check that the seckey is valid to be able to sign for it later. */ if (seckey != NULL) { - rustsecp256k1zkp_v0_10_0_scalar sk; - ret &= rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sk, seckey); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sk); + rustsecp256k1zkp_v0_10_1_scalar sk; + ret &= rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sk, seckey); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); } if (keyagg_cache != NULL) { - if (!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + if (!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { return 0; } /* The loaded point cache_i.pk can not be the point at infinity. */ - rustsecp256k1zkp_v0_10_0_fe_get_b32(aggpk_ser, &cache_i.pk.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(aggpk_ser, &cache_i.pk.x); aggpk_ser_ptr = aggpk_ser; } - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pk, pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pk, pubkey)) { return 0; } - pk_serialize_success = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&pk, pk_ser, &pk_ser_len, SECP256K1_EC_COMPRESSED); + pk_serialize_success = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&pk, pk_ser, &pk_ser_len, SECP256K1_EC_COMPRESSED); #ifdef VERIFY /* A pubkey cannot be the point at infinity */ @@ -378,114 +378,114 @@ int rustsecp256k1zkp_v0_10_0_musig_nonce_gen(const rustsecp256k1zkp_v0_10_0_cont (void) pk_serialize_success; #endif - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k, session_id32, msg32, seckey, pk_ser, aggpk_ser_ptr, extra_input32); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k[0])); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k[1])); - VERIFY_CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&k[0], &k[1])); - rustsecp256k1zkp_v0_10_0_musig_secnonce_save(secnonce, k, &pk); - rustsecp256k1zkp_v0_10_0_musig_secnonce_invalidate(ctx, secnonce, !ret); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k, session_id32, msg32, seckey, pk_ser, aggpk_ser_ptr, extra_input32); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k[0])); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k[1])); + VERIFY_CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&k[0], &k[1])); + rustsecp256k1zkp_v0_10_1_musig_secnonce_save(secnonce, k, &pk); + rustsecp256k1zkp_v0_10_1_musig_secnonce_invalidate(ctx, secnonce, !ret); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_gej nonce_ptj; - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &nonce_ptj, &k[i]); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&nonce_pt[i], &nonce_ptj); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &nonce_pt[i], sizeof(nonce_pt)); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k[i]); + rustsecp256k1zkp_v0_10_1_gej nonce_ptj; + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &nonce_ptj, &k[i]); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&nonce_pt[i], &nonce_ptj); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &nonce_pt[i], sizeof(nonce_pt)); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k[i]); } /* nonce_pt won't be infinity because k != 0 with overwhelming probability */ - rustsecp256k1zkp_v0_10_0_musig_pubnonce_save(pubnonce, nonce_pt); + rustsecp256k1zkp_v0_10_1_musig_pubnonce_save(pubnonce, nonce_pt); return ret; } -static int rustsecp256k1zkp_v0_10_0_musig_sum_nonces(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_gej *summed_nonces, const rustsecp256k1zkp_v0_10_0_musig_pubnonce * const* pubnonces, size_t n_pubnonces) { +static int rustsecp256k1zkp_v0_10_1_musig_sum_nonces(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_gej *summed_nonces, const rustsecp256k1zkp_v0_10_1_musig_pubnonce * const* pubnonces, size_t n_pubnonces) { size_t i; int j; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&summed_nonces[0]); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&summed_nonces[1]); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&summed_nonces[0]); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&summed_nonces[1]); for (i = 0; i < n_pubnonces; i++) { - rustsecp256k1zkp_v0_10_0_ge nonce_pt[2]; - if (!rustsecp256k1zkp_v0_10_0_musig_pubnonce_load(ctx, nonce_pt, pubnonces[i])) { + rustsecp256k1zkp_v0_10_1_ge nonce_pt[2]; + if (!rustsecp256k1zkp_v0_10_1_musig_pubnonce_load(ctx, nonce_pt, pubnonces[i])) { return 0; } for (j = 0; j < 2; j++) { - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&summed_nonces[j], &summed_nonces[j], &nonce_pt[j], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&summed_nonces[j], &summed_nonces[j], &nonce_pt[j], NULL); } } return 1; } -int rustsecp256k1zkp_v0_10_0_musig_nonce_agg(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_aggnonce *aggnonce, const rustsecp256k1zkp_v0_10_0_musig_pubnonce * const* pubnonces, size_t n_pubnonces) { - rustsecp256k1zkp_v0_10_0_gej aggnonce_ptj[2]; - rustsecp256k1zkp_v0_10_0_ge aggnonce_pt[2]; +int rustsecp256k1zkp_v0_10_1_musig_nonce_agg(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_aggnonce *aggnonce, const rustsecp256k1zkp_v0_10_1_musig_pubnonce * const* pubnonces, size_t n_pubnonces) { + rustsecp256k1zkp_v0_10_1_gej aggnonce_ptj[2]; + rustsecp256k1zkp_v0_10_1_ge aggnonce_pt[2]; int i; VERIFY_CHECK(ctx != NULL); ARG_CHECK(aggnonce != NULL); ARG_CHECK(pubnonces != NULL); ARG_CHECK(n_pubnonces > 0); - if (!rustsecp256k1zkp_v0_10_0_musig_sum_nonces(ctx, aggnonce_ptj, pubnonces, n_pubnonces)) { + if (!rustsecp256k1zkp_v0_10_1_musig_sum_nonces(ctx, aggnonce_ptj, pubnonces, n_pubnonces)) { return 0; } for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_set_gej(&aggnonce_pt[i], &aggnonce_ptj[i]); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&aggnonce_pt[i], &aggnonce_ptj[i]); } - rustsecp256k1zkp_v0_10_0_musig_aggnonce_save(aggnonce, aggnonce_pt); + rustsecp256k1zkp_v0_10_1_musig_aggnonce_save(aggnonce, aggnonce_pt); return 1; } /* tagged_hash(aggnonce[0], aggnonce[1], agg_pk, msg) */ -static int rustsecp256k1zkp_v0_10_0_musig_compute_noncehash(unsigned char *noncehash, rustsecp256k1zkp_v0_10_0_ge *aggnonce, const unsigned char *agg_pk32, const unsigned char *msg) { +static int rustsecp256k1zkp_v0_10_1_musig_compute_noncehash(unsigned char *noncehash, rustsecp256k1zkp_v0_10_1_ge *aggnonce, const unsigned char *agg_pk32, const unsigned char *msg) { unsigned char buf[33]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; int i; - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/noncecoef", sizeof("MuSig/noncecoef") - 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, (unsigned char*)"MuSig/noncecoef", sizeof("MuSig/noncecoef") - 1); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_serialize_ext(buf, &aggnonce[i]); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, sizeof(buf)); + rustsecp256k1zkp_v0_10_1_ge_serialize_ext(buf, &aggnonce[i]); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, sizeof(buf)); } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, agg_pk32, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, msg, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, noncehash); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, agg_pk32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, noncehash); return 1; } -static int rustsecp256k1zkp_v0_10_0_musig_nonce_process_internal(int *fin_nonce_parity, unsigned char *fin_nonce, rustsecp256k1zkp_v0_10_0_scalar *b, rustsecp256k1zkp_v0_10_0_gej *aggnoncej, const unsigned char *agg_pk32, const unsigned char *msg) { +static int rustsecp256k1zkp_v0_10_1_musig_nonce_process_internal(int *fin_nonce_parity, unsigned char *fin_nonce, rustsecp256k1zkp_v0_10_1_scalar *b, rustsecp256k1zkp_v0_10_1_gej *aggnoncej, const unsigned char *agg_pk32, const unsigned char *msg) { unsigned char noncehash[32]; - rustsecp256k1zkp_v0_10_0_ge fin_nonce_pt; - rustsecp256k1zkp_v0_10_0_gej fin_nonce_ptj; - rustsecp256k1zkp_v0_10_0_ge aggnonce[2]; + rustsecp256k1zkp_v0_10_1_ge fin_nonce_pt; + rustsecp256k1zkp_v0_10_1_gej fin_nonce_ptj; + rustsecp256k1zkp_v0_10_1_ge aggnonce[2]; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&aggnonce[0], &aggnoncej[0]); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&aggnonce[1], &aggnoncej[1]); - if (!rustsecp256k1zkp_v0_10_0_musig_compute_noncehash(noncehash, aggnonce, agg_pk32, msg)) { + rustsecp256k1zkp_v0_10_1_ge_set_gej(&aggnonce[0], &aggnoncej[0]); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&aggnonce[1], &aggnoncej[1]); + if (!rustsecp256k1zkp_v0_10_1_musig_compute_noncehash(noncehash, aggnonce, agg_pk32, msg)) { return 0; } /* fin_nonce = aggnonce[0] + b*aggnonce[1] */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(b, noncehash, NULL); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&fin_nonce_ptj); - rustsecp256k1zkp_v0_10_0_ecmult(&fin_nonce_ptj, &aggnoncej[1], b, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&fin_nonce_ptj, &fin_nonce_ptj, &aggnonce[0], NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&fin_nonce_pt, &fin_nonce_ptj); - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(&fin_nonce_pt)) { - fin_nonce_pt = rustsecp256k1zkp_v0_10_0_ge_const_g; + rustsecp256k1zkp_v0_10_1_scalar_set_b32(b, noncehash, NULL); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&fin_nonce_ptj); + rustsecp256k1zkp_v0_10_1_ecmult(&fin_nonce_ptj, &aggnoncej[1], b, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&fin_nonce_ptj, &fin_nonce_ptj, &aggnonce[0], NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&fin_nonce_pt, &fin_nonce_ptj); + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(&fin_nonce_pt)) { + fin_nonce_pt = rustsecp256k1zkp_v0_10_1_ge_const_g; } /* fin_nonce_pt is not the point at infinity */ - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&fin_nonce_pt.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(fin_nonce, &fin_nonce_pt.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&fin_nonce_pt.y); - *fin_nonce_parity = rustsecp256k1zkp_v0_10_0_fe_is_odd(&fin_nonce_pt.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&fin_nonce_pt.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(fin_nonce, &fin_nonce_pt.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&fin_nonce_pt.y); + *fin_nonce_parity = rustsecp256k1zkp_v0_10_1_fe_is_odd(&fin_nonce_pt.y); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_nonce_process(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_session *session, const rustsecp256k1zkp_v0_10_0_musig_aggnonce *aggnonce, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_0_pubkey *adaptor) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; - rustsecp256k1zkp_v0_10_0_ge aggnonce_pt[2]; - rustsecp256k1zkp_v0_10_0_gej aggnonce_ptj[2]; +int rustsecp256k1zkp_v0_10_1_musig_nonce_process(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_session *session, const rustsecp256k1zkp_v0_10_1_musig_aggnonce *aggnonce, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_1_pubkey *adaptor) { + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; + rustsecp256k1zkp_v0_10_1_ge aggnonce_pt[2]; + rustsecp256k1zkp_v0_10_1_gej aggnonce_ptj[2]; unsigned char fin_nonce[32]; - rustsecp256k1zkp_v0_10_0_musig_session_internal session_i; + rustsecp256k1zkp_v0_10_1_musig_session_internal session_i; unsigned char agg_pk32[32]; VERIFY_CHECK(ctx != NULL); @@ -494,70 +494,70 @@ int rustsecp256k1zkp_v0_10_0_musig_nonce_process(const rustsecp256k1zkp_v0_10_0_ ARG_CHECK(msg32 != NULL); ARG_CHECK(keyagg_cache != NULL); - if (!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + if (!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_get_b32(agg_pk32, &cache_i.pk.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(agg_pk32, &cache_i.pk.x); - if (!rustsecp256k1zkp_v0_10_0_musig_aggnonce_load(ctx, aggnonce_pt, aggnonce)) { + if (!rustsecp256k1zkp_v0_10_1_musig_aggnonce_load(ctx, aggnonce_pt, aggnonce)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&aggnonce_ptj[0], &aggnonce_pt[0]); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&aggnonce_ptj[1], &aggnonce_pt[1]); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&aggnonce_ptj[0], &aggnonce_pt[0]); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&aggnonce_ptj[1], &aggnonce_pt[1]); /* Add public adaptor to nonce */ if (adaptor != NULL) { - rustsecp256k1zkp_v0_10_0_ge adaptorp; - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &adaptorp, adaptor)) { + rustsecp256k1zkp_v0_10_1_ge adaptorp; + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &adaptorp, adaptor)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&aggnonce_ptj[0], &aggnonce_ptj[0], &adaptorp, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&aggnonce_ptj[0], &aggnonce_ptj[0], &adaptorp, NULL); } - if (!rustsecp256k1zkp_v0_10_0_musig_nonce_process_internal(&session_i.fin_nonce_parity, fin_nonce, &session_i.noncecoef, aggnonce_ptj, agg_pk32, msg32)) { + if (!rustsecp256k1zkp_v0_10_1_musig_nonce_process_internal(&session_i.fin_nonce_parity, fin_nonce, &session_i.noncecoef, aggnonce_ptj, agg_pk32, msg32)) { return 0; } - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&session_i.challenge, fin_nonce, msg32, 32, agg_pk32); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&session_i.challenge, fin_nonce, msg32, 32, agg_pk32); /* If there is a tweak then set `challenge` times `tweak` to the `s`-part.*/ - rustsecp256k1zkp_v0_10_0_scalar_set_int(&session_i.s_part, 0); - if (!rustsecp256k1zkp_v0_10_0_scalar_is_zero(&cache_i.tweak)) { - rustsecp256k1zkp_v0_10_0_scalar e_tmp; - rustsecp256k1zkp_v0_10_0_scalar_mul(&e_tmp, &session_i.challenge, &cache_i.tweak); - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&cache_i.pk.y)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&e_tmp, &e_tmp); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&session_i.s_part, 0); + if (!rustsecp256k1zkp_v0_10_1_scalar_is_zero(&cache_i.tweak)) { + rustsecp256k1zkp_v0_10_1_scalar e_tmp; + rustsecp256k1zkp_v0_10_1_scalar_mul(&e_tmp, &session_i.challenge, &cache_i.tweak); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&cache_i.pk.y)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&e_tmp, &e_tmp); } - rustsecp256k1zkp_v0_10_0_scalar_add(&session_i.s_part, &session_i.s_part, &e_tmp); + rustsecp256k1zkp_v0_10_1_scalar_add(&session_i.s_part, &session_i.s_part, &e_tmp); } memcpy(session_i.fin_nonce, fin_nonce, sizeof(session_i.fin_nonce)); - rustsecp256k1zkp_v0_10_0_musig_session_save(session, &session_i); + rustsecp256k1zkp_v0_10_1_musig_session_save(session, &session_i); return 1; } -static void rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(rustsecp256k1zkp_v0_10_0_scalar *sk, rustsecp256k1zkp_v0_10_0_scalar *k) { - rustsecp256k1zkp_v0_10_0_scalar_clear(sk); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k[0]); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k[1]); +static void rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(rustsecp256k1zkp_v0_10_1_scalar *sk, rustsecp256k1zkp_v0_10_1_scalar *k) { + rustsecp256k1zkp_v0_10_1_scalar_clear(sk); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k[0]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k[1]); } -int rustsecp256k1zkp_v0_10_0_musig_partial_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig, rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, const rustsecp256k1zkp_v0_10_0_keypair *keypair, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_0_musig_session *session) { - rustsecp256k1zkp_v0_10_0_scalar sk; - rustsecp256k1zkp_v0_10_0_ge pk, keypair_pk; - rustsecp256k1zkp_v0_10_0_scalar k[2]; - rustsecp256k1zkp_v0_10_0_scalar mu, s; - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; - rustsecp256k1zkp_v0_10_0_musig_session_internal session_i; +int rustsecp256k1zkp_v0_10_1_musig_partial_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig, rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, const rustsecp256k1zkp_v0_10_1_keypair *keypair, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_1_musig_session *session) { + rustsecp256k1zkp_v0_10_1_scalar sk; + rustsecp256k1zkp_v0_10_1_ge pk, keypair_pk; + rustsecp256k1zkp_v0_10_1_scalar k[2]; + rustsecp256k1zkp_v0_10_1_scalar mu, s; + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; + rustsecp256k1zkp_v0_10_1_musig_session_internal session_i; int ret; VERIFY_CHECK(ctx != NULL); ARG_CHECK(secnonce != NULL); /* Fails if the magic doesn't match */ - ret = rustsecp256k1zkp_v0_10_0_musig_secnonce_load(ctx, k, &pk, secnonce); + ret = rustsecp256k1zkp_v0_10_1_musig_secnonce_load(ctx, k, &pk, secnonce); /* Set nonce to zero to avoid nonce reuse. This will cause subsequent calls * of this function to fail */ memset(secnonce, 0, sizeof(*secnonce)); if (!ret) { - rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(&sk, k); + rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(&sk, k); return 0; } @@ -566,61 +566,61 @@ int rustsecp256k1zkp_v0_10_0_musig_partial_sign(const rustsecp256k1zkp_v0_10_0_c ARG_CHECK(keyagg_cache != NULL); ARG_CHECK(session != NULL); - if (!rustsecp256k1zkp_v0_10_0_keypair_load(ctx, &sk, &keypair_pk, keypair)) { - rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(&sk, k); + if (!rustsecp256k1zkp_v0_10_1_keypair_load(ctx, &sk, &keypair_pk, keypair)) { + rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(&sk, k); return 0; } - ARG_CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&pk.x, &keypair_pk.x) - && rustsecp256k1zkp_v0_10_0_fe_equal(&pk.y, &keypair_pk.y)); - if (!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { - rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(&sk, k); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&pk.x, &keypair_pk.x) + && rustsecp256k1zkp_v0_10_1_fe_equal(&pk.y, &keypair_pk.y)); + if (!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(&sk, k); return 0; } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&pk.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&pk.y); - /* Negate sk if rustsecp256k1zkp_v0_10_0_fe_is_odd(&cache_i.pk.y)) XOR cache_i.parity_acc. + /* Negate sk if rustsecp256k1zkp_v0_10_1_fe_is_odd(&cache_i.pk.y)) XOR cache_i.parity_acc. * This corresponds to the line "Let d = g⋅gacc⋅d' mod n" in the * specification. */ - if ((rustsecp256k1zkp_v0_10_0_fe_is_odd(&cache_i.pk.y) + if ((rustsecp256k1zkp_v0_10_1_fe_is_odd(&cache_i.pk.y) != cache_i.parity_acc)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&sk, &sk); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sk, &sk); } /* Multiply KeyAgg coefficient */ - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&pk.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&pk.x); /* TODO Cache mu */ - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef(&mu, &cache_i, &pk); - rustsecp256k1zkp_v0_10_0_scalar_mul(&sk, &sk, &mu); + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef(&mu, &cache_i, &pk); + rustsecp256k1zkp_v0_10_1_scalar_mul(&sk, &sk, &mu); - if (!rustsecp256k1zkp_v0_10_0_musig_session_load(ctx, &session_i, session)) { - rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(&sk, k); + if (!rustsecp256k1zkp_v0_10_1_musig_session_load(ctx, &session_i, session)) { + rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(&sk, k); return 0; } if (session_i.fin_nonce_parity) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&k[0], &k[0]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&k[1], &k[1]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&k[0], &k[0]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&k[1], &k[1]); } /* Sign */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&s, &session_i.challenge, &sk); - rustsecp256k1zkp_v0_10_0_scalar_mul(&k[1], &session_i.noncecoef, &k[1]); - rustsecp256k1zkp_v0_10_0_scalar_add(&k[0], &k[0], &k[1]); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &k[0]); - rustsecp256k1zkp_v0_10_0_musig_partial_sig_save(partial_sig, &s); - rustsecp256k1zkp_v0_10_0_musig_partial_sign_clear(&sk, k); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s, &session_i.challenge, &sk); + rustsecp256k1zkp_v0_10_1_scalar_mul(&k[1], &session_i.noncecoef, &k[1]); + rustsecp256k1zkp_v0_10_1_scalar_add(&k[0], &k[0], &k[1]); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &k[0]); + rustsecp256k1zkp_v0_10_1_musig_partial_sig_save(partial_sig, &s); + rustsecp256k1zkp_v0_10_1_musig_partial_sign_clear(&sk, k); return 1; } -int rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig, const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_0_musig_session *session) { - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; - rustsecp256k1zkp_v0_10_0_musig_session_internal session_i; - rustsecp256k1zkp_v0_10_0_scalar mu, e, s; - rustsecp256k1zkp_v0_10_0_gej pkj; - rustsecp256k1zkp_v0_10_0_ge nonce_pt[2]; - rustsecp256k1zkp_v0_10_0_gej rj; - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_ge pkp; +int rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig, const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const rustsecp256k1zkp_v0_10_1_musig_session *session) { + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; + rustsecp256k1zkp_v0_10_1_musig_session_internal session_i; + rustsecp256k1zkp_v0_10_1_scalar mu, e, s; + rustsecp256k1zkp_v0_10_1_gej pkj; + rustsecp256k1zkp_v0_10_1_ge nonce_pt[2]; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_ge pkp; VERIFY_CHECK(ctx != NULL); ARG_CHECK(partial_sig != NULL); @@ -629,57 +629,57 @@ int rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(const rustsecp256k1zkp_v0_ ARG_CHECK(keyagg_cache != NULL); ARG_CHECK(session != NULL); - if (!rustsecp256k1zkp_v0_10_0_musig_session_load(ctx, &session_i, session)) { + if (!rustsecp256k1zkp_v0_10_1_musig_session_load(ctx, &session_i, session)) { return 0; } /* Compute "effective" nonce rj = aggnonce[0] + b*aggnonce[1] */ /* TODO: use multiexp to compute -s*G + e*mu*pubkey + aggnonce[0] + b*aggnonce[1] */ - if (!rustsecp256k1zkp_v0_10_0_musig_pubnonce_load(ctx, nonce_pt, pubnonce)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubnonce_load(ctx, nonce_pt, pubnonce)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&rj, &nonce_pt[1]); - rustsecp256k1zkp_v0_10_0_ecmult(&rj, &rj, &session_i.noncecoef, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&rj, &rj, &nonce_pt[0], NULL); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&rj, &nonce_pt[1]); + rustsecp256k1zkp_v0_10_1_ecmult(&rj, &rj, &session_i.noncecoef, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&rj, &rj, &nonce_pt[0], NULL); - if (!rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &pkp, pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &pkp, pubkey)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { + if (!rustsecp256k1zkp_v0_10_1_keyagg_cache_load(ctx, &cache_i, keyagg_cache)) { return 0; } /* Multiplying the challenge by the KeyAgg coefficient is equivalent * to multiplying the signer's public key by the coefficient, except * much easier to do. */ - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef(&mu, &cache_i, &pkp); - rustsecp256k1zkp_v0_10_0_scalar_mul(&e, &session_i.challenge, &mu); + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef(&mu, &cache_i, &pkp); + rustsecp256k1zkp_v0_10_1_scalar_mul(&e, &session_i.challenge, &mu); - /* Negate e if rustsecp256k1zkp_v0_10_0_fe_is_odd(&cache_i.pk.y)) XOR cache_i.parity_acc. + /* Negate e if rustsecp256k1zkp_v0_10_1_fe_is_odd(&cache_i.pk.y)) XOR cache_i.parity_acc. * This corresponds to the line "Let g' = g⋅gacc mod n" and the multiplication "g'⋅e" * in the specification. */ - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&cache_i.pk.y) + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&cache_i.pk.y) != cache_i.parity_acc) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&e, &e); + rustsecp256k1zkp_v0_10_1_scalar_negate(&e, &e); } - if (!rustsecp256k1zkp_v0_10_0_musig_partial_sig_load(ctx, &s, partial_sig)) { + if (!rustsecp256k1zkp_v0_10_1_musig_partial_sig_load(ctx, &s, partial_sig)) { return 0; } /* Compute -s*G + e*pkj + rj (e already includes the keyagg coefficient mu) */ - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pkj, &pkp); - rustsecp256k1zkp_v0_10_0_ecmult(&tmp, &pkj, &e, &s); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pkj, &pkp); + rustsecp256k1zkp_v0_10_1_ecmult(&tmp, &pkj, &e, &s); if (session_i.fin_nonce_parity) { - rustsecp256k1zkp_v0_10_0_gej_neg(&rj, &rj); + rustsecp256k1zkp_v0_10_1_gej_neg(&rj, &rj); } - rustsecp256k1zkp_v0_10_0_gej_add_var(&tmp, &tmp, &rj, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&tmp, &tmp, &rj, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&tmp); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&tmp); } -int rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const rustsecp256k1zkp_v0_10_0_musig_session *session, const rustsecp256k1zkp_v0_10_0_musig_partial_sig * const* partial_sigs, size_t n_sigs) { +int rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const rustsecp256k1zkp_v0_10_1_musig_session *session, const rustsecp256k1zkp_v0_10_1_musig_partial_sig * const* partial_sigs, size_t n_sigs) { size_t i; - rustsecp256k1zkp_v0_10_0_musig_session_internal session_i; + rustsecp256k1zkp_v0_10_1_musig_session_internal session_i; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig64 != NULL); @@ -687,17 +687,17 @@ int rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(const rustsecp256k1zkp_v0_10_ ARG_CHECK(partial_sigs != NULL); ARG_CHECK(n_sigs > 0); - if (!rustsecp256k1zkp_v0_10_0_musig_session_load(ctx, &session_i, session)) { + if (!rustsecp256k1zkp_v0_10_1_musig_session_load(ctx, &session_i, session)) { return 0; } for (i = 0; i < n_sigs; i++) { - rustsecp256k1zkp_v0_10_0_scalar term; - if (!rustsecp256k1zkp_v0_10_0_musig_partial_sig_load(ctx, &term, partial_sigs[i])) { + rustsecp256k1zkp_v0_10_1_scalar term; + if (!rustsecp256k1zkp_v0_10_1_musig_partial_sig_load(ctx, &term, partial_sigs[i])) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_add(&session_i.s_part, &session_i.s_part, &term); + rustsecp256k1zkp_v0_10_1_scalar_add(&session_i.s_part, &session_i.s_part, &term); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig64[32], &session_i.s_part); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig64[32], &session_i.s_part); memcpy(&sig64[0], session_i.fin_nonce, 32); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/tests_impl.h index 9ff93cde..2fb6cee5 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/musig/tests_impl.h @@ -25,11 +25,11 @@ #include "vectors.h" -static int create_keypair_and_pk(rustsecp256k1zkp_v0_10_0_keypair *keypair, rustsecp256k1zkp_v0_10_0_pubkey *pk, const unsigned char *sk) { +static int create_keypair_and_pk(rustsecp256k1zkp_v0_10_1_keypair *keypair, rustsecp256k1zkp_v0_10_1_pubkey *pk, const unsigned char *sk) { int ret; - rustsecp256k1zkp_v0_10_0_keypair keypair_tmp; - ret = rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair_tmp, sk); - ret &= rustsecp256k1zkp_v0_10_0_keypair_pub(CTX, pk, &keypair_tmp); + rustsecp256k1zkp_v0_10_1_keypair keypair_tmp; + ret = rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair_tmp, sk); + ret &= rustsecp256k1zkp_v0_10_1_keypair_pub(CTX, pk, &keypair_tmp); if (keypair != NULL) { *keypair = keypair_tmp; } @@ -38,125 +38,125 @@ static int create_keypair_and_pk(rustsecp256k1zkp_v0_10_0_keypair *keypair, rust /* Just a simple (non-adaptor, non-tweaked) 2-of-2 MuSig aggregate, sign, verify * test. */ -static void musig_simple_test(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { +static void musig_simple_test(rustsecp256k1zkp_v0_10_1_scratch_space *scratch) { unsigned char sk[2][32]; - rustsecp256k1zkp_v0_10_0_keypair keypair[2]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_keypair keypair[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; unsigned char msg[32]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; unsigned char session_id[2][32]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce[2]; - rustsecp256k1zkp_v0_10_0_pubkey pk[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig[2]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce[2]; + rustsecp256k1zkp_v0_10_1_pubkey pk[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig[2]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_ptr[2]; unsigned char final_sig[64]; - rustsecp256k1zkp_v0_10_0_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_session session; int i; - rustsecp256k1zkp_v0_10_0_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(msg); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(session_id[i]); - rustsecp256k1zkp_v0_10_0_testrand256(sk[i]); + rustsecp256k1zkp_v0_10_1_testrand256(session_id[i]); + rustsecp256k1zkp_v0_10_1_testrand256(sk[i]); pk_ptr[i] = &pk[i]; pubnonce_ptr[i] = &pubnonce[i]; partial_sig_ptr[i] = &partial_sig[i]; CHECK(create_keypair_and_pk(&keypair[i], &pk[i], sk[i])); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[i], &pubnonce[i], session_id[i], sk[i], &pk[i], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[i], &pubnonce[i], session_id[i], sk[i], &pk[i], NULL, NULL, NULL) == 1); } - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, NULL) == 1); for (i = 0; i < 2; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[i], &secnonce[i], &keypair[i], &keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[i], &pubnonce[i], &pk[i], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[i], &secnonce[i], &keypair[i], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[i], &pubnonce[i], &pk[i], &keyagg_cache, &session) == 1); } - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); } -static void pubnonce_summing_to_inf(rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce) { - rustsecp256k1zkp_v0_10_0_ge ge[2]; +static void pubnonce_summing_to_inf(rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce) { + rustsecp256k1zkp_v0_10_1_ge ge[2]; int i; - rustsecp256k1zkp_v0_10_0_gej summed_nonces[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_gej summed_nonces[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[2]; - ge[0] = rustsecp256k1zkp_v0_10_0_ge_const_g; - ge[1] = rustsecp256k1zkp_v0_10_0_ge_const_g; + ge[0] = rustsecp256k1zkp_v0_10_1_ge_const_g; + ge[1] = rustsecp256k1zkp_v0_10_1_ge_const_g; for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_musig_pubnonce_save(&pubnonce[i], ge); + rustsecp256k1zkp_v0_10_1_musig_pubnonce_save(&pubnonce[i], ge); pubnonce_ptr[i] = &pubnonce[i]; - rustsecp256k1zkp_v0_10_0_ge_neg(&ge[0], &ge[0]); - rustsecp256k1zkp_v0_10_0_ge_neg(&ge[1], &ge[1]); + rustsecp256k1zkp_v0_10_1_ge_neg(&ge[0], &ge[0]); + rustsecp256k1zkp_v0_10_1_ge_neg(&ge[1], &ge[1]); } - rustsecp256k1zkp_v0_10_0_musig_sum_nonces(CTX, summed_nonces, pubnonce_ptr, 2); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&summed_nonces[0])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&summed_nonces[1])); + rustsecp256k1zkp_v0_10_1_musig_sum_nonces(CTX, summed_nonces, pubnonce_ptr, 2); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&summed_nonces[0])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&summed_nonces[1])); } int memcmp_and_randomize(unsigned char *value, const unsigned char *expected, size_t len) { int ret; size_t i; - ret = rustsecp256k1zkp_v0_10_0_memcmp_var(value, expected, len); + ret = rustsecp256k1zkp_v0_10_1_memcmp_var(value, expected, len); for (i = 0; i < len; i++) { - value[i] = rustsecp256k1zkp_v0_10_0_testrand_bits(8); + value[i] = rustsecp256k1zkp_v0_10_1_testrand_bits(8); } return ret; } -static void musig_api_tests(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { - rustsecp256k1zkp_v0_10_0_scratch_space *scratch_small; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig[2]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_partial_sig invalid_partial_sig; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *invalid_partial_sig_ptr[2]; +static void musig_api_tests(rustsecp256k1zkp_v0_10_1_scratch_space *scratch) { + rustsecp256k1zkp_v0_10_1_scratch_space *scratch_small; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig[2]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_partial_sig invalid_partial_sig; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *invalid_partial_sig_ptr[2]; unsigned char final_sig[64]; unsigned char pre_sig[64]; unsigned char buf[32]; unsigned char sk[2][32]; - rustsecp256k1zkp_v0_10_0_keypair keypair[2]; - rustsecp256k1zkp_v0_10_0_keypair invalid_keypair; + rustsecp256k1zkp_v0_10_1_keypair keypair[2]; + rustsecp256k1zkp_v0_10_1_keypair invalid_keypair; unsigned char max64[64]; unsigned char zeros132[132] = { 0 }; unsigned char session_id[2][32]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce[2]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce_tmp; - rustsecp256k1zkp_v0_10_0_musig_secnonce invalid_secnonce; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce[2]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce_tmp; + rustsecp256k1zkp_v0_10_1_musig_secnonce invalid_secnonce; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[2]; unsigned char pubnonce_ser[66]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce inf_pubnonce[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *inf_pubnonce_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce invalid_pubnonce; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *invalid_pubnonce_ptr[1]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_pubnonce inf_pubnonce[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *inf_pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce invalid_pubnonce; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *invalid_pubnonce_ptr[1]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; unsigned char aggnonce_ser[66]; unsigned char msg[32]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk; - rustsecp256k1zkp_v0_10_0_pubkey full_agg_pk; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache invalid_keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_session invalid_session; - rustsecp256k1zkp_v0_10_0_pubkey pk[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[2]; - rustsecp256k1zkp_v0_10_0_pubkey invalid_pk; - const rustsecp256k1zkp_v0_10_0_pubkey *invalid_pk_ptr2[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *invalid_pk_ptr3[3]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_pubkey full_agg_pk; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache invalid_keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_session invalid_session; + rustsecp256k1zkp_v0_10_1_pubkey pk[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[2]; + rustsecp256k1zkp_v0_10_1_pubkey invalid_pk; + const rustsecp256k1zkp_v0_10_1_pubkey *invalid_pk_ptr2[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *invalid_pk_ptr3[3]; unsigned char tweak[32]; int nonce_parity; unsigned char sec_adaptor[32]; unsigned char sec_adaptor1[32]; - rustsecp256k1zkp_v0_10_0_pubkey adaptor; + rustsecp256k1zkp_v0_10_1_pubkey adaptor; int i; /** setup **/ @@ -174,10 +174,10 @@ static void musig_api_tests(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { memset(&invalid_pubnonce, 0, sizeof(invalid_pubnonce)); memset(&invalid_session, 0, sizeof(invalid_session)); - rustsecp256k1zkp_v0_10_0_testrand256(sec_adaptor); - rustsecp256k1zkp_v0_10_0_testrand256(msg); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &adaptor, sec_adaptor) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(sec_adaptor); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &adaptor, sec_adaptor) == 1); for (i = 0; i < 2; i++) { pk_ptr[i] = &pk[i]; invalid_pk_ptr2[i] = &invalid_pk; @@ -186,8 +186,8 @@ static void musig_api_tests(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { inf_pubnonce_ptr[i] = &inf_pubnonce[i]; partial_sig_ptr[i] = &partial_sig[i]; invalid_partial_sig_ptr[i] = &partial_sig[i]; - rustsecp256k1zkp_v0_10_0_testrand256(session_id[i]); - rustsecp256k1zkp_v0_10_0_testrand256(sk[i]); + rustsecp256k1zkp_v0_10_1_testrand256(session_id[i]); + rustsecp256k1zkp_v0_10_1_testrand256(sk[i]); CHECK(create_keypair_and_pk(&keypair[i], &pk[i], sk[i])); } invalid_pubnonce_ptr[0] = &invalid_pubnonce; @@ -199,42 +199,42 @@ static void musig_api_tests(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { /** main test body **/ /** Key aggregation **/ - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); /* pubkey_agg does not require a scratch space */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, NULL, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, NULL, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); /* A small scratch space works too, but will result in using an ineffecient algorithm */ - scratch_small = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch_small, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch_small); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, NULL, &keyagg_cache, pk_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, NULL, pk_ptr, 2) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, NULL, 2)); + scratch_small = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch_small, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch_small); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, NULL, &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, NULL, pk_ptr, 2) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, NULL, 2)); CHECK(memcmp_and_randomize(agg_pk.data, zeros132, sizeof(agg_pk.data)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, invalid_pk_ptr2, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, invalid_pk_ptr2, 2)); CHECK(memcmp_and_randomize(agg_pk.data, zeros132, sizeof(agg_pk.data)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, invalid_pk_ptr3, 3)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, invalid_pk_ptr3, 3)); CHECK(memcmp_and_randomize(agg_pk.data, zeros132, sizeof(agg_pk.data)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 0)); CHECK(memcmp_and_randomize(agg_pk.data, zeros132, sizeof(agg_pk.data)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, NULL, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, NULL, 0)); CHECK(memcmp_and_randomize(agg_pk.data, zeros132, sizeof(agg_pk.data)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk, &keyagg_cache, pk_ptr, 2) == 1); /* pubkey_get */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_get(CTX, &full_agg_pk, &keyagg_cache) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_get(CTX, NULL, &keyagg_cache)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubkey_get(CTX, &full_agg_pk, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&full_agg_pk, zeros132, sizeof(full_agg_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_get(CTX, &full_agg_pk, &keyagg_cache) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_get(CTX, NULL, &keyagg_cache)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubkey_get(CTX, &full_agg_pk, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&full_agg_pk, zeros132, sizeof(full_agg_pk)) == 0); /** Tweaking **/ { - int (*tweak_func[2]) (const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32); - tweak_func[0] = rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add; - tweak_func[1] = rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add; + int (*tweak_func[2]) (const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *output_pubkey, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, const unsigned char *tweak32); + tweak_func[0] = rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add; + tweak_func[1] = rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add; for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_pubkey tmp_output_pk; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache tmp_keyagg_cache = keyagg_cache; + rustsecp256k1zkp_v0_10_1_pubkey tmp_output_pk; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache tmp_keyagg_cache = keyagg_cache; CHECK((*tweak_func[i])(CTX, &tmp_output_pk, &tmp_keyagg_cache, tweak) == 1); /* Reset keyagg_cache */ tmp_keyagg_cache = keyagg_cache; @@ -258,229 +258,229 @@ static void musig_api_tests(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { } /** Session creation **/ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(STATIC_CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, NULL, &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], NULL, session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], NULL, sk[0], &pk[0], msg, &keyagg_cache, max64)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(STATIC_CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, NULL, &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], NULL, session_id[0], sk[0], &pk[0], msg, &keyagg_cache, max64)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], NULL, sk[0], &pk[0], msg, &keyagg_cache, max64)); CHECK(memcmp_and_randomize(secnonce[0].data, zeros132, sizeof(secnonce[0].data)) == 0); /* no seckey and session_id is 0 */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], zeros132, NULL, &pk[0], msg, &keyagg_cache, max64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], zeros132, NULL, &pk[0], msg, &keyagg_cache, max64) == 0); CHECK(memcmp_and_randomize(secnonce[0].data, zeros132, sizeof(secnonce[0].data)) == 0); /* session_id 0 is fine when a seckey is provided */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], zeros132, sk[0], &pk[0], msg, &keyagg_cache, max64) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], NULL, &pk[0], msg, &keyagg_cache, max64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], zeros132, sk[0], &pk[0], msg, &keyagg_cache, max64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], NULL, &pk[0], msg, &keyagg_cache, max64) == 1); /* invalid seckey */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], max64, &pk[0], msg, &keyagg_cache, max64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], max64, &pk[0], msg, &keyagg_cache, max64) == 0); CHECK(memcmp_and_randomize(secnonce[0].data, zeros132, sizeof(secnonce[0].data)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], NULL, msg, &keyagg_cache, max64)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &invalid_pk, msg, &keyagg_cache, max64)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], NULL, &keyagg_cache, max64) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, NULL, max64) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &invalid_keyagg_cache, max64)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], NULL, msg, &keyagg_cache, max64)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &invalid_pk, msg, &keyagg_cache, max64)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], NULL, &keyagg_cache, max64) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, NULL, max64) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &invalid_keyagg_cache, max64)); CHECK(memcmp_and_randomize(secnonce[0].data, zeros132, sizeof(secnonce[0].data)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk[0], &pk[0], msg, &keyagg_cache, NULL) == 1); /* Every in-argument except session_id and pubkey can be NULL */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], NULL, &pk[0], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[1], &pubnonce[1], session_id[1], sk[1], &pk[1], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], NULL, &pk[0], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[1], &pubnonce[1], session_id[1], sk[1], &pk[1], NULL, NULL, NULL) == 1); /** Serialize and parse public nonces **/ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, NULL, &pubnonce[0])); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, pubnonce_ser, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, NULL, &pubnonce[0])); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, pubnonce_ser, NULL)); CHECK(memcmp_and_randomize(pubnonce_ser, zeros132, sizeof(pubnonce_ser)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, pubnonce_ser, &invalid_pubnonce)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, pubnonce_ser, &invalid_pubnonce)); CHECK(memcmp_and_randomize(pubnonce_ser, zeros132, sizeof(pubnonce_ser)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, pubnonce_ser, &pubnonce[0]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, pubnonce_ser, &pubnonce[0]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[0], pubnonce_ser) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, NULL, pubnonce_ser)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[0], NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[0], zeros132) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[0], pubnonce_ser) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[0], pubnonce_ser) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, NULL, pubnonce_ser)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[0], NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[0], zeros132) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[0], pubnonce_ser) == 1); { /* Check that serialize and parse results in the same value */ - rustsecp256k1zkp_v0_10_0_musig_pubnonce tmp; - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, pubnonce_ser, &pubnonce[0]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &tmp, pubnonce_ser) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp, &pubnonce[0], sizeof(tmp)) == 0); + rustsecp256k1zkp_v0_10_1_musig_pubnonce tmp; + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, pubnonce_ser, &pubnonce[0]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &tmp, pubnonce_ser) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp, &pubnonce[0], sizeof(tmp)) == 0); } /** Receive nonces and aggregate **/ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, NULL, pubnonce_ptr, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, NULL, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, invalid_pubnonce_ptr, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, inf_pubnonce_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, NULL, pubnonce_ptr, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, NULL, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, invalid_pubnonce_ptr, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, inf_pubnonce_ptr, 2) == 1); { /* Check that the aggnonce encodes two points at infinity */ - rustsecp256k1zkp_v0_10_0_ge aggnonce_pt[2]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce_load(CTX, aggnonce_pt, &aggnonce); + rustsecp256k1zkp_v0_10_1_ge aggnonce_pt[2]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce_load(CTX, aggnonce_pt, &aggnonce); for (i = 0; i < 2; i++) { - rustsecp256k1zkp_v0_10_0_ge_is_infinity(&aggnonce_pt[i]); + rustsecp256k1zkp_v0_10_1_ge_is_infinity(&aggnonce_pt[i]); } } - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); /** Serialize and parse aggregate nonces **/ - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, NULL, &aggnonce)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce_ser, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, NULL, &aggnonce)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce_ser, NULL)); CHECK(memcmp_and_randomize(aggnonce_ser, zeros132, sizeof(aggnonce_ser)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce_ser, (rustsecp256k1zkp_v0_10_0_musig_aggnonce*) &invalid_pubnonce)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce_ser, (rustsecp256k1zkp_v0_10_1_musig_aggnonce*) &invalid_pubnonce)); CHECK(memcmp_and_randomize(aggnonce_ser, zeros132, sizeof(aggnonce_ser)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, aggnonce_ser) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, NULL, aggnonce_ser)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, zeros132) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, aggnonce_ser) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, aggnonce_ser) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, NULL, aggnonce_ser)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, zeros132) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, aggnonce_ser) == 1); { /* Check that serialize and parse results in the same value */ - rustsecp256k1zkp_v0_10_0_musig_aggnonce tmp; - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &tmp, aggnonce_ser) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp, &aggnonce, sizeof(tmp)) == 0); + rustsecp256k1zkp_v0_10_1_musig_aggnonce tmp; + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce_ser, &aggnonce) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &tmp, aggnonce_ser) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp, &aggnonce, sizeof(tmp)) == 0); } /** Process nonces **/ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, &adaptor) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, NULL, &aggnonce, msg, &keyagg_cache, &adaptor)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, NULL, msg, &keyagg_cache, &adaptor)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, (rustsecp256k1zkp_v0_10_0_musig_aggnonce*) &invalid_pubnonce, msg, &keyagg_cache, &adaptor)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, NULL, &keyagg_cache, &adaptor)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, NULL, &adaptor)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &invalid_keyagg_cache, &adaptor)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, (rustsecp256k1zkp_v0_10_0_pubkey *)&invalid_pk)); - - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, &adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, &adaptor) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, NULL, &aggnonce, msg, &keyagg_cache, &adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, NULL, msg, &keyagg_cache, &adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, (rustsecp256k1zkp_v0_10_1_musig_aggnonce*) &invalid_pubnonce, msg, &keyagg_cache, &adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, NULL, &keyagg_cache, &adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, NULL, &adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &invalid_keyagg_cache, &adaptor)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, (rustsecp256k1zkp_v0_10_1_pubkey *)&invalid_pk)); + + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, &keyagg_cache, &adaptor) == 1); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &session) == 1); /* The secnonce is set to 0 and subsequent signing attempts fail */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&secnonce_tmp, zeros132, sizeof(secnonce_tmp)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&secnonce_tmp, zeros132, sizeof(secnonce_tmp)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, NULL, &secnonce_tmp, &keypair[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, NULL, &secnonce_tmp, &keypair[0], &keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], NULL, &keypair[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &invalid_secnonce, &keypair[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, NULL, &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], NULL, &keypair[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &invalid_secnonce, &keypair[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, NULL, &keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &invalid_keypair, &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &invalid_keypair, &keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); { unsigned char sk_tmp[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair_tmp; - rustsecp256k1zkp_v0_10_0_testrand256(sk_tmp); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair_tmp, sk_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair_tmp, &keyagg_cache, &session)); + rustsecp256k1zkp_v0_10_1_keypair keypair_tmp; + rustsecp256k1zkp_v0_10_1_testrand256(sk_tmp); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair_tmp, sk_tmp)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair_tmp, &keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); } - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], NULL, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], NULL, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &invalid_keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &invalid_keyagg_cache, &session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, NULL)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &invalid_session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce_tmp, &keypair[0], &keyagg_cache, &invalid_session)); memcpy(&secnonce_tmp, &secnonce[0], sizeof(secnonce_tmp)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce[0], &keypair[0], &keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[1], &secnonce[1], &keypair[1], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce[0], &keypair[0], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[1], &secnonce[1], &keypair[1], &keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, buf, &partial_sig[0]) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, NULL, &partial_sig[0])); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, buf, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig[0], buf) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, NULL, buf)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig[0], max64) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig[0], NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, buf, &partial_sig[0]) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, NULL, &partial_sig[0])); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, buf, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig[0], buf) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, NULL, buf)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig[0], max64) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig[0], NULL)); { /* Check that serialize and parse results in the same value */ - rustsecp256k1zkp_v0_10_0_musig_partial_sig tmp; - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, buf, &partial_sig[0]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &tmp, buf) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp, &partial_sig[0], sizeof(tmp)) == 0); + rustsecp256k1zkp_v0_10_1_musig_partial_sig tmp; + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, buf, &partial_sig[0]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &tmp, buf) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp, &partial_sig[0], sizeof(tmp)) == 0); } /** Partial signature verification */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, NULL, &pubnonce[0], &pk[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &invalid_partial_sig, &pubnonce[0], &pk[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], NULL, &pk[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &invalid_pubnonce, &pk[0], &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], NULL, &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &invalid_pk, &keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], NULL, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &invalid_keyagg_cache, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &invalid_session)); - - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[1], &pk[1], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, NULL, &pubnonce[0], &pk[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &invalid_partial_sig, &pubnonce[0], &pk[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], NULL, &pk[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &invalid_pubnonce, &pk[0], &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], NULL, &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &invalid_pk, &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], NULL, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &invalid_keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &invalid_session)); + + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], &keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[1], &pk[1], &keyagg_cache, &session) == 1); /** Signature aggregation and verification */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 2) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, NULL, &session, partial_sig_ptr, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, NULL, partial_sig_ptr, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &invalid_session, partial_sig_ptr, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, NULL, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, invalid_partial_sig_ptr, 2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 1) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 2) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, NULL, &session, partial_sig_ptr, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, NULL, partial_sig_ptr, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &invalid_session, partial_sig_ptr, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, NULL, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, invalid_partial_sig_ptr, 2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig, &session, partial_sig_ptr, 2) == 1); /** Adaptor signature verification */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, &nonce_parity, &session) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, NULL, &session)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, &nonce_parity, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, &nonce_parity, &invalid_session)); - - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, nonce_parity) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, NULL, pre_sig, sec_adaptor, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, NULL, sec_adaptor, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, max64, sec_adaptor, 0) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, pre_sig, NULL, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, pre_sig, max64, 0) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, &nonce_parity, &session) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, NULL, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, &nonce_parity, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, &nonce_parity, &invalid_session)); + + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, nonce_parity) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, NULL, pre_sig, sec_adaptor, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, NULL, sec_adaptor, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, max64, sec_adaptor, 0) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, pre_sig, NULL, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, pre_sig, max64, 0) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, 2)); /* sig and pre_sig argument point to the same location */ memcpy(final_sig, pre_sig, sizeof(final_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, final_sig, sec_adaptor, nonce_parity) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, final_sig, sec_adaptor, nonce_parity) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, nonce_parity) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig, pre_sig, sec_adaptor, nonce_parity) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), &agg_pk) == 1); /** Secret adaptor can be extracted from signature */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, nonce_parity) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sec_adaptor, sec_adaptor1, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, nonce_parity) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sec_adaptor, sec_adaptor1, 32) == 0); /* wrong nonce parity */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, !nonce_parity) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sec_adaptor, sec_adaptor1, 32) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, NULL, final_sig, pre_sig, 0)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, NULL, pre_sig, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, max64, pre_sig, 0) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, NULL, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, max64, 0) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, !nonce_parity) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sec_adaptor, sec_adaptor1, 32) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, NULL, final_sig, pre_sig, 0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, NULL, pre_sig, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, max64, pre_sig, 0) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, NULL, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, max64, 0) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor1, final_sig, pre_sig, 2)); } static void musig_nonce_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes) { - rustsecp256k1zkp_v0_10_0_scalar k1[2], k2[2]; + rustsecp256k1zkp_v0_10_1_scalar k1[2], k2[2]; - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k1, args[0], args[1], args[2], args[3], args[4], args[5]); - rustsecp256k1zkp_v0_10_0_testrand_flip(args[n_flip], n_bytes); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k2, args[0], args[1], args[2], args[3], args[4], args[5]); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&k1[0], &k2[0]) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&k1[1], &k2[1]) == 0); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k1, args[0], args[1], args[2], args[3], args[4], args[5]); + rustsecp256k1zkp_v0_10_1_testrand_flip(args[n_flip], n_bytes); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k2, args[0], args[1], args[2], args[3], args[4], args[5]); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&k1[0], &k2[0]) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&k1[1], &k2[1]) == 0); } static void musig_nonce_test(void) { @@ -492,14 +492,14 @@ static void musig_nonce_test(void) { unsigned char agg_pk[32]; unsigned char extra_input[32]; int i, j; - rustsecp256k1zkp_v0_10_0_scalar k[6][2]; + rustsecp256k1zkp_v0_10_1_scalar k[6][2]; - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(session_id, sizeof(session_id)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(sk, sizeof(sk)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(pk, sizeof(pk)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(msg, sizeof(msg)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(agg_pk, sizeof(agg_pk)); - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(extra_input, sizeof(extra_input)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(session_id, sizeof(session_id)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(sk, sizeof(sk)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(pk, sizeof(pk)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(msg, sizeof(msg)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(agg_pk, sizeof(agg_pk)); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(extra_input, sizeof(extra_input)); /* Check that a bitflip in an argument results in different nonces. */ args[0] = session_id; @@ -523,22 +523,22 @@ static void musig_nonce_test(void) { memcpy(pk, session_id, sizeof(session_id)); memcpy(agg_pk, session_id, sizeof(agg_pk)); memcpy(extra_input, session_id, sizeof(extra_input)); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[0], args[0], args[1], args[2], args[3], args[4], args[5]); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[1], args[0], NULL, args[2], args[3], args[4], args[5]); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[2], args[0], args[1], NULL, args[3], args[4], args[5]); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[3], args[0], args[1], args[2], NULL, args[4], args[5]); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[4], args[0], args[1], args[2], args[3], NULL, args[5]); - rustsecp256k1zkp_v0_10_0_nonce_function_musig(k[5], args[0], args[1], args[2], args[3], args[4], NULL); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[0], args[0], args[1], args[2], args[3], args[4], args[5]); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[1], args[0], NULL, args[2], args[3], args[4], args[5]); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[2], args[0], args[1], NULL, args[3], args[4], args[5]); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[3], args[0], args[1], args[2], NULL, args[4], args[5]); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[4], args[0], args[1], args[2], args[3], NULL, args[5]); + rustsecp256k1zkp_v0_10_1_nonce_function_musig(k[5], args[0], args[1], args[2], args[3], args[4], NULL); for (i = 0; i < 6; i++) { - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&k[i][0], &k[i][1])); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&k[i][0], &k[i][1])); for (j = i+1; j < 6; j++) { - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&k[i][0], &k[j][0])); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&k[i][1], &k[j][1])); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&k[i][0], &k[j][0])); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&k[i][1], &k[j][1])); } } } -static void scriptless_atomic_swap(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { +static void scriptless_atomic_swap(rustsecp256k1zkp_v0_10_1_scratch_space *scratch) { /* Throughout this test "a" and "b" refer to two hypothetical blockchains, * while the indices 0 and 1 refer to the two signers. Here signer 0 is * sending a-coins to signer 1, while signer 1 is sending b-coins to signer @@ -547,34 +547,34 @@ static void scriptless_atomic_swap(rustsecp256k1zkp_v0_10_0_scratch_space *scrat unsigned char final_sig_a[64]; unsigned char pre_sig_b[64]; unsigned char final_sig_b[64]; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig_a[2]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_a_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig_b[2]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_b_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig_a[2]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_a_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig_b[2]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_b_ptr[2]; unsigned char sec_adaptor[32]; unsigned char sec_adaptor_extracted[32]; - rustsecp256k1zkp_v0_10_0_pubkey pub_adaptor; + rustsecp256k1zkp_v0_10_1_pubkey pub_adaptor; unsigned char sk_a[2][32]; unsigned char sk_b[2][32]; - rustsecp256k1zkp_v0_10_0_keypair keypair_a[2]; - rustsecp256k1zkp_v0_10_0_keypair keypair_b[2]; - rustsecp256k1zkp_v0_10_0_pubkey pk_a[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_a_ptr[2]; - rustsecp256k1zkp_v0_10_0_pubkey pk_b[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_b_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache_a; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache_b; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk_a; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk_b; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce_a[2]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce_b[2]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce_a[2]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce_b[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr_a[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr_b[2]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce_a; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce_b; - rustsecp256k1zkp_v0_10_0_musig_session session_a, session_b; + rustsecp256k1zkp_v0_10_1_keypair keypair_a[2]; + rustsecp256k1zkp_v0_10_1_keypair keypair_b[2]; + rustsecp256k1zkp_v0_10_1_pubkey pk_a[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_a_ptr[2]; + rustsecp256k1zkp_v0_10_1_pubkey pk_b[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_b_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache_a; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache_b; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk_a; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk_b; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce_a[2]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce_b[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce_a[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce_b[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr_a[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr_b[2]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce_a; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce_b; + rustsecp256k1zkp_v0_10_1_musig_session session_a, session_b; int nonce_parity_a; int nonce_parity_b; unsigned char seed_a[2][32] = { "a0", "a1" }; @@ -592,71 +592,71 @@ static void scriptless_atomic_swap(rustsecp256k1zkp_v0_10_0_scratch_space *scrat partial_sig_a_ptr[i] = &partial_sig_a[i]; partial_sig_b_ptr[i] = &partial_sig_b[i]; - rustsecp256k1zkp_v0_10_0_testrand256(sk_a[i]); - rustsecp256k1zkp_v0_10_0_testrand256(sk_b[i]); + rustsecp256k1zkp_v0_10_1_testrand256(sk_a[i]); + rustsecp256k1zkp_v0_10_1_testrand256(sk_b[i]); CHECK(create_keypair_and_pk(&keypair_a[i], &pk_a[i], sk_a[i]) == 1); CHECK(create_keypair_and_pk(&keypair_b[i], &pk_b[i], sk_b[i]) == 1); } - rustsecp256k1zkp_v0_10_0_testrand256(sec_adaptor); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pub_adaptor, sec_adaptor) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(sec_adaptor); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pub_adaptor, sec_adaptor) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk_a, &keyagg_cache_a, pk_a_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &agg_pk_b, &keyagg_cache_b, pk_b_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk_a, &keyagg_cache_a, pk_a_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &agg_pk_b, &keyagg_cache_b, pk_b_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce_a[0], &pubnonce_a[0], seed_a[0], sk_a[0], &pk_a[0], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce_a[1], &pubnonce_a[1], seed_a[1], sk_a[1], &pk_a[1], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce_b[0], &pubnonce_b[0], seed_b[0], sk_b[0], &pk_b[0], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce_b[1], &pubnonce_b[1], seed_b[1], sk_b[1], &pk_b[1], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce_a[0], &pubnonce_a[0], seed_a[0], sk_a[0], &pk_a[0], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce_a[1], &pubnonce_a[1], seed_a[1], sk_a[1], &pk_a[1], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce_b[0], &pubnonce_b[0], seed_b[0], sk_b[0], &pk_b[0], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce_b[1], &pubnonce_b[1], seed_b[1], sk_b[1], &pk_b[1], NULL, NULL, NULL) == 1); /* Step 2: Exchange nonces */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce_a, pubnonce_ptr_a, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session_a, &aggnonce_a, msg32_a, &keyagg_cache_a, &pub_adaptor) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, &nonce_parity_a, &session_a) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce_b, pubnonce_ptr_b, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session_b, &aggnonce_b, msg32_b, &keyagg_cache_b, &pub_adaptor) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_parity(CTX, &nonce_parity_b, &session_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce_a, pubnonce_ptr_a, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session_a, &aggnonce_a, msg32_a, &keyagg_cache_a, &pub_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, &nonce_parity_a, &session_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce_b, pubnonce_ptr_b, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session_b, &aggnonce_b, msg32_b, &keyagg_cache_b, &pub_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_parity(CTX, &nonce_parity_b, &session_b) == 1); /* Step 3: Signer 0 produces partial signatures for both chains. */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig_a[0], &secnonce_a[0], &keypair_a[0], &keyagg_cache_a, &session_a) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig_b[0], &secnonce_b[0], &keypair_b[0], &keyagg_cache_b, &session_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig_a[0], &secnonce_a[0], &keypair_a[0], &keyagg_cache_a, &session_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig_b[0], &secnonce_b[0], &keypair_b[0], &keyagg_cache_b, &session_b) == 1); /* Step 4: Signer 1 receives partial signatures, verifies them and creates a * partial signature to send B-coins to signer 0. */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig_a[0], &pubnonce_a[0], &pk_a[0], &keyagg_cache_a, &session_a) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig_b[0], &pubnonce_b[0], &pk_b[0], &keyagg_cache_b, &session_b) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig_b[1], &secnonce_b[1], &keypair_b[1], &keyagg_cache_b, &session_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig_a[0], &pubnonce_a[0], &pk_a[0], &keyagg_cache_a, &session_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig_b[0], &pubnonce_b[0], &pk_b[0], &keyagg_cache_b, &session_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig_b[1], &secnonce_b[1], &keypair_b[1], &keyagg_cache_b, &session_b) == 1); /* Step 5: Signer 0 aggregates its own partial signature with the partial * signature from signer 1 and adapts it. This results in a complete * signature which is broadcasted by signer 0 to take B-coins. */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig_b, &session_b, partial_sig_b_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig_b, pre_sig_b, sec_adaptor, nonce_parity_b) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig_b, msg32_b, sizeof(msg32_b), &agg_pk_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig_b, &session_b, partial_sig_b_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig_b, pre_sig_b, sec_adaptor, nonce_parity_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig_b, msg32_b, sizeof(msg32_b), &agg_pk_b) == 1); /* Step 6: Signer 1 signs, extracts adaptor from the published signature, * and adapts the signature to take A-coins. */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig_a[1], &secnonce_a[1], &keypair_a[1], &keyagg_cache_a, &session_a) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, pre_sig_a, &session_a, partial_sig_a_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_extract_adaptor(CTX, sec_adaptor_extracted, final_sig_b, pre_sig_b, nonce_parity_b) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sec_adaptor_extracted, sec_adaptor, sizeof(sec_adaptor)) == 0); /* in real life we couldn't check this, of course */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_adapt(CTX, final_sig_a, pre_sig_a, sec_adaptor_extracted, nonce_parity_a) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig_a, msg32_a, sizeof(msg32_a), &agg_pk_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig_a[1], &secnonce_a[1], &keypair_a[1], &keyagg_cache_a, &session_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, pre_sig_a, &session_a, partial_sig_a_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_extract_adaptor(CTX, sec_adaptor_extracted, final_sig_b, pre_sig_b, nonce_parity_b) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sec_adaptor_extracted, sec_adaptor, sizeof(sec_adaptor)) == 0); /* in real life we couldn't check this, of course */ + CHECK(rustsecp256k1zkp_v0_10_1_musig_adapt(CTX, final_sig_a, pre_sig_a, sec_adaptor_extracted, nonce_parity_a) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig_a, msg32_a, sizeof(msg32_a), &agg_pk_a) == 1); } -static void sha256_tag_test_internal(rustsecp256k1zkp_v0_10_0_sha256 *sha_tagged, unsigned char *tag, size_t taglen) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +static void sha256_tag_test_internal(rustsecp256k1zkp_v0_10_1_sha256 *sha_tagged, unsigned char *tag, size_t taglen) { + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char buf[32]; unsigned char buf2[32]; size_t i; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, tag, taglen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, tag, taglen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); /* buf = SHA256(tag) */ - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, 32); /* Is buffer fully consumed? */ CHECK((sha.bytes & 0x3F) == 0); @@ -664,43 +664,43 @@ static void sha256_tag_test_internal(rustsecp256k1zkp_v0_10_0_sha256 *sha_tagged for (i = 0; i < 8; i++) { CHECK(sha_tagged->s[i] == sha.s[i]); } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, buf, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(sha_tagged, buf, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); - rustsecp256k1zkp_v0_10_0_sha256_finalize(sha_tagged, buf2); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf, buf2, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(sha_tagged, buf, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_sha256_finalize(sha_tagged, buf2); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf, buf2, 32) == 0); } /* Checks that the initialized tagged hashes initialized have the expected * state. */ static void sha256_tag_test(void) { - rustsecp256k1zkp_v0_10_0_sha256 sha_tagged; + rustsecp256k1zkp_v0_10_1_sha256 sha_tagged; { char tag[11] = "KeyAgg list"; - rustsecp256k1zkp_v0_10_0_musig_keyagglist_sha256(&sha_tagged); + rustsecp256k1zkp_v0_10_1_musig_keyagglist_sha256(&sha_tagged); sha256_tag_test_internal(&sha_tagged, (unsigned char*)tag, sizeof(tag)); } { char tag[18] = "KeyAgg coefficient"; - rustsecp256k1zkp_v0_10_0_musig_keyaggcoef_sha256(&sha_tagged); + rustsecp256k1zkp_v0_10_1_musig_keyaggcoef_sha256(&sha_tagged); sha256_tag_test_internal(&sha_tagged, (unsigned char*)tag, sizeof(tag)); } } /* Attempts to create a signature for the aggregate public key using given secret * keys and keyagg_cache. */ -static void musig_tweak_test_helper(const rustsecp256k1zkp_v0_10_0_xonly_pubkey* agg_pk, const unsigned char *sk0, const unsigned char *sk1, rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache) { - rustsecp256k1zkp_v0_10_0_pubkey pk[2]; +static void musig_tweak_test_helper(const rustsecp256k1zkp_v0_10_1_xonly_pubkey* agg_pk, const unsigned char *sk0, const unsigned char *sk1, rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache) { + rustsecp256k1zkp_v0_10_1_pubkey pk[2]; unsigned char session_id[2][32]; unsigned char msg[32]; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce[2]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_keypair keypair[2]; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig[2]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_keypair keypair[2]; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig[2]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_ptr[2]; unsigned char final_sig[64]; int i; @@ -708,50 +708,50 @@ static void musig_tweak_test_helper(const rustsecp256k1zkp_v0_10_0_xonly_pubkey* pubnonce_ptr[i] = &pubnonce[i]; partial_sig_ptr[i] = &partial_sig[i]; - rustsecp256k1zkp_v0_10_0_testrand256(session_id[i]); + rustsecp256k1zkp_v0_10_1_testrand256(session_id[i]); } CHECK(create_keypair_and_pk(&keypair[0], &pk[0], sk0) == 1); CHECK(create_keypair_and_pk(&keypair[1], &pk[1], sk1) == 1); - rustsecp256k1zkp_v0_10_0_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(msg); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk0, &pk[0], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce[1], &pubnonce[1], session_id[1], sk1, &pk[1], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[0], &pubnonce[0], session_id[0], sk0, &pk[0], NULL, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce[1], &pubnonce[1], session_id[1], sk1, &pk[1], NULL, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, msg, keyagg_cache, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, msg, keyagg_cache, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[0], &secnonce[0], &keypair[0], keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig[1], &secnonce[1], &keypair[1], keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[0], &secnonce[0], &keypair[0], keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig[1], &secnonce[1], &keypair[1], keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[1], &pk[1], keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[0], &pubnonce[0], &pk[0], keyagg_cache, &session) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig[1], &pubnonce[1], &pk[1], keyagg_cache, &session) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, 2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), agg_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig, msg, sizeof(msg), agg_pk) == 1); } /* Create aggregate public key P[0], tweak multiple times (using xonly and * plain tweaking) and test signing. */ -static void musig_tweak_test(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { +static void musig_tweak_test(rustsecp256k1zkp_v0_10_1_scratch_space *scratch) { unsigned char sk[2][32]; - rustsecp256k1zkp_v0_10_0_pubkey pk[2]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_pubkey pk[2]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; enum { N_TWEAKS = 8 }; - rustsecp256k1zkp_v0_10_0_pubkey P[N_TWEAKS + 1]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey P_xonly[N_TWEAKS + 1]; + rustsecp256k1zkp_v0_10_1_pubkey P[N_TWEAKS + 1]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey P_xonly[N_TWEAKS + 1]; int i; /* Key Setup */ for (i = 0; i < 2; i++) { pk_ptr[i] = &pk[i]; - rustsecp256k1zkp_v0_10_0_testrand256(sk[i]); + rustsecp256k1zkp_v0_10_1_testrand256(sk[i]); CHECK(create_keypair_and_pk(NULL, &pk[i], sk[i]) == 1); } /* Compute P0 = keyagg(pk0, pk1) and test signing for it */ - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, scratch, &P_xonly[0], &keyagg_cache, pk_ptr, 2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, scratch, &P_xonly[0], &keyagg_cache, pk_ptr, 2) == 1); musig_tweak_test_helper(&P_xonly[0], sk[0], sk[1], &keyagg_cache); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_get(CTX, &P[0], &keyagg_cache)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_get(CTX, &P[0], &keyagg_cache)); /* Compute Pi = f(Pj) + tweaki*G where where j = i-1 and try signing for * that key. If xonly is set to true, the function f is normalizes the input @@ -760,25 +760,25 @@ static void musig_tweak_test(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { for (i = 1; i <= N_TWEAKS; i++) { unsigned char tweak[32]; int P_parity; - int xonly = rustsecp256k1zkp_v0_10_0_testrand_bits(1); + int xonly = rustsecp256k1zkp_v0_10_1_testrand_bits(1); - rustsecp256k1zkp_v0_10_0_testrand256(tweak); + rustsecp256k1zkp_v0_10_1_testrand256(tweak); if (xonly) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add(CTX, &P[i], &keyagg_cache, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add(CTX, &P[i], &keyagg_cache, tweak) == 1); } else { - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add(CTX, &P[i], &keyagg_cache, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add(CTX, &P[i], &keyagg_cache, tweak) == 1); } - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &P_xonly[i], &P_parity, &P[i])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &P_xonly[i], &P_parity, &P[i])); /* Check that musig_pubkey_tweak_add produces same result as * xonly_pubkey_tweak_add or ec_pubkey_tweak_add. */ if (xonly) { unsigned char P_serialized[32]; - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, P_serialized, &P_xonly[i])); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, P_serialized, P_parity, &P_xonly[i-1], tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, P_serialized, &P_xonly[i])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, P_serialized, P_parity, &P_xonly[i-1], tweak) == 1); } else { - rustsecp256k1zkp_v0_10_0_pubkey tmp_key = P[i-1]; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &tmp_key, tweak)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp_key, &P[i], sizeof(tmp_key)) == 0); + rustsecp256k1zkp_v0_10_1_pubkey tmp_key = P[i-1]; + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &tmp_key, tweak)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp_key, &P[i], sizeof(tmp_key)) == 0); } /* Test signing for P[i] */ musig_tweak_test_helper(&P_xonly[i], sk[0], sk[1], &keyagg_cache); @@ -786,7 +786,7 @@ static void musig_tweak_test(rustsecp256k1zkp_v0_10_0_scratch_space *scratch) { } int musig_vectors_keyagg_and_tweak(enum MUSIG_ERROR *error, - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache, + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache, unsigned char *agg_pk_ser, const unsigned char pubkeys33[][33], const unsigned char tweaks32[][32], @@ -795,49 +795,49 @@ int musig_vectors_keyagg_and_tweak(enum MUSIG_ERROR *error, size_t tweak_indices_len, const size_t *tweak_indices, const int *is_xonly) { - rustsecp256k1zkp_v0_10_0_pubkey pubkeys[MUSIG_VECTORS_MAX_PUBKEYS]; - const rustsecp256k1zkp_v0_10_0_pubkey *pk_ptr[MUSIG_VECTORS_MAX_PUBKEYS]; + rustsecp256k1zkp_v0_10_1_pubkey pubkeys[MUSIG_VECTORS_MAX_PUBKEYS]; + const rustsecp256k1zkp_v0_10_1_pubkey *pk_ptr[MUSIG_VECTORS_MAX_PUBKEYS]; int i; - rustsecp256k1zkp_v0_10_0_pubkey agg_pk; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk_xonly; + rustsecp256k1zkp_v0_10_1_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk_xonly; for (i = 0; i < (int)key_indices_len; i++) { - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkeys[i], pubkeys33[key_indices[i]], 33)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkeys[i], pubkeys33[key_indices[i]], 33)) { *error = MUSIG_PUBKEY; return 0; } pk_ptr[i] = &pubkeys[i]; } - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_agg(CTX, NULL, NULL, keyagg_cache, pk_ptr, key_indices_len)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_agg(CTX, NULL, NULL, keyagg_cache, pk_ptr, key_indices_len)) { *error = MUSIG_OTHER; return 0; } for (i = 0; i < (int)tweak_indices_len; i++) { if (is_xonly[i]) { - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_xonly_tweak_add(CTX, NULL, keyagg_cache, tweaks32[tweak_indices[i]])) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_xonly_tweak_add(CTX, NULL, keyagg_cache, tweaks32[tweak_indices[i]])) { *error = MUSIG_TWEAK; return 0; } } else { - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_ec_tweak_add(CTX, NULL, keyagg_cache, tweaks32[tweak_indices[i]])) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_ec_tweak_add(CTX, NULL, keyagg_cache, tweaks32[tweak_indices[i]])) { *error = MUSIG_TWEAK; return 0; } } } - if (!rustsecp256k1zkp_v0_10_0_musig_pubkey_get(CTX, &agg_pk, keyagg_cache)) { + if (!rustsecp256k1zkp_v0_10_1_musig_pubkey_get(CTX, &agg_pk, keyagg_cache)) { *error = MUSIG_OTHER; return 0; } - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_from_pubkey(CTX, &agg_pk_xonly, NULL, &agg_pk)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_from_pubkey(CTX, &agg_pk_xonly, NULL, &agg_pk)) { *error = MUSIG_OTHER; return 0; } if (agg_pk_ser != NULL) { - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, agg_pk_ser, &agg_pk_xonly)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, agg_pk_ser, &agg_pk_xonly)) { *error = MUSIG_OTHER; return 0; } @@ -853,17 +853,17 @@ static void musig_test_vectors_keyagg(void) { for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { const struct musig_key_agg_valid_test_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; unsigned char agg_pk[32]; CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, agg_pk, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, 0, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(agg_pk, c->expected, sizeof(agg_pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(agg_pk, c->expected, sizeof(agg_pk)) == 0); } for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { const struct musig_key_agg_error_test_case *c = &vector->error_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; CHECK(!musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, NULL, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, c->tweak_indices_len, c->tweak_indices, c->is_xonly)); CHECK(c->error == error); @@ -876,14 +876,14 @@ static void musig_test_vectors_noncegen(void) { for (i = 0; i < sizeof(vector->test_case)/sizeof(vector->test_case[0]); i++) { const struct musig_nonce_gen_test_case *c = &vector->test_case[i]; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache *keyagg_cache_ptr = NULL; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache *keyagg_cache_ptr = NULL; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; const unsigned char *sk = NULL; const unsigned char *msg = NULL; const unsigned char *extra_in = NULL; - rustsecp256k1zkp_v0_10_0_pubkey pk; + rustsecp256k1zkp_v0_10_1_pubkey pk; unsigned char pubnonce66[66]; if (c->has_sk) { @@ -891,12 +891,12 @@ static void musig_test_vectors_noncegen(void) { } if (c->has_aggpk) { /* Create keyagg_cache from aggpk */ - rustsecp256k1zkp_v0_10_0_keyagg_cache_internal cache_i; - rustsecp256k1zkp_v0_10_0_xonly_pubkey aggpk; + rustsecp256k1zkp_v0_10_1_keyagg_cache_internal cache_i; + rustsecp256k1zkp_v0_10_1_xonly_pubkey aggpk; memset(&cache_i, 0, sizeof(cache_i)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &aggpk, c->aggpk)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(CTX, &cache_i.pk, &aggpk)); - rustsecp256k1zkp_v0_10_0_keyagg_cache_save(&keyagg_cache, &cache_i); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &aggpk, c->aggpk)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(CTX, &cache_i.pk, &aggpk)); + rustsecp256k1zkp_v0_10_1_keyagg_cache_save(&keyagg_cache, &cache_i); keyagg_cache_ptr = &keyagg_cache; } if (c->has_msg) { @@ -906,14 +906,14 @@ static void musig_test_vectors_noncegen(void) { extra_in = c->extra_in; } - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk, c->pk, sizeof(c->pk))); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_gen(CTX, &secnonce, &pubnonce, c->rand_, sk, &pk, msg, keyagg_cache_ptr, extra_in) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&secnonce.data[4], c->expected_secnonce, 2*32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&secnonce.data[4+2*32], &pk, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk, c->pk, sizeof(c->pk))); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_gen(CTX, &secnonce, &pubnonce, c->rand_, sk, &pk, msg, keyagg_cache_ptr, extra_in) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&secnonce.data[4], c->expected_secnonce, 2*32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&secnonce.data[4+2*32], &pk, sizeof(pk)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_serialize(CTX, pubnonce66, &pubnonce) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_serialize(CTX, pubnonce66, &pubnonce) == 1); CHECK(sizeof(c->expected_pubnonce) == sizeof(pubnonce66)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(pubnonce66, c->expected_pubnonce, sizeof(pubnonce66)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(pubnonce66, c->expected_pubnonce, sizeof(pubnonce66)) == 0); } } @@ -925,37 +925,37 @@ static void musig_test_vectors_nonceagg(void) { for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { const struct musig_nonce_agg_test_case *c = &vector->valid_case[i]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[2]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[2]; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[2]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[2]; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; unsigned char aggnonce66[66]; for (j = 0; j < 2; j++) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pnonces[c->pnonce_indices[j]]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pnonces[c->pnonce_indices[j]]) == 1); pubnonce_ptr[j] = &pubnonce[j]; } - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_serialize(CTX, aggnonce66, &aggnonce)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(aggnonce66, c->expected, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_serialize(CTX, aggnonce66, &aggnonce)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(aggnonce66, c->expected, 33) == 0); } for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { const struct musig_nonce_agg_test_case *c = &vector->error_case[i]; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[2]; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[2]; for (j = 0; j < 2; j++) { int expected = c->invalid_nonce_idx != j; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pnonces[c->pnonce_indices[j]])); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pnonces[c->pnonce_indices[j]])); } } } -static void musig_test_set_secnonce(rustsecp256k1zkp_v0_10_0_musig_secnonce *secnonce, const unsigned char *secnonce64, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey) { - rustsecp256k1zkp_v0_10_0_ge pk; - rustsecp256k1zkp_v0_10_0_scalar k[2]; +static void musig_test_set_secnonce(rustsecp256k1zkp_v0_10_1_musig_secnonce *secnonce, const unsigned char *secnonce64, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_scalar k[2]; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k[0], &secnonce64[0], NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k[1], &secnonce64[32], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &pk, pubkey)); - rustsecp256k1zkp_v0_10_0_musig_secnonce_save(secnonce, k, &pk); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k[0], &secnonce64[0], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k[1], &secnonce64[32], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &pk, pubkey)); + rustsecp256k1zkp_v0_10_1_musig_secnonce_save(secnonce, k, &pk); } static void musig_test_vectors_signverify(void) { @@ -965,41 +965,41 @@ static void musig_test_vectors_signverify(void) { for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { const struct musig_valid_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_keypair keypair; unsigned char partial_sig32[32]; - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, vector->sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, vector->sk)); CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, NULL, vector->pubkeys, NULL, c->key_indices_len, c->key_indices, 0, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonces[c->aggnonce_index])); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonces[c->aggnonce_index])); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); musig_test_set_secnonce(&secnonce, vector->secnonces[0], &pubkey); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, partial_sig32, &partial_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(partial_sig32, c->expected, sizeof(partial_sig32)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, partial_sig32, &partial_sig)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(partial_sig32, c->expected, sizeof(partial_sig32)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[0])); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[0])); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); } for (i = 0; i < sizeof(vector->sign_error_case)/sizeof(vector->sign_error_case[0]); i++) { const struct musig_sign_error_case *c = &vector->sign_error_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_keypair keypair; int expected; if (i == 0) { @@ -1015,60 +1015,60 @@ static void musig_test_vectors_signverify(void) { } expected = c->error != MUSIG_AGGNONCE; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonces[c->aggnonce_index])); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonces[c->aggnonce_index])); if (!expected) { continue; } - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); musig_test_set_secnonce(&secnonce, vector->secnonces[c->secnonce_index], &pubkey); expected = c->error != MUSIG_SECNONCE; if (expected) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); } else { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); } } for (i = 0; i < sizeof(vector->verify_fail_case)/sizeof(vector->verify_fail_case[0]); i++) { const struct musig_verify_fail_error_case *c = &vector->verify_fail_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; enum { NUM_PUBNONCES = 3 }; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce[NUM_PUBNONCES]; - const rustsecp256k1zkp_v0_10_0_musig_pubnonce *pubnonce_ptr[NUM_PUBNONCES]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce[NUM_PUBNONCES]; + const rustsecp256k1zkp_v0_10_1_musig_pubnonce *pubnonce_ptr[NUM_PUBNONCES]; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; int expected; size_t j; CHECK(NUM_PUBNONCES <= c->nonce_indices_len); for (j = 0; j < c->nonce_indices_len; j++) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pubnonces[c->nonce_indices[j]])); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce[j], vector->pubnonces[c->nonce_indices[j]])); pubnonce_ptr[j] = &pubnonce[j]; } CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, NULL, vector->pubkeys, NULL, c->key_indices_len, c->key_indices, 0, NULL, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, c->nonce_indices_len) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_agg(CTX, &aggnonce, pubnonce_ptr, c->nonce_indices_len) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, vector->msgs[c->msg_index], &keyagg_cache, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[c->signer_index], sizeof(vector->pubkeys[0]))); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[c->signer_index], sizeof(vector->pubkeys[0]))); expected = c->error != MUSIG_SIG; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig, c->sig)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig, c->sig)); if (!expected) { continue; } expected = c->error != MUSIG_SIG_VERIFY; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig, pubnonce, &pubkey, &keyagg_cache, &session)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig, pubnonce, &pubkey, &keyagg_cache, &session)); } for (i = 0; i < sizeof(vector->verify_error_case)/sizeof(vector->verify_error_case[0]); i++) { const struct musig_verify_fail_error_case *c = &vector->verify_error_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; int expected; expected = c->error != MUSIG_PUBKEY; @@ -1078,48 +1078,48 @@ static void musig_test_vectors_signverify(void) { continue; } expected = c->error != MUSIG_PUBNONCE; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[c->nonce_indices[c->signer_index]])); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[c->nonce_indices[c->signer_index]])); } } static void musig_test_vectors_tweak(void) { size_t i; const struct musig_tweak_vector *vector = &musig_tweak_vector; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_secnonce secnonce; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_secnonce secnonce; - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonce)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, vector->aggnonce)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, vector->pubkeys[0], sizeof(vector->pubkeys[0]))); for (i = 0; i < sizeof(vector->valid_case)/sizeof(vector->valid_case[0]); i++) { const struct musig_tweak_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; - rustsecp256k1zkp_v0_10_0_musig_pubnonce pubnonce; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_pubnonce pubnonce; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig; + rustsecp256k1zkp_v0_10_1_keypair keypair; unsigned char partial_sig32[32]; musig_test_set_secnonce(&secnonce, vector->secnonce, &pubkey); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, vector->sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, vector->sk)); CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, NULL, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, c->tweak_indices_len, c->tweak_indices, c->is_xonly)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, vector->msg, &keyagg_cache, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, vector->msg, &keyagg_cache, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_serialize(CTX, partial_sig32, &partial_sig)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(partial_sig32, c->expected, sizeof(partial_sig32)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sign(CTX, &partial_sig, &secnonce, &keypair, &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_serialize(CTX, partial_sig32, &partial_sig)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(partial_sig32, c->expected, sizeof(partial_sig32)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[c->nonce_indices[c->signer_index]])); - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_pubnonce_parse(CTX, &pubnonce, vector->pubnonces[c->nonce_indices[c->signer_index]])); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_verify(CTX, &partial_sig, &pubnonce, &pubkey, &keyagg_cache, &session)); } for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { const struct musig_tweak_case *c = &vector->error_case[i]; enum MUSIG_ERROR error; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; CHECK(!musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, NULL, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, c->tweak_indices_len, c->tweak_indices, c->is_xonly)); CHECK(error == MUSIG_TWEAK); } @@ -1133,41 +1133,41 @@ static void musig_test_vectors_sigagg(void) { const struct musig_sig_agg_case *c = &vector->valid_case[i]; enum MUSIG_ERROR error; unsigned char final_sig[64]; - rustsecp256k1zkp_v0_10_0_musig_keyagg_cache keyagg_cache; + rustsecp256k1zkp_v0_10_1_musig_keyagg_cache keyagg_cache; unsigned char agg_pk32[32]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey agg_pk; - rustsecp256k1zkp_v0_10_0_musig_aggnonce aggnonce; - rustsecp256k1zkp_v0_10_0_musig_session session; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; - const rustsecp256k1zkp_v0_10_0_musig_partial_sig *partial_sig_ptr[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey agg_pk; + rustsecp256k1zkp_v0_10_1_musig_aggnonce aggnonce; + rustsecp256k1zkp_v0_10_1_musig_session session; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; + const rustsecp256k1zkp_v0_10_1_musig_partial_sig *partial_sig_ptr[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; CHECK(musig_vectors_keyagg_and_tweak(&error, &keyagg_cache, agg_pk32, vector->pubkeys, vector->tweaks, c->key_indices_len, c->key_indices, c->tweak_indices_len, c->tweak_indices, c->is_xonly)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_aggnonce_parse(CTX, &aggnonce, c->aggnonce)); - CHECK(rustsecp256k1zkp_v0_10_0_musig_nonce_process(CTX, &session, &aggnonce, vector->msg, &keyagg_cache, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_aggnonce_parse(CTX, &aggnonce, c->aggnonce)); + CHECK(rustsecp256k1zkp_v0_10_1_musig_nonce_process(CTX, &session, &aggnonce, vector->msg, &keyagg_cache, NULL)); for (j = 0; j < c->psig_indices_len; j++) { - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig[j], vector->psigs[c->psig_indices[j]])); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig[j], vector->psigs[c->psig_indices[j]])); partial_sig_ptr[j] = &partial_sig[j]; } - CHECK(rustsecp256k1zkp_v0_10_0_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, c->psig_indices_len) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(final_sig, c->expected, sizeof(final_sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_musig_partial_sig_agg(CTX, final_sig, &session, partial_sig_ptr, c->psig_indices_len) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(final_sig, c->expected, sizeof(final_sig)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &agg_pk, agg_pk32)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, final_sig, vector->msg, sizeof(vector->msg), &agg_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &agg_pk, agg_pk32)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, final_sig, vector->msg, sizeof(vector->msg), &agg_pk) == 1); } for (i = 0; i < sizeof(vector->error_case)/sizeof(vector->error_case[0]); i++) { const struct musig_sig_agg_case *c = &vector->error_case[i]; - rustsecp256k1zkp_v0_10_0_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; + rustsecp256k1zkp_v0_10_1_musig_partial_sig partial_sig[(sizeof(vector->psigs)/sizeof(vector->psigs[0]))]; for (j = 0; j < c->psig_indices_len; j++) { int expected = c->invalid_sig_idx != (int)j; - CHECK(expected == rustsecp256k1zkp_v0_10_0_musig_partial_sig_parse(CTX, &partial_sig[j], vector->psigs[c->psig_indices[j]])); + CHECK(expected == rustsecp256k1zkp_v0_10_1_musig_partial_sig_parse(CTX, &partial_sig[j], vector->psigs[c->psig_indices[j]])); } } } static void run_musig_tests(void) { int i; - rustsecp256k1zkp_v0_10_0_scratch_space *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1024 * 1024); + rustsecp256k1zkp_v0_10_1_scratch_space *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1024 * 1024); for (i = 0; i < COUNT; i++) { musig_simple_test(scratch); @@ -1188,7 +1188,7 @@ static void run_musig_tests(void) { musig_test_vectors_tweak(); musig_test_vectors_sigagg(); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/Makefile.am.include index d40232db..ec6713f6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_rangeproof.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_rangeproof.h noinst_HEADERS += src/modules/rangeproof/main_impl.h noinst_HEADERS += src/modules/rangeproof/borromean.h noinst_HEADERS += src/modules/rangeproof/borromean_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean.h index 3228b0b6..9d0485c2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean.h @@ -14,11 +14,11 @@ #include "../../ecmult.h" #include "../../ecmult_gen.h" -static int rustsecp256k1zkp_v0_10_0_borromean_verify(rustsecp256k1zkp_v0_10_0_scalar *evalues, const unsigned char *e0, const rustsecp256k1zkp_v0_10_0_scalar *s, - const rustsecp256k1zkp_v0_10_0_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen); +static int rustsecp256k1zkp_v0_10_1_borromean_verify(rustsecp256k1zkp_v0_10_1_scalar *evalues, const unsigned char *e0, const rustsecp256k1zkp_v0_10_1_scalar *s, + const rustsecp256k1zkp_v0_10_1_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen); -static int rustsecp256k1zkp_v0_10_0_borromean_sign(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, - unsigned char *e0, rustsecp256k1zkp_v0_10_0_scalar *s, const rustsecp256k1zkp_v0_10_0_gej *pubs, const rustsecp256k1zkp_v0_10_0_scalar *k, const rustsecp256k1zkp_v0_10_0_scalar *sec, +static int rustsecp256k1zkp_v0_10_1_borromean_sign(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, + unsigned char *e0, rustsecp256k1zkp_v0_10_1_scalar *s, const rustsecp256k1zkp_v0_10_1_gej *pubs, const rustsecp256k1zkp_v0_10_1_scalar *k, const rustsecp256k1zkp_v0_10_1_scalar *sec, const size_t *rsizes, const size_t *secidx, size_t nrings, const unsigned char *m, size_t mlen); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean_impl.h index 6e1d36ad..7ebdb8c7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/borromean_impl.h @@ -20,19 +20,19 @@ #include #include -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_borromean_hash(unsigned char *hash, const unsigned char *m, size_t mlen, const unsigned char *e, size_t elen, +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_borromean_hash(unsigned char *hash, const unsigned char *m, size_t mlen, const unsigned char *e, size_t elen, size_t ridx, size_t eidx) { unsigned char ring[4]; unsigned char epos[4]; - rustsecp256k1zkp_v0_10_0_sha256 sha256_en; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_en); - rustsecp256k1zkp_v0_10_0_write_be32(ring, (uint32_t)ridx); - rustsecp256k1zkp_v0_10_0_write_be32(epos, (uint32_t)eidx); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, e, elen); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, m, mlen); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, ring, 4); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, epos, 4); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_en, hash); + rustsecp256k1zkp_v0_10_1_sha256 sha256_en; + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_en); + rustsecp256k1zkp_v0_10_1_write_be32(ring, (uint32_t)ridx); + rustsecp256k1zkp_v0_10_1_write_be32(epos, (uint32_t)eidx); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, e, elen); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, m, mlen); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, ring, 4); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, epos, 4); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_en, hash); } /** "Borromean" ring signature. @@ -49,12 +49,12 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_borromean_hash(unsigned ch * | | r_i = r * | return e_0 ==== H(r_{0..i}||m) */ -int rustsecp256k1zkp_v0_10_0_borromean_verify(rustsecp256k1zkp_v0_10_0_scalar *evalues, const unsigned char *e0, - const rustsecp256k1zkp_v0_10_0_scalar *s, const rustsecp256k1zkp_v0_10_0_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen) { - rustsecp256k1zkp_v0_10_0_gej rgej; - rustsecp256k1zkp_v0_10_0_ge rge; - rustsecp256k1zkp_v0_10_0_scalar ens; - rustsecp256k1zkp_v0_10_0_sha256 sha256_e0; +int rustsecp256k1zkp_v0_10_1_borromean_verify(rustsecp256k1zkp_v0_10_1_scalar *evalues, const unsigned char *e0, + const rustsecp256k1zkp_v0_10_1_scalar *s, const rustsecp256k1zkp_v0_10_1_gej *pubs, const size_t *rsizes, size_t nrings, const unsigned char *m, size_t mlen) { + rustsecp256k1zkp_v0_10_1_gej rgej; + rustsecp256k1zkp_v0_10_1_ge rge; + rustsecp256k1zkp_v0_10_1_scalar ens; + rustsecp256k1zkp_v0_10_1_sha256 sha256_e0; unsigned char tmp[33]; size_t i; size_t j; @@ -68,47 +68,47 @@ int rustsecp256k1zkp_v0_10_0_borromean_verify(rustsecp256k1zkp_v0_10_0_scalar *e VERIFY_CHECK(nrings > 0); VERIFY_CHECK(m != NULL); count = 0; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_e0); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_e0); for (i = 0; i < nrings; i++) { VERIFY_CHECK(INT_MAX - count > rsizes[i]); - rustsecp256k1zkp_v0_10_0_borromean_hash(tmp, m, mlen, e0, 32, i, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&ens, tmp, &overflow); + rustsecp256k1zkp_v0_10_1_borromean_hash(tmp, m, mlen, e0, 32, i, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&ens, tmp, &overflow); for (j = 0; j < rsizes[i]; j++) { - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s[count]) || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&ens) || rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pubs[count])) { + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s[count]) || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&ens) || rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pubs[count])) { return 0; } if (evalues) { /*If requested, save the challenges for proof rewind.*/ evalues[count] = ens; } - rustsecp256k1zkp_v0_10_0_ecmult(&rgej, &pubs[count], &ens, &s[count]); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rgej)) { + rustsecp256k1zkp_v0_10_1_ecmult(&rgej, &pubs[count], &ens, &s[count]); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rgej)) { return 0; } /* OPT: loop can be hoisted and split to use batch inversion across all the rings; this would make it much faster. */ - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&rge, &rgej); - rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&rge, tmp, &size, 1); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&rge, &rgej); + rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&rge, tmp, &size, 1); if (j != rsizes[i] - 1) { - rustsecp256k1zkp_v0_10_0_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&ens, tmp, &overflow); + rustsecp256k1zkp_v0_10_1_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&ens, tmp, &overflow); } else { - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_e0, tmp, size); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_e0, tmp, size); } count++; } } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_e0, m, mlen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_e0, tmp); - return rustsecp256k1zkp_v0_10_0_memcmp_var(e0, tmp, 32) == 0; + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_e0, m, mlen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_e0, tmp); + return rustsecp256k1zkp_v0_10_1_memcmp_var(e0, tmp, 32) == 0; } -int rustsecp256k1zkp_v0_10_0_borromean_sign(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, - unsigned char *e0, rustsecp256k1zkp_v0_10_0_scalar *s, const rustsecp256k1zkp_v0_10_0_gej *pubs, const rustsecp256k1zkp_v0_10_0_scalar *k, const rustsecp256k1zkp_v0_10_0_scalar *sec, +int rustsecp256k1zkp_v0_10_1_borromean_sign(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, + unsigned char *e0, rustsecp256k1zkp_v0_10_1_scalar *s, const rustsecp256k1zkp_v0_10_1_gej *pubs, const rustsecp256k1zkp_v0_10_1_scalar *k, const rustsecp256k1zkp_v0_10_1_scalar *sec, const size_t *rsizes, const size_t *secidx, size_t nrings, const unsigned char *m, size_t mlen) { - rustsecp256k1zkp_v0_10_0_gej rgej; - rustsecp256k1zkp_v0_10_0_ge rge; - rustsecp256k1zkp_v0_10_0_scalar ens; - rustsecp256k1zkp_v0_10_0_sha256 sha256_e0; + rustsecp256k1zkp_v0_10_1_gej rgej; + rustsecp256k1zkp_v0_10_1_ge rge; + rustsecp256k1zkp_v0_10_1_scalar ens; + rustsecp256k1zkp_v0_10_1_sha256 sha256_e0; unsigned char tmp[33]; size_t i; size_t j; @@ -125,70 +125,70 @@ int rustsecp256k1zkp_v0_10_0_borromean_sign(const rustsecp256k1zkp_v0_10_0_ecmul VERIFY_CHECK(secidx != NULL); VERIFY_CHECK(nrings > 0); VERIFY_CHECK(m != NULL); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_e0); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_e0); count = 0; for (i = 0; i < nrings; i++) { VERIFY_CHECK(INT_MAX - count > rsizes[i]); - rustsecp256k1zkp_v0_10_0_ecmult_gen(ecmult_gen_ctx, &rgej, &k[i]); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&rge, &rgej); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rgej)) { + rustsecp256k1zkp_v0_10_1_ecmult_gen(ecmult_gen_ctx, &rgej, &k[i]); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&rge, &rgej); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rgej)) { return 0; } - rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&rge, tmp, &size, 1); + rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&rge, tmp, &size, 1); for (j = secidx[i] + 1; j < rsizes[i]; j++) { - rustsecp256k1zkp_v0_10_0_borromean_hash(tmp, m, mlen, tmp, 33, i, j); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&ens, tmp, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&ens)) { + rustsecp256k1zkp_v0_10_1_borromean_hash(tmp, m, mlen, tmp, 33, i, j); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&ens, tmp, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&ens)) { return 0; } /** The signing algorithm as a whole is not memory uniform so there is likely a cache sidechannel that * leaks which members are non-forgeries. That the forgeries themselves are variable time may leave * an additional privacy impacting timing side-channel, but not a key loss one. */ - rustsecp256k1zkp_v0_10_0_ecmult(&rgej, &pubs[count + j], &ens, &s[count + j]); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rgej)) { + rustsecp256k1zkp_v0_10_1_ecmult(&rgej, &pubs[count + j], &ens, &s[count + j]); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rgej)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&rge, &rgej); - rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&rge, tmp, &size, 1); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&rge, &rgej); + rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&rge, tmp, &size, 1); } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_e0, tmp, size); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_e0, tmp, size); count += rsizes[i]; } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_e0, m, mlen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_e0, e0); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_e0, m, mlen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_e0, e0); count = 0; for (i = 0; i < nrings; i++) { VERIFY_CHECK(INT_MAX - count > rsizes[i]); - rustsecp256k1zkp_v0_10_0_borromean_hash(tmp, m, mlen, e0, 32, i, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&ens, tmp, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&ens)) { + rustsecp256k1zkp_v0_10_1_borromean_hash(tmp, m, mlen, e0, 32, i, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&ens, tmp, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&ens)) { return 0; } for (j = 0; j < secidx[i]; j++) { - rustsecp256k1zkp_v0_10_0_ecmult(&rgej, &pubs[count + j], &ens, &s[count + j]); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&rgej)) { + rustsecp256k1zkp_v0_10_1_ecmult(&rgej, &pubs[count + j], &ens, &s[count + j]); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rgej)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&rge, &rgej); - rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&rge, tmp, &size, 1); - rustsecp256k1zkp_v0_10_0_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&ens, tmp, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&ens)) { + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&rge, &rgej); + rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&rge, tmp, &size, 1); + rustsecp256k1zkp_v0_10_1_borromean_hash(tmp, m, mlen, tmp, 33, i, j + 1); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&ens, tmp, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&ens)) { return 0; } } - rustsecp256k1zkp_v0_10_0_scalar_mul(&s[count + j], &ens, &sec[i]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s[count + j], &s[count + j]); - rustsecp256k1zkp_v0_10_0_scalar_add(&s[count + j], &s[count + j], &k[i]); - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s[count + j])) { + rustsecp256k1zkp_v0_10_1_scalar_mul(&s[count + j], &ens, &sec[i]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s[count + j], &s[count + j]); + rustsecp256k1zkp_v0_10_1_scalar_add(&s[count + j], &s[count + j], &k[i]); + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s[count + j])) { return 0; } count += rsizes[i]; } - rustsecp256k1zkp_v0_10_0_scalar_clear(&ens); - rustsecp256k1zkp_v0_10_0_ge_clear(&rge); - rustsecp256k1zkp_v0_10_0_gej_clear(&rgej); + rustsecp256k1zkp_v0_10_1_scalar_clear(&ens); + rustsecp256k1zkp_v0_10_1_ge_clear(&rge); + rustsecp256k1zkp_v0_10_1_gej_clear(&rgej); memset(tmp, 0, 33); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/main_impl.h index 4264c9bc..ac287203 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/main_impl.h @@ -13,7 +13,7 @@ #include "../rangeproof/borromean_impl.h" #include "../rangeproof/rangeproof_impl.h" -int rustsecp256k1zkp_v0_10_0_rangeproof_info(const rustsecp256k1zkp_v0_10_0_context* ctx, int *exp, int *mantissa, +int rustsecp256k1zkp_v0_10_1_rangeproof_info(const rustsecp256k1zkp_v0_10_1_context* ctx, int *exp, int *mantissa, uint64_t *min_value, uint64_t *max_value, const unsigned char *proof, size_t plen) { size_t offset; uint64_t scale; @@ -25,15 +25,15 @@ int rustsecp256k1zkp_v0_10_0_rangeproof_info(const rustsecp256k1zkp_v0_10_0_cont offset = 0; scale = 1; (void)ctx; - return rustsecp256k1zkp_v0_10_0_rangeproof_getheader_impl(&offset, exp, mantissa, &scale, min_value, max_value, proof, plen); + return rustsecp256k1zkp_v0_10_1_rangeproof_getheader_impl(&offset, exp, mantissa, &scale, min_value, max_value, proof, plen); } -int rustsecp256k1zkp_v0_10_0_rangeproof_rewind(const rustsecp256k1zkp_v0_10_0_context* ctx, +int rustsecp256k1zkp_v0_10_1_rangeproof_rewind(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *blind_out, uint64_t *value_out, unsigned char *message_out, size_t *outlen, const unsigned char *nonce, uint64_t *min_value, uint64_t *max_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_generator* gen) { - rustsecp256k1zkp_v0_10_0_ge commitp; - rustsecp256k1zkp_v0_10_0_ge genp; + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_generator* gen) { + rustsecp256k1zkp_v0_10_1_ge commitp; + rustsecp256k1zkp_v0_10_1_ge genp; VERIFY_CHECK(ctx != NULL); ARG_CHECK(commit != NULL); ARG_CHECK(proof != NULL); @@ -43,17 +43,17 @@ int rustsecp256k1zkp_v0_10_0_rangeproof_rewind(const rustsecp256k1zkp_v0_10_0_co ARG_CHECK(nonce != NULL); ARG_CHECK(extra_commit != NULL || extra_commit_len == 0); ARG_CHECK(gen != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); - rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(&commitp, commit); - rustsecp256k1zkp_v0_10_0_generator_load(&genp, gen); - return rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(&ctx->ecmult_gen_ctx, + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(&commitp, commit); + rustsecp256k1zkp_v0_10_1_generator_load(&genp, gen); + return rustsecp256k1zkp_v0_10_1_rangeproof_verify_impl(&ctx->ecmult_gen_ctx, blind_out, value_out, message_out, outlen, nonce, min_value, max_value, &commitp, proof, plen, extra_commit, extra_commit_len, &genp); } -int rustsecp256k1zkp_v0_10_0_rangeproof_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, uint64_t *min_value, uint64_t *max_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_generator* gen) { - rustsecp256k1zkp_v0_10_0_ge commitp; - rustsecp256k1zkp_v0_10_0_ge genp; +int rustsecp256k1zkp_v0_10_1_rangeproof_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, uint64_t *min_value, uint64_t *max_value, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_generator* gen) { + rustsecp256k1zkp_v0_10_1_ge commitp; + rustsecp256k1zkp_v0_10_1_ge genp; VERIFY_CHECK(ctx != NULL); ARG_CHECK(commit != NULL); ARG_CHECK(proof != NULL); @@ -61,17 +61,17 @@ int rustsecp256k1zkp_v0_10_0_rangeproof_verify(const rustsecp256k1zkp_v0_10_0_co ARG_CHECK(max_value != NULL); ARG_CHECK(extra_commit != NULL || extra_commit_len == 0); ARG_CHECK(gen != NULL); - rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(&commitp, commit); - rustsecp256k1zkp_v0_10_0_generator_load(&genp, gen); - return rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(NULL, + rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(&commitp, commit); + rustsecp256k1zkp_v0_10_1_generator_load(&genp, gen); + return rustsecp256k1zkp_v0_10_1_rangeproof_verify_impl(NULL, NULL, NULL, NULL, NULL, NULL, min_value, max_value, &commitp, proof, plen, extra_commit, extra_commit_len, &genp); } -int rustsecp256k1zkp_v0_10_0_rangeproof_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *proof, size_t *plen, uint64_t min_value, - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, - const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_generator* gen){ - rustsecp256k1zkp_v0_10_0_ge commitp; - rustsecp256k1zkp_v0_10_0_ge genp; +int rustsecp256k1zkp_v0_10_1_rangeproof_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *proof, size_t *plen, uint64_t min_value, + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, + const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_generator* gen){ + rustsecp256k1zkp_v0_10_1_ge commitp; + rustsecp256k1zkp_v0_10_1_ge genp; VERIFY_CHECK(ctx != NULL); ARG_CHECK(proof != NULL); ARG_CHECK(plen != NULL); @@ -81,15 +81,15 @@ int rustsecp256k1zkp_v0_10_0_rangeproof_sign(const rustsecp256k1zkp_v0_10_0_cont ARG_CHECK(message != NULL || msg_len == 0); ARG_CHECK(extra_commit != NULL || extra_commit_len == 0); ARG_CHECK(gen != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); - rustsecp256k1zkp_v0_10_0_pedersen_commitment_load(&commitp, commit); - rustsecp256k1zkp_v0_10_0_generator_load(&genp, gen); - return rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(&ctx->ecmult_gen_ctx, + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + rustsecp256k1zkp_v0_10_1_pedersen_commitment_load(&commitp, commit); + rustsecp256k1zkp_v0_10_1_generator_load(&genp, gen); + return rustsecp256k1zkp_v0_10_1_rangeproof_sign_impl(&ctx->ecmult_gen_ctx, proof, plen, min_value, &commitp, blind, nonce, exp, min_bits, value, message, msg_len, extra_commit, extra_commit_len, &genp); } -size_t rustsecp256k1zkp_v0_10_0_rangeproof_max_size(const rustsecp256k1zkp_v0_10_0_context* ctx, uint64_t max_value, int min_bits) { - const int val_mantissa = max_value > 0 ? 64 - rustsecp256k1zkp_v0_10_0_clz64_var(max_value) : 1; +size_t rustsecp256k1zkp_v0_10_1_rangeproof_max_size(const rustsecp256k1zkp_v0_10_1_context* ctx, uint64_t max_value, int min_bits) { + const int val_mantissa = max_value > 0 ? 64 - rustsecp256k1zkp_v0_10_1_clz64_var(max_value) : 1; const int mantissa = min_bits > val_mantissa ? min_bits : val_mantissa; const size_t rings = (mantissa + 1) / 2; const size_t npubs = rings * 4 - 2 * (mantissa % 2); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof.h index d2bdbd32..5995f4fa 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof.h @@ -12,9 +12,9 @@ #include "../../ecmult.h" #include "../../ecmult_gen.h" -static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ecmult_gen_ctx, +static int rustsecp256k1zkp_v0_10_1_rangeproof_verify_impl(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ecmult_gen_ctx, unsigned char *blindout, uint64_t *value_out, unsigned char *message_out, size_t *outlen, const unsigned char *nonce, - uint64_t *min_value, uint64_t *max_value, const rustsecp256k1zkp_v0_10_0_ge *commit, const unsigned char *proof, size_t plen, - const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_ge* genp); + uint64_t *min_value, uint64_t *max_value, const rustsecp256k1zkp_v0_10_1_ge *commit, const unsigned char *proof, size_t plen, + const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_ge* genp); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof_impl.h index 08a07992..31a8a240 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/rangeproof_impl.h @@ -17,9 +17,9 @@ #include "../rangeproof/borromean.h" #include "../rangeproof/rangeproof.h" -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_pub_expand(rustsecp256k1zkp_v0_10_0_gej *pubs, - int exp, size_t *rsizes, size_t rings, const rustsecp256k1zkp_v0_10_0_ge* genp) { - rustsecp256k1zkp_v0_10_0_gej base; +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_rangeproof_pub_expand(rustsecp256k1zkp_v0_10_1_gej *pubs, + int exp, size_t *rsizes, size_t rings, const rustsecp256k1zkp_v0_10_1_ge* genp) { + rustsecp256k1zkp_v0_10_1_gej base; size_t i; size_t j; size_t npub; @@ -27,43 +27,43 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_pub_expand(rust if (exp < 0) { exp = 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&base, genp); - rustsecp256k1zkp_v0_10_0_gej_neg(&base, &base); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&base, genp); + rustsecp256k1zkp_v0_10_1_gej_neg(&base, &base); while (exp--) { /* Multiplication by 10 */ - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_gej_double_var(&tmp, &base, NULL); - rustsecp256k1zkp_v0_10_0_gej_double_var(&base, &tmp, NULL); - rustsecp256k1zkp_v0_10_0_gej_double_var(&base, &base, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(&base, &base, &tmp, NULL); + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_gej_double_var(&tmp, &base, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&base, &tmp, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&base, &base, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&base, &base, &tmp, NULL); } npub = 0; for (i = 0; i < rings; i++) { for (j = 1; j < rsizes[i]; j++) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&pubs[npub + j], &pubs[npub + j - 1], &base, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&pubs[npub + j], &pubs[npub + j - 1], &base, NULL); } if (i < rings - 1) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&base, &base, NULL); - rustsecp256k1zkp_v0_10_0_gej_double_var(&base, &base, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&base, &base, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&base, &base, NULL); } npub += rsizes[i]; } } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(unsigned char* data, const rustsecp256k1zkp_v0_10_0_ge *point) { - rustsecp256k1zkp_v0_10_0_fe pointx; +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(unsigned char* data, const rustsecp256k1zkp_v0_10_1_ge *point) { + rustsecp256k1zkp_v0_10_1_fe pointx; pointx = point->x; - rustsecp256k1zkp_v0_10_0_fe_normalize(&pointx); - data[0] = !rustsecp256k1zkp_v0_10_0_fe_is_square_var(&point->y); - rustsecp256k1zkp_v0_10_0_fe_get_b32(data + 1, &pointx); + rustsecp256k1zkp_v0_10_1_fe_normalize(&pointx); + data[0] = !rustsecp256k1zkp_v0_10_1_fe_is_square_var(&point->y); + rustsecp256k1zkp_v0_10_1_fe_get_b32(data + 1, &pointx); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_genrand(rustsecp256k1zkp_v0_10_0_scalar *sec, rustsecp256k1zkp_v0_10_0_scalar *s, unsigned char *message, - size_t *rsizes, size_t rings, const unsigned char *nonce, const rustsecp256k1zkp_v0_10_0_ge *commit, const unsigned char *proof, size_t len, const rustsecp256k1zkp_v0_10_0_ge* genp) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_rangeproof_genrand(rustsecp256k1zkp_v0_10_1_scalar *sec, rustsecp256k1zkp_v0_10_1_scalar *s, unsigned char *message, + size_t *rsizes, size_t rings, const unsigned char *nonce, const rustsecp256k1zkp_v0_10_1_ge *commit, const unsigned char *proof, size_t len, const rustsecp256k1zkp_v0_10_1_ge* genp) { unsigned char tmp[32]; unsigned char rngseed[32 + 33 + 33 + 10]; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; - rustsecp256k1zkp_v0_10_0_scalar acc; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; + rustsecp256k1zkp_v0_10_1_scalar acc; int overflow; int ret; size_t i; @@ -72,45 +72,45 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_genrand(rustsecp size_t npub; VERIFY_CHECK(len <= 10); memcpy(rngseed, nonce, 32); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(rngseed + 32, commit); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(rngseed + 32 + 33, genp); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(rngseed + 32, commit); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(rngseed + 32 + 33, genp); memcpy(rngseed + 33 + 33 + 32, proof, len); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, rngseed, 32 + 33 + 33 + len); - rustsecp256k1zkp_v0_10_0_scalar_clear(&acc); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, rngseed, 32 + 33 + 33 + len); + rustsecp256k1zkp_v0_10_1_scalar_clear(&acc); npub = 0; ret = 1; for (i = 0; i < rings; i++) { if (i < rings - 1) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); do { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, tmp, 32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sec[i], tmp, &overflow); - } while (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sec[i])); - rustsecp256k1zkp_v0_10_0_scalar_add(&acc, &acc, &sec[i]); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sec[i], tmp, &overflow); + } while (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sec[i])); + rustsecp256k1zkp_v0_10_1_scalar_add(&acc, &acc, &sec[i]); } else { - rustsecp256k1zkp_v0_10_0_scalar_negate(&acc, &acc); + rustsecp256k1zkp_v0_10_1_scalar_negate(&acc, &acc); sec[i] = acc; } for (j = 0; j < rsizes[i]; j++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, tmp, 32); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, tmp, 32); if (message) { for (b = 0; b < 32; b++) { tmp[b] ^= message[(i * 4 + j) * 32 + b]; message[(i * 4 + j) * 32 + b] = tmp[b]; } } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s[npub], tmp, &overflow); - ret &= !(overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s[npub])); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s[npub], tmp, &overflow); + ret &= !(overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s[npub])); npub++; } } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); - rustsecp256k1zkp_v0_10_0_scalar_clear(&acc); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_scalar_clear(&acc); memset(tmp, 0, 32); return ret; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_range_proveparams(uint64_t *v, size_t *rings, size_t *rsizes, size_t *npub, size_t *secidx, uint64_t *min_value, +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_range_proveparams(uint64_t *v, size_t *rings, size_t *rsizes, size_t *npub, size_t *secidx, uint64_t *min_value, int *mantissa, uint64_t *scale, int *exp, int *min_bits, uint64_t value) { size_t i; *rings = 1; @@ -130,7 +130,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_range_proveparams(uint64_t /* If either value or min_value is >= 2^63-1 then the other must by zero to avoid overflowing the proven range. */ return 0; } - max_bits = *min_value ? rustsecp256k1zkp_v0_10_0_clz64_var(*min_value) : 64; + max_bits = *min_value ? rustsecp256k1zkp_v0_10_1_clz64_var(*min_value) : 64; if (*min_bits > max_bits) { *min_bits = max_bits; } @@ -159,7 +159,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_range_proveparams(uint64_t /* If the masked number isn't precise, compute the public offset. */ *min_value = value - v2; /* How many bits do we need to represent our value? */ - *mantissa = *v ? 64 - rustsecp256k1zkp_v0_10_0_clz64_var(*v) : 1; + *mantissa = *v ? 64 - rustsecp256k1zkp_v0_10_1_clz64_var(*v) : 1; if (*min_bits > *mantissa) { /* If the user asked for more precision, give it to them. */ *mantissa = *min_bits; @@ -188,16 +188,16 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_range_proveparams(uint64_t } /* strawman interface, writes proof in proof, a buffer of plen, proves with respect to min_value the range for commit which has the provided blinding factor and value. */ -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ecmult_gen_ctx, +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_rangeproof_sign_impl(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ecmult_gen_ctx, unsigned char *proof, size_t *plen, uint64_t min_value, - const rustsecp256k1zkp_v0_10_0_ge *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, - const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_ge* genp){ - rustsecp256k1zkp_v0_10_0_gej pubs[128]; /* Candidate digits for our proof, most inferred. */ - rustsecp256k1zkp_v0_10_0_scalar s[128]; /* Signatures in our proof, most forged. */ - rustsecp256k1zkp_v0_10_0_scalar sec[32]; /* Blinding factors for the correct digits. */ - rustsecp256k1zkp_v0_10_0_scalar k[32]; /* Nonces for our non-forged signatures. */ - rustsecp256k1zkp_v0_10_0_scalar stmp; - rustsecp256k1zkp_v0_10_0_sha256 sha256_m; + const rustsecp256k1zkp_v0_10_1_ge *commit, const unsigned char *blind, const unsigned char *nonce, int exp, int min_bits, uint64_t value, + const unsigned char *message, size_t msg_len, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_ge* genp){ + rustsecp256k1zkp_v0_10_1_gej pubs[128]; /* Candidate digits for our proof, most inferred. */ + rustsecp256k1zkp_v0_10_1_scalar s[128]; /* Signatures in our proof, most forged. */ + rustsecp256k1zkp_v0_10_1_scalar sec[32]; /* Blinding factors for the correct digits. */ + rustsecp256k1zkp_v0_10_1_scalar k[32]; /* Nonces for our non-forged signatures. */ + rustsecp256k1zkp_v0_10_1_scalar stmp; + rustsecp256k1zkp_v0_10_1_sha256 sha256_m; unsigned char prep[4096]; unsigned char tmp[33]; unsigned char *signs; /* Location of sign flags in the proof. */ @@ -215,7 +215,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const if (*plen < 65 || min_value > value || min_bits > 64 || min_bits < 0 || exp < -1 || exp > 18) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_range_proveparams(&v, &rings, rsizes, &npub, secidx, &min_value, &mantissa, &scale, &exp, &min_bits, value)) { + if (!rustsecp256k1zkp_v0_10_1_range_proveparams(&v, &rings, rsizes, &npub, secidx, &min_value, &mantissa, &scale, &exp, &min_bits, value)) { return 0; } proof[len] = (rsizes[0] > 1 ? (64 | exp) : 0) | (min_value ? 32 : 0); @@ -242,12 +242,12 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const if (*plen - len < 32 * (npub + rings - 1) + 32 + ((rings+6) >> 3)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_m); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(tmp, commit); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, tmp, 33); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(tmp, genp); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, tmp, 33); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, proof, len); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_m); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(tmp, commit); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, tmp, 33); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(tmp, genp); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, tmp, 33); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, proof, len); memset(prep, 0, 4096); if (message != NULL) { @@ -266,14 +266,14 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const } prep[idx] = 128; } - if (!rustsecp256k1zkp_v0_10_0_rangeproof_genrand(sec, s, prep, rsizes, rings, nonce, commit, proof, len, genp)) { + if (!rustsecp256k1zkp_v0_10_1_rangeproof_genrand(sec, s, prep, rsizes, rings, nonce, commit, proof, len, genp)) { return 0; } memset(prep, 0, 4096); for (i = 0; i < rings; i++) { /* Sign will overwrite the non-forged signature, move that random value into the nonce. */ k[i] = s[i * 4 + secidx[i]]; - rustsecp256k1zkp_v0_10_0_scalar_clear(&s[i * 4 + secidx[i]]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s[i * 4 + secidx[i]]); } /** Genrand returns the last blinding factor as -sum(rest), * adding in the blinding factor for our commitment, results in the blinding factor for @@ -281,9 +281,9 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const * all the digits in the proof from the commitment. This lets the prover skip sending the * blinded value for one digit. */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&stmp, blind, &overflow); - rustsecp256k1zkp_v0_10_0_scalar_add(&sec[rings - 1], &sec[rings - 1], &stmp); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sec[rings - 1])) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&stmp, blind, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_add(&sec[rings - 1], &sec[rings - 1], &stmp); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sec[rings - 1])) { return 0; } signs = &proof[len]; @@ -295,37 +295,37 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const npub = 0; for (i = 0; i < rings; i++) { /*OPT: Use the precomputed gen2 basis?*/ - rustsecp256k1zkp_v0_10_0_pedersen_ecmult(ecmult_gen_ctx, &pubs[npub], &sec[i], ((uint64_t)secidx[i] * scale) << (i*2), genp); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pubs[npub])) { + rustsecp256k1zkp_v0_10_1_pedersen_ecmult(ecmult_gen_ctx, &pubs[npub], &sec[i], ((uint64_t)secidx[i] * scale) << (i*2), genp); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pubs[npub])) { return 0; } if (i < rings - 1) { unsigned char tmpc[33]; - rustsecp256k1zkp_v0_10_0_ge c; + rustsecp256k1zkp_v0_10_1_ge c; unsigned char quadness; /*OPT: split loop and batch invert.*/ /*OPT: do not compute full pubs[npub] in ge form; we only need x */ - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&c, &pubs[npub]); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(tmpc, &c); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&c, &pubs[npub]); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(tmpc, &c); quadness = tmpc[0]; - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, tmpc, 33); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, tmpc, 33); signs[i>>3] |= quadness << (i&7); memcpy(&proof[len], tmpc + 1, 32); len += 32; } npub += rsizes[i]; } - rustsecp256k1zkp_v0_10_0_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp); + rustsecp256k1zkp_v0_10_1_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp); if (extra_commit != NULL) { - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, extra_commit, extra_commit_len); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, extra_commit, extra_commit_len); } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_m, tmp); - if (!rustsecp256k1zkp_v0_10_0_borromean_sign(ecmult_gen_ctx, &proof[len], s, pubs, k, sec, rsizes, secidx, rings, tmp, 32)) { + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_m, tmp); + if (!rustsecp256k1zkp_v0_10_1_borromean_sign(ecmult_gen_ctx, &proof[len], s, pubs, k, sec, rsizes, secidx, rings, tmp, 32)) { return 0; } len += 32; for (i = 0; i < npub; i++) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&proof[len],&s[i]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&proof[len],&s[i]); len += 32; } VERIFY_CHECK(len <= *plen); @@ -335,36 +335,36 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_sign_impl(const } /* Computes blinding factor x given k, s, and the challenge e. */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_recover_x(rustsecp256k1zkp_v0_10_0_scalar *x, const rustsecp256k1zkp_v0_10_0_scalar *k, const rustsecp256k1zkp_v0_10_0_scalar *e, - const rustsecp256k1zkp_v0_10_0_scalar *s) { - rustsecp256k1zkp_v0_10_0_scalar stmp; - rustsecp256k1zkp_v0_10_0_scalar_negate(x, s); - rustsecp256k1zkp_v0_10_0_scalar_add(x, x, k); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&stmp, e); - rustsecp256k1zkp_v0_10_0_scalar_mul(x, x, &stmp); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_rangeproof_recover_x(rustsecp256k1zkp_v0_10_1_scalar *x, const rustsecp256k1zkp_v0_10_1_scalar *k, const rustsecp256k1zkp_v0_10_1_scalar *e, + const rustsecp256k1zkp_v0_10_1_scalar *s) { + rustsecp256k1zkp_v0_10_1_scalar stmp; + rustsecp256k1zkp_v0_10_1_scalar_negate(x, s); + rustsecp256k1zkp_v0_10_1_scalar_add(x, x, k); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&stmp, e); + rustsecp256k1zkp_v0_10_1_scalar_mul(x, x, &stmp); } /* Computes ring's nonce given the blinding factor x, the challenge e, and the signature s. */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_recover_k(rustsecp256k1zkp_v0_10_0_scalar *k, const rustsecp256k1zkp_v0_10_0_scalar *x, const rustsecp256k1zkp_v0_10_0_scalar *e, - const rustsecp256k1zkp_v0_10_0_scalar *s) { - rustsecp256k1zkp_v0_10_0_scalar stmp; - rustsecp256k1zkp_v0_10_0_scalar_mul(&stmp, x, e); - rustsecp256k1zkp_v0_10_0_scalar_add(k, s, &stmp); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_rangeproof_recover_k(rustsecp256k1zkp_v0_10_1_scalar *k, const rustsecp256k1zkp_v0_10_1_scalar *x, const rustsecp256k1zkp_v0_10_1_scalar *e, + const rustsecp256k1zkp_v0_10_1_scalar *s) { + rustsecp256k1zkp_v0_10_1_scalar stmp; + rustsecp256k1zkp_v0_10_1_scalar_mul(&stmp, x, e); + rustsecp256k1zkp_v0_10_1_scalar_add(k, s, &stmp); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_rangeproof_ch32xor(unsigned char *x, const unsigned char *y) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_rangeproof_ch32xor(unsigned char *x, const unsigned char *y) { int i; for (i = 0; i < 32; i++) { x[i] ^= y[i]; } } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rustsecp256k1zkp_v0_10_0_scalar *blind, uint64_t *v, - unsigned char *m, size_t *mlen, rustsecp256k1zkp_v0_10_0_scalar *ev, rustsecp256k1zkp_v0_10_0_scalar *s, - size_t *rsizes, size_t rings, const unsigned char *nonce, const rustsecp256k1zkp_v0_10_0_ge *commit, const unsigned char *proof, size_t len, const rustsecp256k1zkp_v0_10_0_ge *genp) { - rustsecp256k1zkp_v0_10_0_scalar s_orig[128]; - rustsecp256k1zkp_v0_10_0_scalar sec[32]; - rustsecp256k1zkp_v0_10_0_scalar stmp; +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_rangeproof_rewind_inner(rustsecp256k1zkp_v0_10_1_scalar *blind, uint64_t *v, + unsigned char *m, size_t *mlen, rustsecp256k1zkp_v0_10_1_scalar *ev, rustsecp256k1zkp_v0_10_1_scalar *s, + size_t *rsizes, size_t rings, const unsigned char *nonce, const rustsecp256k1zkp_v0_10_1_ge *commit, const unsigned char *proof, size_t len, const rustsecp256k1zkp_v0_10_1_ge *genp) { + rustsecp256k1zkp_v0_10_1_scalar s_orig[128]; + rustsecp256k1zkp_v0_10_1_scalar sec[32]; + rustsecp256k1zkp_v0_10_1_scalar stmp; unsigned char prep[4096]; unsigned char tmp[32]; uint64_t value = 0; @@ -380,12 +380,12 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rus VERIFY_CHECK(npub >= 1); memset(prep, 0, 4096); /* Reconstruct the provers random values. */ - rustsecp256k1zkp_v0_10_0_rangeproof_genrand(sec, s_orig, prep, rsizes, rings, nonce, commit, proof, len, genp); + rustsecp256k1zkp_v0_10_1_rangeproof_genrand(sec, s_orig, prep, rsizes, rings, nonce, commit, proof, len, genp); *v = UINT64_MAX; - rustsecp256k1zkp_v0_10_0_scalar_clear(blind); + rustsecp256k1zkp_v0_10_1_scalar_clear(blind); if (rings == 1 && rsizes[0] == 1) { /* With only a single proof, we can only recover the blinding factor. */ - rustsecp256k1zkp_v0_10_0_rangeproof_recover_x(blind, &s_orig[0], &ev[0], &s[0]); + rustsecp256k1zkp_v0_10_1_rangeproof_recover_x(blind, &s_orig[0], &ev[0], &s[0]); if (v) { *v = 0; } @@ -399,9 +399,9 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rus size_t idx; /* Look for a value encoding in the last ring. */ idx = npub + rsizes[rings - 1] - 1 - j; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tmp, &s[idx]); - rustsecp256k1zkp_v0_10_0_rangeproof_ch32xor(tmp, &prep[idx * 32]); - if ((tmp[0] & 128) && (rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp[16], &tmp[24], 8) == 0) && (rustsecp256k1zkp_v0_10_0_memcmp_var(&tmp[8], &tmp[16], 8) == 0)) { + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tmp, &s[idx]); + rustsecp256k1zkp_v0_10_1_rangeproof_ch32xor(tmp, &prep[idx * 32]); + if ((tmp[0] & 128) && (rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp[16], &tmp[24], 8) == 0) && (rustsecp256k1zkp_v0_10_1_memcmp_var(&tmp[8], &tmp[16], 8) == 0)) { value = 0; for (i = 0; i < 8; i++) { value = (value << 8) + tmp[24 + i]; @@ -432,9 +432,9 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rus skip1 += (rings - 1) << 2; skip2 += (rings - 1) << 2; /* Like in the rsize[] == 1 case, Having figured out which s is the one which was not forged, we can recover the blinding factor. */ - rustsecp256k1zkp_v0_10_0_rangeproof_recover_x(&stmp, &s_orig[skip2], &ev[skip2], &s[skip2]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&sec[rings - 1], &sec[rings - 1]); - rustsecp256k1zkp_v0_10_0_scalar_add(blind, &stmp, &sec[rings - 1]); + rustsecp256k1zkp_v0_10_1_rangeproof_recover_x(&stmp, &s_orig[skip2], &ev[skip2], &s[skip2]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sec[rings - 1], &sec[rings - 1]); + rustsecp256k1zkp_v0_10_1_scalar_add(blind, &stmp, &sec[rings - 1]); if (!m || !mlen || *mlen == 0) { if (mlen) { *mlen = 0; @@ -457,12 +457,12 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rus * this could just as well recover the blinding factors and messages could be put there as is done for recovering the * blinding factor in the last ring, but it takes an inversion to recover x so it's faster to put the message data in k. */ - rustsecp256k1zkp_v0_10_0_rangeproof_recover_k(&stmp, &sec[i], &ev[npub], &s[npub]); + rustsecp256k1zkp_v0_10_1_rangeproof_recover_k(&stmp, &sec[i], &ev[npub], &s[npub]); } else { stmp = s[npub]; } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tmp, &stmp); - rustsecp256k1zkp_v0_10_0_rangeproof_ch32xor(tmp, &prep[npub * 32]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tmp, &stmp); + rustsecp256k1zkp_v0_10_1_rangeproof_ch32xor(tmp, &prep[npub * 32]); for (b = 0; b < 32 && offset < *mlen; b++) { m[offset] = tmp[b]; offset++; @@ -473,16 +473,16 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(rus *mlen = offset; memset(prep, 0, 4096); for (i = 0; i < 128; i++) { - rustsecp256k1zkp_v0_10_0_scalar_clear(&s_orig[i]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&s_orig[i]); } for (i = 0; i < 32; i++) { - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec[i]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec[i]); } - rustsecp256k1zkp_v0_10_0_scalar_clear(&stmp); + rustsecp256k1zkp_v0_10_1_scalar_clear(&stmp); return 1; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_getheader_impl(size_t *offset, int *exp, int *mantissa, uint64_t *scale, +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_rangeproof_getheader_impl(size_t *offset, int *exp, int *mantissa, uint64_t *scale, uint64_t *min_value, uint64_t *max_value, const unsigned char *proof, size_t plen) { int i; int has_nz_range; @@ -536,15 +536,15 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_getheader_impl(s } /* Verifies range proof (len plen) for commit, the min/max values proven are put in the min/max arguments; returns 0 on failure 1 on success.*/ -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context* ecmult_gen_ctx, +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_rangeproof_verify_impl(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context* ecmult_gen_ctx, unsigned char *blindout, uint64_t *value_out, unsigned char *message_out, size_t *outlen, const unsigned char *nonce, - uint64_t *min_value, uint64_t *max_value, const rustsecp256k1zkp_v0_10_0_ge *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_0_ge* genp) { - rustsecp256k1zkp_v0_10_0_gej accj; - rustsecp256k1zkp_v0_10_0_gej pubs[128]; - rustsecp256k1zkp_v0_10_0_ge c; - rustsecp256k1zkp_v0_10_0_scalar s[128]; - rustsecp256k1zkp_v0_10_0_scalar evalues[128]; /* Challenges, only used during proof rewind. */ - rustsecp256k1zkp_v0_10_0_sha256 sha256_m; + uint64_t *min_value, uint64_t *max_value, const rustsecp256k1zkp_v0_10_1_ge *commit, const unsigned char *proof, size_t plen, const unsigned char *extra_commit, size_t extra_commit_len, const rustsecp256k1zkp_v0_10_1_ge* genp) { + rustsecp256k1zkp_v0_10_1_gej accj; + rustsecp256k1zkp_v0_10_1_gej pubs[128]; + rustsecp256k1zkp_v0_10_1_ge c; + rustsecp256k1zkp_v0_10_1_scalar s[128]; + rustsecp256k1zkp_v0_10_1_scalar evalues[128]; /* Challenges, only used during proof rewind. */ + rustsecp256k1zkp_v0_10_1_sha256 sha256_m; size_t rsizes[32]; int ret; size_t i; @@ -560,7 +560,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(cons unsigned char m[33]; const unsigned char *e0; offset = 0; - if (!rustsecp256k1zkp_v0_10_0_rangeproof_getheader_impl(&offset, &exp, &mantissa, &scale, min_value, max_value, proof, plen)) { + if (!rustsecp256k1zkp_v0_10_1_rangeproof_getheader_impl(&offset, &exp, &mantissa, &scale, min_value, max_value, proof, plen)) { return 0; } offset_post_header = offset; @@ -583,12 +583,12 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(cons if (plen - offset < 32 * (npub + rings - 1) + 32 + ((rings+6) >> 3)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_m); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(m, commit); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, m, 33); - rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point(m, genp); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, m, 33); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, proof, offset); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_m); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(m, commit); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, m, 33); + rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point(m, genp); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, m, 33); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, proof, offset); for(i = 0; i < rings - 1; i++) { signs[i] = (proof[offset + ( i>> 3)] & (1 << (i & 7))) != 0; } @@ -600,39 +600,39 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(cons } } npub = 0; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&accj); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&accj); if (*min_value) { - rustsecp256k1zkp_v0_10_0_pedersen_ecmult_small(&accj, *min_value, genp); + rustsecp256k1zkp_v0_10_1_pedersen_ecmult_small(&accj, *min_value, genp); } for(i = 0; i < rings - 1; i++) { - rustsecp256k1zkp_v0_10_0_fe fe; - if (!rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fe, &proof[offset]) || - !rustsecp256k1zkp_v0_10_0_ge_set_xquad(&c, &fe)) { + rustsecp256k1zkp_v0_10_1_fe fe; + if (!rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fe, &proof[offset]) || + !rustsecp256k1zkp_v0_10_1_ge_set_xquad(&c, &fe)) { return 0; } if (signs[i]) { - rustsecp256k1zkp_v0_10_0_ge_neg(&c, &c); + rustsecp256k1zkp_v0_10_1_ge_neg(&c, &c); } - /* Not using rustsecp256k1zkp_v0_10_0_rangeproof_serialize_point as we almost have it + /* Not using rustsecp256k1zkp_v0_10_1_rangeproof_serialize_point as we almost have it * serialized form already. */ - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, &signs[i], 1); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, &proof[offset], 32); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pubs[npub], &c); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&accj, &accj, &c, NULL); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, &signs[i], 1); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, &proof[offset], 32); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pubs[npub], &c); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&accj, &accj, &c, NULL); offset += 32; npub += rsizes[i]; } - rustsecp256k1zkp_v0_10_0_gej_neg(&accj, &accj); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&pubs[npub], &accj, commit, NULL); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&pubs[npub])) { + rustsecp256k1zkp_v0_10_1_gej_neg(&accj, &accj); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&pubs[npub], &accj, commit, NULL); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&pubs[npub])) { return 0; } - rustsecp256k1zkp_v0_10_0_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp); + rustsecp256k1zkp_v0_10_1_rangeproof_pub_expand(pubs, exp, rsizes, rings, genp); npub += rsizes[rings - 1]; e0 = &proof[offset]; offset += 32; for (i = 0; i < npub; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s[i], &proof[offset], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s[i], &proof[offset], &overflow); if (overflow) { return 0; } @@ -643,34 +643,34 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_rangeproof_verify_impl(cons return 0; } if (extra_commit != NULL) { - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_m, extra_commit, extra_commit_len); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_m, extra_commit, extra_commit_len); } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_m, m); - ret = rustsecp256k1zkp_v0_10_0_borromean_verify(nonce ? evalues : NULL, e0, s, pubs, rsizes, rings, m, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_m, m); + ret = rustsecp256k1zkp_v0_10_1_borromean_verify(nonce ? evalues : NULL, e0, s, pubs, rsizes, rings, m, 32); if (ret && nonce) { /* Given the nonce, try rewinding the witness to recover its initial state. */ - rustsecp256k1zkp_v0_10_0_scalar blind; + rustsecp256k1zkp_v0_10_1_scalar blind; uint64_t vv; if (!ecmult_gen_ctx) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_rangeproof_rewind_inner(&blind, &vv, message_out, outlen, evalues, s, rsizes, rings, nonce, commit, proof, offset_post_header, genp)) { + if (!rustsecp256k1zkp_v0_10_1_rangeproof_rewind_inner(&blind, &vv, message_out, outlen, evalues, s, rsizes, rings, nonce, commit, proof, offset_post_header, genp)) { return 0; } /* Unwind apparently successful, see if the commitment can be reconstructed. */ /* FIXME: should check vv is in the mantissa's range. */ vv = (vv * scale) + *min_value; - rustsecp256k1zkp_v0_10_0_pedersen_ecmult(ecmult_gen_ctx, &accj, &blind, vv, genp); - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&accj)) { + rustsecp256k1zkp_v0_10_1_pedersen_ecmult(ecmult_gen_ctx, &accj, &blind, vv, genp); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&accj)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_neg(&accj, &accj); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&accj, &accj, commit, NULL); - if (!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&accj)) { + rustsecp256k1zkp_v0_10_1_gej_neg(&accj, &accj); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&accj, &accj, commit, NULL); + if (!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&accj)) { return 0; } if (blindout) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(blindout, &blind); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(blindout, &blind); } if (value_out) { *value_out = vv; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/tests_impl.h index 7f2b5384..5d5da6e2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/rangeproof/tests_impl.h @@ -19,9 +19,9 @@ static void test_rangeproof_api(void) { unsigned char proof[5134]; unsigned char blind[32]; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; - uint64_t vmin = rustsecp256k1zkp_v0_10_0_testrand32(); - uint64_t val = vmin + rustsecp256k1zkp_v0_10_0_testrand32(); + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; + uint64_t vmin = rustsecp256k1zkp_v0_10_1_testrand32(); + uint64_t val = vmin + rustsecp256k1zkp_v0_10_1_testrand32(); size_t len = sizeof(proof); /* we'll switch to dylan thomas for this one */ const unsigned char message[68] = "My tears are like the quiet drift / Of petals from some magic rose;"; @@ -29,57 +29,57 @@ static void test_rangeproof_api(void) { const unsigned char ext_commit[72] = "And all my grief flows from the rift / Of unremembered skies and snows."; size_t ext_commit_len = sizeof(ext_commit); - rustsecp256k1zkp_v0_10_0_testrand256(blind); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_0_generator_h)); - - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(STATIC_CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, NULL, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, NULL, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, NULL, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, NULL, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, NULL, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, vmin - 1, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, 0, NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) != 0); + rustsecp256k1zkp_v0_10_1_testrand256(blind); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_1_generator_h)); + + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(STATIC_CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, NULL, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, NULL, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, NULL, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, NULL, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, NULL, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, vmin - 1, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, NULL, 0, NULL, 0, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, val, message, mlen, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) != 0); { int exp; int mantissa; uint64_t min_value; uint64_t max_value; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, proof, len) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, proof, len) != 0); CHECK(exp == 0); CHECK(((uint64_t) 1 << mantissa) > val - vmin); CHECK(((uint64_t) 1 << (mantissa - 1)) <= val - vmin); CHECK(min_value == vmin); CHECK(max_value >= val); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, NULL, &mantissa, &min_value, &max_value, proof, len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, NULL, &min_value, &max_value, proof, len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, &mantissa, NULL, &max_value, proof, len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, &mantissa, &min_value, NULL, proof, len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, NULL, len)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, proof, 0) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, NULL, &mantissa, &min_value, &max_value, proof, len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, NULL, &min_value, &max_value, proof, len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, &mantissa, NULL, &max_value, proof, len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, &mantissa, &min_value, NULL, proof, len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, NULL, len)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_info(CTX, &exp, &mantissa, &min_value, &max_value, proof, 0) == 0); } { uint64_t min_value; uint64_t max_value; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 1); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, NULL, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, NULL, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, NULL, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, NULL, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, 0, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 1); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, NULL, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, NULL, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, NULL, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, NULL, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &min_value, &max_value, &commit, proof, len, NULL, 0, NULL)); } { unsigned char blind_out[32]; @@ -89,46 +89,46 @@ static void test_rangeproof_api(void) { uint64_t max_value; size_t message_len = sizeof(message_out); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(STATIC_CTX, blind_out, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(STATIC_CTX, blind_out, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(min_value == vmin); CHECK(max_value >= val); CHECK(value_out == val); CHECK(message_len == sizeof(message_out)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message, message_out, sizeof(message_out)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message, message_out, sizeof(message_out)) == 0); /* blindout may be NULL */ - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, NULL, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, NULL, &value_out, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) != 0); /* valueout may be NULL */ - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, NULL, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, NULL, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, NULL, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, NULL, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, NULL, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, NULL, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, 0, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, NULL, message_out, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, &message_len, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, NULL, &min_value, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, NULL, &max_value, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, NULL, &commit, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, NULL, proof, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, NULL, len, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, 0, ext_commit, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, ext_commit_len, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, NULL, 0, commit.data, &min_value, &max_value, &commit, proof, len, NULL, 0, NULL)); } /* This constant is hardcoded in these tests and elsewhere, so we * consider it to be part of the API and test it here. */ - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, 0, 64) == 5134); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, UINT64_MAX, 0) == 5134); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, 0, 64) == 5134); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, UINT64_MAX, 0) == 5134); } static void test_borromean(void) { unsigned char e0[32]; - rustsecp256k1zkp_v0_10_0_scalar s[64]; - rustsecp256k1zkp_v0_10_0_gej pubs[64]; - rustsecp256k1zkp_v0_10_0_scalar k[8]; - rustsecp256k1zkp_v0_10_0_scalar sec[8]; - rustsecp256k1zkp_v0_10_0_ge ge; - rustsecp256k1zkp_v0_10_0_scalar one; + rustsecp256k1zkp_v0_10_1_scalar s[64]; + rustsecp256k1zkp_v0_10_1_gej pubs[64]; + rustsecp256k1zkp_v0_10_1_scalar k[8]; + rustsecp256k1zkp_v0_10_1_scalar sec[8]; + rustsecp256k1zkp_v0_10_1_ge ge; + rustsecp256k1zkp_v0_10_1_scalar one; unsigned char m[32]; size_t rsizes[8]; size_t secidx[8]; @@ -136,31 +136,31 @@ static void test_borromean(void) { size_t i; size_t j; int c; - rustsecp256k1zkp_v0_10_0_testrand256_test(m); - nrings = 1 + (rustsecp256k1zkp_v0_10_0_testrand32()&7); + rustsecp256k1zkp_v0_10_1_testrand256_test(m); + nrings = 1 + (rustsecp256k1zkp_v0_10_1_testrand32()&7); c = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&one, 1); - if (rustsecp256k1zkp_v0_10_0_testrand32()&1) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&one, &one); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&one, 1); + if (rustsecp256k1zkp_v0_10_1_testrand32()&1) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&one, &one); } for (i = 0; i < nrings; i++) { - rsizes[i] = 1 + (rustsecp256k1zkp_v0_10_0_testrand32()&7); - secidx[i] = rustsecp256k1zkp_v0_10_0_testrand32() % rsizes[i]; + rsizes[i] = 1 + (rustsecp256k1zkp_v0_10_1_testrand32()&7); + secidx[i] = rustsecp256k1zkp_v0_10_1_testrand32() % rsizes[i]; random_scalar_order(&sec[i]); random_scalar_order(&k[i]); - if(rustsecp256k1zkp_v0_10_0_testrand32()&7) { + if(rustsecp256k1zkp_v0_10_1_testrand32()&7) { sec[i] = one; } - if(rustsecp256k1zkp_v0_10_0_testrand32()&7) { + if(rustsecp256k1zkp_v0_10_1_testrand32()&7) { k[i] = one; } for (j = 0; j < rsizes[i]; j++) { random_scalar_order(&s[c + j]); - if(rustsecp256k1zkp_v0_10_0_testrand32()&7) { + if(rustsecp256k1zkp_v0_10_1_testrand32()&7) { s[i] = one; } if (j == secidx[i]) { - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pubs[c + j], &sec[i]); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubs[c + j], &sec[i]); } else { random_group_element_test(&ge); random_group_element_jacobian_test(&pubs[c + j],&ge); @@ -168,28 +168,28 @@ static void test_borromean(void) { } c += rsizes[i]; } - CHECK(rustsecp256k1zkp_v0_10_0_borromean_sign(&CTX->ecmult_gen_ctx, e0, s, pubs, k, sec, rsizes, secidx, nrings, m, 32)); - CHECK(rustsecp256k1zkp_v0_10_0_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); - i = rustsecp256k1zkp_v0_10_0_testrand32() % c; - rustsecp256k1zkp_v0_10_0_scalar_negate(&s[i],&s[i]); - CHECK(!rustsecp256k1zkp_v0_10_0_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s[i],&s[i]); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&one, 1); + CHECK(rustsecp256k1zkp_v0_10_1_borromean_sign(&CTX->ecmult_gen_ctx, e0, s, pubs, k, sec, rsizes, secidx, nrings, m, 32)); + CHECK(rustsecp256k1zkp_v0_10_1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + i = rustsecp256k1zkp_v0_10_1_testrand32() % c; + rustsecp256k1zkp_v0_10_1_scalar_negate(&s[i],&s[i]); + CHECK(!rustsecp256k1zkp_v0_10_1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s[i],&s[i]); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&one, 1); for(j = 0; j < 4; j++) { - i = rustsecp256k1zkp_v0_10_0_testrand32() % c; - if (rustsecp256k1zkp_v0_10_0_testrand32() & 1) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&pubs[i],&pubs[i], NULL); + i = rustsecp256k1zkp_v0_10_1_testrand32() % c; + if (rustsecp256k1zkp_v0_10_1_testrand32() & 1) { + rustsecp256k1zkp_v0_10_1_gej_double_var(&pubs[i],&pubs[i], NULL); } else { - rustsecp256k1zkp_v0_10_0_scalar_add(&s[i],&s[i],&one); + rustsecp256k1zkp_v0_10_1_scalar_add(&s[i],&s[i],&one); } - CHECK(!rustsecp256k1zkp_v0_10_0_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); + CHECK(!rustsecp256k1zkp_v0_10_1_borromean_verify(NULL, e0, s, pubs, rsizes, nrings, m, 32)); } } static void test_rangeproof(void) { const uint64_t testvs[11] = {0, 1, 5, 11, 65535, 65537, INT32_MAX, UINT32_MAX, INT64_MAX - 1, INT64_MAX, UINT64_MAX}; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit2; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit2; unsigned char proof[5134 + 1]; /* One additional byte to test if trailing bytes are rejected */ unsigned char blind[32]; unsigned char blindout[32]; @@ -213,10 +213,10 @@ static void test_rangeproof(void) { memcpy(&message_long[i], message_short, sizeof(message_short)); } - rustsecp256k1zkp_v0_10_0_testrand256(blind); + rustsecp256k1zkp_v0_10_1_testrand256(blind); for (i = 0; i < 11; i++) { v = testvs[i]; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_1_generator_h)); for (vmin = 0; vmin < (i<9 && i > 0 ? 2 : 1); vmin++) { const unsigned char *input_message = NULL; size_t input_message_len = 0; @@ -232,78 +232,78 @@ static void test_rangeproof(void) { input_message_len = sizeof(message_long); } len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, v, input_message, input_message_len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, 0, 0, v, input_message, input_message_len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 5134); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, 0)); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, 0)); mlen = 4096; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, message, &mlen, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, message, &mlen, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); if (input_message != NULL) { - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message, input_message, input_message_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message, input_message, input_message_len) == 0); } for (j = input_message_len; j < mlen; j++) { CHECK(message[j] == 0); } CHECK(mlen <= 4096); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blindout, blind, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blindout, blind, 32) == 0); CHECK(vout == v); CHECK(minv <= v); CHECK(maxv >= v); len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, -1, 64, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, -1, 64, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 73); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blindout, blind, 32) == 0); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blindout, blind, 32) == 0); CHECK(vout == v); CHECK(minv == v); CHECK(maxv == v); /* Check with a committed message */ len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, -1, 64, v, NULL, 0, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, -1, 64, v, NULL, 0, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 73); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, 0)); - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, message_long, sizeof(message_long), rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blindout, blind, 32) == 0); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, 0)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, message_long, sizeof(message_long), rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blindout, blind, 32) == 0); CHECK(vout == v); CHECK(minv == v); CHECK(maxv == v); } } - rustsecp256k1zkp_v0_10_0_testrand256(blind); + rustsecp256k1zkp_v0_10_1_testrand256(blind); v = INT64_MAX - 1; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_1_generator_h)); for (i = 0; i < 19; i++) { len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, 0, &commit, blind, commit.data, i, 0, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, 0, &commit, blind, commit.data, i, 0, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 5134); CHECK(minv <= v); CHECK(maxv >= v); /* Make sure it fails when validating with a committed message */ - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, message_short, sizeof(message_short), rustsecp256k1zkp_v0_10_1_generator_h)); } - rustsecp256k1zkp_v0_10_0_testrand256(blind); + rustsecp256k1zkp_v0_10_1_testrand256(blind); { /*Malleability test.*/ - v = rustsecp256k1zkp_v0_10_0_testrandi64(0, 255); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_0_generator_h)); + v = rustsecp256k1zkp_v0_10_1_testrandi64(0, 255); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_1_generator_h)); len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, 0, &commit, blind, commit.data, 0, 3, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, 0, &commit, blind, commit.data, 0, 3, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 5134); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, 3)); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, 3)); /* Test if trailing bytes are rejected. */ proof[len] = v; - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len + 1, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len + 1, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); for (i = 0; i < len*8; i++) { proof[i >> 3] ^= 1 << (i & 7); - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); proof[i >> 3] ^= 1 << (i & 7); } - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(minv <= v); CHECK(maxv >= v); } @@ -311,48 +311,48 @@ static void test_rangeproof(void) { for (i = 0; i < (size_t) COUNT; i++) { int exp; int min_bits; - v = rustsecp256k1zkp_v0_10_0_testrandi64(0, UINT64_MAX >> (rustsecp256k1zkp_v0_10_0_testrand32()&63)); + v = rustsecp256k1zkp_v0_10_1_testrandi64(0, UINT64_MAX >> (rustsecp256k1zkp_v0_10_1_testrand32()&63)); vmin = 0; - if ((v < INT64_MAX) && (rustsecp256k1zkp_v0_10_0_testrand32()&1)) { - vmin = rustsecp256k1zkp_v0_10_0_testrandi64(0, v); + if ((v < INT64_MAX) && (rustsecp256k1zkp_v0_10_1_testrand32()&1)) { + vmin = rustsecp256k1zkp_v0_10_1_testrandi64(0, v); } - rustsecp256k1zkp_v0_10_0_testrand256(blind); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_0_generator_h)); + rustsecp256k1zkp_v0_10_1_testrand256(blind); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_1_generator_h)); len = 5134; - exp = (int)rustsecp256k1zkp_v0_10_0_testrandi64(0,18)-(int)rustsecp256k1zkp_v0_10_0_testrandi64(0,18); + exp = (int)rustsecp256k1zkp_v0_10_1_testrandi64(0,18)-(int)rustsecp256k1zkp_v0_10_1_testrandi64(0,18); if (exp < 0) { exp = -exp; } - min_bits = (int)rustsecp256k1zkp_v0_10_0_testrandi64(0,64)-(int)rustsecp256k1zkp_v0_10_0_testrandi64(0,64); + min_bits = (int)rustsecp256k1zkp_v0_10_1_testrandi64(0,64)-(int)rustsecp256k1zkp_v0_10_1_testrandi64(0,64); if (min_bits < 0) { min_bits = -min_bits; } - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, exp, min_bits, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, vmin, &commit, blind, commit.data, exp, min_bits, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(len <= 5134); - CHECK(len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, v, min_bits)); + CHECK(len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, v, min_bits)); mlen = 4096; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, message, &mlen, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, message, &mlen, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); for (j = 0; j < mlen; j++) { CHECK(message[j] == 0); } CHECK(mlen <= 4096); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blindout, blind, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blindout, blind, 32) == 0); CHECK(minv <= v); CHECK(maxv >= v); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blindout, &vout, NULL, NULL, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); memcpy(&commit2, &commit, sizeof(commit)); } for (j = 0; j < 3; j++) { for (i = 0; i < 96; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(&proof[i * 32]); + rustsecp256k1zkp_v0_10_1_testrand256(&proof[i * 32]); } for (k = 0; k < 128; k += 3) { len = k; - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit2, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit2, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); } - len = rustsecp256k1zkp_v0_10_0_testrandi64(0, 3072); - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit2, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + len = rustsecp256k1zkp_v0_10_1_testrandi64(0, 3072); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit2, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); } } @@ -361,29 +361,29 @@ static void test_rangeproof_null_blinder(void) { const unsigned char blind[32] = { 0 }; const uint64_t v = 1111; uint64_t minv, maxv; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; size_t len; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, v, rustsecp256k1zkp_v0_10_1_generator_h)); /* Try a 32-bit proof; should work */ len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, 1, &commit, blind, commit.data, 0, 32, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, 1, &commit, blind, commit.data, 0, 32, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(minv == 1); CHECK(maxv == 1ULL << 32); /* Try a 3-bit proof; should work */ len = 5134; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, v - 1, &commit, blind, commit.data, 0, 3, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, v - 1, &commit, blind, commit.data, 0, 3, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify(CTX, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); CHECK(minv == 1110); CHECK(maxv == 1117); /* But a 2-bits will not because then it does not have any subcommitments (which rerandomize * the blinding factors that get passed into the borromean logic ... passing 0s will fail) */ len = 5134; - CHECK(!rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, v - 1, &commit, blind, commit.data, 0, 2, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); + CHECK(!rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, v - 1, &commit, blind, commit.data, 0, 2, v, NULL, 0, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); /* Rewinding with 3-bits works */ { @@ -394,14 +394,14 @@ static void test_rangeproof_null_blinder(void) { size_t msg_len = sizeof(msg); len = 1000; - rustsecp256k1zkp_v0_10_0_testrand256(msg); - rustsecp256k1zkp_v0_10_0_testrand256(&msg[32]); - rustsecp256k1zkp_v0_10_0_testrand256(&msg[64]); - rustsecp256k1zkp_v0_10_0_testrand256(&msg[96]); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, 0, 3, v, msg, sizeof(msg), NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind(CTX, blind_out, &value_out, msg_out, &msg_len, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind, blind_out, sizeof(blind)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(msg, msg_out, sizeof(msg)) == 0); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(&msg[32]); + rustsecp256k1zkp_v0_10_1_testrand256(&msg[64]); + rustsecp256k1zkp_v0_10_1_testrand256(&msg[96]); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &len, v, &commit, blind, commit.data, 0, 3, v, msg, sizeof(msg), NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind(CTX, blind_out, &value_out, msg_out, &msg_len, commit.data, &minv, &maxv, &commit, proof, len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind, blind_out, sizeof(blind)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(msg, msg_out, sizeof(msg)) == 0); CHECK(value_out == v); CHECK(minv == v); CHECK(maxv == v + 7); @@ -410,7 +410,7 @@ static void test_rangeproof_null_blinder(void) { static void test_single_value_proof(uint64_t val) { unsigned char proof[5000]; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit; unsigned char blind[32]; unsigned char blind_out[32]; unsigned char nonce[32]; @@ -424,11 +424,11 @@ static void test_single_value_proof(uint64_t val) { size_t m_len_out = 0; size_t i; - rustsecp256k1zkp_v0_10_0_testrand256(blind); - rustsecp256k1zkp_v0_10_0_testrand256(nonce); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_0_generator_h)); + rustsecp256k1zkp_v0_10_1_testrand256(blind); + rustsecp256k1zkp_v0_10_1_testrand256(nonce); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit, blind, val, rustsecp256k1zkp_v0_10_1_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign( CTX, proof, &plen, val, /* min_val */ @@ -438,11 +438,11 @@ static void test_single_value_proof(uint64_t val) { val, /* val */ message, sizeof(message), /* Will cause this to fail */ NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h ) == 0); plen = sizeof(proof); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign( CTX, proof, &plen, val, /* min_val */ @@ -452,9 +452,9 @@ static void test_single_value_proof(uint64_t val) { val, /* val */ NULL, 0, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h ) == 1); - CHECK(plen <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, val, 0)); + CHECK(plen <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, val, 0)); /* Different proof sizes are unfortunate but is caused by `min_value` of * zero being special-cased and encoded more efficiently. */ @@ -466,42 +466,42 @@ static void test_single_value_proof(uint64_t val) { /* Test if trailing bytes are rejected. */ proof[plen] = 0; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_val_out, &max_val_out, &commit, proof, plen + 1, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h ) == 0); /* Test if single-bit malleation is caught */ for (i = 0; i < plen*8; i++) { proof[i >> 3] ^= 1 << (i & 7); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_val_out, &max_val_out, &commit, proof, plen, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h ) == 0); proof[i >> 3] ^= 1 << (i & 7); } /* Test if unchanged proof is accepted. */ - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_val_out, &max_val_out, &commit, proof, plen, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h ) == 1); CHECK(min_val_out == val); CHECK(max_val_out == val); memset(message_out, 0, sizeof(message_out)); m_len_out = sizeof(message_out); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind( CTX, blind_out, &val_out, message_out, &m_len_out, @@ -510,13 +510,13 @@ static void test_single_value_proof(uint64_t val) { &commit, proof, plen, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); CHECK(val_out == val); CHECK(min_val_out == val); CHECK(max_val_out == val); CHECK(m_len_out == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind, blind_out, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind, blind_out, 32) == 0); for (m_len_out = 0; m_len_out < sizeof(message_out); m_len_out++) { CHECK(message_out[m_len_out] == 0); } @@ -524,34 +524,34 @@ static void test_single_value_proof(uint64_t val) { #define MAX_N_GENS 30 static void test_multiple_generators(void) { - const size_t n_inputs = (rustsecp256k1zkp_v0_10_0_testrand32() % (MAX_N_GENS / 2)) + 1; - const size_t n_outputs = (rustsecp256k1zkp_v0_10_0_testrand32() % (MAX_N_GENS / 2)) + 1; + const size_t n_inputs = (rustsecp256k1zkp_v0_10_1_testrand32() % (MAX_N_GENS / 2)) + 1; + const size_t n_outputs = (rustsecp256k1zkp_v0_10_1_testrand32() % (MAX_N_GENS / 2)) + 1; const size_t n_generators = n_inputs + n_outputs; unsigned char *generator_blind[MAX_N_GENS]; unsigned char *pedersen_blind[MAX_N_GENS]; - rustsecp256k1zkp_v0_10_0_generator generator[MAX_N_GENS]; - rustsecp256k1zkp_v0_10_0_pedersen_commitment commit[MAX_N_GENS]; - const rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit_ptr[MAX_N_GENS]; + rustsecp256k1zkp_v0_10_1_generator generator[MAX_N_GENS]; + rustsecp256k1zkp_v0_10_1_pedersen_commitment commit[MAX_N_GENS]; + const rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit_ptr[MAX_N_GENS]; size_t i; int64_t total_value; uint64_t value[MAX_N_GENS]; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; unsigned char generator_seed[32]; random_scalar_order(&s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(generator_seed, &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(generator_seed, &s); /* Create all the needed generators */ for (i = 0; i < n_generators; i++) { generator_blind[i] = (unsigned char*) malloc(32); pedersen_blind[i] = (unsigned char*) malloc(32); random_scalar_order(&s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(generator_blind[i], &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(generator_blind[i], &s); random_scalar_order(&s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(pedersen_blind[i], &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(pedersen_blind[i], &s); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &generator[i], generator_seed, generator_blind[i])); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &generator[i], generator_seed, generator_blind[i])); commit_ptr[i] = &commit[i]; } @@ -559,23 +559,23 @@ static void test_multiple_generators(void) { /* Compute all the values -- can be positive or negative */ total_value = 0; for (i = 0; i < n_outputs; i++) { - value[n_inputs + i] = rustsecp256k1zkp_v0_10_0_testrandi64(0, INT64_MAX - total_value); + value[n_inputs + i] = rustsecp256k1zkp_v0_10_1_testrandi64(0, INT64_MAX - total_value); total_value += value[n_inputs + i]; } for (i = 0; i < n_inputs - 1; i++) { - value[i] = rustsecp256k1zkp_v0_10_0_testrandi64(0, total_value); + value[i] = rustsecp256k1zkp_v0_10_1_testrandi64(0, total_value); total_value -= value[i]; } value[i] = total_value; /* Correct for blinding factors and do the commitments */ - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum(CTX, value, (const unsigned char * const *) generator_blind, pedersen_blind, n_generators, n_inputs)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum(CTX, value, (const unsigned char * const *) generator_blind, pedersen_blind, n_generators, n_inputs)); for (i = 0; i < n_generators; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &commit[i], pedersen_blind[i], value[i], &generator[i])); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &commit[i], pedersen_blind[i], value[i], &generator[i])); } /* Verify */ - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_verify_tally(CTX, &commit_ptr[0], n_inputs, &commit_ptr[n_inputs], n_outputs)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_verify_tally(CTX, &commit_ptr[0], n_inputs, &commit_ptr[n_inputs], n_outputs)); /* Cleanup */ for (i = 0; i < n_generators; i++) { @@ -590,7 +590,7 @@ static void test_rangeproof_fixed_vectors(void) { uint64_t value; uint64_t min_value; uint64_t max_value; - rustsecp256k1zkp_v0_10_0_pedersen_commitment pc; + rustsecp256k1zkp_v0_10_1_pedersen_commitment pc; unsigned char message[4000] = {0}; size_t m_len = sizeof(message); @@ -649,19 +649,19 @@ static void test_rangeproof_fixed_vectors(void) { 0x77, 0x47, 0xa4, 0xd3, 0x53, 0x17, 0xc6, 0x44, 0x30, 0x73, 0x84, 0xeb, 0x1f, 0xbe, 0xa1, 0xfb }; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &pc, commit_1)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &pc, commit_1)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_value, &max_value, &pc, vector_1, sizeof(vector_1), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); CHECK(min_value == 86); CHECK(max_value == 25586); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind( CTX, blind, &value, message, &m_len, @@ -670,10 +670,10 @@ static void test_rangeproof_fixed_vectors(void) { &pc, vector_1, sizeof(vector_1), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind, blind_1, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind, blind_1, 32) == 0); CHECK(value == 86); CHECK(min_value == 86); CHECK(max_value == 25586); @@ -720,19 +720,19 @@ static void test_rangeproof_fixed_vectors(void) { }; static const unsigned char message_2[] = "When I see my own likeness in the depths of someone else's consciousness, I always experience a moment of panic."; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &pc, commit_2)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &pc, commit_2)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_value, &max_value, &pc, vector_2, sizeof(vector_2), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); CHECK(min_value == 0); CHECK(max_value == 15); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind( CTX, blind, &value, message, &m_len, @@ -741,15 +741,15 @@ static void test_rangeproof_fixed_vectors(void) { &pc, vector_2, sizeof(vector_2), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind, blind_2, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind, blind_2, 32) == 0); CHECK(value == 11); CHECK(min_value == 0); CHECK(max_value == 15); CHECK(m_len == 192); /* length of the sidechannel in the proof */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message, message_2, sizeof(message_2)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message, message_2, sizeof(message_2)) == 0); for (i = sizeof(message_2); i < m_len; i++) { CHECK(message[i] == 0); } @@ -778,19 +778,19 @@ static void test_rangeproof_fixed_vectors(void) { 0xc0, 0x6b, 0x9b, 0x4c, 0x02, 0xa6, 0xc8, 0xf6, 0xc0, 0x34, 0xea, 0x35, 0x57, 0xf4, 0xe1, 0x37 }; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &pc, commit_3)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &pc, commit_3)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, &min_value, &max_value, &pc, vector_3, sizeof(vector_3), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); CHECK(min_value == UINT64_MAX); CHECK(max_value == UINT64_MAX); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind( CTX, blind, &value, message, &m_len, @@ -799,9 +799,9 @@ static void test_rangeproof_fixed_vectors(void) { &pc, vector_3, sizeof(vector_3), NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind, blind_3, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind, blind_3, 32) == 0); CHECK(value == UINT64_MAX); CHECK(min_value == UINT64_MAX); CHECK(max_value == UINT64_MAX); @@ -826,13 +826,13 @@ static void print_vector_helper(unsigned char *buf, size_t buf_len) { printf("};\n"); } -static void print_vector(int i, unsigned char *proof, size_t p_len, rustsecp256k1zkp_v0_10_0_pedersen_commitment *commit) { +static void print_vector(int i, unsigned char *proof, size_t p_len, rustsecp256k1zkp_v0_10_1_pedersen_commitment *commit) { unsigned char commit_output[33]; printf("unsigned char vector_%d[] = {\n", i); print_vector_helper(proof, p_len); - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize(CTX, commit_output, commit)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize(CTX, commit_output, commit)); printf("unsigned char commit_%d[] = {\n", i); print_vector_helper(commit_output, sizeof(commit_output)); } @@ -850,21 +850,21 @@ static unsigned char vector_nonce[] = { /* Maximum length of a message that can be embedded into a rangeproof */ static void test_rangeproof_fixed_vectors_reproducible_helper(unsigned char *vector, size_t vector_len, unsigned char *commit, uint64_t *value_r, uint64_t *min_value_r, uint64_t *max_value_r, unsigned char *message_r, size_t *m_len_r) { - rustsecp256k1zkp_v0_10_0_pedersen_commitment pc; + rustsecp256k1zkp_v0_10_1_pedersen_commitment pc; unsigned char blind_r[32]; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse(CTX, &pc, commit)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_verify( + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse(CTX, &pc, commit)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_verify( CTX, min_value_r, max_value_r, &pc, vector, vector_len, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); *m_len_r = SECP256K1_RANGEPROOF_MAX_MESSAGE_LEN; - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_rewind( + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_rewind( CTX, blind_r, value_r, message_r, m_len_r, @@ -873,9 +873,9 @@ static void test_rangeproof_fixed_vectors_reproducible_helper(unsigned char *vec &pc, vector, vector_len, NULL, 0, - rustsecp256k1zkp_v0_10_0_generator_h + rustsecp256k1zkp_v0_10_1_generator_h )); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(blind_r, vector_blind, sizeof(vector_blind)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(blind_r, vector_blind, sizeof(vector_blind)) == 0); } static void test_rangeproof_fixed_vectors_reproducible(void) { @@ -896,7 +896,7 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { int exp = 18; unsigned char proof[5126]; size_t p_len = sizeof(proof); - rustsecp256k1zkp_v0_10_0_pedersen_commitment pc; + rustsecp256k1zkp_v0_10_1_pedersen_commitment pc; unsigned char vector_0[] = { 0x40, 0x3f, 0xd1, 0x77, 0x65, 0x05, 0x87, 0x88, 0xd0, 0x3d, 0xb2, 0x24, 0x60, 0x7a, 0x08, 0x76, @@ -1227,19 +1227,19 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { 0xef, }; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(p_len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, value, min_bits)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(p_len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, value, min_bits)); CHECK(p_len == sizeof(proof)); /* Uncomment the next line to print the test vector */ /* print_vector(0, proof, p_len, &pc); */ CHECK(p_len == sizeof(vector_0)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(proof, vector_0, p_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(proof, vector_0, p_len) == 0); test_rangeproof_fixed_vectors_reproducible_helper(vector_0, sizeof(vector_0), commit_0, &value_r, &min_value_r, &max_value_r, message_r, &m_len_r); CHECK(value_r == value); CHECK(m_len_r == m_len); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message_r, message, m_len_r) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message_r, message, m_len_r) == 0); CHECK(min_value_r == min_value); CHECK(max_value_r == UINT64_MAX); memset(message_r, 0, sizeof(message_r)); @@ -1255,7 +1255,7 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { int exp = 1; unsigned char proof[267]; size_t p_len = sizeof(proof); - rustsecp256k1zkp_v0_10_0_pedersen_commitment pc; + rustsecp256k1zkp_v0_10_1_pedersen_commitment pc; unsigned char vector_1[] = { 0x61, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0xcb, 0xdc, 0xbe, 0x42, 0xe6, @@ -1281,19 +1281,19 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { 0x66, 0x16, 0x2e, 0x44, 0xc8, 0x65, 0x8e, 0xe6, 0x3a, 0x1a, 0x57, 0x2c, 0xb9, 0x6c, 0x07, 0x85, 0xf0, }; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(p_len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, value, min_bits)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(p_len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, value, min_bits)); CHECK(p_len == sizeof(proof)); /* Uncomment the next line to print the test vector */ /* print_vector(1, proof, p_len, &pc); */ CHECK(p_len == sizeof(vector_1)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(proof, vector_1, p_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(proof, vector_1, p_len) == 0); test_rangeproof_fixed_vectors_reproducible_helper(vector_1, sizeof(vector_1), commit_1, &value_r, &min_value_r, &max_value_r, message_r, &m_len_r); CHECK(value_r == value); CHECK(m_len_r == m_len); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message_r, message, m_len_r) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message_r, message, m_len_r) == 0); CHECK(min_value_r == 3); CHECK(max_value_r == 73); memset(message_r, 0, sizeof(message_r)); @@ -1310,7 +1310,7 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { int exp = 0; unsigned char proof[106]; size_t p_len = sizeof(proof); - rustsecp256k1zkp_v0_10_0_pedersen_commitment pc; + rustsecp256k1zkp_v0_10_1_pedersen_commitment pc; unsigned char vector_2[] = { 0x60, 0x00, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0x81, 0xd8, 0x21, 0x12, 0x4d, 0xa4, @@ -1327,19 +1327,19 @@ static void test_rangeproof_fixed_vectors_reproducible(void) { 0x70, }; - CHECK(rustsecp256k1zkp_v0_10_0_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(rustsecp256k1zkp_v0_10_0_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_0_generator_h)); - CHECK(p_len <= rustsecp256k1zkp_v0_10_0_rangeproof_max_size(CTX, value, min_bits)); + CHECK(rustsecp256k1zkp_v0_10_1_pedersen_commit(CTX, &pc, vector_blind, value, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(rustsecp256k1zkp_v0_10_1_rangeproof_sign(CTX, proof, &p_len, min_value, &pc, vector_blind, vector_nonce, exp, min_bits, value, message, m_len, NULL, 0, rustsecp256k1zkp_v0_10_1_generator_h)); + CHECK(p_len <= rustsecp256k1zkp_v0_10_1_rangeproof_max_size(CTX, value, min_bits)); CHECK(p_len == sizeof(proof)); /* Uncomment the next line to print the test vector */ /* print_vector(2, proof, p_len, &pc); */ CHECK(p_len == sizeof(vector_2)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(proof, vector_2, p_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(proof, vector_2, p_len) == 0); test_rangeproof_fixed_vectors_reproducible_helper(vector_2, sizeof(vector_2), commit_2, &value_r, &min_value_r, &max_value_r, message_r, &m_len_r); CHECK(value_r == value); CHECK(m_len_r == m_len); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(message_r, message, m_len_r) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(message_r, message, m_len_r) == 0); CHECK(min_value_r == INT64_MAX-1); CHECK(max_value_r == INT64_MAX); memset(message_r, 0, sizeof(message_r)); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/Makefile.am.include index 769b8a44..15edee1e 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_recovery.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_recovery.h noinst_HEADERS += src/modules/recovery/main_impl.h noinst_HEADERS += src/modules/recovery/tests_impl.h noinst_HEADERS += src/modules/recovery/tests_exhaustive_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/bench_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/bench_impl.h index daa19704..2c0ddc79 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/bench_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/bench_impl.h @@ -10,7 +10,7 @@ #include "../../../include/secp256k1_recovery.h" typedef struct { - rustsecp256k1zkp_v0_10_0_context *ctx; + rustsecp256k1zkp_v0_10_1_context *ctx; unsigned char msg[32]; unsigned char sig[64]; } bench_recover_data; @@ -18,16 +18,16 @@ typedef struct { static void bench_recover(void* arg, int iters) { int i; bench_recover_data *data = (bench_recover_data*)arg; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; unsigned char pubkeyc[33]; for (i = 0; i < iters; i++) { int j; size_t pubkeylen = 33; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature sig; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(data->ctx, &sig, data->sig, i % 2)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(data->ctx, &pubkey, &sig, data->msg)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(data->ctx, pubkeyc, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED)); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature sig; + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(data->ctx, &sig, data->sig, i % 2)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(data->ctx, &pubkey, &sig, data->msg)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(data->ctx, pubkeyc, &pubkeylen, &pubkey, SECP256K1_EC_COMPRESSED)); for (j = 0; j < 32; j++) { data->sig[j + 32] = data->msg[j]; /* Move former message to S. */ data->msg[j] = data->sig[j]; /* Move former R to message. */ @@ -52,11 +52,11 @@ static void run_recovery_bench(int iters, int argc, char** argv) { bench_recover_data data; int d = argc == 1; - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (d || have_flag(argc, argv, "ecdsa") || have_flag(argc, argv, "recover") || have_flag(argc, argv, "ecdsa_recover")) run_benchmark("ecdsa_recover", bench_recover, bench_recover_setup, NULL, &data, 10, iters); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); } #endif /* SECP256K1_MODULE_RECOVERY_BENCH_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/main_impl.h index dc665e6d..1cb071b2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/main_impl.h @@ -9,34 +9,34 @@ #include "../../../include/secp256k1_recovery.h" -static void rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar* r, rustsecp256k1zkp_v0_10_0_scalar* s, int* recid, const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature* sig) { +static void rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar* r, rustsecp256k1zkp_v0_10_1_scalar* s, int* recid, const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature* sig) { (void)ctx; - if (sizeof(rustsecp256k1zkp_v0_10_0_scalar) == 32) { - /* When the rustsecp256k1zkp_v0_10_0_scalar type is exactly 32 byte, use its - * representation inside rustsecp256k1zkp_v0_10_0_ecdsa_signature, as conversion is very fast. - * Note that rustsecp256k1zkp_v0_10_0_ecdsa_signature_save must use the same representation. */ + if (sizeof(rustsecp256k1zkp_v0_10_1_scalar) == 32) { + /* When the rustsecp256k1zkp_v0_10_1_scalar type is exactly 32 byte, use its + * representation inside rustsecp256k1zkp_v0_10_1_ecdsa_signature, as conversion is very fast. + * Note that rustsecp256k1zkp_v0_10_1_ecdsa_signature_save must use the same representation. */ memcpy(r, &sig->data[0], 32); memcpy(s, &sig->data[32], 32); } else { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, &sig->data[0], NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(s, &sig->data[32], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, &sig->data[0], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(s, &sig->data[32], NULL); } *recid = sig->data[64]; } -static void rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_save(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature* sig, const rustsecp256k1zkp_v0_10_0_scalar* r, const rustsecp256k1zkp_v0_10_0_scalar* s, int recid) { - if (sizeof(rustsecp256k1zkp_v0_10_0_scalar) == 32) { +static void rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_save(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature* sig, const rustsecp256k1zkp_v0_10_1_scalar* r, const rustsecp256k1zkp_v0_10_1_scalar* s, int recid) { + if (sizeof(rustsecp256k1zkp_v0_10_1_scalar) == 32) { memcpy(&sig->data[0], r, 32); memcpy(&sig->data[32], s, 32); } else { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[0], r); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[32], s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[0], r); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[32], s); } sig->data[64] = recid; } -int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature* sig, const unsigned char *input64, int recid) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature* sig, const unsigned char *input64, int recid) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret = 1; int overflow = 0; @@ -45,110 +45,110 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(const rus ARG_CHECK(input64 != NULL); ARG_CHECK(recid >= 0 && recid <= 3); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&r, &input64[0], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&r, &input64[0], &overflow); ret &= !overflow; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &input64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &input64[32], &overflow); ret &= !overflow; if (ret) { - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_save(sig, &r, &s, recid); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_save(sig, &r, &s, recid); } else { memset(sig, 0, sizeof(*sig)); } return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output64, int *recid, const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature* sig) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output64, int *recid, const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature* sig) { + rustsecp256k1zkp_v0_10_1_scalar r, s; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output64 != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(recid != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_load(ctx, &r, &s, recid, sig); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&output64[0], &r); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&output64[32], &s); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_load(ctx, &r, &s, recid, sig); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&output64[0], &r); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&output64[32], &s); return 1; } -int rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature* sigin) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature* sigin) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int recid; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(sigin != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, sigin); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(sig, &r, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, sigin); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(sig, &r, &s); return 1; } -static int rustsecp256k1zkp_v0_10_0_ecdsa_sig_recover(const rustsecp256k1zkp_v0_10_0_scalar *sigr, const rustsecp256k1zkp_v0_10_0_scalar* sigs, rustsecp256k1zkp_v0_10_0_ge *pubkey, const rustsecp256k1zkp_v0_10_0_scalar *message, int recid) { +static int rustsecp256k1zkp_v0_10_1_ecdsa_sig_recover(const rustsecp256k1zkp_v0_10_1_scalar *sigr, const rustsecp256k1zkp_v0_10_1_scalar* sigs, rustsecp256k1zkp_v0_10_1_ge *pubkey, const rustsecp256k1zkp_v0_10_1_scalar *message, int recid) { unsigned char brx[32]; - rustsecp256k1zkp_v0_10_0_fe fx; - rustsecp256k1zkp_v0_10_0_ge x; - rustsecp256k1zkp_v0_10_0_gej xj; - rustsecp256k1zkp_v0_10_0_scalar rn, u1, u2; - rustsecp256k1zkp_v0_10_0_gej qj; + rustsecp256k1zkp_v0_10_1_fe fx; + rustsecp256k1zkp_v0_10_1_ge x; + rustsecp256k1zkp_v0_10_1_gej xj; + rustsecp256k1zkp_v0_10_1_scalar rn, u1, u2; + rustsecp256k1zkp_v0_10_1_gej qj; int r; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigr) || rustsecp256k1zkp_v0_10_0_scalar_is_zero(sigs)) { + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigr) || rustsecp256k1zkp_v0_10_1_scalar_is_zero(sigs)) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(brx, sigr); - r = rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fx, brx); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(brx, sigr); + r = rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fx, brx); (void)r; VERIFY_CHECK(r); /* brx comes from a scalar, so is less than the order; certainly less than p */ if (recid & 2) { - if (rustsecp256k1zkp_v0_10_0_fe_cmp_var(&fx, &rustsecp256k1zkp_v0_10_0_ecdsa_const_p_minus_order) >= 0) { + if (rustsecp256k1zkp_v0_10_1_fe_cmp_var(&fx, &rustsecp256k1zkp_v0_10_1_ecdsa_const_p_minus_order) >= 0) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_add(&fx, &rustsecp256k1zkp_v0_10_0_ecdsa_const_order_as_fe); + rustsecp256k1zkp_v0_10_1_fe_add(&fx, &rustsecp256k1zkp_v0_10_1_ecdsa_const_order_as_fe); } - if (!rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&x, &fx, recid & 1)) { + if (!rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&x, &fx, recid & 1)) { return 0; } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&xj, &x); - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&rn, sigr); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u1, &rn, message); - rustsecp256k1zkp_v0_10_0_scalar_negate(&u1, &u1); - rustsecp256k1zkp_v0_10_0_scalar_mul(&u2, &rn, sigs); - rustsecp256k1zkp_v0_10_0_ecmult(&qj, &xj, &u2, &u1); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(pubkey, &qj); - return !rustsecp256k1zkp_v0_10_0_gej_is_infinity(&qj); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&xj, &x); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&rn, sigr); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u1, &rn, message); + rustsecp256k1zkp_v0_10_1_scalar_negate(&u1, &u1); + rustsecp256k1zkp_v0_10_1_scalar_mul(&u2, &rn, sigs); + rustsecp256k1zkp_v0_10_1_ecmult(&qj, &xj, &u2, &u1); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(pubkey, &qj); + return !rustsecp256k1zkp_v0_10_1_gej_is_infinity(&qj); } -int rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *signature, const unsigned char *msghash32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_0_nonce_function noncefp, const void* noncedata) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *signature, const unsigned char *msghash32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_1_nonce_function noncefp, const void* noncedata) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret, recid; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(msghash32 != NULL); ARG_CHECK(signature != NULL); ARG_CHECK(seckey != NULL); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign_inner(ctx, &r, &s, &recid, NULL, NULL, NULL, msghash32, seckey, noncefp, noncedata); - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_save(signature, &r, &s, recid); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign_inner(ctx, &r, &s, &recid, NULL, NULL, NULL, msghash32, seckey, noncefp, noncedata); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_save(signature, &r, &s, recid); return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_recover(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature *signature, const unsigned char *msghash32) { - rustsecp256k1zkp_v0_10_0_ge q; - rustsecp256k1zkp_v0_10_0_scalar r, s; - rustsecp256k1zkp_v0_10_0_scalar m; +int rustsecp256k1zkp_v0_10_1_ecdsa_recover(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature *signature, const unsigned char *msghash32) { + rustsecp256k1zkp_v0_10_1_ge q; + rustsecp256k1zkp_v0_10_1_scalar r, s; + rustsecp256k1zkp_v0_10_1_scalar m; int recid; VERIFY_CHECK(ctx != NULL); ARG_CHECK(msghash32 != NULL); ARG_CHECK(signature != NULL); ARG_CHECK(pubkey != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, signature); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, signature); VERIFY_CHECK(recid >= 0 && recid < 4); /* should have been caught in parse_compact */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&m, msghash32, NULL); - if (rustsecp256k1zkp_v0_10_0_ecdsa_sig_recover(&r, &s, &q, &m, recid)) { - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &q); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&m, msghash32, NULL); + if (rustsecp256k1zkp_v0_10_1_ecdsa_sig_recover(&r, &s, &q, &m, recid)) { + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &q); return 1; } else { memset(pubkey, 0, sizeof(*pubkey)); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_exhaustive_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_exhaustive_impl.h index a462c7b6..223086f4 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_exhaustive_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_exhaustive_impl.h @@ -10,7 +10,7 @@ #include "main_impl.h" #include "../../../include/secp256k1_recovery.h" -static void test_exhaustive_recovery_sign(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_recovery_sign(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { int i, j, k; uint64_t iter = 0; @@ -20,23 +20,23 @@ static void test_exhaustive_recovery_sign(const rustsecp256k1zkp_v0_10_0_context if (skip_section(&iter)) continue; for (k = 1; k < EXHAUSTIVE_TEST_ORDER; k++) { /* nonce */ const int starting_k = k; - rustsecp256k1zkp_v0_10_0_fe r_dot_y_normalized; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature rsig; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; - rustsecp256k1zkp_v0_10_0_scalar sk, msg, r, s, expected_r; + rustsecp256k1zkp_v0_10_1_fe r_dot_y_normalized; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature rsig; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_scalar sk, msg, r, s, expected_r; unsigned char sk32[32], msg32[32]; int expected_recid; int recid; int overflow; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, i); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sk, j); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sk32, &sk); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msg32, &msg); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sk, j); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sk32, &sk); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msg32, &msg); - rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(ctx, &rsig, msg32, sk32, rustsecp256k1zkp_v0_10_0_nonce_function_smallint, &k); + rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(ctx, &rsig, msg32, sk32, rustsecp256k1zkp_v0_10_1_nonce_function_smallint, &k); /* Check directly */ - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, &rsig); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_load(ctx, &r, &s, &recid, &rsig); r_from_k(&expected_r, group, k, &overflow); CHECK(r == expected_r); CHECK((k * s) % EXHAUSTIVE_TEST_ORDER == (i + r * j) % EXHAUSTIVE_TEST_ORDER || @@ -49,18 +49,18 @@ static void test_exhaustive_recovery_sign(const rustsecp256k1zkp_v0_10_0_context * in the real group. */ expected_recid = overflow ? 2 : 0; r_dot_y_normalized = group[k].y; - rustsecp256k1zkp_v0_10_0_fe_normalize(&r_dot_y_normalized); + rustsecp256k1zkp_v0_10_1_fe_normalize(&r_dot_y_normalized); /* Also the recovery id is flipped depending if we hit the low-s branch */ if ((k * s) % EXHAUSTIVE_TEST_ORDER == (i + r * j) % EXHAUSTIVE_TEST_ORDER) { - expected_recid |= rustsecp256k1zkp_v0_10_0_fe_is_odd(&r_dot_y_normalized); + expected_recid |= rustsecp256k1zkp_v0_10_1_fe_is_odd(&r_dot_y_normalized); } else { - expected_recid |= !rustsecp256k1zkp_v0_10_0_fe_is_odd(&r_dot_y_normalized); + expected_recid |= !rustsecp256k1zkp_v0_10_1_fe_is_odd(&r_dot_y_normalized); } CHECK(recid == expected_recid); /* Convert to a standard sig then check */ - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, &sig); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, &sig); /* Note that we compute expected_r *after* signing -- this is important * because our nonce-computing function function might change k during * signing. */ @@ -78,7 +78,7 @@ static void test_exhaustive_recovery_sign(const rustsecp256k1zkp_v0_10_0_context } } -static void test_exhaustive_recovery_verify(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_recovery_verify(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { /* This is essentially a copy of test_exhaustive_verify, with recovery added */ int s, r, msg, key; uint64_t iter = 0; @@ -86,41 +86,41 @@ static void test_exhaustive_recovery_verify(const rustsecp256k1zkp_v0_10_0_conte for (r = 1; r < EXHAUSTIVE_TEST_ORDER; r++) { for (msg = 1; msg < EXHAUSTIVE_TEST_ORDER; msg++) { for (key = 1; key < EXHAUSTIVE_TEST_ORDER; key++) { - rustsecp256k1zkp_v0_10_0_ge nonconst_ge; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature rsig; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; - rustsecp256k1zkp_v0_10_0_pubkey pk; - rustsecp256k1zkp_v0_10_0_scalar sk_s, msg_s, r_s, s_s; - rustsecp256k1zkp_v0_10_0_scalar s_times_k_s, msg_plus_r_times_sk_s; + rustsecp256k1zkp_v0_10_1_ge nonconst_ge; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature rsig; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pk; + rustsecp256k1zkp_v0_10_1_scalar sk_s, msg_s, r_s, s_s; + rustsecp256k1zkp_v0_10_1_scalar s_times_k_s, msg_plus_r_times_sk_s; int recid = 0; int k, should_verify; unsigned char msg32[32]; if (skip_section(&iter)) continue; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s_s, s); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&r_s, r); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg_s, msg); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sk_s, key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msg32, &msg_s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s_s, s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&r_s, r); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg_s, msg); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sk_s, key); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msg32, &msg_s); /* Verify by hand */ /* Run through every k value that gives us this r and check that *one* works. * Note there could be none, there could be multiple, ECDSA is weird. */ should_verify = 0; for (k = 0; k < EXHAUSTIVE_TEST_ORDER; k++) { - rustsecp256k1zkp_v0_10_0_scalar check_x_s; + rustsecp256k1zkp_v0_10_1_scalar check_x_s; r_from_k(&check_x_s, group, k, NULL); if (r_s == check_x_s) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s_times_k_s, k); - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s); - rustsecp256k1zkp_v0_10_0_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s); - should_verify |= rustsecp256k1zkp_v0_10_0_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s_times_k_s, k); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s); + rustsecp256k1zkp_v0_10_1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s); + should_verify |= rustsecp256k1zkp_v0_10_1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s); } } /* nb we have a "high s" rule */ - should_verify &= !rustsecp256k1zkp_v0_10_0_scalar_is_high(&s_s); + should_verify &= !rustsecp256k1zkp_v0_10_1_scalar_is_high(&s_s); /* We would like to try recovering the pubkey and checking that it matches, * but pubkey recovery is impossible in the exhaustive tests (the reason @@ -128,19 +128,19 @@ static void test_exhaustive_recovery_verify(const rustsecp256k1zkp_v0_10_0_conte * overlap between the sets, so there are no valid signatures). */ /* Verify by converting to a standard signature and calling verify */ - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_save(&rsig, &r_s, &s_s, recid); - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_save(&rsig, &r_s, &s_s, recid); + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(ctx, &sig, &rsig); memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge)); - rustsecp256k1zkp_v0_10_0_pubkey_save(&pk, &nonconst_ge); + rustsecp256k1zkp_v0_10_1_pubkey_save(&pk, &nonconst_ge); CHECK(should_verify == - rustsecp256k1zkp_v0_10_0_ecdsa_verify(ctx, &sig, msg32, &pk)); + rustsecp256k1zkp_v0_10_1_ecdsa_verify(ctx, &sig, msg32, &pk)); } } } } } -static void test_exhaustive_recovery(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_recovery(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { test_exhaustive_recovery_sign(ctx, group); test_exhaustive_recovery_verify(ctx, group); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_impl.h index c497879f..c9abe5a7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/recovery/tests_impl.h @@ -25,15 +25,15 @@ static int recovery_test_nonce_function(unsigned char *nonce32, const unsigned c } /* On the next run, return a valid nonce, but flip a coin as to whether or not to fail signing. */ memset(nonce32, 1, 32); - return rustsecp256k1zkp_v0_10_0_testrand_bits(1); + return rustsecp256k1zkp_v0_10_1_testrand_bits(1); } static void test_ecdsa_recovery_api(void) { /* Setup contexts that just count errors */ - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey recpubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature normal_sig; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature recsig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey recpubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature normal_sig; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature recsig; unsigned char privkey[32] = { 1 }; unsigned char message[32] = { 2 }; int recid = 0; @@ -45,109 +45,109 @@ static void test_ecdsa_recovery_api(void) { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }; /* Construct and verify corresponding public key. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, privkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, privkey) == 1); /* Check bad contexts and NULLs for signing */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, NULL, message, privkey, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, NULL, privkey, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, NULL, NULL, NULL)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(STATIC_CTX, &recsig, message, privkey, NULL, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, NULL, message, privkey, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, NULL, privkey, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, NULL, NULL, NULL)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(STATIC_CTX, &recsig, message, privkey, NULL, NULL)); /* This will fail or succeed randomly, and in either case will not ARG_CHECK failure */ - rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, recovery_test_nonce_function, NULL); + rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, recovery_test_nonce_function, NULL); /* These will all fail, but not in ARG_CHECK way */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, zero_privkey, NULL, NULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, over_privkey, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, zero_privkey, NULL, NULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, over_privkey, NULL, NULL) == 0); /* This one will succeed. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); /* Check signing with a goofy nonce function */ /* Check bad contexts and NULLs for recovery */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &recpubkey, &recsig, message) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, NULL, &recsig, message)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &recpubkey, NULL, message)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &recpubkey, &recsig, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &recpubkey, &recsig, message) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, NULL, &recsig, message)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &recpubkey, NULL, message)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &recpubkey, &recsig, NULL)); /* Check NULLs for conversion */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &normal_sig, message, privkey, NULL, NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, NULL, &recsig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, &normal_sig, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, &normal_sig, &recsig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &normal_sig, message, privkey, NULL, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, NULL, &recsig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, &normal_sig, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, &normal_sig, &recsig) == 1); /* Check NULLs for de/serialization */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, NULL, &recid, &recsig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, sig, NULL, &recsig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &recsig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &recsig, message, privkey, NULL, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, NULL, &recid, &recsig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, sig, NULL, &recsig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &recsig) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, NULL, sig, recid)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, NULL, recid)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, -1)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, 5)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, NULL, sig, recid)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, NULL, recid)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, -1)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, 5)); /* overflow in signature will not result in calling illegal_callback */ memcpy(sig, over_privkey, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, recid) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &recsig, sig, recid) == 0); } static void test_ecdsa_recovery_end_to_end(void) { unsigned char extra[32] = {0x00}; unsigned char privkey[32]; unsigned char message[32]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature[5]; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature rsignature[5]; + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature[5]; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature rsignature[5]; unsigned char sig[74]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey recpubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey recpubkey; int recid = 0; /* Generate a random key and message. */ { - rustsecp256k1zkp_v0_10_0_scalar msg, key; + rustsecp256k1zkp_v0_10_1_scalar msg, key; random_scalar_order_test(&msg); random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(privkey, &key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(message, &msg); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(privkey, &key); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(message, &msg); } /* Construct and verify corresponding public key. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, privkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, privkey) == 1); /* Serialize/parse compact and verify/recover. */ extra[0] = 0; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &rsignature[0], message, privkey, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[0], message, privkey, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &rsignature[4], message, privkey, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &rsignature[1], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &rsignature[0], message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[0], message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &rsignature[4], message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &rsignature[1], message, privkey, NULL, extra) == 1); extra[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &rsignature[2], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &rsignature[2], message, privkey, NULL, extra) == 1); extra[31] = 0; extra[0] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign_recoverable(CTX, &rsignature[3], message, privkey, NULL, extra) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &rsignature[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[4], &signature[0], 64) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign_recoverable(CTX, &rsignature[3], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &rsignature[4]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[4], &signature[0], 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 1); memset(&rsignature[4], 0, sizeof(rsignature[4])); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 1); /* Parse compact (with recovery id) and recover. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &recpubkey, &rsignature[4], message) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &recpubkey, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &recpubkey, &rsignature[4], message) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &recpubkey, sizeof(pubkey)) == 0); /* Serialize/destroy/parse signature and verify again. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &rsignature[4]) == 1); - sig[rustsecp256k1zkp_v0_10_0_testrand_bits(6)] += 1 + rustsecp256k1zkp_v0_10_0_testrand_int(255); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_serialize_compact(CTX, sig, &recid, &rsignature[4]) == 1); + sig[rustsecp256k1zkp_v0_10_1_testrand_bits(6)] += 1 + rustsecp256k1zkp_v0_10_1_testrand_int(255); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsignature[4], sig, recid) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_convert(CTX, &signature[4], &rsignature[4]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[4], message, &pubkey) == 0); /* Recover again */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &recpubkey, &rsignature[4], message) == 0 || - rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &recpubkey, sizeof(pubkey)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &recpubkey, &rsignature[4], message) == 0 || + rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &recpubkey, sizeof(pubkey)) != 0); } /* Tests several edge cases. */ @@ -170,7 +170,7 @@ static void test_ecdsa_recovery_edge_cases(void) { 0x7D, 0xD7, 0x3E, 0x38, 0x7E, 0xE4, 0xFC, 0x86, 0x6E, 0x1B, 0xE8, 0xEC, 0xC7, 0xDD, 0x95, 0x57 }; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; /* signature (r,s) = (4,4), which can be recovered with all 4 recids. */ const unsigned char sigb64[64] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -182,19 +182,19 @@ static void test_ecdsa_recovery_edge_cases(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, }; - rustsecp256k1zkp_v0_10_0_pubkey pubkeyb; - rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature rsig; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkeyb; + rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature rsig; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; int recid; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 0)); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 2)); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 3)); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 0)); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 2)); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sig64, 3)); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkey, &rsig, msg32)); for (recid = 0; recid < 4; recid++) { int i; @@ -239,40 +239,40 @@ static void test_ecdsa_recovery_edge_cases(void) { 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x45, 0x02, 0x01, 0x04 }; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigb64, recid) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigb64, recid) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 1); for (recid2 = 0; recid2 < 4; recid2++) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey2b; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigb64, recid2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkey2b, &rsig, msg32) == 1); + rustsecp256k1zkp_v0_10_1_pubkey pubkey2b; + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigb64, recid2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkey2b, &rsig, msg32) == 1); /* Verifying with (order + r,4) should always fail. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderlong, sizeof(sigbderlong)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderlong, sizeof(sigbderlong)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); } /* DER parsing tests. */ /* Zero length r/s. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigcder_zr, sizeof(sigcder_zr)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigcder_zs, sizeof(sigcder_zs)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigcder_zr, sizeof(sigcder_zr)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigcder_zs, sizeof(sigcder_zs)) == 0); /* Leading zeros. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt1, sizeof(sigbderalt1)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt2, sizeof(sigbderalt2)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt3, sizeof(sigbderalt3)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt4, sizeof(sigbderalt4)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt1, sizeof(sigbderalt1)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt2, sizeof(sigbderalt2)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt3, sizeof(sigbderalt3)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt4, sizeof(sigbderalt4)) == 0); sigbderalt3[4] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt3, sizeof(sigbderalt3)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt3, sizeof(sigbderalt3)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); sigbderalt4[7] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbderalt4, sizeof(sigbderalt4)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbderalt4, sizeof(sigbderalt4)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); /* Damage signature. */ sigbder[7]++; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); sigbder[7]--; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbder, 6) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder) - 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbder, 6) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder) - 1) == 0); for(i = 0; i < 8; i++) { int c; unsigned char orig = sigbder[i]; @@ -282,7 +282,7 @@ static void test_ecdsa_recovery_edge_cases(void) { continue; } sigbder[i] = c; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 0 || rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigbder, sizeof(sigbder)) == 0 || rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyb) == 0); } sigbder[i] = orig; } @@ -302,25 +302,25 @@ static void test_ecdsa_recovery_edge_cases(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, }; - rustsecp256k1zkp_v0_10_0_pubkey pubkeyc; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkeyc, &rsig, msg32) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 1); + rustsecp256k1zkp_v0_10_1_pubkey pubkeyc; + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkeyc, &rsig, msg32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 1); sigcder[4] = 0; sigc64[31] = 0; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 0); sigcder[4] = 1; sigcder[7] = 0; sigc64[31] = 1; sigc64[63] = 0; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recoverable_signature_parse_compact(CTX, &rsig, sigc64, 0) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_recover(CTX, &pubkeyb, &rsig, msg32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, sigcder, sizeof(sigcder)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg32, &pubkeyc) == 0); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/Makefile.am.include new file mode 100644 index 00000000..c7aaa378 --- /dev/null +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/Makefile.am.include @@ -0,0 +1,3 @@ +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_schnorr_adaptor.h +noinst_HEADERS += src/modules/schnorr_adaptor/main_impl.h +noinst_HEADERS += src/modules/schnorr_adaptor/tests_impl.h \ No newline at end of file diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/main_impl.h new file mode 100644 index 00000000..275fa41d --- /dev/null +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/main_impl.h @@ -0,0 +1,370 @@ +/********************************************************************** + * Copyright (c) 2023-2024 Zhe Pang and Sivaram Dhakshinamoorthy * + * Distributed under the MIT software license, see the accompanying * + * file COPYING or http://www.opensource.org/licenses/mit-license.php.* + **********************************************************************/ + +#ifndef SECP256K1_MODULE_SCHNORR_ADAPTOR_MAIN_H +#define SECP256K1_MODULE_SCHNORR_ADAPTOR_MAIN_H + +#include "../../../include/secp256k1.h" +#include "../../../include/secp256k1_schnorr_adaptor.h" + +#include "../../hash.h" +#include "../../scalar.h" + +/* Initializes SHA256 with fixed midstate. This midstate was computed by applying + * SHA256 to SHA256("SchnorrAdaptor/nonce")||SHA256("SchnorrAdaptor/nonce"). */ +static void rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); + sha->s[0] = 0xe268ac2aul; + sha->s[1] = 0x3a221b84ul; + sha->s[2] = 0x69612afdul; + sha->s[3] = 0x92ce3040ul; + sha->s[4] = 0xc83ca35ful; + sha->s[5] = 0xec2ee152ul; + sha->s[6] = 0xba136ab7ul; + sha->s[7] = 0x3bf6ec7ful; + + sha->bytes = 64; +} + +/* Initializes SHA256 with fixed midstate. This midstate was computed by applying + * SHA256 to SHA256("SchnorrAdaptor/aux")||SHA256("SchnorrAdaptor/aux"). */ +static void rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged_aux(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); + sha->s[0] = 0x50685e98ul; + sha->s[1] = 0x6313905eul; + sha->s[2] = 0x6db24fa0ul; + sha->s[3] = 0xc8b15c48ul; + sha->s[4] = 0x6b318921ul; + sha->s[5] = 0x441d8ff3ul; + sha->s[6] = 0xa7033a66ul; + sha->s[7] = 0xc3545cddul; + + sha->bytes = 64; +} + +/* algo argument for `nonce_function_schnorr_adaptor` to derive the nonce using a tagged hash function. */ +static const unsigned char schnorr_adaptor_algo[20] = "SchnorrAdaptor/nonce"; + +/* Modified BIP-340 nonce function */ +static int nonce_function_schnorr_adaptor(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *adaptor33, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void *data) { + rustsecp256k1zkp_v0_10_1_sha256 sha; + unsigned char masked_key[32]; + int i; + + if (algo == NULL) { + return 0; + } + + if (data != NULL) { + rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged_aux(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, data, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, masked_key); + for (i = 0; i < 32; i++) { + masked_key[i] ^= key32[i]; + } + } else { + /* Precomputed TaggedHash("SchnorrAdaptor/aux", 0x0000...00); */ + static const unsigned char ZERO_MASK[32] = { + 65, 206, 231, 5, 44, 99, 30, 162, + 119, 101, 143, 108, 176, 134, 217, 23, + 54, 150, 157, 221, 198, 161, 164, 85, + 235, 82, 28, 56, 164, 220, 113, 53 + }; + for (i = 0; i < 32; i++) { + masked_key[i] = key32[i] ^ ZERO_MASK[i]; + } + } + + /* Tag the hash with algo which is important to avoid nonce reuse across + * algorithms. An optimized tagging implementation is used if the default + * tag is provided. */ + if (algolen == sizeof(schnorr_adaptor_algo) + && rustsecp256k1zkp_v0_10_1_memcmp_var(algo, schnorr_adaptor_algo, algolen) == 0) { + rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged(&sha); + } else { + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, algo, algolen); + } + + /* Hash masked-key||adaptor33||pk||msg using the tagged hash */ + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, masked_key, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, adaptor33, 33); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, xonly_pk32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg32, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, nonce32); + return 1; +} + +const rustsecp256k1zkp_v0_10_1_nonce_function_hardened_schnorr_adaptor rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor = nonce_function_schnorr_adaptor; + +static int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign_internal(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *pre_sig65, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_keypair *keypair, const rustsecp256k1zkp_v0_10_1_pubkey *adaptor, rustsecp256k1zkp_v0_10_1_nonce_function_hardened_schnorr_adaptor noncefp, void *ndata) { + rustsecp256k1zkp_v0_10_1_scalar sk; + rustsecp256k1zkp_v0_10_1_scalar e; + rustsecp256k1zkp_v0_10_1_scalar k; + rustsecp256k1zkp_v0_10_1_gej rj, rpj; + rustsecp256k1zkp_v0_10_1_ge r, rp; + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_ge adaptor_ge; + unsigned char nonce32[32] = { 0 }; + unsigned char pk_buf[32]; + unsigned char seckey[32]; + unsigned char adaptor_buff[33]; + size_t cmprssd_len = 33; /* for serializing `adaptor_ge` and `pre_sig65` */ + int serialize_ret = 0; + int ret = 1; + + VERIFY_CHECK(ctx != NULL); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(pre_sig65 != NULL); + ARG_CHECK(msg32 != NULL); + ARG_CHECK(keypair != NULL); + ARG_CHECK(adaptor != NULL); + + if (noncefp == NULL) { + noncefp = rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor; + } + + /* T := adaptor_ge */ + if(!rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &adaptor_ge, adaptor)){ + return 0; + } + + ret &= rustsecp256k1zkp_v0_10_1_keypair_load(ctx, &sk, &pk, keypair); + /* Because we are signing for a x-only pubkey, the secret key is negated + * before signing if the point corresponding to the secret key does not + * have an even Y. */ + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&pk.y)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&sk, &sk); + } + + /* Generate the nonce k */ + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &sk); + rustsecp256k1zkp_v0_10_1_fe_get_b32(pk_buf, &pk.x); + serialize_ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&adaptor_ge, adaptor_buff, &cmprssd_len, 1); + VERIFY_CHECK(serialize_ret); + ret &= !!noncefp(nonce32, msg32, seckey, adaptor_buff, pk_buf, schnorr_adaptor_algo, sizeof(schnorr_adaptor_algo), ndata); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k, nonce32, NULL); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&k, &rustsecp256k1zkp_v0_10_1_scalar_one, !ret); + + /* R = k*G */ + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &rj, &k); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rj); + + /* We declassify the non-secret values R and T to allow using them + * as branch points. */ + rustsecp256k1zkp_v0_10_1_declassify(ctx, &rj, sizeof(rj)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &adaptor_ge, sizeof(adaptor_ge)); + /* R' = R + T */ + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&rpj, &rj, &adaptor_ge, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&rp, &rpj); + + /* We declassify R' (non-secret value) to branch on it */ + rustsecp256k1zkp_v0_10_1_declassify(ctx, &rp, sizeof(rp)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&rp.y); + + /* Determine if the secret nonce should be negated. + * + * pre_sig65[0:33] contains the compressed 33-byte encoding of the public + * nonce R' = k*G + T, where k is the secret nonce and T is the adaptor point. + * + * Since a BIP340 signature requires an x-only public nonce, in the case where + * R' = k*G + T has odd Y-coordinate, the x-only public nonce corresponding to + * the signature is actually -k*G - T. Therefore, we negate k to ensure that the + * adapted pre-signature will result in a valid BIP340 signature, with an even R'.y + * + * pre_sig65[33:65] = k + e * d if R'.y is even + * = -k + e * d if R'.y is odd + */ + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&rp.y)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&k, &k); + } + serialize_ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&rp, pre_sig65, &cmprssd_len, 1); + /* R' is not the point at infinity with overwhelming probability */ + VERIFY_CHECK(serialize_ret); + (void) serialize_ret; + + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, &pre_sig65[1], msg32, 32, pk_buf); + rustsecp256k1zkp_v0_10_1_scalar_mul(&e, &e, &sk); + rustsecp256k1zkp_v0_10_1_scalar_add(&e, &e, &k); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&pre_sig65[33], &e); + + rustsecp256k1zkp_v0_10_1_memczero(pre_sig65, 65, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); + memset(seckey, 0, sizeof(seckey)); + + return ret; +} + +int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *pre_sig65, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_keypair *keypair, const rustsecp256k1zkp_v0_10_1_pubkey *adaptor, const unsigned char *aux_rand32) { + /* We cast away const from the passed aux_rand32 argument since we know the default nonce function does not modify it. */ + return rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign_internal(ctx, pre_sig65, msg32, keypair, adaptor, rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor, (unsigned char*)aux_rand32); +} + + +int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(const rustsecp256k1zkp_v0_10_1_context *ctx, rustsecp256k1zkp_v0_10_1_pubkey *adaptor, const unsigned char *pre_sig65, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar e; + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_gej pkj; + rustsecp256k1zkp_v0_10_1_ge adaptor_ge; + rustsecp256k1zkp_v0_10_1_gej adaptor_gej; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_ge rp; + unsigned char buf[32]; + int overflow; + + VERIFY_CHECK(ctx != NULL); + ARG_CHECK(adaptor != NULL); + ARG_CHECK(pre_sig65 != NULL); + ARG_CHECK(msg32 != NULL); + ARG_CHECK(pubkey != NULL); + + /* R' := pre_sig65[0:33] */ + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&rp, &pre_sig65[0], 33)) { + return 0; + } + /* s := pre_sig65[33:65] */ + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig65[33], &overflow); + if (overflow) { + return 0; + } + + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pk, pubkey)) { + return 0; + } + + /* Compute e */ + rustsecp256k1zkp_v0_10_1_fe_get_b32(buf, &pk.x); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, &pre_sig65[1], msg32, 32, buf); + + /* Compute R = s*G + (-e)*P */ + rustsecp256k1zkp_v0_10_1_scalar_negate(&e, &e); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pkj, &pk); + rustsecp256k1zkp_v0_10_1_ecmult(&rj, &pkj, &e, &s); + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&rj)) { + return 0; + } + + /* Determine if R needs to be negated + * + * `adaptor_presign` negates the secret nonce k when R’.y is odd, during + * the computation of the s value (i.e., presig[33:65]). Therefore, we need + * to negate R = k*G (if R'.y is odd) before subtracting it from R' = R + T. + * + * T = R' - R if R'.y is even + * = R' + R if R'.y is odd + */ + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&rp.y); + if (!rustsecp256k1zkp_v0_10_1_fe_is_odd(&rp.y)) { + rustsecp256k1zkp_v0_10_1_gej_neg(&rj, &rj); + } + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&adaptor_gej, &rj, &rp, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&adaptor_ge, &adaptor_gej); + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(&adaptor_ge)) { + return 0; + } + rustsecp256k1zkp_v0_10_1_pubkey_save(adaptor, &adaptor_ge); + + return 1; +} + +int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sig64, const unsigned char *pre_sig65, const unsigned char *sec_adaptor32) { + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar t; + int overflow; + int ret = 1; + + VERIFY_CHECK(ctx != NULL); + ARG_CHECK(sig64 != NULL); + ARG_CHECK(pre_sig65 != NULL); + ARG_CHECK(sec_adaptor32 != NULL); + + if (pre_sig65[0] != SECP256K1_TAG_PUBKEY_EVEN && pre_sig65[0] != SECP256K1_TAG_PUBKEY_ODD) { + return 0; + } + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig65[33], &overflow); + if (overflow) { + return 0; + } + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&t, sec_adaptor32, &overflow); + ret &= !overflow; + + /* Determine if the secret adaptor should be negated. + * + * pre_sig65[0:33] contains the compressed 33-byte encoding of the public + * nonce R' = (k + t)*G, where r is the secret nonce generated by + * `adaptor_presign` and t is the secret adaptor. + * + * Since a BIP340 signature requires an x-only public nonce, in the case where + * (k + t)*G has odd Y-coordinate, the x-only public nonce corresponding to the + * signature is actually (-k - t)*G. Thus adapting a pre-signature requires + * negating t in this case. + * + * sig64[32:64] = s + t if R'.y is even + * = s - t if R'.y is odd + */ + if (pre_sig65[0] == SECP256K1_TAG_PUBKEY_ODD) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); + } + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &t); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig64[32], &s); + memmove(sig64, &pre_sig65[1], 32); + + rustsecp256k1zkp_v0_10_1_memczero(sig64, 64, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&t); + return ret; +} + +int rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *sec_adaptor32, const unsigned char *pre_sig65, const unsigned char *sig64) { + rustsecp256k1zkp_v0_10_1_scalar t; + rustsecp256k1zkp_v0_10_1_scalar s; + int overflow; + int ret = 1; + + VERIFY_CHECK(ctx != NULL); + ARG_CHECK(sec_adaptor32 != NULL); + ARG_CHECK(pre_sig65 != NULL); + ARG_CHECK(sig64 != NULL); + + if (pre_sig65[0] != SECP256K1_TAG_PUBKEY_EVEN && pre_sig65[0] != SECP256K1_TAG_PUBKEY_ODD) { + return 0; + } + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig65[33], &overflow); + if (overflow) { + return 0; + } + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&t, &sig64[32], &overflow); + ret &= !overflow; + + /*TODO: should we parse presig[0:33] & sig[0:32], to make sure the presig & + * has valid public nonce point? + * + * But we don't care about their validity here right? Then why do we ARG_CHECK + * presig[0] parity byte? + * + * Here, the inputs are invalid but the output is valid :/ */ + + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&t, &t, &s); + /* `adaptor_adapt` negates the secret adaptor t when R’.y is odd, during + * the computation of the BIP340 signature. Therefore, we need negate + * (sig[32:64] - pre_sig65[33:65]) in this case. + * + * t = (sig[32:64] - pre_sig65[33:65]) if R'.y is even + * = -(sig[32:64] - pre_sig65[33:65]) if R'.y is odd + */ + if (pre_sig65[0] == SECP256K1_TAG_PUBKEY_ODD) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); + } + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sec_adaptor32, &t); + + rustsecp256k1zkp_v0_10_1_memczero(sec_adaptor32, 32, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&t); + return ret; +} + +#endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/tests_impl.h new file mode 100644 index 00000000..6e673864 --- /dev/null +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorr_adaptor/tests_impl.h @@ -0,0 +1,1264 @@ +/********************************************************************** + * Copyright (c) 2023-2024 Zhe Pang and Sivaram Dhakshinamoorthy * + * Distributed under the MIT software license, see the accompanying * + * file COPYING or http://www.opensource.org/licenses/mit-license.php.* + **********************************************************************/ + +#ifndef SECP256K1_MODULE_SCHNORR_ADAPTOR_TESTS_H +#define SECP256K1_MODULE_SCHNORR_ADAPTOR_TESTS_H + +#include "../../../include/secp256k1_schnorrsig.h" +#include "../../../include/secp256k1_schnorr_adaptor.h" + +/* Checks that a bit flip in the n_flip-th argument (that has n_bytes many + * bytes) changes the hash function + */ +static void nonce_function_schnorr_adaptor_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes, size_t algolen) { + unsigned char nonces[2][32]; + CHECK(nonce_function_schnorr_adaptor(nonces[0], args[0], args[1], args[2], args[3], args[4], algolen, args[5]) == 1); + rustsecp256k1zkp_v0_10_1_testrand_flip(args[n_flip], n_bytes); + CHECK(nonce_function_schnorr_adaptor(nonces[1], args[0], args[1], args[2], args[3], args[4], algolen, args[5]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonces[0], nonces[1], 32) != 0); +} + +static void run_nonce_function_schnorr_adaptor_tests(void) { + unsigned char tag[20] = "SchnorrAdaptor/nonce"; + unsigned char aux_tag[18] = "SchnorrAdaptor/aux"; + unsigned char algo[20] = "SchnorrAdaptor/nonce"; + size_t algolen = sizeof(algo); + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; + unsigned char nonce[32], nonce_z[32]; + unsigned char msg[32]; + unsigned char key[32]; + unsigned char adaptor[33]; + unsigned char pk[32]; + unsigned char aux_rand[32]; + unsigned char *args[6]; + int i; + + /* Check that hash initialized by + * rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged has the expected + * state. */ + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag, sizeof(tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged(&sha_optimized); + test_sha256_eq(&sha, &sha_optimized); + + /* Check that hash initialized by + * rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged_aux has the expected + * state. */ + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, aux_tag, sizeof(aux_tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor_sha256_tagged_aux(&sha_optimized); + test_sha256_eq(&sha, &sha_optimized); + + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(key); + /* The random function below may generate an invalid (serialized) adaptor + * point, but for testing the nonce function, this invalid argument + * is acceptable. */ + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(adaptor, sizeof(adaptor)); + rustsecp256k1zkp_v0_10_1_testrand256(pk); + rustsecp256k1zkp_v0_10_1_testrand256(aux_rand); + + /* Check that a bitflip in an argument results in different nonces. */ + args[0] = msg; + args[1] = key; + args[2] = adaptor; + args[3] = pk; + args[4] = algo; + args[5] = aux_rand; + for (i = 0; i < COUNT; i++) { + nonce_function_schnorr_adaptor_bitflip(args, 0, 32, algolen); + nonce_function_schnorr_adaptor_bitflip(args, 1, 32, algolen); + nonce_function_schnorr_adaptor_bitflip(args, 2, 33, algolen); + nonce_function_schnorr_adaptor_bitflip(args, 3, 32, algolen); + /* Flip algo special case "SchnorrAdaptor/nonce" */ + nonce_function_schnorr_adaptor_bitflip(args, 4, algolen, algolen); + /* Flip algo again */ + nonce_function_schnorr_adaptor_bitflip(args, 4, algolen, algolen); + nonce_function_schnorr_adaptor_bitflip(args, 5, 32, algolen); + } + + /* NULL algo is disallowed */ + CHECK(nonce_function_schnorr_adaptor(nonce, msg, key, adaptor, pk, NULL, 0, NULL) == 0); + CHECK(nonce_function_schnorr_adaptor(nonce, msg, key, adaptor, pk, algo, algolen, NULL) == 1); + /* Other algo is fine */ + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(algo, algolen); + CHECK(nonce_function_schnorr_adaptor(nonce, msg, key, adaptor, pk, algo, algolen, NULL) == 1); + + /* Different algolen gives different nonce */ + for (i = 0; i < COUNT; i++) { + unsigned char nonce2[32]; + uint32_t offset = rustsecp256k1zkp_v0_10_1_testrand_int(algolen - 1); + size_t algolen_tmp = (algolen + offset) % algolen; + + CHECK(nonce_function_schnorr_adaptor(nonce2, msg, key, adaptor, pk, algo, algolen_tmp, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce2, 32) != 0); + } + + /* NULL aux_rand argument is allowed, and identical to passing all zero aux_rand. */ + memset(aux_rand, 0, 32); + CHECK(nonce_function_schnorr_adaptor(nonce_z, msg, key, adaptor, pk, algo, algolen, &aux_rand) == 1); + CHECK(nonce_function_schnorr_adaptor(nonce, msg, key, adaptor, pk, algo, algolen, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce_z, nonce, 32) == 0); +} + +static void test_schnorr_adaptor_api(void) { + unsigned char sk[32]; + unsigned char msg[32]; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_keypair invalid_keypair = {{ 0 }}; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey zero_pk; + unsigned char pre_sig[65]; + unsigned char invalid_pre_sig[65] = { 0 }; + unsigned char sig[64]; + unsigned char sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey adaptor; + rustsecp256k1zkp_v0_10_1_pubkey invalid_adaptor = {{ 0 }}; + unsigned char extracted_sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor; + + /* setup */ + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(sec_adaptor); + + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair) == 1); + memset(&zero_pk, 0, sizeof(zero_pk)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &adaptor, sec_adaptor) == 1); + + /* main test body */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, NULL, msg, &keypair, &adaptor, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, NULL, &keypair, &adaptor, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, NULL, &adaptor, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &invalid_keypair, &adaptor, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &invalid_adaptor, NULL)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(STATIC_CTX, pre_sig, msg, &keypair, &adaptor, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, msg, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&extracted_adaptor, &adaptor, sizeof(adaptor)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, NULL, pre_sig, msg, &pk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, NULL, msg, &pk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, NULL, &pk)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, msg, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, msg, &zero_pk)); + + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, sec_adaptor) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, NULL, pre_sig, sec_adaptor)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, NULL, sec_adaptor)); + /* invalid pre_sig[0] byte */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, invalid_pre_sig, sec_adaptor) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, sec_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, pre_sig, sig) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, NULL, pre_sig, sig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, NULL, sig)); + /* invalid pre_sig[0] byte */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, invalid_pre_sig, sig) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, pre_sig, NULL)); +} + +/* Helper function for schnorr_adaptor_vectors + * Signs the message and checks that it's the same as expected_presig. */ +static void test_schnorr_adaptor_spec_vectors_check_presigning(const unsigned char *sk, const unsigned char *pk_serialized, const unsigned char *aux_rand, const unsigned char *msg32, const unsigned char *adaptor_serialized, const unsigned char *expected_presig) { + unsigned char pre_sig[65]; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk, pk_expected; + rustsecp256k1zkp_v0_10_1_pubkey adaptor, adaptor_extracted; + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &adaptor, adaptor_serialized, 33)); + + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg32, &keypair, &adaptor, aux_rand)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(pre_sig, expected_presig, 65) == 0); + + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_expected, pk_serialized)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pk, &pk_expected, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &adaptor_extracted, pre_sig, msg32, &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &adaptor_extracted, &adaptor) == 0); +} + +/* Helper function for schnorr_adaptor_vectors + * Extracts the adaptor point and checks if it returns the same value as expected. */ +static void test_schnorr_adaptor_spec_vectors_check_extract(const unsigned char *pk_serialized, const unsigned char *msg32, const unsigned char *adaptor_serialized, const unsigned char *pre_sig, int extract_success, int extracted_val_correct) { + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_pubkey adaptor, adaptor_extracted; + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk, pk_serialized)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &adaptor, adaptor_serialized, 33)); + CHECK(extract_success == rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &adaptor_extracted, pre_sig, msg32, &pk)); + if (extract_success) { + CHECK(extracted_val_correct == (rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &adaptor_extracted, &adaptor) == 0)); + } + } + +/* Helper function for schnorr_adaptor_vectors + * Adapts a Schnorr pre-signature in a BIP340 signature + * and checks if it is [1] same as expected_sig64, and + * [2] valid BIP340 signature. */ +static void test_schnorr_adaptor_spec_vectors_check_adapt(const unsigned char *pk_serialized, const unsigned char *msg32, const unsigned char *pre_sig, const unsigned char *secadaptor, const unsigned char *expected_sig, int expected) { + unsigned char sig[64]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, secadaptor)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, expected_sig, 64) == 0); + + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk, pk_serialized)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg32, 32, &pk)); +} + +/* Helper function for schnorr_adaptor_vectors + * Extracts the secret adaptor from a pre-signature and a BIP340 + * signature and checks if it is the same as expected_secadaptor. */ +static void test_schnorr_adaptor_spec_vectors_check_extract_sec(const unsigned char *pre_sig, const unsigned char *sig, const unsigned char *expected_secadaptor, int expected) { + unsigned char sec_adaptor[32]; + + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, sec_adaptor, pre_sig, sig)); + CHECK(expected == (rustsecp256k1zkp_v0_10_1_memcmp_var(sec_adaptor, expected_secadaptor, 32) == 0)); +} + +/* Test vectors according to Schnorr adaptor signature spec. + * See https://github.com/ZhePang/Python_Specification_for_Schnorr_Adaptor */ +static void test_schnorr_adaptor_spec_vectors(void) { + { + /* Presig: Test vector 0 */ + const unsigned char sk[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03 + }; + const unsigned char pk[32] = { + 0xF9, 0x30, 0x8A, 0x01, 0x92, 0x58, 0xC3, 0x10, + 0x49, 0x34, 0x4F, 0x85, 0xF8, 0x9D, 0x52, 0x29, + 0xB5, 0x31, 0xC8, 0x45, 0x83, 0x6F, 0x99, 0xB0, + 0x86, 0x01, 0xF1, 0x13, 0xBC, 0xE0, 0x36, 0xF9 + }; + unsigned char aux_rand[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }; + const unsigned char msg[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }; + const unsigned char adaptor[33] = { + 0x02, 0xC6, 0x04, 0x7F, 0x94, 0x41, 0xED, 0x7D, + 0x6D, 0x30, 0x45, 0x40, 0x6E, 0x95, 0xC0, 0x7C, + 0xD8, 0x5C, 0x77, 0x8E, 0x4B, 0x8C, 0xEF, 0x3C, + 0xA7, 0xAB, 0xAC, 0x09, 0xB9, 0x5C, 0x70, 0x9E, + 0xE5 + }; + const unsigned char pre_sig[65] = { + 0x03, 0x61, 0x79, 0xDB, 0xF3, 0xE1, 0x32, 0x07, + 0x85, 0x3F, 0x88, 0x0C, 0x7A, 0x7A, 0x85, 0xEC, + 0x67, 0x8B, 0xAD, 0x64, 0xB8, 0x97, 0xF1, 0x08, + 0xD4, 0x76, 0x43, 0x8A, 0xC4, 0xA9, 0x32, 0xEE, + 0x94, 0x97, 0xCC, 0x73, 0xB8, 0xC3, 0x51, 0xF1, + 0x89, 0xB9, 0xD4, 0xFD, 0xE8, 0x93, 0xE3, 0x82, + 0x0D, 0x4B, 0xFF, 0x7F, 0x49, 0xD4, 0xBE, 0x1F, + 0x8B, 0x02, 0xCB, 0x80, 0x8C, 0xD3, 0x19, 0x23, + 0xA0 + }; + test_schnorr_adaptor_spec_vectors_check_presigning(sk, pk, aux_rand, msg, adaptor, pre_sig); + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 1); + }; + { + /* Presig: Test vector 1 */ + const unsigned char sk[32] = { + 0x0B, 0x43, 0x2B, 0x26, 0x77, 0x93, 0x73, 0x81, + 0xAE, 0xF0, 0x5B, 0xB0, 0x2A, 0x66, 0xEC, 0xD0, + 0x12, 0x77, 0x30, 0x62, 0xCF, 0x3F, 0xA2, 0x54, + 0x9E, 0x44, 0xF5, 0x8E, 0xD2, 0x40, 0x17, 0x10 + }; + const unsigned char pk[32] = { + 0x25, 0xD1, 0xDF, 0xF9, 0x51, 0x05, 0xF5, 0x25, + 0x3C, 0x40, 0x22, 0xF6, 0x28, 0xA9, 0x96, 0xAD, + 0x3A, 0x0D, 0x95, 0xFB, 0xF2, 0x1D, 0x46, 0x8A, + 0x1B, 0x33, 0xF8, 0xC1, 0x60, 0xD8, 0xF5, 0x17 + }; + const unsigned char aux_rand[32] = { + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF + }; + const unsigned char msg[32] = { + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF + }; + const unsigned char adaptor[33] = { + 0x03, 0x97, 0x72, 0x0B, 0x39, 0x10, 0x29, 0xF5, + 0x79, 0xF1, 0xF5, 0x71, 0x73, 0x35, 0x0B, 0x76, + 0xE4, 0xA7, 0xC3, 0xF4, 0x71, 0x53, 0xA5, 0x0E, + 0x46, 0xFA, 0x3A, 0x5F, 0x08, 0xBE, 0x66, 0xB1, + 0x4A + }; + const unsigned char pre_sig[65] = { + 0x02, 0xC9, 0x74, 0xF5, 0x2A, 0xEC, 0xE9, 0x7C, + 0x75, 0xE4, 0x40, 0xA8, 0xD8, 0x67, 0x7F, 0xC5, + 0x10, 0x5D, 0x85, 0x12, 0x28, 0x7B, 0x9C, 0x03, + 0x04, 0xFA, 0x8D, 0x51, 0xF0, 0xBF, 0x48, 0x60, + 0xBA, 0xA5, 0x30, 0x46, 0xD2, 0x22, 0x1B, 0xB1, + 0x23, 0xBA, 0x04, 0x5F, 0xF5, 0xE5, 0xBD, 0x26, + 0xD8, 0x8D, 0x0B, 0xF0, 0xD6, 0x3B, 0x80, 0xE6, + 0x40, 0x59, 0x99, 0xC1, 0xD2, 0xB6, 0xFF, 0x00, + 0x71 + }; + test_schnorr_adaptor_spec_vectors_check_presigning(sk, pk, aux_rand, msg, adaptor, pre_sig); + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 1); + }; + { + /* Presig: Test vector 2 */ + const unsigned char pk[32] = { + 0xD6, 0x9C, 0x35, 0x09, 0xBB, 0x99, 0xE4, 0x12, + 0xE6, 0x8B, 0x0F, 0xE8, 0x54, 0x4E, 0x72, 0x83, + 0x7D, 0xFA, 0x30, 0x74, 0x6D, 0x8B, 0xE2, 0xAA, + 0x65, 0x97, 0x5F, 0x29, 0xD2, 0x2D, 0xC7, 0xB9 + }; + const unsigned char msg[32] = { + 0x4D, 0xF3, 0xC3, 0xF6, 0x8F, 0xCC, 0x83, 0xB2, + 0x7E, 0x9D, 0x42, 0xC9, 0x04, 0x31, 0xA7, 0x24, + 0x99, 0xF1, 0x78, 0x75, 0xC8, 0x1A, 0x59, 0x9B, + 0x56, 0x6C, 0x98, 0x89, 0xB9, 0x69, 0x67, 0x03 + }; + const unsigned char adaptor[33] = { + 0x02, 0xA6, 0xB5, 0x94, 0xB3, 0x8F, 0xB3, 0xE7, + 0x7C, 0x6E, 0xDF, 0x78, 0x16, 0x1F, 0xAD, 0xE2, + 0x04, 0x1F, 0x4E, 0x09, 0xFD, 0x84, 0x97, 0xDB, + 0x77, 0x6E, 0x54, 0x6C, 0x41, 0x56, 0x7F, 0xEB, + 0x3C + }; + const unsigned char pre_sig[65] = { + 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x3B, 0x78, 0xCE, 0x56, + 0x3F, 0x89, 0xA0, 0xED, 0x94, 0x14, 0xF5, 0xAA, + 0x28, 0xAD, 0x0D, 0x96, 0xD6, 0x79, 0x5F, 0x9C, + 0x63, 0xB6, 0xAF, 0xB1, 0x54, 0x8A, 0xF6, 0x03, + 0xB3, 0xEB, 0x45, 0xC9, 0xF8, 0x20, 0x7D, 0xEE, + 0x10, 0x0F, 0x77, 0x28, 0xF9, 0xFA, 0x53, 0x1D, + 0xA1, 0xF5, 0xFF, 0x9F, 0x75, 0xB7, 0x16, 0x68, + 0x44 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 1); + }; + { + /* Presig: Test vector 3 */ + const unsigned char pk[32] = { + 0xEE, 0xFD, 0xEA, 0x4C, 0xDB, 0x67, 0x77, 0x50, + 0xA4, 0x20, 0xFE, 0xE8, 0x07, 0xEA, 0xCF, 0x21, + 0xEB, 0x98, 0x98, 0xAE, 0x79, 0xB9, 0x76, 0x87, + 0x66, 0xE4, 0xFA, 0xA0, 0x4A, 0x2D, 0x4A, 0x34 + }; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk_parsed; + /* No need to check adaptor_extract as parsing the pubkey already fails */ + CHECK(!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_parsed, pk)); + }; + { + /* Presig: Test vector 4 */ + const unsigned char pk[32] = { + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFC, 0x2F + }; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk_parsed; + /* No need to check adaptor_extract as parsing the pubkey already fails */ + CHECK(!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_parsed, pk)); + }; + { + /* Presig: Test vector 5 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x03, 0xEE, 0xFD, 0xEA, 0x4C, 0xDB, 0x67, 0x77, + 0x50, 0xA4, 0x20, 0xFE, 0xE8, 0x07, 0xEA, 0xCF, + 0x21, 0xEB, 0x98, 0x98, 0xAE, 0x79, 0xB9, 0x76, + 0x87, 0x66, 0xE4, 0xFA, 0xA0, 0x4A, 0x2D, 0x4A, + 0x34, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 0, 0); + }; + { + /* Presig: Test vector 6 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x03, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFC, + 0x2F, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 0, 0); + }; + { + /* Presig: Test vector 7 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x04, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 0, 0); + }; + { + /* Presig: Test vector 8 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x02, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 0); + }; + { + /* Presig: Test vector 9 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x03, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFE, 0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, + 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, + 0x41 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 0, 0); + }; + { + /* Presig: Test vector 10 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x02, 0x70, 0x7A, 0x89, 0x22, 0xA7, 0xF9, 0x26, + 0x1A, 0x22, 0xA7, 0x05, 0x9E, 0x60, 0x35, 0x70, + 0x57, 0x2A, 0x71, 0x2A, 0x47, 0x53, 0xE3, 0x06, + 0xEE, 0xA5, 0xA6, 0x8B, 0x83, 0xC6, 0xC2, 0x48, + 0xA8, 0x8F, 0x77, 0x9F, 0xA0, 0x55, 0x99, 0x8A, + 0x02, 0x93, 0x8B, 0x38, 0x25, 0x5E, 0x37, 0x61, + 0x90, 0x17, 0x06, 0xFD, 0xDB, 0xF4, 0x77, 0x70, + 0x63, 0x87, 0xC0, 0x46, 0x0B, 0x6C, 0xFB, 0x89, + 0x2D + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 0); + }; + { + /* Presig: Test vector 11 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x03, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x86, 0x87, 0xC5, 0x33, 0x55, 0xF1, 0x9D, + 0xF9, 0xEF, 0x9E, 0x62, 0x4B, 0xB4, 0xF3, 0x2B, + 0x00, 0xB0, 0x66, 0xFC, 0x82, 0xEC, 0x78, 0x03, + 0x18, 0xCD, 0xBE, 0x8E, 0x9C, 0x7A, 0xDA, 0xE3, + 0xF1 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 1, 0); + }; + { + /* Presig: Test vector 12 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char adaptor[33] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83 + }; + const unsigned char pre_sig[65] = { + 0x02, 0x67, 0x8F, 0x02, 0xC8, 0x63, 0xB8, 0x2C, + 0xB7, 0xB6, 0xF0, 0xE5, 0x21, 0x60, 0x6A, 0xD8, + 0xDC, 0x75, 0xC1, 0xAC, 0x71, 0xD9, 0x74, 0x53, + 0xDB, 0x1A, 0x15, 0x61, 0x0E, 0x15, 0xA1, 0x2C, + 0x83, 0x92, 0x62, 0xEC, 0xE4, 0xF1, 0x76, 0xC3, + 0xDE, 0x64, 0x2D, 0xDD, 0x09, 0xE8, 0xF5, 0xBF, + 0xB1, 0xB0, 0x84, 0xFA, 0x17, 0x78, 0x76, 0x41, + 0xB1, 0xD8, 0x76, 0x1E, 0x3C, 0xE9, 0x7E, 0xE0, + 0xA2 + }; + test_schnorr_adaptor_spec_vectors_check_extract(pk, msg, adaptor, pre_sig, 0, 0); + }; + { + /* Presig: Test vector 13 & 14 */ + const unsigned char adaptor[33] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00 + }; + rustsecp256k1zkp_v0_10_1_pubkey adaptor_parsed; + /* No need to check adaptor_extract as parsing the adaptor point already fails */ + CHECK(!rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &adaptor_parsed, adaptor, sizeof(adaptor))); + }; + { + /* Adapt: Test vector 0 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char sec_adaptor[32] = { + 0x84, 0x8B, 0xC8, 0x7F, 0x32, 0xC6, 0xF7, 0x1D, + 0x3A, 0x93, 0xA5, 0x94, 0x24, 0x58, 0x45, 0x62, + 0x04, 0x6F, 0x31, 0x69, 0x37, 0x16, 0xFF, 0x73, + 0xA8, 0x97, 0xCC, 0xC1, 0x65, 0x9C, 0x5F, 0x5D + }; + const unsigned char pre_sig[65] = { + 0x02, 0x70, 0x7A, 0x89, 0x22, 0xA7, 0xF9, 0x26, + 0x1A, 0x22, 0xA7, 0x05, 0x9E, 0x60, 0x35, 0x70, + 0x57, 0x2A, 0x71, 0x2A, 0x47, 0x53, 0xE3, 0x06, + 0xEE, 0xA5, 0xA6, 0x8B, 0x83, 0xC6, 0xC2, 0x48, + 0xA8, 0x8F, 0x77, 0x9F, 0xA0, 0x55, 0x99, 0x8A, + 0x02, 0x93, 0x8B, 0x38, 0x25, 0x5E, 0x37, 0x61, + 0x90, 0x17, 0x06, 0xFD, 0xDB, 0xF4, 0x77, 0x70, + 0x63, 0x87, 0xC0, 0x46, 0x0B, 0x6C, 0xFB, 0x89, + 0x2D + }; + const unsigned char sig[64] = { + 0x70, 0x7A, 0x89, 0x22, 0xA7, 0xF9, 0x26, 0x1A, + 0x22, 0xA7, 0x05, 0x9E, 0x60, 0x35, 0x70, 0x57, + 0x2A, 0x71, 0x2A, 0x47, 0x53, 0xE3, 0x06, 0xEE, + 0xA5, 0xA6, 0x8B, 0x83, 0xC6, 0xC2, 0x48, 0xA8, + 0x14, 0x03, 0x68, 0x1F, 0x88, 0x60, 0x81, 0x1F, + 0xCE, 0x1E, 0xDD, 0xB9, 0x82, 0x8F, 0xA6, 0xF3, + 0x60, 0xC7, 0x52, 0x5E, 0x7C, 0x45, 0xCF, 0x9B, + 0x70, 0x85, 0xB4, 0x40, 0x02, 0x61, 0xA7, 0x49 + }; + test_schnorr_adaptor_spec_vectors_check_adapt(pk, msg, pre_sig, sec_adaptor, sig, 0); + }; + { + /* Adapt: Test vector 1 */ + const unsigned char pk[32] = { + 0xA1, 0x8D, 0xBC, 0x8D, 0xBF, 0x16, 0x3C, 0x60, + 0xDF, 0xA2, 0xEC, 0x7C, 0x87, 0xAC, 0x11, 0x3C, + 0xA4, 0x82, 0xFA, 0x2E, 0x1F, 0xDD, 0xD3, 0xEC, + 0x84, 0x21, 0xBF, 0x9B, 0xFA, 0x2E, 0xF1, 0xF2 + }; + const unsigned char msg[32] = { + 0x38, 0x95, 0x75, 0xB9, 0x2B, 0x58, 0x6B, 0xE2, + 0x73, 0x0A, 0x99, 0x82, 0x41, 0xE5, 0xCF, 0x65, + 0x1D, 0x6C, 0x19, 0x1F, 0xA6, 0x4E, 0xEA, 0x3D, + 0x00, 0x25, 0x6A, 0xFF, 0x7D, 0x18, 0x48, 0x4F + }; + const unsigned char sec_adaptor[32] = { + 0xE5, 0xE6, 0x8D, 0x0E, 0x63, 0x7D, 0xA4, 0x82, + 0x27, 0x32, 0xE2, 0x0F, 0x3E, 0xEB, 0xE1, 0x82, + 0x68, 0x92, 0xE8, 0x1C, 0x2C, 0xFE, 0xC6, 0xBE, + 0xC6, 0x00, 0xCD, 0xA3, 0xF6, 0x6A, 0x53, 0xF1 + }; + const unsigned char pre_sig[65] = { + 0x03, 0x73, 0xDB, 0x6D, 0x58, 0x03, 0xED, 0xD6, + 0x50, 0x2D, 0xE3, 0xD0, 0x95, 0x6B, 0xA3, 0xBD, + 0x3F, 0xAB, 0xA9, 0x88, 0x41, 0xC9, 0xAB, 0x07, + 0x4C, 0x79, 0x5E, 0x3A, 0x90, 0x12, 0xC4, 0x29, + 0x8A, 0x4B, 0x6C, 0x99, 0xEE, 0x31, 0xDB, 0x1C, + 0x15, 0x81, 0x30, 0x28, 0x26, 0x2E, 0xC2, 0x5E, + 0x0E, 0x04, 0xC9, 0xD3, 0x04, 0x15, 0x55, 0x85, + 0xF8, 0x5E, 0xC0, 0x02, 0x42, 0x2D, 0x20, 0x5D, + 0x6E + }; + const unsigned char sig[64] = { + 0x73, 0xDB, 0x6D, 0x58, 0x03, 0xED, 0xD6, 0x50, + 0x2D, 0xE3, 0xD0, 0x95, 0x6B, 0xA3, 0xBD, 0x3F, + 0xAB, 0xA9, 0x88, 0x41, 0xC9, 0xAB, 0x07, 0x4C, + 0x79, 0x5E, 0x3A, 0x90, 0x12, 0xC4, 0x29, 0x8A, + 0x65, 0x86, 0x0C, 0xDF, 0xCE, 0x5D, 0x77, 0x93, + 0x59, 0xFD, 0x46, 0x16, 0xEF, 0xD6, 0x7C, 0x8A, + 0x56, 0xE5, 0xC7, 0xCE, 0x97, 0x9F, 0x5F, 0x75, + 0x58, 0x91, 0x93, 0x2B, 0x06, 0xEC, 0x4A, 0xBE + }; + test_schnorr_adaptor_spec_vectors_check_adapt(pk, msg, pre_sig, sec_adaptor, sig, 1); + }; + { + /* Adapt: Test vector 2 */ + const unsigned char pk[32] = { + 0x15, 0xBF, 0x35, 0x43, 0x75, 0xCF, 0xBE, 0xDB, + 0x43, 0xDA, 0xC7, 0xD6, 0x6B, 0xCA, 0x46, 0xF5, + 0xCF, 0x0A, 0x42, 0xFF, 0xF0, 0x60, 0xEC, 0x0C, + 0xCC, 0x59, 0xA3, 0x96, 0xB2, 0x25, 0x43, 0x85 + }; + const unsigned char msg[32] = { + 0x2F, 0x4E, 0x50, 0x5E, 0x2C, 0x70, 0xE8, 0x1B, + 0x94, 0x43, 0x18, 0x00, 0xF8, 0x10, 0xEC, 0xB0, + 0x4F, 0xD0, 0xAA, 0xEE, 0xB0, 0xC7, 0x03, 0xF8, + 0xDC, 0xE4, 0x4E, 0xED, 0xFA, 0x0A, 0xB8, 0xC2 + }; + const unsigned char sec_adaptor[32] = { + 0x53, 0x92, 0x12, 0xA1, 0xB9, 0xFC, 0x42, 0xF4, + 0x4A, 0xD1, 0xA5, 0x77, 0x20, 0xC7, 0x44, 0x40, + 0x84, 0x03, 0xFF, 0xFF, 0x80, 0x58, 0x48, 0x66, + 0x40, 0x27, 0xFC, 0x9C, 0x74, 0xB3, 0x87, 0x6A + }; + const unsigned char pre_sig[65] = { + 0x02, 0xE4, 0xC4, 0x16, 0x13, 0x45, 0xD8, 0xC9, + 0xD8, 0x4A, 0x50, 0xD5, 0x76, 0x25, 0x76, 0xBE, + 0x7B, 0xCF, 0x1E, 0xA3, 0x13, 0x21, 0xBA, 0x6A, + 0x6F, 0x3C, 0x64, 0x18, 0x53, 0x6D, 0xB4, 0x3D, + 0xB4, 0x96, 0x72, 0x85, 0x58, 0xD7, 0x79, 0x65, + 0xA6, 0x42, 0x6D, 0x5C, 0x6A, 0x25, 0xB3, 0xF0, + 0xB0, 0x2E, 0xCE, 0xC4, 0x9F, 0xBB, 0xC7, 0xC9, + 0x66, 0xB9, 0xE3, 0x87, 0x6C, 0x33, 0x70, 0x4C, + 0xB5 + }; + const unsigned char sig[64] = { + 0xE4, 0xC4, 0x16, 0x13, 0x45, 0xD8, 0xC9, 0xD8, + 0x4A, 0x50, 0xD5, 0x76, 0x25, 0x76, 0xBE, 0x7B, + 0xCF, 0x1E, 0xA3, 0x13, 0x21, 0xBA, 0x6A, 0x6F, + 0x3C, 0x64, 0x18, 0x53, 0x6D, 0xB4, 0x3D, 0xB4, + 0xEA, 0x04, 0x97, 0xFA, 0x91, 0x75, 0xA8, 0x9A, + 0x8D, 0x3F, 0x01, 0xE1, 0x46, 0x7B, 0x34, 0xF0, + 0xB2, 0xD2, 0xC4, 0x9F, 0x3C, 0x20, 0x11, 0xCC, + 0xFA, 0x0B, 0x84, 0x08, 0xA8, 0x23, 0xD4, 0x1F + }; + test_schnorr_adaptor_spec_vectors_check_adapt(pk, msg, pre_sig, sec_adaptor, sig, 1); + }; + { + /* Adapt: Test vector 3 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char sec_adaptor[32] = { + 0x84, 0x8B, 0xC8, 0x7F, 0x32, 0xC6, 0xF7, 0x1D, + 0x3A, 0x93, 0xA5, 0x94, 0x24, 0x58, 0x45, 0x62, + 0x04, 0x6F, 0x31, 0x69, 0x37, 0x16, 0xFF, 0x73, + 0xA8, 0x97, 0xCC, 0xC1, 0x65, 0x9C, 0x5F, 0x5D + }; + const unsigned char pre_sig[65] = { + 0x03, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x86, 0x87, 0xC5, 0x33, 0x55, 0xF1, 0x9D, + 0xF9, 0xEF, 0x9E, 0x62, 0x4B, 0xB4, 0xF3, 0x2B, + 0x00, 0xB0, 0x66, 0xFC, 0x82, 0xEC, 0x78, 0x03, + 0x18, 0xCD, 0xBE, 0x8E, 0x9C, 0x7A, 0xDA, 0xE3, + 0xF1 + }; + const unsigned char sig[64] = { + 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, 0x80, + 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, 0xD4, + 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, 0xA9, + 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, 0x13, + 0x01, 0xFB, 0xFC, 0xB4, 0x23, 0x2A, 0xA6, 0xDC, + 0xB5, 0x0A, 0xBC, 0xB7, 0x90, 0x9A, 0xE5, 0x9E, + 0xAB, 0xF7, 0xCB, 0x19, 0xB5, 0x61, 0x03, 0xA5, + 0x25, 0x26, 0xC1, 0xDB, 0x15, 0x3E, 0x84, 0x94 + }; + test_schnorr_adaptor_spec_vectors_check_adapt(pk, msg, pre_sig, sec_adaptor, sig, 0); + }; + { + /* Adapt: Test vector 4 */ + const unsigned char pk[32] = { + 0xDF, 0xF1, 0xD7, 0x7F, 0x2A, 0x67, 0x1C, 0x5F, + 0x36, 0x18, 0x37, 0x26, 0xDB, 0x23, 0x41, 0xBE, + 0x58, 0xFE, 0xAE, 0x1D, 0xA2, 0xDE, 0xCE, 0xD8, + 0x43, 0x24, 0x0F, 0x7B, 0x50, 0x2B, 0xA6, 0x59 + }; + const unsigned char msg[32] = { + 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, + 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 0x44, + 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, + 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 0x89 + }; + const unsigned char sec_adaptor[32] = { + 0x84, 0x8B, 0xC8, 0x7F, 0x32, 0xC6, 0xF7, 0x1D, + 0x3A, 0x93, 0xA5, 0x94, 0x24, 0x58, 0x45, 0x62, + 0x04, 0x6F, 0x31, 0x69, 0x37, 0x16, 0xFF, 0x73, + 0xA8, 0x97, 0xCC, 0xC1, 0x65, 0x9C, 0x5F, 0x5D + }; + const unsigned char pre_sig[65] = { + 0x02, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + const unsigned char sig[64] = { + 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, 0x80, + 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, 0xD4, + 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, 0xA9, + 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, 0x13, + 0xFE, 0x04, 0x03, 0x4B, 0xDC, 0xD5, 0x59, 0x23, + 0x4A, 0xF5, 0x43, 0x48, 0x6F, 0x65, 0x1A, 0x60, + 0x0E, 0xB7, 0x11, 0xCC, 0xF9, 0xE7, 0x9C, 0x96, + 0x9A, 0xAB, 0x9C, 0xB1, 0xBA, 0xF7, 0xBC, 0xAD + }; + test_schnorr_adaptor_spec_vectors_check_adapt(pk, msg, pre_sig, sec_adaptor, sig, 0); + }; + { + /* Secadapt: Test vector 0 */ + const unsigned char pre_sig[65] = { + 0x03, 0x73, 0xDB, 0x6D, 0x58, 0x03, 0xED, 0xD6, + 0x50, 0x2D, 0xE3, 0xD0, 0x95, 0x6B, 0xA3, 0xBD, + 0x3F, 0xAB, 0xA9, 0x88, 0x41, 0xC9, 0xAB, 0x07, + 0x4C, 0x79, 0x5E, 0x3A, 0x90, 0x12, 0xC4, 0x29, + 0x8A, 0x4B, 0x6C, 0x99, 0xEE, 0x31, 0xDB, 0x1C, + 0x15, 0x81, 0x30, 0x28, 0x26, 0x2E, 0xC2, 0x5E, + 0x0E, 0x04, 0xC9, 0xD3, 0x04, 0x15, 0x55, 0x85, + 0xF8, 0x5E, 0xC0, 0x02, 0x42, 0x2D, 0x20, 0x5D, + 0x6E + }; + const unsigned char sig[64] = { + 0x73, 0xDB, 0x6D, 0x58, 0x03, 0xED, 0xD6, 0x50, + 0x2D, 0xE3, 0xD0, 0x95, 0x6B, 0xA3, 0xBD, 0x3F, + 0xAB, 0xA9, 0x88, 0x41, 0xC9, 0xAB, 0x07, 0x4C, + 0x79, 0x5E, 0x3A, 0x90, 0x12, 0xC4, 0x29, 0x8A, + 0x65, 0x86, 0x0C, 0xDF, 0xCE, 0x5D, 0x77, 0x93, + 0x59, 0xFD, 0x46, 0x16, 0xEF, 0xD6, 0x7C, 0x8A, + 0x56, 0xE5, 0xC7, 0xCE, 0x97, 0x9F, 0x5F, 0x75, + 0x58, 0x91, 0x93, 0x2B, 0x06, 0xEC, 0x4A, 0xBE + }; + const unsigned char sec_adaptor[32] = { + 0xE5, 0xE6, 0x8D, 0x0E, 0x63, 0x7D, 0xA4, 0x82, + 0x27, 0x32, 0xE2, 0x0F, 0x3E, 0xEB, 0xE1, 0x82, + 0x68, 0x92, 0xE8, 0x1C, 0x2C, 0xFE, 0xC6, 0xBE, + 0xC6, 0x00, 0xCD, 0xA3, 0xF6, 0x6A, 0x53, 0xF1 + }; + test_schnorr_adaptor_spec_vectors_check_extract_sec(pre_sig, sig, sec_adaptor, 1); + }; + { + /* Secadapt: Test vector 1 */ + const unsigned char pre_sig[65] = { + 0x02, 0xE4, 0xC4, 0x16, 0x13, 0x45, 0xD8, 0xC9, + 0xD8, 0x4A, 0x50, 0xD5, 0x76, 0x25, 0x76, 0xBE, + 0x7B, 0xCF, 0x1E, 0xA3, 0x13, 0x21, 0xBA, 0x6A, + 0x6F, 0x3C, 0x64, 0x18, 0x53, 0x6D, 0xB4, 0x3D, + 0xB4, 0x96, 0x72, 0x85, 0x58, 0xD7, 0x79, 0x65, + 0xA6, 0x42, 0x6D, 0x5C, 0x6A, 0x25, 0xB3, 0xF0, + 0xB0, 0x2E, 0xCE, 0xC4, 0x9F, 0xBB, 0xC7, 0xC9, + 0x66, 0xB9, 0xE3, 0x87, 0x6C, 0x33, 0x70, 0x4C, + 0xB5 + }; + const unsigned char sig[64] = { + 0xE4, 0xC4, 0x16, 0x13, 0x45, 0xD8, 0xC9, 0xD8, + 0x4A, 0x50, 0xD5, 0x76, 0x25, 0x76, 0xBE, 0x7B, + 0xCF, 0x1E, 0xA3, 0x13, 0x21, 0xBA, 0x6A, 0x6F, + 0x3C, 0x64, 0x18, 0x53, 0x6D, 0xB4, 0x3D, 0xB4, + 0xEA, 0x04, 0x97, 0xFA, 0x91, 0x75, 0xA8, 0x9A, + 0x8D, 0x3F, 0x01, 0xE1, 0x46, 0x7B, 0x34, 0xF0, + 0xB2, 0xD2, 0xC4, 0x9F, 0x3C, 0x20, 0x11, 0xCC, + 0xFA, 0x0B, 0x84, 0x08, 0xA8, 0x23, 0xD4, 0x1F + }; + const unsigned char sec_adaptor[32] = { + 0x53, 0x92, 0x12, 0xA1, 0xB9, 0xFC, 0x42, 0xF4, + 0x4A, 0xD1, 0xA5, 0x77, 0x20, 0xC7, 0x44, 0x40, + 0x84, 0x03, 0xFF, 0xFF, 0x80, 0x58, 0x48, 0x66, + 0x40, 0x27, 0xFC, 0x9C, 0x74, 0xB3, 0x87, 0x6A + }; + test_schnorr_adaptor_spec_vectors_check_extract_sec(pre_sig, sig, sec_adaptor, 1); + }; + { + /* Secadapt: Test vector 2 */ + const unsigned char pre_sig[65] = { + 0x03, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + const unsigned char sig[64] = { + 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, 0x80, + 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, 0xD4, + 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, 0xA9, + 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, 0x13, + 0x01, 0xFB, 0xFC, 0xB4, 0x23, 0x2A, 0xA6, 0xDC, + 0xB5, 0x0A, 0xBC, 0xB7, 0x90, 0x9A, 0xE5, 0x9E, + 0xAB, 0xF7, 0xCB, 0x19, 0xB5, 0x61, 0x03, 0xA5, + 0x25, 0x26, 0xC1, 0xDB, 0x15, 0x3E, 0x84, 0x94 + }; + const unsigned char sec_adaptor[32] = { + 0x84, 0x8B, 0xC8, 0x7F, 0x32, 0xC6, 0xF7, 0x1D, + 0x3A, 0x93, 0xA5, 0x94, 0x24, 0x58, 0x45, 0x62, + 0x04, 0x6F, 0x31, 0x69, 0x37, 0x16, 0xFF, 0x73, + 0xA8, 0x97, 0xCC, 0xC1, 0x65, 0x9C, 0x5F, 0x5D + }; + test_schnorr_adaptor_spec_vectors_check_extract_sec(pre_sig, sig, sec_adaptor, 0); + }; + { + /* Secadapt: Test vector 3 */ + const unsigned char pre_sig[65] = { + 0x03, 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, + 0x80, 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, + 0xD4, 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, + 0xA9, 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, + 0x13, 0x79, 0x78, 0x3A, 0xCC, 0xAA, 0x0E, 0x62, + 0x06, 0x10, 0x61, 0x9D, 0xB4, 0x4B, 0x0C, 0xD4, + 0xFE, 0x0A, 0x47, 0xE0, 0x63, 0xC2, 0xD0, 0x9D, + 0x22, 0xF2, 0x13, 0xCF, 0xF0, 0x55, 0x5B, 0x5D, + 0x50 + }; + const unsigned char sig[64] = { + 0xB8, 0x61, 0x3C, 0xCA, 0x78, 0xF4, 0xFA, 0x80, + 0xEA, 0x58, 0xEE, 0xD0, 0xC2, 0x6B, 0x4A, 0xD4, + 0x91, 0xEF, 0xFC, 0x44, 0x50, 0x3C, 0x8D, 0xA9, + 0x0D, 0x15, 0xA9, 0xC1, 0x7E, 0xD2, 0x60, 0x13, + 0xFE, 0x04, 0x03, 0x4B, 0xDC, 0xD5, 0x59, 0x23, + 0x4A, 0xF5, 0x43, 0x48, 0x6F, 0x65, 0x1A, 0x60, + 0x0E, 0xB7, 0x11, 0xCC, 0xF9, 0xE7, 0x9C, 0x96, + 0x9A, 0xAB, 0x9C, 0xB1, 0xBA, 0xF7, 0xBC, 0xAD + }; + const unsigned char sec_adaptor[32] = { + 0x84, 0x8B, 0xC8, 0x7F, 0x32, 0xC6, 0xF7, 0x1D, + 0x3A, 0x93, 0xA5, 0x94, 0x24, 0x58, 0x45, 0x62, + 0x04, 0x6F, 0x31, 0x69, 0x37, 0x16, 0xFF, 0x73, + 0xA8, 0x97, 0xCC, 0xC1, 0x65, 0x9C, 0x5F, 0x5D + }; + test_schnorr_adaptor_spec_vectors_check_extract_sec(pre_sig, sig, sec_adaptor, 0); + }; +} + +static void test_schnorr_adaptor_edge_cases(void) { + unsigned char sk[32]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_keypair keypair; + const unsigned char msg[32] = "this is a schnorr adaptor msg..."; + unsigned char sec_adaptor[32]; + unsigned char extracted_sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey adaptor; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor; + unsigned char aux_rand[32]; + unsigned char pre_sig[65]; + unsigned char sig[64]; + + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(sec_adaptor); + rustsecp256k1zkp_v0_10_1_testrand256(aux_rand); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &adaptor, sec_adaptor)); + + /* Test schnorr_adaptor_presign */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, aux_rand) == 1); + + /* TODO: test with different nonce functions after `schnorr_adaptor_presign_custom` + * gets implemented */ + + /* Test schnorr_adaptor_extract */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, aux_rand) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, msg, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &extracted_adaptor, &adaptor) == 0); + { + /* invalid R' (= pre_sig[0:33]) */ + unsigned char pre_sig_tmp[65]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor_tmp; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + memset(pre_sig_tmp, 0xFF, 33); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor_tmp, pre_sig_tmp, msg, &pk) == 0); + } + { + /* overflowing s */ + unsigned char pre_sig_tmp[65]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor_tmp; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + memset(&pre_sig_tmp[33], 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor_tmp, pre_sig_tmp, msg, &pk) == 0); + } + { + /* negated s */ + unsigned char pre_sig_tmp[65]; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor_tmp; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig_tmp[33], NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&pre_sig_tmp[33], &s); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor_tmp, pre_sig_tmp, msg, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &extracted_adaptor_tmp, &adaptor) != 0); + } + { + /* any flipped bit in the pre-signature will extract + * an invalid adaptor point */ + unsigned char pre_sig_tmp[65]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor_tmp; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + rand_flip_bit(&pre_sig_tmp[1], sizeof(pre_sig_tmp) - 1); + /* depending on which bit was flipped adaptor_extract can either + * return 0 (parsing pre-signature failed) or 1 (parsing + * pre-signature success) */ + if(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor_tmp, pre_sig_tmp, msg, &pk)) { + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &extracted_adaptor_tmp, &adaptor) != 0); + } + } + { + /* any flipped bit in the message will extract an invalid + * adaptor point */ + unsigned char msg_tmp[32]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor_tmp; + memcpy(msg_tmp, msg, sizeof(msg_tmp)); + rand_flip_bit(msg_tmp, sizeof(msg_tmp)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor_tmp, pre_sig, msg_tmp, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &extracted_adaptor_tmp, &adaptor) != 0); + } + /* Note: presig test vectors 12, 13, 14 will cover the case where + * adaptor_extract returns 0 when [1] R = infinity, or [2] T = infinity. + * So, we don't need to test those scenarios here */ + + /* Test schnorr_adaptor_adapt */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, aux_rand) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, sec_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk) == 1); + { + /* overflowing sec_adaptor */ + unsigned char sig_tmp[64]; + unsigned char sec_adaptor_tmp[32]; + memset(sec_adaptor_tmp, 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig_tmp, pre_sig, sec_adaptor_tmp) == 0); + } + { + /* overflowing s */ + unsigned char sig_tmp[64]; + unsigned char pre_sig_tmp[65]; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + memset(&pre_sig_tmp[33], 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig_tmp, pre_sig_tmp, sec_adaptor) == 0); + } + { + /* negated s */ + unsigned char sig_tmp[64]; + unsigned char pre_sig_tmp[65]; + rustsecp256k1zkp_v0_10_1_scalar s; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &pre_sig_tmp[33], NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&pre_sig_tmp[33], &s); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig_tmp, pre_sig_tmp, sec_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig_tmp, msg, sizeof(msg), &pk) == 0); + } + { + /* any flipped bit in the pre-signature will result in + * an invalid signature */ + unsigned char sig_tmp[64]; + unsigned char pre_sig_tmp[65]; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + rand_flip_bit(&pre_sig_tmp[1], sizeof(pre_sig_tmp) - 1); + /* depending on which bit was flipped adaptor_adapt can either + * return 0 (parsing pre_sig_tmp[33:65] failed) or 1 (parsing + * success but invalid sig will be generated) */ + if (rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig_tmp, pre_sig_tmp, sec_adaptor)) { + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig_tmp, msg, sizeof(msg), &pk) == 0); + } + } + { + /* any flipped bit in the sec_adaptor will result in an + * invalid signature */ + unsigned char sig_tmp[64]; + unsigned char sec_adaptor_tmp[32]; + memcpy(sec_adaptor_tmp, sec_adaptor, sizeof(sec_adaptor_tmp)); + rand_flip_bit(sec_adaptor_tmp, sizeof(sec_adaptor_tmp)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig_tmp, pre_sig, sec_adaptor_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig_tmp, msg, sizeof(msg), &pk) == 0); + } + + /* Test schnorr_adaptor_extract_sec */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &keypair, &adaptor, aux_rand) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, sec_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, pre_sig, sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extracted_sec_adaptor, sec_adaptor, sizeof(extracted_sec_adaptor)) == 0); + { + /* overflowing pre_sig[33:65] */ + unsigned char extracted_sec_adaptor_tmp[32]; + unsigned char pre_sig_tmp[65]; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + memset(&pre_sig_tmp[33], 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor_tmp, pre_sig_tmp, sig) == 0); + } + { + /* overflowing sig[32:64] */ + unsigned char extracted_sec_adaptor_tmp[32]; + unsigned char sig_tmp[64]; + memcpy(sig_tmp, sig, sizeof(sig_tmp)); + memset(&sig_tmp[32], 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor_tmp, pre_sig, sig_tmp) == 0); + } + { + /* any flipped bit in pre_sig[33:65] will extract + * an invalid secret adaptor */ + unsigned char extracted_sec_adaptor_tmp[32]; + unsigned char pre_sig_tmp[65]; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + rand_flip_bit(&pre_sig_tmp[33], sizeof(pre_sig_tmp) - 33); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor_tmp, pre_sig_tmp, sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extracted_sec_adaptor_tmp, sec_adaptor, sizeof(extracted_sec_adaptor_tmp)) != 0); + } + { + /* any flipped bit in sig[32:64] will extract + * an invalid secret adaptor */ + unsigned char extracted_sec_adaptor_tmp[32]; + unsigned char sig_tmp[64]; + memcpy(sig_tmp, sig, sizeof(sig_tmp)); + rand_flip_bit(&sig_tmp[32], sizeof(sig_tmp) - 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor_tmp, pre_sig, sig_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extracted_sec_adaptor_tmp, sec_adaptor, sizeof(extracted_sec_adaptor_tmp)) != 0); + } + { + /* invalid presig[0:33] or sig[0:32] does not + * neccessarily result in an invalid output */ + unsigned char extracted_sec_adaptor_tmp[32]; + unsigned char pre_sig_tmp[65]; + unsigned char sig_tmp[64]; + memcpy(pre_sig_tmp, pre_sig, sizeof(pre_sig_tmp)); + memcpy(sig_tmp, sig, sizeof(sig_tmp)); + memset(&pre_sig_tmp[1], 0xFF, 32); + memset(sig_tmp, 0xFF, 32); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor_tmp, pre_sig_tmp, sig_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extracted_sec_adaptor_tmp, sec_adaptor, sizeof(extracted_sec_adaptor_tmp)) == 0); + } +} + +static void test_schnorr_adaptor_correctness(void) { + unsigned char alice_sk[32]; + rustsecp256k1zkp_v0_10_1_keypair alice_keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey alice_pk; + unsigned char sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey adaptor; + unsigned char extracted_sec_adaptor[32]; + rustsecp256k1zkp_v0_10_1_pubkey extracted_adaptor; + unsigned char msg[32]; + unsigned char pre_sig[65]; + unsigned char sig[64]; + + /* Alice setup */ + rustsecp256k1zkp_v0_10_1_testrand256(alice_sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &alice_keypair, alice_sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &alice_pk, NULL, &alice_keypair) == 1); + + /* t := sec_adaptor + * There exists an adaptor T = t*G, where t is unknown to Bob */ + rustsecp256k1zkp_v0_10_1_testrand256(sec_adaptor); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &adaptor, sec_adaptor)); + + /* Alice creates a pre-siganture for the adaptor point T, + * and sends it to Bob. */ + rustsecp256k1zkp_v0_10_1_testrand256(msg); + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign(CTX, pre_sig, msg, &alice_keypair, &adaptor, NULL) == 1); + + /* Bob extracts the adaptor point from the pre-signature, + * and verifies if it is equal to T */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract(CTX, &extracted_adaptor, pre_sig, msg, &alice_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &extracted_adaptor, &adaptor) == 0); + + /* Bob learns t (the discrete logarithm of T). For example, Bob can + * pay a Lightning invoice that reveals t, assuming Lightning uses + * PTLC (Point Time Locked Contracts). */ + + /* Bob adapts the pre-signature with the discrete logarithm of T to + * create a valid BIP 340 Schnorr signature. */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt(CTX, sig, pre_sig, sec_adaptor) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &alice_pk) == 1); + + /* Alice learns the BIP340 signature after Bob publishes it on the blockchain. */ + + /* Alice extracts the discrete logarithm of T from the pre-signature and the + * BIP 340 signature. */ + CHECK(rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec(CTX, extracted_sec_adaptor, pre_sig, sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extracted_sec_adaptor, sec_adaptor, sizeof(extracted_sec_adaptor)) == 0); +} + +static void run_schnorr_adaptor_tests(void) { + int i; + run_nonce_function_schnorr_adaptor_tests(); + + test_schnorr_adaptor_api(); + test_schnorr_adaptor_spec_vectors(); + for (i = 0; i < COUNT; i++) { + test_schnorr_adaptor_edge_cases(); + } + test_schnorr_adaptor_correctness(); +} + +#endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/Makefile.am.include index 9180b4ef..35eab0ba 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_schnorrsig.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_schnorrsig.h noinst_HEADERS += src/modules/schnorrsig/main_impl.h noinst_HEADERS += src/modules/schnorrsig/tests_impl.h noinst_HEADERS += src/modules/schnorrsig/tests_exhaustive_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/bench_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/bench_impl.h index f1709863..8531d96a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/bench_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/bench_impl.h @@ -12,10 +12,10 @@ #define MSGLEN 32 typedef struct { - rustsecp256k1zkp_v0_10_0_context *ctx; + rustsecp256k1zkp_v0_10_1_context *ctx; int n; - const rustsecp256k1zkp_v0_10_0_keypair **keypairs; + const rustsecp256k1zkp_v0_10_1_keypair **keypairs; const unsigned char **pk; const unsigned char **sigs; const unsigned char **msgs; @@ -30,7 +30,7 @@ static void bench_schnorrsig_sign(void* arg, int iters) { for (i = 0; i < iters; i++) { msg[0] = i; msg[1] = i >> 8; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(data->ctx, sig, msg, MSGLEN, data->keypairs[i], NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(data->ctx, sig, msg, MSGLEN, data->keypairs[i], NULL)); } } @@ -39,9 +39,9 @@ static void bench_schnorrsig_verify(void* arg, int iters) { int i; for (i = 0; i < iters; i++) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(data->ctx, &pk, data->pk[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(data->ctx, data->sigs[i], data->msgs[i], MSGLEN, &pk)); + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(data->ctx, &pk, data->pk[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(data->ctx, data->sigs[i], data->msgs[i], MSGLEN, &pk)); } } @@ -50,8 +50,8 @@ static void run_schnorrsig_bench(int iters, int argc, char** argv) { bench_schnorrsig_data data; int d = argc == 1; - data.ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); - data.keypairs = (const rustsecp256k1zkp_v0_10_0_keypair **)malloc(iters * sizeof(rustsecp256k1zkp_v0_10_0_keypair *)); + data.ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); + data.keypairs = (const rustsecp256k1zkp_v0_10_1_keypair **)malloc(iters * sizeof(rustsecp256k1zkp_v0_10_1_keypair *)); data.pk = (const unsigned char **)malloc(iters * sizeof(unsigned char *)); data.msgs = (const unsigned char **)malloc(iters * sizeof(unsigned char *)); data.sigs = (const unsigned char **)malloc(iters * sizeof(unsigned char *)); @@ -61,9 +61,9 @@ static void run_schnorrsig_bench(int iters, int argc, char** argv) { unsigned char sk[32]; unsigned char *msg = (unsigned char *)malloc(MSGLEN); unsigned char *sig = (unsigned char *)malloc(64); - rustsecp256k1zkp_v0_10_0_keypair *keypair = (rustsecp256k1zkp_v0_10_0_keypair *)malloc(sizeof(*keypair)); + rustsecp256k1zkp_v0_10_1_keypair *keypair = (rustsecp256k1zkp_v0_10_1_keypair *)malloc(sizeof(*keypair)); unsigned char *pk_char = (unsigned char *)malloc(32); - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; msg[0] = sk[0] = i; msg[1] = sk[1] = i >> 8; msg[2] = sk[2] = i >> 16; @@ -76,10 +76,10 @@ static void run_schnorrsig_bench(int iters, int argc, char** argv) { data.msgs[i] = msg; data.sigs[i] = sig; - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(data.ctx, keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(data.ctx, sig, msg, MSGLEN, keypair, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(data.ctx, &pk, NULL, keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(data.ctx, pk_char, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(data.ctx, keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(data.ctx, sig, msg, MSGLEN, keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(data.ctx, &pk, NULL, keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(data.ctx, pk_char, &pk) == 1); } if (d || have_flag(argc, argv, "schnorrsig") || have_flag(argc, argv, "sign") || have_flag(argc, argv, "schnorrsig_sign")) run_benchmark("schnorrsig_sign", bench_schnorrsig_sign, NULL, NULL, (void *) &data, 10, iters); @@ -98,7 +98,7 @@ static void run_schnorrsig_bench(int iters, int argc, char** argv) { free((void *)data.msgs); free((void *)data.sigs); - rustsecp256k1zkp_v0_10_0_context_destroy(data.ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(data.ctx); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/main_impl.h index 4585383a..b3188ffd 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/main_impl.h @@ -13,8 +13,8 @@ /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("BIP0340/nonce")||SHA256("BIP0340/nonce"). */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x46615b35ul; sha->s[1] = 0xf4bfbff7ul; sha->s[2] = 0x9f8dc671ul; @@ -29,8 +29,8 @@ static void rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged(rustsec /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("BIP0340/aux")||SHA256("BIP0340/aux"). */ -static void rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged_aux(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged_aux(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x24dd3219ul; sha->s[1] = 0x4eba7e70ul; sha->s[2] = 0xca0fabb9ul; @@ -50,7 +50,7 @@ static const unsigned char bip340_algo[13] = "BIP0340/nonce"; static const unsigned char schnorrsig_extraparams_magic[4] = SECP256K1_SCHNORRSIG_EXTRAPARAMS_MAGIC; static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *msg, size_t msglen, const unsigned char *key32, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void *data) { - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; unsigned char masked_key[32]; int i; @@ -59,9 +59,9 @@ static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *ms } if (data != NULL) { - rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged_aux(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, data, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, masked_key); + rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged_aux(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, data, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, masked_key); for (i = 0; i < 32; i++) { masked_key[i] ^= key32[i]; } @@ -82,26 +82,26 @@ static int nonce_function_bip340(unsigned char *nonce32, const unsigned char *ms * algorithms. If this nonce function is used in BIP-340 signing as defined * in the spec, an optimized tagging implementation is used. */ if (algolen == sizeof(bip340_algo) - && rustsecp256k1zkp_v0_10_0_memcmp_var(algo, bip340_algo, algolen) == 0) { - rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged(&sha); + && rustsecp256k1zkp_v0_10_1_memcmp_var(algo, bip340_algo, algolen) == 0) { + rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged(&sha); } else { - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, algo, algolen); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, algo, algolen); } /* Hash masked-key||pk||msg using the tagged hash as per the spec */ - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, masked_key, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, xonly_pk32, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, msg, msglen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, nonce32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, masked_key, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, xonly_pk32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg, msglen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, nonce32); return 1; } -const rustsecp256k1zkp_v0_10_0_nonce_function_hardened rustsecp256k1zkp_v0_10_0_nonce_function_bip340 = nonce_function_bip340; +const rustsecp256k1zkp_v0_10_1_nonce_function_hardened rustsecp256k1zkp_v0_10_1_nonce_function_bip340 = nonce_function_bip340; /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("BIP0340/challenge")||SHA256("BIP0340/challenge"). */ -static void rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +static void rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0x9cecba11ul; sha->s[1] = 0x23925381ul; sha->s[2] = 0x11679112ul; @@ -113,117 +113,117 @@ static void rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged(rustsecp256k1zkp_v sha->bytes = 64; } -static void rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(rustsecp256k1zkp_v0_10_0_scalar* e, const unsigned char *r32, const unsigned char *msg, size_t msglen, const unsigned char *pubkey32) +static void rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(rustsecp256k1zkp_v0_10_1_scalar* e, const unsigned char *r32, const unsigned char *msg, size_t msglen, const unsigned char *pubkey32) { unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; /* tagged hash(r.x, pk.x, msg) */ - rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, r32, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, pubkey32, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, msg, msglen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, buf); + rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, r32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, pubkey32, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg, msglen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, buf); /* Set scalar e to the challenge hash modulo the curve order as per * BIP340. */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(e, buf, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(e, buf, NULL); } -static int rustsecp256k1zkp_v0_10_0_schnorrsig_sign_internal(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_0_keypair *keypair, rustsecp256k1zkp_v0_10_0_nonce_function_hardened noncefp, void *ndata) { - rustsecp256k1zkp_v0_10_0_scalar sk; - rustsecp256k1zkp_v0_10_0_scalar e; - rustsecp256k1zkp_v0_10_0_scalar k; - rustsecp256k1zkp_v0_10_0_gej rj; - rustsecp256k1zkp_v0_10_0_ge pk; - rustsecp256k1zkp_v0_10_0_ge r; +static int rustsecp256k1zkp_v0_10_1_schnorrsig_sign_internal(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_1_keypair *keypair, rustsecp256k1zkp_v0_10_1_nonce_function_hardened noncefp, void *ndata) { + rustsecp256k1zkp_v0_10_1_scalar sk; + rustsecp256k1zkp_v0_10_1_scalar e; + rustsecp256k1zkp_v0_10_1_scalar k; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_ge r; unsigned char buf[32] = { 0 }; unsigned char pk_buf[32]; unsigned char seckey[32]; int ret = 1; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(sig64 != NULL); ARG_CHECK(msg != NULL || msglen == 0); ARG_CHECK(keypair != NULL); if (noncefp == NULL) { - noncefp = rustsecp256k1zkp_v0_10_0_nonce_function_bip340; + noncefp = rustsecp256k1zkp_v0_10_1_nonce_function_bip340; } - ret &= rustsecp256k1zkp_v0_10_0_keypair_load(ctx, &sk, &pk, keypair); + ret &= rustsecp256k1zkp_v0_10_1_keypair_load(ctx, &sk, &pk, keypair); /* Because we are signing for a x-only pubkey, the secret key is negated * before signing if the point corresponding to the secret key does not * have an even Y. */ - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&pk.y)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&sk, &sk); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&pk.y)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&sk, &sk); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey, &sk); - rustsecp256k1zkp_v0_10_0_fe_get_b32(pk_buf, &pk.x); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &sk); + rustsecp256k1zkp_v0_10_1_fe_get_b32(pk_buf, &pk.x); ret &= !!noncefp(buf, msg, msglen, seckey, pk_buf, bip340_algo, sizeof(bip340_algo), ndata); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&k, buf, NULL); - ret &= !rustsecp256k1zkp_v0_10_0_scalar_is_zero(&k); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&k, &rustsecp256k1zkp_v0_10_0_scalar_one, !ret); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&k, buf, NULL); + ret &= !rustsecp256k1zkp_v0_10_1_scalar_is_zero(&k); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&k, &rustsecp256k1zkp_v0_10_1_scalar_one, !ret); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &rj, &k); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&r, &rj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &rj, &k); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&r, &rj); /* We declassify r to allow using it as a branch point. This is fine * because r is not a secret. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &r, sizeof(r)); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&r.y); - if (rustsecp256k1zkp_v0_10_0_fe_is_odd(&r.y)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&k, &k); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &r, sizeof(r)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&r.y); + if (rustsecp256k1zkp_v0_10_1_fe_is_odd(&r.y)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&k, &k); } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&r.x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(&sig64[0], &r.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&r.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(&sig64[0], &r.x); - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&e, &sig64[0], msg, msglen, pk_buf); - rustsecp256k1zkp_v0_10_0_scalar_mul(&e, &e, &sk); - rustsecp256k1zkp_v0_10_0_scalar_add(&e, &e, &k); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig64[32], &e); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, &sig64[0], msg, msglen, pk_buf); + rustsecp256k1zkp_v0_10_1_scalar_mul(&e, &e, &sk); + rustsecp256k1zkp_v0_10_1_scalar_add(&e, &e, &k); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig64[32], &e); - rustsecp256k1zkp_v0_10_0_memczero(sig64, 64, !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&k); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sk); + rustsecp256k1zkp_v0_10_1_memczero(sig64, 64, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&k); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sk); memset(seckey, 0, sizeof(seckey)); return ret; } -int rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_keypair *keypair, const unsigned char *aux_rand32) { +int rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *aux_rand32) { /* We cast away const from the passed aux_rand32 argument since we know the default nonce function does not modify it. */ - return rustsecp256k1zkp_v0_10_0_schnorrsig_sign_internal(ctx, sig64, msg32, 32, keypair, rustsecp256k1zkp_v0_10_0_nonce_function_bip340, (unsigned char*)aux_rand32); + return rustsecp256k1zkp_v0_10_1_schnorrsig_sign_internal(ctx, sig64, msg32, 32, keypair, rustsecp256k1zkp_v0_10_1_nonce_function_bip340, (unsigned char*)aux_rand32); } -int rustsecp256k1zkp_v0_10_0_schnorrsig_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_keypair *keypair, const unsigned char *aux_rand32) { - return rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(ctx, sig64, msg32, keypair, aux_rand32); +int rustsecp256k1zkp_v0_10_1_schnorrsig_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_keypair *keypair, const unsigned char *aux_rand32) { + return rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(ctx, sig64, msg32, keypair, aux_rand32); } -int rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_0_keypair *keypair, rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams *extraparams) { - rustsecp256k1zkp_v0_10_0_nonce_function_hardened noncefp = NULL; +int rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_1_keypair *keypair, rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams *extraparams) { + rustsecp256k1zkp_v0_10_1_nonce_function_hardened noncefp = NULL; void *ndata = NULL; VERIFY_CHECK(ctx != NULL); if (extraparams != NULL) { - ARG_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(extraparams->magic, + ARG_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(extraparams->magic, schnorrsig_extraparams_magic, sizeof(extraparams->magic)) == 0); noncefp = extraparams->noncefp; ndata = extraparams->ndata; } - return rustsecp256k1zkp_v0_10_0_schnorrsig_sign_internal(ctx, sig64, msg, msglen, keypair, noncefp, ndata); + return rustsecp256k1zkp_v0_10_1_schnorrsig_sign_internal(ctx, sig64, msg, msglen, keypair, noncefp, ndata); } -int rustsecp256k1zkp_v0_10_0_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkey) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar e; - rustsecp256k1zkp_v0_10_0_gej rj; - rustsecp256k1zkp_v0_10_0_ge pk; - rustsecp256k1zkp_v0_10_0_gej pkj; - rustsecp256k1zkp_v0_10_0_fe rx; - rustsecp256k1zkp_v0_10_0_ge r; +int rustsecp256k1zkp_v0_10_1_schnorrsig_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char *sig64, const unsigned char *msg, size_t msglen, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar e; + rustsecp256k1zkp_v0_10_1_gej rj; + rustsecp256k1zkp_v0_10_1_ge pk; + rustsecp256k1zkp_v0_10_1_gej pkj; + rustsecp256k1zkp_v0_10_1_fe rx; + rustsecp256k1zkp_v0_10_1_ge r; unsigned char buf[32]; int overflow; @@ -232,36 +232,36 @@ int rustsecp256k1zkp_v0_10_0_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_co ARG_CHECK(msg != NULL || msglen == 0); ARG_CHECK(pubkey != NULL); - if (!rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&rx, &sig64[0])) { + if (!rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&rx, &sig64[0])) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &sig64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &sig64[32], &overflow); if (overflow) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(ctx, &pk, pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pk, pubkey)) { return 0; } /* Compute e. */ - rustsecp256k1zkp_v0_10_0_fe_get_b32(buf, &pk.x); - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&e, &sig64[0], msg, msglen, buf); + rustsecp256k1zkp_v0_10_1_fe_get_b32(buf, &pk.x); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, &sig64[0], msg, msglen, buf); /* Compute rj = s*G + (-e)*pkj */ - rustsecp256k1zkp_v0_10_0_scalar_negate(&e, &e); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pkj, &pk); - rustsecp256k1zkp_v0_10_0_ecmult(&rj, &pkj, &e, &s); + rustsecp256k1zkp_v0_10_1_scalar_negate(&e, &e); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pkj, &pk); + rustsecp256k1zkp_v0_10_1_ecmult(&rj, &pkj, &e, &s); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&r, &rj); - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(&r)) { + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&r, &rj); + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(&r)) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&r.y); - return !rustsecp256k1zkp_v0_10_0_fe_is_odd(&r.y) && - rustsecp256k1zkp_v0_10_0_fe_equal(&rx, &r.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&r.y); + return !rustsecp256k1zkp_v0_10_1_fe_is_odd(&r.y) && + rustsecp256k1zkp_v0_10_1_fe_equal(&rx, &r.x); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_exhaustive_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_exhaustive_impl.h index 18ae9891..f66a822b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_exhaustive_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_exhaustive_impl.h @@ -58,12 +58,12 @@ static const unsigned char invalid_pubkey_bytes[][32] = { #define NUM_INVALID_KEYS (sizeof(invalid_pubkey_bytes) / sizeof(invalid_pubkey_bytes[0])) -static int rustsecp256k1zkp_v0_10_0_hardened_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg, +static int rustsecp256k1zkp_v0_10_1_hardened_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg, size_t msglen, const unsigned char *key32, const unsigned char *xonly_pk32, const unsigned char *algo, size_t algolen, void* data) { - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; int *idata = data; (void)msg; (void)msglen; @@ -71,12 +71,12 @@ static int rustsecp256k1zkp_v0_10_0_hardened_nonce_function_smallint(unsigned ch (void)xonly_pk32; (void)algo; (void)algolen; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s, *idata); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(nonce32, &s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s, *idata); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(nonce32, &s); return 1; } -static void test_exhaustive_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_xonly_pubkey* pubkeys, unsigned char (*xonly_pubkey_bytes)[32], const int* parities) { +static void test_exhaustive_schnorrsig_verify(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_xonly_pubkey* pubkeys, unsigned char (*xonly_pubkey_bytes)[32], const int* parities) { int d; uint64_t iter = 0; /* Iterate over the possible public keys to verify against (through their corresponding DL d). */ @@ -102,10 +102,10 @@ static void test_exhaustive_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_con } /* Randomly generate messages until all challenges have been hit. */ while (e_count_done < EXHAUSTIVE_TEST_ORDER) { - rustsecp256k1zkp_v0_10_0_scalar e; + rustsecp256k1zkp_v0_10_1_scalar e; unsigned char msg32[32]; - rustsecp256k1zkp_v0_10_0_testrand256(msg32); - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&e, sig64, msg32, sizeof(msg32), pk32); + rustsecp256k1zkp_v0_10_1_testrand256(msg32); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, sig64, msg32, sizeof(msg32), pk32); /* Only do work if we hit a challenge we haven't tried before. */ if (!e_done[e]) { /* Iterate over the possible valid last 32 bytes in the signature. @@ -116,14 +116,14 @@ static void test_exhaustive_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_con int expect_valid, valid; if (s <= EXHAUSTIVE_TEST_ORDER) { memset(sig64 + 32, 0, 32); - rustsecp256k1zkp_v0_10_0_write_be32(sig64 + 60, s); + rustsecp256k1zkp_v0_10_1_write_be32(sig64 + 60, s); expect_valid = actual_k != -1 && s != EXHAUSTIVE_TEST_ORDER && (s == (actual_k + actual_d * e) % EXHAUSTIVE_TEST_ORDER); } else { - rustsecp256k1zkp_v0_10_0_testrand256(sig64 + 32); + rustsecp256k1zkp_v0_10_1_testrand256(sig64 + 32); expect_valid = 0; } - valid = rustsecp256k1zkp_v0_10_0_schnorrsig_verify(ctx, sig64, msg32, sizeof(msg32), &pubkeys[d - 1]); + valid = rustsecp256k1zkp_v0_10_1_schnorrsig_verify(ctx, sig64, msg32, sizeof(msg32), &pubkeys[d - 1]); CHECK(valid == expect_valid); count_valid += valid; } @@ -138,10 +138,10 @@ static void test_exhaustive_schnorrsig_verify(const rustsecp256k1zkp_v0_10_0_con } } -static void test_exhaustive_schnorrsig_sign(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char (*xonly_pubkey_bytes)[32], const rustsecp256k1zkp_v0_10_0_keypair* keypairs, const int* parities) { +static void test_exhaustive_schnorrsig_sign(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char (*xonly_pubkey_bytes)[32], const rustsecp256k1zkp_v0_10_1_keypair* keypairs, const int* parities) { int d, k; uint64_t iter = 0; - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; /* Loop over keys. */ for (d = 1; d < EXHAUSTIVE_TEST_ORDER; ++d) { @@ -155,25 +155,25 @@ static void test_exhaustive_schnorrsig_sign(const rustsecp256k1zkp_v0_10_0_conte unsigned char sig64[64]; int actual_k = k; if (skip_section(&iter)) continue; - extraparams.noncefp = rustsecp256k1zkp_v0_10_0_hardened_nonce_function_smallint; + extraparams.noncefp = rustsecp256k1zkp_v0_10_1_hardened_nonce_function_smallint; extraparams.ndata = &k; if (parities[k - 1]) actual_k = EXHAUSTIVE_TEST_ORDER - k; /* Generate random messages until all challenges have been tried. */ while (e_count_done < EXHAUSTIVE_TEST_ORDER) { - rustsecp256k1zkp_v0_10_0_scalar e; - rustsecp256k1zkp_v0_10_0_testrand256(msg32); - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&e, xonly_pubkey_bytes[k - 1], msg32, sizeof(msg32), xonly_pubkey_bytes[d - 1]); + rustsecp256k1zkp_v0_10_1_scalar e; + rustsecp256k1zkp_v0_10_1_testrand256(msg32); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&e, xonly_pubkey_bytes[k - 1], msg32, sizeof(msg32), xonly_pubkey_bytes[d - 1]); /* Only do work if we hit a challenge we haven't tried before. */ if (!e_done[e]) { - rustsecp256k1zkp_v0_10_0_scalar expected_s = (actual_k + e * actual_d) % EXHAUSTIVE_TEST_ORDER; + rustsecp256k1zkp_v0_10_1_scalar expected_s = (actual_k + e * actual_d) % EXHAUSTIVE_TEST_ORDER; unsigned char expected_s_bytes[32]; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(expected_s_bytes, &expected_s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(expected_s_bytes, &expected_s); /* Invoke the real function to construct a signature. */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(ctx, sig64, msg32, sizeof(msg32), &keypairs[d - 1], &extraparams)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(ctx, sig64, msg32, sizeof(msg32), &keypairs[d - 1], &extraparams)); /* The first 32 bytes must match the xonly pubkey for the specified k. */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig64, xonly_pubkey_bytes[k - 1], 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig64, xonly_pubkey_bytes[k - 1], 32) == 0); /* The last 32 bytes must match the expected s value. */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig64 + 32, expected_s_bytes, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig64 + 32, expected_s_bytes, 32) == 0); /* Don't retry other messages that result in the same challenge. */ e_done[e] = 1; ++e_count_done; @@ -183,28 +183,28 @@ static void test_exhaustive_schnorrsig_sign(const rustsecp256k1zkp_v0_10_0_conte } } -static void test_exhaustive_schnorrsig(const rustsecp256k1zkp_v0_10_0_context *ctx) { - rustsecp256k1zkp_v0_10_0_keypair keypair[EXHAUSTIVE_TEST_ORDER - 1]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey xonly_pubkey[EXHAUSTIVE_TEST_ORDER - 1]; +static void test_exhaustive_schnorrsig(const rustsecp256k1zkp_v0_10_1_context *ctx) { + rustsecp256k1zkp_v0_10_1_keypair keypair[EXHAUSTIVE_TEST_ORDER - 1]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey xonly_pubkey[EXHAUSTIVE_TEST_ORDER - 1]; int parity[EXHAUSTIVE_TEST_ORDER - 1]; unsigned char xonly_pubkey_bytes[EXHAUSTIVE_TEST_ORDER - 1][32]; unsigned i; /* Verify that all invalid_pubkey_bytes are actually invalid. */ for (i = 0; i < NUM_INVALID_KEYS; ++i) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; - CHECK(!rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(ctx, &pk, invalid_pubkey_bytes[i])); + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + CHECK(!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(ctx, &pk, invalid_pubkey_bytes[i])); } /* Construct keypairs and xonly-pubkeys for the entire group. */ for (i = 1; i < EXHAUSTIVE_TEST_ORDER; ++i) { - rustsecp256k1zkp_v0_10_0_scalar scalar_i; + rustsecp256k1zkp_v0_10_1_scalar scalar_i; unsigned char buf[32]; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalar_i, i); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf, &scalar_i); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(ctx, &keypair[i - 1], buf)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(ctx, &xonly_pubkey[i - 1], &parity[i - 1], &keypair[i - 1])); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, xonly_pubkey_bytes[i - 1], &xonly_pubkey[i - 1])); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalar_i, i); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf, &scalar_i); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(ctx, &keypair[i - 1], buf)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(ctx, &xonly_pubkey[i - 1], &parity[i - 1], &keypair[i - 1])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, xonly_pubkey_bytes[i - 1], &xonly_pubkey[i - 1])); } test_exhaustive_schnorrsig_sign(ctx, xonly_pubkey_bytes, keypair, parity); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_impl.h index 1b87817c..ac40aaf4 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig/tests_impl.h @@ -15,9 +15,9 @@ static void nonce_function_bip340_bitflip(unsigned char **args, size_t n_flip, size_t n_bytes, size_t msglen, size_t algolen) { unsigned char nonces[2][32]; CHECK(nonce_function_bip340(nonces[0], args[0], msglen, args[1], args[2], args[3], algolen, args[4]) == 1); - rustsecp256k1zkp_v0_10_0_testrand_flip(args[n_flip], n_bytes); + rustsecp256k1zkp_v0_10_1_testrand_flip(args[n_flip], n_bytes); CHECK(nonce_function_bip340(nonces[1], args[0], msglen, args[1], args[2], args[3], algolen, args[4]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonces[0], nonces[1], 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonces[0], nonces[1], 32) != 0); } static void run_nonce_function_bip340_tests(void) { @@ -25,8 +25,8 @@ static void run_nonce_function_bip340_tests(void) { unsigned char aux_tag[11] = "BIP0340/aux"; unsigned char algo[13] = "BIP0340/nonce"; size_t algolen = sizeof(algo); - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_optimized; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; unsigned char nonce[32], nonce_z[32]; unsigned char msg[32]; size_t msglen = sizeof(msg); @@ -37,23 +37,23 @@ static void run_nonce_function_bip340_tests(void) { int i; /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged has the expected + * rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag, sizeof(tag)); - rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag, sizeof(tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); /* Check that hash initialized by - * rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged_aux has the expected + * rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged_aux has the expected * state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, aux_tag, sizeof(aux_tag)); - rustsecp256k1zkp_v0_10_0_nonce_function_bip340_sha256_tagged_aux(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, aux_tag, sizeof(aux_tag)); + rustsecp256k1zkp_v0_10_1_nonce_function_bip340_sha256_tagged_aux(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); - rustsecp256k1zkp_v0_10_0_testrand256(msg); - rustsecp256k1zkp_v0_10_0_testrand256(key); - rustsecp256k1zkp_v0_10_0_testrand256(pk); - rustsecp256k1zkp_v0_10_0_testrand256(aux_rand); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + rustsecp256k1zkp_v0_10_1_testrand256(key); + rustsecp256k1zkp_v0_10_1_testrand256(pk); + rustsecp256k1zkp_v0_10_1_testrand256(aux_rand); /* Check that a bitflip in an argument results in different nonces. */ args[0] = msg; @@ -76,31 +76,31 @@ static void run_nonce_function_bip340_tests(void) { CHECK(nonce_function_bip340(nonce, msg, msglen, key, pk, NULL, 0, NULL) == 0); CHECK(nonce_function_bip340(nonce, msg, msglen, key, pk, algo, algolen, NULL) == 1); /* Other algo is fine */ - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(algo, algolen); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(algo, algolen); CHECK(nonce_function_bip340(nonce, msg, msglen, key, pk, algo, algolen, NULL) == 1); for (i = 0; i < COUNT; i++) { unsigned char nonce2[32]; - uint32_t offset = rustsecp256k1zkp_v0_10_0_testrand_int(msglen - 1); + uint32_t offset = rustsecp256k1zkp_v0_10_1_testrand_int(msglen - 1); size_t msglen_tmp = (msglen + offset) % msglen; size_t algolen_tmp; /* Different msglen gives different nonce */ CHECK(nonce_function_bip340(nonce2, msg, msglen_tmp, key, pk, algo, algolen, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce2, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce2, 32) != 0); /* Different algolen gives different nonce */ - offset = rustsecp256k1zkp_v0_10_0_testrand_int(algolen - 1); + offset = rustsecp256k1zkp_v0_10_1_testrand_int(algolen - 1); algolen_tmp = (algolen + offset) % algolen; CHECK(nonce_function_bip340(nonce2, msg, msglen, key, pk, algo, algolen_tmp, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce2, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce2, 32) != 0); } /* NULL aux_rand argument is allowed, and identical to passing all zero aux_rand. */ memset(aux_rand, 0, 32); CHECK(nonce_function_bip340(nonce_z, msg, msglen, key, pk, algo, algolen, &aux_rand) == 1); CHECK(nonce_function_bip340(nonce, msg, msglen, key, pk, algo, algolen, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce_z, nonce, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce_z, nonce, 32) == 0); } static void test_schnorrsig_api(void) { @@ -108,62 +108,62 @@ static void test_schnorrsig_api(void) { unsigned char sk2[32]; unsigned char sk3[32]; unsigned char msg[32]; - rustsecp256k1zkp_v0_10_0_keypair keypairs[3]; - rustsecp256k1zkp_v0_10_0_keypair invalid_keypair = {{ 0 }}; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk[3]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey zero_pk; + rustsecp256k1zkp_v0_10_1_keypair keypairs[3]; + rustsecp256k1zkp_v0_10_1_keypair invalid_keypair = {{ 0 }}; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk[3]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey zero_pk; unsigned char sig[64]; - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams invalid_extraparams = {{ 0 }, NULL, NULL}; - - rustsecp256k1zkp_v0_10_0_testrand256(sk1); - rustsecp256k1zkp_v0_10_0_testrand256(sk2); - rustsecp256k1zkp_v0_10_0_testrand256(sk3); - rustsecp256k1zkp_v0_10_0_testrand256(msg); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypairs[0], sk1) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypairs[1], sk2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypairs[2], sk3) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk[0], NULL, &keypairs[0]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk[1], NULL, &keypairs[1]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk[2], NULL, &keypairs[2]) == 1); + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams invalid_extraparams = {{ 0 }, NULL, NULL}; + + rustsecp256k1zkp_v0_10_1_testrand256(sk1); + rustsecp256k1zkp_v0_10_1_testrand256(sk2); + rustsecp256k1zkp_v0_10_1_testrand256(sk3); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypairs[0], sk1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypairs[1], sk2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypairs[2], sk3) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk[0], NULL, &keypairs[0]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk[1], NULL, &keypairs[1]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk[2], NULL, &keypairs[2]) == 1); memset(&zero_pk, 0, sizeof(zero_pk)); /** main test body **/ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &keypairs[0], NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, NULL, msg, &keypairs[0], NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, NULL, &keypairs[0], NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &invalid_keypair, NULL)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(STATIC_CTX, sig, msg, &keypairs[0], NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], &extraparams) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, NULL, msg, sizeof(msg), &keypairs[0], &extraparams)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, NULL, sizeof(msg), &keypairs[0], &extraparams)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, NULL, 0, &keypairs[0], &extraparams) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), NULL, &extraparams)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &invalid_keypair, &extraparams)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], NULL) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], &invalid_extraparams)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(STATIC_CTX, sig, msg, sizeof(msg), &keypairs[0], &extraparams)); - - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &keypairs[0], NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk[0]) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, NULL, msg, sizeof(msg), &pk[0])); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, NULL, sizeof(msg), &pk[0])); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, NULL, 0, &pk[0]) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &zero_pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &keypairs[0], NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, NULL, msg, &keypairs[0], NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, NULL, &keypairs[0], NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &invalid_keypair, NULL)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(STATIC_CTX, sig, msg, &keypairs[0], NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], &extraparams) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, NULL, msg, sizeof(msg), &keypairs[0], &extraparams)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, NULL, sizeof(msg), &keypairs[0], &extraparams)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, NULL, 0, &keypairs[0], &extraparams) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), NULL, &extraparams)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &invalid_keypair, &extraparams)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], NULL) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypairs[0], &invalid_extraparams)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(STATIC_CTX, sig, msg, sizeof(msg), &keypairs[0], &extraparams)); + + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &keypairs[0], NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk[0]) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, NULL, msg, sizeof(msg), &pk[0])); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, NULL, sizeof(msg), &pk[0])); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, NULL, 0, &pk[0]) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &zero_pk)); } -/* Checks that hash initialized by rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged has the +/* Checks that hash initialized by rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged has the * expected state. */ static void test_schnorrsig_sha256_tagged(void) { unsigned char tag[17] = "BIP0340/challenge"; - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_optimized; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, (unsigned char *) tag, sizeof(tag)); - rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, (unsigned char *) tag, sizeof(tag)); + rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); } @@ -171,34 +171,34 @@ static void test_schnorrsig_sha256_tagged(void) { * Signs the message and checks that it's the same as expected_sig. */ static void test_schnorrsig_bip_vectors_check_signing(const unsigned char *sk, const unsigned char *pk_serialized, const unsigned char *aux_rand, const unsigned char *msg, size_t msglen, const unsigned char *expected_sig) { unsigned char sig[64]; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk, pk_expected; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk, pk_expected; - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; extraparams.ndata = (unsigned char*)aux_rand; - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, msglen, &keypair, &extraparams)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, expected_sig, 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, msglen, &keypair, &extraparams)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, expected_sig, 64) == 0); if (msglen == 32) { memset(sig, 0, 64); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &keypair, aux_rand)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, expected_sig, 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &keypair, aux_rand)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, expected_sig, 64) == 0); } - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk_expected, pk_serialized)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pk, &pk_expected, sizeof(pk)) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, msglen, &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_expected, pk_serialized)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pk, &pk_expected, sizeof(pk)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, msglen, &pk)); } /* Helper function for schnorrsig_bip_vectors * Checks that both verify and verify_batch (TODO) return the same value as expected. */ static void test_schnorrsig_bip_vectors_check_verify(const unsigned char *pk_serialized, const unsigned char *msg, size_t msglen, const unsigned char *sig, int expected) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk, pk_serialized)); - CHECK(expected == rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, msglen, &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk, pk_serialized)); + CHECK(expected == rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, msglen, &pk)); } /* Test vectors according to BIP-340 ("Schnorr Signatures for secp256k1"). See @@ -394,9 +394,9 @@ static void test_schnorrsig_bip_vectors(void) { 0xEB, 0x98, 0x98, 0xAE, 0x79, 0xB9, 0x76, 0x87, 0x66, 0xE4, 0xFA, 0xA0, 0x4A, 0x2D, 0x4A, 0x34 }; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk_parsed; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk_parsed; /* No need to check the signature of the test vector as parsing the pubkey already fails */ - CHECK(!rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk_parsed, pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_parsed, pk)); } { /* Test vector 6 */ @@ -614,9 +614,9 @@ static void test_schnorrsig_bip_vectors(void) { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFC, 0x30 }; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk_parsed; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk_parsed; /* No need to check the signature of the test vector as parsing the pubkey already fails */ - CHECK(!rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pk_parsed, pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pk_parsed, pk)); } { /* Test vector 15 */ @@ -804,48 +804,48 @@ static int nonce_function_overflowing(unsigned char *nonce32, const unsigned cha static void test_schnorrsig_sign(void) { unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; - rustsecp256k1zkp_v0_10_0_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_keypair keypair; const unsigned char msg[32] = "this is a msg for a schnorrsig.."; unsigned char sig[64]; unsigned char sig2[64]; unsigned char zeros64[64] = { 0 }; - rustsecp256k1zkp_v0_10_0_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; + rustsecp256k1zkp_v0_10_1_schnorrsig_extraparams extraparams = SECP256K1_SCHNORRSIG_EXTRAPARAMS_INIT; unsigned char aux_rand[32]; - rustsecp256k1zkp_v0_10_0_testrand256(sk); - rustsecp256k1zkp_v0_10_0_testrand256(aux_rand); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &keypair, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(aux_rand); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &keypair, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); /* Check that deprecated alias gives the same result */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign(CTX, sig2, msg, &keypair, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, sig2, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign(CTX, sig2, msg, &keypair, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, sig2, sizeof(sig)) == 0); /* Test different nonce functions */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); memset(sig, 1, sizeof(sig)); extraparams.noncefp = nonce_function_failing; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, zeros64, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, zeros64, sizeof(sig)) == 0); memset(&sig, 1, sizeof(sig)); extraparams.noncefp = nonce_function_0; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, zeros64, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, zeros64, sizeof(sig)) == 0); memset(&sig, 1, sizeof(sig)); extraparams.noncefp = nonce_function_overflowing; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &pk)); /* When using the default nonce function, schnorrsig_sign_custom produces * the same result as schnorrsig_sign with aux_rand = extraparams.ndata */ extraparams.noncefp = NULL; extraparams.ndata = aux_rand; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig2, msg, &keypair, extraparams.ndata) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sig, sig2, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig, msg, sizeof(msg), &keypair, &extraparams) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig2, msg, &keypair, extraparams.ndata) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sig, sig2, sizeof(sig)) == 0); } #define N_SIGS 3 @@ -857,84 +857,84 @@ static void test_schnorrsig_sign_verify(void) { unsigned char msg[N_SIGS][32]; unsigned char sig[N_SIGS][64]; size_t i; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pk; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pk; + rustsecp256k1zkp_v0_10_1_scalar s; - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pk, NULL, &keypair)); for (i = 0; i < N_SIGS; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(msg[i]); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig[i], msg[i], &keypair, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[i], msg[i], sizeof(msg[i]), &pk)); + rustsecp256k1zkp_v0_10_1_testrand256(msg[i]); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig[i], msg[i], &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[i], msg[i], sizeof(msg[i]), &pk)); } { /* Flip a few bits in the signature and in the message and check that * verify and verify_batch (TODO) fail */ - size_t sig_idx = rustsecp256k1zkp_v0_10_0_testrand_int(N_SIGS); - size_t byte_idx = rustsecp256k1zkp_v0_10_0_testrand_bits(5); - unsigned char xorbyte = rustsecp256k1zkp_v0_10_0_testrand_int(254)+1; + size_t sig_idx = rustsecp256k1zkp_v0_10_1_testrand_int(N_SIGS); + size_t byte_idx = rustsecp256k1zkp_v0_10_1_testrand_bits(5); + unsigned char xorbyte = rustsecp256k1zkp_v0_10_1_testrand_int(254)+1; sig[sig_idx][byte_idx] ^= xorbyte; - CHECK(!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); sig[sig_idx][byte_idx] ^= xorbyte; - byte_idx = rustsecp256k1zkp_v0_10_0_testrand_bits(5); + byte_idx = rustsecp256k1zkp_v0_10_1_testrand_bits(5); sig[sig_idx][32+byte_idx] ^= xorbyte; - CHECK(!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); sig[sig_idx][32+byte_idx] ^= xorbyte; - byte_idx = rustsecp256k1zkp_v0_10_0_testrand_bits(5); + byte_idx = rustsecp256k1zkp_v0_10_1_testrand_bits(5); msg[sig_idx][byte_idx] ^= xorbyte; - CHECK(!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); msg[sig_idx][byte_idx] ^= xorbyte; /* Check that above bitflips have been reversed correctly */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[sig_idx], msg[sig_idx], sizeof(msg[sig_idx]), &pk)); } /* Test overflowing s */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig[0], msg[0], &keypair, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig[0], msg[0], &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); memset(&sig[0][32], 0xFF, 32); - CHECK(!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); + CHECK(!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); /* Test negative s */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig[0], msg[0], &keypair, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &sig[0][32], NULL); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig[0][32], &s); - CHECK(!rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig[0], msg[0], &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &sig[0][32], NULL); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig[0][32], &s); + CHECK(!rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg[0], sizeof(msg[0]), &pk)); /* The empty message can be signed & verified */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig[0], NULL, 0, &keypair, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], NULL, 0, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig[0], NULL, 0, &keypair, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], NULL, 0, &pk) == 1); { /* Test varying message lengths */ unsigned char msg_large[32 * 8]; - uint32_t msglen = rustsecp256k1zkp_v0_10_0_testrand_int(sizeof(msg_large)); + uint32_t msglen = rustsecp256k1zkp_v0_10_1_testrand_int(sizeof(msg_large)); for (i = 0; i < sizeof(msg_large); i += 32) { - rustsecp256k1zkp_v0_10_0_testrand256(&msg_large[i]); + rustsecp256k1zkp_v0_10_1_testrand256(&msg_large[i]); } - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign_custom(CTX, sig[0], msg_large, msglen, &keypair, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg_large, msglen, &pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign_custom(CTX, sig[0], msg_large, msglen, &keypair, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg_large, msglen, &pk) == 1); /* Verification for a random wrong message length fails */ msglen = (msglen + (sizeof(msg_large) - 1)) % sizeof(msg_large); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig[0], msg_large, msglen, &pk) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig[0], msg_large, msglen, &pk) == 0); } } #undef N_SIGS static void test_schnorrsig_taproot(void) { unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_xonly_pubkey internal_pk; + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_xonly_pubkey internal_pk; unsigned char internal_pk_bytes[32]; - rustsecp256k1zkp_v0_10_0_xonly_pubkey output_pk; + rustsecp256k1zkp_v0_10_1_xonly_pubkey output_pk; unsigned char output_pk_bytes[32]; unsigned char tweak[32]; int pk_parity; @@ -942,27 +942,27 @@ static void test_schnorrsig_taproot(void) { unsigned char sig[64]; /* Create output key */ - rustsecp256k1zkp_v0_10_0_testrand256(sk); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &internal_pk, NULL, &keypair) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(sk); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &internal_pk, NULL, &keypair) == 1); /* In actual taproot the tweak would be hash of internal_pk */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, tweak, &internal_pk) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &output_pk, &pk_parity, &keypair) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, output_pk_bytes, &output_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, tweak, &internal_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_tweak_add(CTX, &keypair, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &output_pk, &pk_parity, &keypair) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, output_pk_bytes, &output_pk) == 1); /* Key spend */ - rustsecp256k1zkp_v0_10_0_testrand256(msg); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign32(CTX, sig, msg, &keypair, NULL) == 1); + rustsecp256k1zkp_v0_10_1_testrand256(msg); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign32(CTX, sig, msg, &keypair, NULL) == 1); /* Verify key spend */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &output_pk, output_pk_bytes) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &output_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &output_pk, output_pk_bytes) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_verify(CTX, sig, msg, sizeof(msg), &output_pk) == 1); /* Script spend */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(CTX, internal_pk_bytes, &internal_pk) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(CTX, internal_pk_bytes, &internal_pk) == 1); /* Verify script spend */ - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &internal_pk, internal_pk_bytes) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_tweak_add_check(CTX, output_pk_bytes, pk_parity, &internal_pk, tweak) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &internal_pk, internal_pk_bytes) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_tweak_add_check(CTX, output_pk_bytes, pk_parity, &internal_pk, tweak) == 1); } static void run_schnorrsig_tests(void) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/Makefile.am.include index b396f709..93743269 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/Makefile.am.include @@ -1,3 +1,3 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_schnorrsig_halfagg.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_schnorrsig_halfagg.h noinst_HEADERS += src/modules/schnorrsig_halfagg/main_impl.h noinst_HEADERS += src/modules/schnorrsig_halfagg/tests_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/main_impl.h index 64b1807d..4874c2a1 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/main_impl.h @@ -8,8 +8,8 @@ /* Initializes SHA256 with fixed midstate. This midstate was computed by applying * SHA256 to SHA256("HalfAgg/randomizer")||SHA256("HalfAgg/randomizer"). */ -void rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregation(rustsecp256k1zkp_v0_10_0_sha256 *sha) { - rustsecp256k1zkp_v0_10_0_sha256_initialize(sha); +void rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged_aggregation(rustsecp256k1zkp_v0_10_1_sha256 *sha) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(sha); sha->s[0] = 0xd11f5532ul; sha->s[1] = 0xfa57f70ful; sha->s[2] = 0x5db0d728ul; @@ -22,11 +22,11 @@ void rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregation(rustsecp256k1 sha->bytes = 64; } -int rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *aggsig, size_t *aggsig_len, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *all_pubkeys, const unsigned char *all_msgs32, const unsigned char *new_sigs64, size_t n_before, size_t n_new) { +int rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *aggsig, size_t *aggsig_len, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *all_pubkeys, const unsigned char *all_msgs32, const unsigned char *new_sigs64, size_t n_before, size_t n_new) { size_t i; size_t n; - rustsecp256k1zkp_v0_10_0_sha256 hash; - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_sha256 hash; + rustsecp256k1zkp_v0_10_1_scalar s; VERIFY_CHECK(ctx != NULL); ARG_CHECK(aggsig != NULL); @@ -45,54 +45,54 @@ int rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(const rustsecp256k1zkp_v0_ /* Prepare hash with common prefix. The prefix is the tag and */ /* r_0 || pk_0 || m_0 || .... || r_{n'-1} || pk_{n'-1} || m_{n'-1} */ /* where n' = n_before */ - rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregation(&hash); + rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged_aggregation(&hash); for (i = 0; i < n_before; ++i) { /* serialize pk_i */ unsigned char pk_ser[32]; - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, pk_ser, &all_pubkeys[i])) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, pk_ser, &all_pubkeys[i])) { return 0; } /* write r_i */ - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &aggsig[i*32], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &aggsig[i*32], 32); /* write pk_i */ - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, pk_ser, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, pk_ser, 32); /* write m_i*/ - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &all_msgs32[i*32], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &all_msgs32[i*32], 32); } /* Compute s = s_old + sum_{i = n_before}^{n} z_i*s_i */ /* where s_old = 0 if n_before = 0 */ - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s, 0); - if (n_before > 0) rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &aggsig[n_before*32], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s, 0); + if (n_before > 0) rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &aggsig[n_before*32], NULL); for (i = n_before; i < n; ++i) { unsigned char pk_ser[32]; unsigned char hashoutput[32]; - rustsecp256k1zkp_v0_10_0_sha256 hashcopy; - rustsecp256k1zkp_v0_10_0_scalar si; - rustsecp256k1zkp_v0_10_0_scalar zi; + rustsecp256k1zkp_v0_10_1_sha256 hashcopy; + rustsecp256k1zkp_v0_10_1_scalar si; + rustsecp256k1zkp_v0_10_1_scalar zi; /* Step 0: Serialize pk_i into pk_ser */ - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_serialize(ctx, pk_ser, &all_pubkeys[i])) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_serialize(ctx, pk_ser, &all_pubkeys[i])) { return 0; } /* Step 1: z_i = TaggedHash(...) */ /* 1.a) Write into hash r_i, pk_i, m_i, r_i */ - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &new_sigs64[(i-n_before)*64], 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, pk_ser, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &all_msgs32[i*32], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &new_sigs64[(i-n_before)*64], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, pk_ser, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &all_msgs32[i*32], 32); /* 1.b) Copy the hash */ hashcopy = hash; /* 1.c) Finalize the copy to get zi*/ - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hashcopy, hashoutput); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hashcopy, hashoutput); /* Note: No need to check overflow, comes from hash */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&zi, hashoutput, NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&zi, hashoutput, NULL); /* Step 2: s := s + zi*si */ /* except if i == 0, then zi = 1 implicitly */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&si, &new_sigs64[(i-n_before)*64+32], NULL); - if (i != 0) rustsecp256k1zkp_v0_10_0_scalar_mul(&si, &si, &zi); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &si); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&si, &new_sigs64[(i-n_before)*64+32], NULL); + if (i != 0) rustsecp256k1zkp_v0_10_1_scalar_mul(&si, &si, &zi); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &si); } /* copy new rs into aggsig */ @@ -100,27 +100,27 @@ int rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(const rustsecp256k1zkp_v0_ memcpy(&aggsig[i*32], &new_sigs64[(i-n_before)*64], 32); } /* copy new s into aggsig */ - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&aggsig[n*32], &s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&aggsig[n*32], &s); *aggsig_len = 32 * (1 + n); return 1; } -int rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(const rustsecp256k1zkp_v0_10_0_context *ctx, unsigned char *aggsig, size_t *aggsig_len, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkeys, const unsigned char *msgs32, const unsigned char *sigs64, size_t n) { - return rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(ctx, aggsig, aggsig_len, pubkeys, msgs32, sigs64, 0, n); +int rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(const rustsecp256k1zkp_v0_10_1_context *ctx, unsigned char *aggsig, size_t *aggsig_len, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkeys, const unsigned char *msgs32, const unsigned char *sigs64, size_t n) { + return rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(ctx, aggsig, aggsig_len, pubkeys, msgs32, sigs64, 0, n); } -int rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkeys, const unsigned char *msgs32, size_t n, const unsigned char *aggsig, size_t aggsig_len) { +int rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkeys, const unsigned char *msgs32, size_t n, const unsigned char *aggsig, size_t aggsig_len) { size_t i; - rustsecp256k1zkp_v0_10_0_gej lhs, rhs; - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_sha256 hash; + rustsecp256k1zkp_v0_10_1_gej lhs, rhs; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_sha256 hash; int overflow; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkeys != NULL || n == 0); ARG_CHECK(msgs32 != NULL || n == 0); ARG_CHECK(aggsig != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); /* Check that aggsig_len is correct, i.e., aggsig_len = 32*(n+1) */ if ((aggsig_len / 32) <= 0 || ((aggsig_len / 32)-1) != n || (aggsig_len % 32) != 0) { @@ -133,70 +133,70 @@ int rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(const rustsecp256k1zkp_v0_10_0 /* (1) z_i = TaggedHash(...) */ /* (2) T_i = R_i+e_i*P_i */ /* (3) rhs = rhs + z_i*T_i */ - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&rhs); - rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregation(&hash); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&rhs); + rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged_aggregation(&hash); for (i = 0; i < n; ++i) { - rustsecp256k1zkp_v0_10_0_fe rx; - rustsecp256k1zkp_v0_10_0_ge rp, pp; - rustsecp256k1zkp_v0_10_0_scalar ei; - rustsecp256k1zkp_v0_10_0_gej ppj, ti; + rustsecp256k1zkp_v0_10_1_fe rx; + rustsecp256k1zkp_v0_10_1_ge rp, pp; + rustsecp256k1zkp_v0_10_1_scalar ei; + rustsecp256k1zkp_v0_10_1_gej ppj, ti; unsigned char pk_ser[32]; unsigned char hashoutput[32]; - rustsecp256k1zkp_v0_10_0_sha256 hashcopy; - rustsecp256k1zkp_v0_10_0_scalar zi; + rustsecp256k1zkp_v0_10_1_sha256 hashcopy; + rustsecp256k1zkp_v0_10_1_scalar zi; /* Step 0: Serialize pk_i into pk_ser */ /* We need that in Step 1 and in Step 2 */ - if (!rustsecp256k1zkp_v0_10_0_xonly_pubkey_load(ctx, &pp, &pubkeys[i])) { + if (!rustsecp256k1zkp_v0_10_1_xonly_pubkey_load(ctx, &pp, &pubkeys[i])) { return 0; } - rustsecp256k1zkp_v0_10_0_fe_get_b32(pk_ser, &pp.x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(pk_ser, &pp.x); /* Step 1: z_i = TaggedHash(...) */ /* 1.a) Write into hash r_i, pk_i, m_i, r_i */ - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &aggsig[i*32], 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, pk_ser, 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, &msgs32[i*32], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &aggsig[i*32], 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, pk_ser, 32); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, &msgs32[i*32], 32); /* 1.b) Copy the hash */ hashcopy = hash; /* 1.c) Finalize the copy to get zi*/ - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hashcopy, hashoutput); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&zi, hashoutput, NULL); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hashcopy, hashoutput); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&zi, hashoutput, NULL); /* Step 2: T_i = R_i+e_i*P_i */ /* 2.a) R_i = lift_x(int(r_i)); fail if that fails */ - if (!rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&rx, &aggsig[i*32])) { + if (!rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&rx, &aggsig[i*32])) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&rp, &rx, 0)) { + if (!rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&rp, &rx, 0)) { return 0; } /* 2.b) e_i = int(hash_{BIP0340/challenge}(bytes(r_i) || pk_i || m_i)) mod n */ - rustsecp256k1zkp_v0_10_0_schnorrsig_challenge(&ei, &aggsig[i*32], &msgs32[i*32], 32, pk_ser); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ppj, &pp); + rustsecp256k1zkp_v0_10_1_schnorrsig_challenge(&ei, &aggsig[i*32], &msgs32[i*32], 32, pk_ser); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ppj, &pp); /* 2.c) T_i = R_i + e_i*P_i */ - rustsecp256k1zkp_v0_10_0_ecmult(&ti, &ppj, &ei, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&ti, &ti, &rp, NULL); + rustsecp256k1zkp_v0_10_1_ecmult(&ti, &ppj, &ei, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&ti, &ti, &rp, NULL); /* Step 3: rhs = rhs + zi*T_i */ /* Note that if i == 0, then zi = 1 implicitly */ - if (i != 0) rustsecp256k1zkp_v0_10_0_ecmult(&ti, &ti, &zi, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(&rhs, &rhs, &ti, NULL); + if (i != 0) rustsecp256k1zkp_v0_10_1_ecmult(&ti, &ti, &zi, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&rhs, &rhs, &ti, NULL); } /* Compute the lhs as lhs = s*G */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &aggsig[n*32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &aggsig[n*32], &overflow); if (overflow) { return 0; } - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &lhs, &s); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &lhs, &s); /* Check that lhs == rhs */ - rustsecp256k1zkp_v0_10_0_gej_neg(&lhs, &lhs); - rustsecp256k1zkp_v0_10_0_gej_add_var(&lhs, &lhs, &rhs, NULL); - return rustsecp256k1zkp_v0_10_0_gej_is_infinity(&lhs); + rustsecp256k1zkp_v0_10_1_gej_neg(&lhs, &lhs); + rustsecp256k1zkp_v0_10_1_gej_add_var(&lhs, &lhs, &rhs, NULL); + return rustsecp256k1zkp_v0_10_1_gej_is_infinity(&lhs); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/tests_impl.h index 47db0697..abc6625a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/schnorrsig_halfagg/tests_impl.h @@ -5,30 +5,30 @@ #define N_MAX 50 -/* We test that the hash initialized by rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregate +/* We test that the hash initialized by rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged_aggregate * has the expected state. */ void test_schnorrsig_sha256_tagged_aggregate(void) { unsigned char tag[18] = "HalfAgg/randomizer"; - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256 sha_optimized; + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha_optimized; - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, (unsigned char *) tag, sizeof(tag)); - rustsecp256k1zkp_v0_10_0_schnorrsig_sha256_tagged_aggregation(&sha_optimized); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, (unsigned char *) tag, sizeof(tag)); + rustsecp256k1zkp_v0_10_1_schnorrsig_sha256_tagged_aggregation(&sha_optimized); test_sha256_eq(&sha, &sha_optimized); } /* Create n many x-only pubkeys and sigs for random messages */ -void test_schnorrsig_aggregate_input_helper(rustsecp256k1zkp_v0_10_0_xonly_pubkey *pubkeys, unsigned char *msgs32, unsigned char *sigs64, size_t n) { +void test_schnorrsig_aggregate_input_helper(rustsecp256k1zkp_v0_10_1_xonly_pubkey *pubkeys, unsigned char *msgs32, unsigned char *sigs64, size_t n) { size_t i; for (i = 0; i < n; ++i) { unsigned char sk[32]; - rustsecp256k1zkp_v0_10_0_keypair keypair; - rustsecp256k1zkp_v0_10_0_testrand256(sk); - rustsecp256k1zkp_v0_10_0_testrand256(&msgs32[i*32]); + rustsecp256k1zkp_v0_10_1_keypair keypair; + rustsecp256k1zkp_v0_10_1_testrand256(sk); + rustsecp256k1zkp_v0_10_1_testrand256(&msgs32[i*32]); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_create(CTX, &keypair, sk)); - CHECK(rustsecp256k1zkp_v0_10_0_keypair_xonly_pub(CTX, &pubkeys[i], NULL, &keypair)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_sign(CTX, &sigs64[i*64], &msgs32[i*32], &keypair, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_create(CTX, &keypair, sk)); + CHECK(rustsecp256k1zkp_v0_10_1_keypair_xonly_pub(CTX, &pubkeys[i], NULL, &keypair)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_sign(CTX, &sigs64[i*64], &msgs32[i*32], &keypair, NULL)); } } @@ -37,36 +37,36 @@ void test_schnorrsig_aggregate_input_helper(rustsecp256k1zkp_v0_10_0_xonly_pubke * aggregate the others incrementally to the already aggregated ones. * The aggregate signature should verify after both steps. */ void test_schnorrsig_aggregate(void) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[N_MAX]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[N_MAX]; unsigned char msgs32[N_MAX*32]; unsigned char sigs64[N_MAX*64]; unsigned char aggsig[32*(N_MAX + 1) + 17]; size_t aggsig_len = sizeof(aggsig); - size_t n = rustsecp256k1zkp_v0_10_0_testrand_int(N_MAX + 1); - size_t n_initial = rustsecp256k1zkp_v0_10_0_testrand_int(n + 1); + size_t n = rustsecp256k1zkp_v0_10_1_testrand_int(N_MAX + 1); + size_t n_initial = rustsecp256k1zkp_v0_10_1_testrand_int(n + 1); size_t n_new = n - n_initial; test_schnorrsig_aggregate_input_helper(pubkeys, msgs32, sigs64, n); /* Aggregate the first n_initial of them */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); /* Make sure that the aggregate signature verifies */ CHECK(aggsig_len == 32*(n_initial + 1)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n_initial, aggsig, aggsig_len)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n_initial, aggsig, aggsig_len)); /* Aggregate the remaining n_new many signatures to the already existing ones */ aggsig_len = sizeof(aggsig); - rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new); + rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new); /* Make sure that the aggregate signature verifies */ CHECK(aggsig_len == 32*(n + 1)); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); /* Check that a direct aggregation of the n sigs yields an identical aggsig */ { unsigned char aggsig2[sizeof(aggsig)]; size_t aggsig_len2 = sizeof(aggsig2); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig2, &aggsig_len2, pubkeys, msgs32, sigs64, n)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig2, &aggsig_len2, pubkeys, msgs32, sigs64, n)); CHECK(aggsig_len == aggsig_len2); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(aggsig, aggsig2, aggsig_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(aggsig, aggsig2, aggsig_len) == 0); } } @@ -83,7 +83,7 @@ void test_schnorrsig_aggverify_spec_vectors(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; size_t aggsig_len = sizeof(aggsig); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, NULL, NULL, n, aggsig, aggsig_len)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, NULL, NULL, n, aggsig, aggsig_len)); } /* Test vector 1 */ { @@ -94,7 +94,7 @@ void test_schnorrsig_aggverify_spec_vectors(void) { 0xd7, 0x1e, 0x18, 0x34, 0x60, 0x48, 0x19, 0xff, 0x9c, 0x17, 0xf5, 0xe9, 0xd5, 0xdd, 0x07, 0x8f }; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[1]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[1]; const unsigned char msgs32[1*32] = { 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, @@ -114,9 +114,9 @@ void test_schnorrsig_aggverify_spec_vectors(void) { size_t aggsig_len = sizeof(aggsig); size_t i; for (i = 0; i < n; ++i) { - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pubkeys[i], &pubkeys_ser[i*32])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pubkeys[i], &pubkeys_ser[i*32])); } - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); } /* Test vector 2 */ { @@ -132,7 +132,7 @@ void test_schnorrsig_aggverify_spec_vectors(void) { 0x10, 0xe1, 0xc7, 0xa5, 0x93, 0xe4, 0xe0, 0x30, 0xef, 0xb5, 0xb8, 0x72, 0x1c, 0xe5, 0x5b, 0x0b, }; - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[2]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[2]; const unsigned char msgs32[2*32] = { 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, @@ -161,19 +161,19 @@ void test_schnorrsig_aggverify_spec_vectors(void) { size_t aggsig_len = sizeof(aggsig); size_t i; for (i = 0; i < n; ++i) { - CHECK(rustsecp256k1zkp_v0_10_0_xonly_pubkey_parse(CTX, &pubkeys[i], &pubkeys_ser[i*32])); + CHECK(rustsecp256k1zkp_v0_10_1_xonly_pubkey_parse(CTX, &pubkeys[i], &pubkeys_ser[i*32])); } - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len)); } } static void test_schnorrsig_aggregate_api(void) { - size_t n = rustsecp256k1zkp_v0_10_0_testrand_int(N_MAX + 1); - size_t n_initial = rustsecp256k1zkp_v0_10_0_testrand_int(n + 1); + size_t n = rustsecp256k1zkp_v0_10_1_testrand_int(N_MAX + 1); + size_t n_initial = rustsecp256k1zkp_v0_10_1_testrand_int(n + 1); size_t n_new = n - n_initial; /* Test preparation. */ - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[N_MAX]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[N_MAX]; unsigned char msgs32[N_MAX*32]; unsigned char sigs64[N_MAX*64]; unsigned char aggsig[32*(N_MAX + 1)]; @@ -183,73 +183,73 @@ static void test_schnorrsig_aggregate_api(void) { { /* Should not accept NULL for aggsig or aggsig length */ size_t aggsig_len = sizeof(aggsig); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, NULL, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, NULL, pubkeys, msgs32, sigs64, n_initial)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, NULL, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, NULL, pubkeys, msgs32, sigs64, n_initial)); /* Should not accept NULL for keys, messages, or signatures if n_initial is not 0 */ if (n_initial != 0) { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, NULL, msgs32, sigs64, n_initial)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, NULL, sigs64, n_initial)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, NULL, n_initial)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, NULL, msgs32, sigs64, n_initial)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, NULL, sigs64, n_initial)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, NULL, n_initial)); } } /* Test body 2: Check API of function inc_aggregate. */ { size_t aggsig_len = sizeof(aggsig); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n_initial)); aggsig_len = 32*(n+1); /* Should not accept NULL for aggsig or aggsig length */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, NULL, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, NULL, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, NULL, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, NULL, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); /* Should not accept NULL for keys or messages if n is not 0 */ if (n != 0) { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, NULL, msgs32, &sigs64[n_initial*64], n_initial, n_new)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, NULL, &sigs64[n_initial*64], n_initial, n_new)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, NULL, msgs32, &sigs64[n_initial*64], n_initial, n_new)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, NULL, &sigs64[n_initial*64], n_initial, n_new)); } /* Should not accept NULL for new_sigs64 if n_new is not 0 */ if (n_new != 0) { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, NULL, n_initial, n_new)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, NULL, n_initial, n_new)); } /* Should not accept overflowing number of sigs. */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], SIZE_MAX, SIZE_MAX)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], SIZE_MAX, SIZE_MAX)); if (n_initial > 0) { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, SIZE_MAX)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, SIZE_MAX)); } /* Should reject if aggsig_len is too small. */ aggsig_len = 32*n; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new) == 0); aggsig_len = 32*(n+1) - 1; - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new) == 0); } /* Test body 3: Check API of function aggverify. */ { size_t aggsig_len = sizeof(aggsig); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_inc_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, &sigs64[n_initial*64], n_initial, n_new)); /* Should not accept NULL for keys or messages if n is not 0 */ if (n != 0) { - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, NULL, msgs32, n, aggsig, aggsig_len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, NULL, n, aggsig, aggsig_len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, NULL, msgs32, n, aggsig, aggsig_len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, NULL, n, aggsig, aggsig_len)); } /* Should never accept NULL the aggsig */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, NULL, aggsig_len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, NULL, aggsig_len)); /* Should reject for invalid aggsig_len. */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len + 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len - 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len + 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len - 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len + 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len - 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len + 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len - 32) == 0); } } /* In this test, we make sure that trivial attempts to break * the security of verification do not work. */ static void test_schnorrsig_aggregate_unforge(void) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[N_MAX]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[N_MAX]; unsigned char msgs32[N_MAX*32]; unsigned char sigs64[N_MAX*64]; unsigned char aggsig[32*(N_MAX + 1)]; - size_t n = rustsecp256k1zkp_v0_10_0_testrand_int(N_MAX + 1); + size_t n = rustsecp256k1zkp_v0_10_1_testrand_int(N_MAX + 1); /* Test 1: We fix a set of n messages and compute * a random aggsig for them. This should not verify. */ @@ -259,10 +259,10 @@ static void test_schnorrsig_aggregate_unforge(void) { size_t i; /* Sample aggsig randomly */ for (i = 0; i < n + 1; ++i) { - rustsecp256k1zkp_v0_10_0_testrand256(&aggsig[i*32]); + rustsecp256k1zkp_v0_10_1_testrand256(&aggsig[i*32]); } /* Make sure that it does not verify */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); } /* Test 2: We fix a set of n messages and compute valid @@ -272,13 +272,13 @@ static void test_schnorrsig_aggregate_unforge(void) { if (n > 0) { size_t aggsig_len = sizeof(aggsig); /* Replace a randomly chosen real sig with a random one. */ - size_t k = rustsecp256k1zkp_v0_10_0_testrand_int(n); - rustsecp256k1zkp_v0_10_0_testrand256(&sigs64[k*64]); - rustsecp256k1zkp_v0_10_0_testrand256(&sigs64[k*64+32]); + size_t k = rustsecp256k1zkp_v0_10_1_testrand_int(n); + rustsecp256k1zkp_v0_10_1_testrand256(&sigs64[k*64]); + rustsecp256k1zkp_v0_10_1_testrand256(&sigs64[k*64+32]); /* Aggregate the n signatures */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); /* Make sure the result does not verify */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); } /* Test 3: We generate a valid aggregate signature and then @@ -288,34 +288,34 @@ static void test_schnorrsig_aggregate_unforge(void) { size_t aggsig_len = sizeof(aggsig); size_t k; /* Aggregate the n signatures */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); /* Change one of the messages */ - k = rustsecp256k1zkp_v0_10_0_testrand_int(32*n); + k = rustsecp256k1zkp_v0_10_1_testrand_int(32*n); msgs32[k] = msgs32[k]^0xff; /* Make sure the result does not verify */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); } } /* In this test, we make sure that the algorithms properly reject * for overflowing and non parseable values. */ static void test_schnorrsig_aggregate_overflow(void) { - rustsecp256k1zkp_v0_10_0_xonly_pubkey pubkeys[N_MAX]; + rustsecp256k1zkp_v0_10_1_xonly_pubkey pubkeys[N_MAX]; unsigned char msgs32[N_MAX*32]; unsigned char sigs64[N_MAX*64]; unsigned char aggsig[32*(N_MAX + 1)]; - size_t n = rustsecp256k1zkp_v0_10_0_testrand_int(N_MAX + 1); + size_t n = rustsecp256k1zkp_v0_10_1_testrand_int(N_MAX + 1); /* We check that verification returns 0 if the s in aggsig overflows. */ test_schnorrsig_aggregate_input_helper(pubkeys, msgs32, sigs64, n); { size_t aggsig_len = sizeof(aggsig); /* Aggregate */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggregate(CTX, aggsig, &aggsig_len, pubkeys, msgs32, sigs64, n)); /* Make s in the aggsig overflow */ memset(&aggsig[n*32], 0xFF, 32); /* Should not verify */ - CHECK(rustsecp256k1zkp_v0_10_0_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_schnorrsig_aggverify(CTX, pubkeys, msgs32, n, aggsig, aggsig_len) == 0); } } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/Makefile.am.include index 32baaee6..f4b815ba 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_surjectionproof.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_surjectionproof.h noinst_HEADERS += src/modules/surjection/main_impl.h noinst_HEADERS += src/modules/surjection/surjection.h noinst_HEADERS += src/modules/surjection/surjection_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/main_impl.h index 831a9994..3a45587b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/main_impl.h @@ -19,7 +19,7 @@ #define SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS 16 #endif -static size_t rustsecp256k1zkp_v0_10_0_count_bits_set(const unsigned char* data, size_t count) { +static size_t rustsecp256k1zkp_v0_10_1_count_bits_set(const unsigned char* data, size_t count) { size_t ret = 0; size_t i; for (i = 0; i < count; i++) { @@ -42,7 +42,7 @@ static size_t rustsecp256k1zkp_v0_10_0_count_bits_set(const unsigned char* data, #ifdef USE_REDUCED_SURJECTION_PROOF_SIZE static #endif -int rustsecp256k1zkp_v0_10_0_surjectionproof_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_surjectionproof *proof, const unsigned char *input, size_t inputlen) { +int rustsecp256k1zkp_v0_10_1_surjectionproof_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_surjectionproof *proof, const unsigned char *input, size_t inputlen) { size_t n_inputs; size_t signature_len; @@ -71,7 +71,7 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_parse(const rustsecp256k1zkp_v0_10_ } } - signature_len = 32 * (1 + rustsecp256k1zkp_v0_10_0_count_bits_set(&input[2], (n_inputs + 7) / 8)); + signature_len = 32 * (1 + rustsecp256k1zkp_v0_10_1_count_bits_set(&input[2], (n_inputs + 7) / 8)); if (inputlen != 2 + (n_inputs + 7) / 8 + signature_len) { return 0; } @@ -82,7 +82,7 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_parse(const rustsecp256k1zkp_v0_10_ return 1; } -int rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_0_surjectionproof *proof) { +int rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_1_surjectionproof *proof) { size_t signature_len; size_t serialized_len; @@ -92,7 +92,7 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(const rustsecp256k1zkp_v0 ARG_CHECK(proof != NULL); (void) ctx; - signature_len = 32 * (1 + rustsecp256k1zkp_v0_10_0_count_bits_set(proof->used_inputs, (proof->n_inputs + 7) / 8)); + signature_len = 32 * (1 + rustsecp256k1zkp_v0_10_1_count_bits_set(proof->used_inputs, (proof->n_inputs + 7) / 8)); serialized_len = 2 + (proof->n_inputs + 7) / 8 + signature_len; if (*outputlen < serialized_len) { return 0; @@ -107,37 +107,37 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(const rustsecp256k1zkp_v0 return 1; } -size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_surjectionproof* proof) { +size_t rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_surjectionproof* proof) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(proof != NULL); (void) ctx; return proof->n_inputs; } -size_t rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_surjectionproof* proof) { +size_t rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_surjectionproof* proof) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(proof != NULL); (void) ctx; - return rustsecp256k1zkp_v0_10_0_count_bits_set(proof->used_inputs, (proof->n_inputs + 7) / 8); + return rustsecp256k1zkp_v0_10_1_count_bits_set(proof->used_inputs, (proof->n_inputs + 7) / 8); } -size_t rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_surjectionproof* proof) { +size_t rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_surjectionproof* proof) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(proof != NULL); - return 2 + (proof->n_inputs + 7) / 8 + 32 * (1 + rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(ctx, proof)); + return 2 + (proof->n_inputs + 7) / 8 + 32 * (1 + rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(ctx, proof)); } typedef struct { unsigned char state[32]; size_t state_i; -} rustsecp256k1zkp_v0_10_0_surjectionproof_csprng; +} rustsecp256k1zkp_v0_10_1_surjectionproof_csprng; -static void rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_init(rustsecp256k1zkp_v0_10_0_surjectionproof_csprng *csprng, const unsigned char* state) { +static void rustsecp256k1zkp_v0_10_1_surjectionproof_csprng_init(rustsecp256k1zkp_v0_10_1_surjectionproof_csprng *csprng, const unsigned char* state) { memcpy(csprng->state, state, 32); csprng->state_i = 0; } -static size_t rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_next(rustsecp256k1zkp_v0_10_0_surjectionproof_csprng *csprng, size_t rand_max) { +static size_t rustsecp256k1zkp_v0_10_1_surjectionproof_csprng_next(rustsecp256k1zkp_v0_10_1_surjectionproof_csprng *csprng, size_t rand_max) { /* The number of random bytes to read for each random sample */ const size_t increment = rand_max > 256 ? 2 : 1; /* The maximum value expressable by the number of random bytes we read */ @@ -148,10 +148,10 @@ static size_t rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_next(rustsecp256k1 while (1) { size_t val; if (csprng->state_i + increment >= 32) { - rustsecp256k1zkp_v0_10_0_sha256 sha; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, csprng->state, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, csprng->state); + rustsecp256k1zkp_v0_10_1_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, csprng->state, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, csprng->state); csprng->state_i = 0; } val = csprng->state[csprng->state_i]; @@ -167,8 +167,8 @@ static size_t rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_next(rustsecp256k1 } } -int rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_surjectionproof* proof, size_t *input_index, const rustsecp256k1zkp_v0_10_0_fixed_asset_tag* fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, const rustsecp256k1zkp_v0_10_0_fixed_asset_tag* fixed_output_tag, const size_t n_max_iterations, const unsigned char *random_seed32) { - rustsecp256k1zkp_v0_10_0_surjectionproof_csprng csprng; +int rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_surjectionproof* proof, size_t *input_index, const rustsecp256k1zkp_v0_10_1_fixed_asset_tag* fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, const rustsecp256k1zkp_v0_10_1_fixed_asset_tag* fixed_output_tag, const size_t n_max_iterations, const unsigned char *random_seed32) { + rustsecp256k1zkp_v0_10_1_surjectionproof_csprng csprng; size_t n_iterations = 0; VERIFY_CHECK(ctx != NULL); @@ -182,7 +182,7 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(const rustsecp256k1zkp_v ARG_CHECK(n_input_tags_to_use <= n_input_tags); (void) ctx; - rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_init(&csprng, random_seed32); + rustsecp256k1zkp_v0_10_1_surjectionproof_csprng_init(&csprng, random_seed32); memset(proof->data, 0, sizeof(proof->data)); proof->n_inputs = n_input_tags; @@ -195,8 +195,8 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(const rustsecp256k1zkp_v for (i = 0; i < n_input_tags_to_use; i++) { while (1) { size_t next_input_index; - next_input_index = rustsecp256k1zkp_v0_10_0_surjectionproof_csprng_next(&csprng, n_input_tags); - if (rustsecp256k1zkp_v0_10_0_memcmp_var(&fixed_input_tags[next_input_index], fixed_output_tag, sizeof(*fixed_output_tag)) == 0) { + next_input_index = rustsecp256k1zkp_v0_10_1_surjectionproof_csprng_next(&csprng, n_input_tags); + if (rustsecp256k1zkp_v0_10_1_memcmp_var(&fixed_input_tags[next_input_index], fixed_output_tag, sizeof(*fixed_output_tag)) == 0) { *input_index = next_input_index; has_output_tag = 1; } @@ -225,10 +225,10 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(const rustsecp256k1zkp_v } } -int rustsecp256k1zkp_v0_10_0_surjectionproof_generate(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_surjectionproof* proof, const rustsecp256k1zkp_v0_10_0_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const rustsecp256k1zkp_v0_10_0_generator* ephemeral_output_tag, size_t input_index, const unsigned char *input_blinding_key, const unsigned char *output_blinding_key) { - rustsecp256k1zkp_v0_10_0_scalar blinding_key; - rustsecp256k1zkp_v0_10_0_scalar tmps; - rustsecp256k1zkp_v0_10_0_scalar nonce; +int rustsecp256k1zkp_v0_10_1_surjectionproof_generate(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_surjectionproof* proof, const rustsecp256k1zkp_v0_10_1_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const rustsecp256k1zkp_v0_10_1_generator* ephemeral_output_tag, size_t input_index, const unsigned char *input_blinding_key, const unsigned char *output_blinding_key) { + rustsecp256k1zkp_v0_10_1_scalar blinding_key; + rustsecp256k1zkp_v0_10_1_scalar tmps; + rustsecp256k1zkp_v0_10_1_scalar nonce; int overflow = 0; size_t rsizes[1]; /* array needed for borromean sig API */ size_t indices[1]; /* array needed for borromean sig API */ @@ -236,12 +236,12 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_generate(const rustsecp256k1zkp_v0_ size_t n_total_pubkeys; size_t n_used_pubkeys; size_t ring_input_index = 0; - rustsecp256k1zkp_v0_10_0_gej ring_pubkeys[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; - rustsecp256k1zkp_v0_10_0_scalar borromean_s[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; + rustsecp256k1zkp_v0_10_1_gej ring_pubkeys[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; + rustsecp256k1zkp_v0_10_1_scalar borromean_s[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; unsigned char msg32[32]; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(proof != NULL); ARG_CHECK(ephemeral_input_tags != NULL); ARG_CHECK(ephemeral_output_tag != NULL); @@ -251,16 +251,16 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_generate(const rustsecp256k1zkp_v0_ CHECK(proof->initialized == 1); #endif - n_used_pubkeys = rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(ctx, proof); + n_used_pubkeys = rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(ctx, proof); /* This must be true if the proof was created with surjectionproof_initialize */ ARG_CHECK(n_used_pubkeys > 0); /* Compute secret key */ - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&tmps, input_blinding_key, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&tmps, input_blinding_key, &overflow); if (overflow) { return 0; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&blinding_key, output_blinding_key, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&blinding_key, output_blinding_key, &overflow); if (overflow) { return 0; } @@ -269,41 +269,41 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_generate(const rustsecp256k1zkp_v0_ * it is deployed on Liquid and cannot be fixed without a hardfork. We should review * this at the same time that we relax the max-256-inputs rule. */ for (i = 0; i < n_ephemeral_input_tags; i++) { - if (rustsecp256k1zkp_v0_10_0_memcmp_var(ephemeral_input_tags[i].data, ephemeral_output_tag->data, sizeof(ephemeral_output_tag->data)) == 0) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(ephemeral_input_tags[i].data, ephemeral_output_tag->data, sizeof(ephemeral_output_tag->data)) == 0) { return 0; } } - rustsecp256k1zkp_v0_10_0_scalar_negate(&tmps, &tmps); - rustsecp256k1zkp_v0_10_0_scalar_add(&blinding_key, &blinding_key, &tmps); + rustsecp256k1zkp_v0_10_1_scalar_negate(&tmps, &tmps); + rustsecp256k1zkp_v0_10_1_scalar_add(&blinding_key, &blinding_key, &tmps); /* Compute public keys */ - n_total_pubkeys = rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(ctx, proof); + n_total_pubkeys = rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(ctx, proof); if (n_used_pubkeys > n_total_pubkeys || n_total_pubkeys != n_ephemeral_input_tags) { return 0; } - if (rustsecp256k1zkp_v0_10_0_surjection_compute_public_keys(ring_pubkeys, n_used_pubkeys, ephemeral_input_tags, n_total_pubkeys, proof->used_inputs, ephemeral_output_tag, input_index, &ring_input_index) == 0) { + if (rustsecp256k1zkp_v0_10_1_surjection_compute_public_keys(ring_pubkeys, n_used_pubkeys, ephemeral_input_tags, n_total_pubkeys, proof->used_inputs, ephemeral_output_tag, input_index, &ring_input_index) == 0) { return 0; } /* Produce signature */ rsizes[0] = (int) n_used_pubkeys; indices[0] = (int) ring_input_index; - rustsecp256k1zkp_v0_10_0_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); - if (rustsecp256k1zkp_v0_10_0_surjection_genrand(borromean_s, n_used_pubkeys, &blinding_key) == 0) { + rustsecp256k1zkp_v0_10_1_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); + if (rustsecp256k1zkp_v0_10_1_surjection_genrand(borromean_s, n_used_pubkeys, &blinding_key) == 0) { return 0; } /* Borromean sign will overwrite one of the s values we just generated, so use * it as a nonce instead. This avoids extra random generation and also is an * homage to the rangeproof code which does this very cleverly to encode messages. */ nonce = borromean_s[ring_input_index]; - rustsecp256k1zkp_v0_10_0_scalar_clear(&borromean_s[ring_input_index]); - if (rustsecp256k1zkp_v0_10_0_borromean_sign(&ctx->ecmult_gen_ctx, &proof->data[0], borromean_s, ring_pubkeys, &nonce, &blinding_key, rsizes, indices, 1, msg32, 32) == 0) { + rustsecp256k1zkp_v0_10_1_scalar_clear(&borromean_s[ring_input_index]); + if (rustsecp256k1zkp_v0_10_1_borromean_sign(&ctx->ecmult_gen_ctx, &proof->data[0], borromean_s, ring_pubkeys, &nonce, &blinding_key, rsizes, indices, 1, msg32, 32) == 0) { return 0; } for (i = 0; i < n_used_pubkeys; i++) { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&proof->data[32 + 32 * i], &borromean_s[i]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&proof->data[32 + 32 * i], &borromean_s[i]); } return 1; } @@ -311,13 +311,13 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_generate(const rustsecp256k1zkp_v0_ #ifdef USE_REDUCED_SURJECTION_PROOF_SIZE static #endif -int rustsecp256k1zkp_v0_10_0_surjectionproof_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_surjectionproof* proof, const rustsecp256k1zkp_v0_10_0_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const rustsecp256k1zkp_v0_10_0_generator* ephemeral_output_tag) { +int rustsecp256k1zkp_v0_10_1_surjectionproof_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_surjectionproof* proof, const rustsecp256k1zkp_v0_10_1_generator* ephemeral_input_tags, size_t n_ephemeral_input_tags, const rustsecp256k1zkp_v0_10_1_generator* ephemeral_output_tag) { size_t rsizes[1]; /* array needed for borromean sig API */ size_t i; size_t n_total_pubkeys; size_t n_used_pubkeys; - rustsecp256k1zkp_v0_10_0_gej ring_pubkeys[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; - rustsecp256k1zkp_v0_10_0_scalar borromean_s[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; + rustsecp256k1zkp_v0_10_1_gej ring_pubkeys[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; + rustsecp256k1zkp_v0_10_1_scalar borromean_s[SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS]; unsigned char msg32[32]; VERIFY_CHECK(ctx != NULL); @@ -326,8 +326,8 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_verify(const rustsecp256k1zkp_v0_10 ARG_CHECK(ephemeral_output_tag != NULL); /* Compute public keys */ - n_total_pubkeys = rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(ctx, proof); - n_used_pubkeys = rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(ctx, proof); + n_total_pubkeys = rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(ctx, proof); + n_used_pubkeys = rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(ctx, proof); if (n_used_pubkeys == 0 || n_used_pubkeys > n_total_pubkeys || n_total_pubkeys != n_ephemeral_input_tags) { return 0; } @@ -337,7 +337,7 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_verify(const rustsecp256k1zkp_v0_10 return 0; } - if (rustsecp256k1zkp_v0_10_0_surjection_compute_public_keys(ring_pubkeys, n_used_pubkeys, ephemeral_input_tags, n_total_pubkeys, proof->used_inputs, ephemeral_output_tag, 0, NULL) == 0) { + if (rustsecp256k1zkp_v0_10_1_surjection_compute_public_keys(ring_pubkeys, n_used_pubkeys, ephemeral_input_tags, n_total_pubkeys, proof->used_inputs, ephemeral_output_tag, 0, NULL) == 0) { return 0; } @@ -345,13 +345,13 @@ int rustsecp256k1zkp_v0_10_0_surjectionproof_verify(const rustsecp256k1zkp_v0_10 rsizes[0] = (int) n_used_pubkeys; for (i = 0; i < n_used_pubkeys; i++) { int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&borromean_s[i], &proof->data[32 + 32 * i], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&borromean_s[i], &proof->data[32 + 32 * i], &overflow); if (overflow == 1) { return 0; } } - rustsecp256k1zkp_v0_10_0_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); - return rustsecp256k1zkp_v0_10_0_borromean_verify(NULL, &proof->data[0], borromean_s, ring_pubkeys, rsizes, 1, msg32, 32); + rustsecp256k1zkp_v0_10_1_surjection_genmessage(msg32, ephemeral_input_tags, n_total_pubkeys, ephemeral_output_tag); + return rustsecp256k1zkp_v0_10_1_borromean_verify(NULL, &proof->data[0], borromean_s, ring_pubkeys, rsizes, 1, msg32, 32); } #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection.h index 7ddef58e..6e11a347 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection.h @@ -10,10 +10,10 @@ #include "../../group.h" #include "../../scalar.h" -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_genmessage(unsigned char *msg32, rustsecp256k1zkp_v0_10_0_ge *ephemeral_input_tags, size_t n_input_tags, rustsecp256k1zkp_v0_10_0_ge *ephemeral_output_tag); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_surjection_genmessage(unsigned char *msg32, rustsecp256k1zkp_v0_10_1_ge *ephemeral_input_tags, size_t n_input_tags, rustsecp256k1zkp_v0_10_1_ge *ephemeral_output_tag); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_genrand(rustsecp256k1zkp_v0_10_0_scalar *s, size_t ns, const rustsecp256k1zkp_v0_10_0_scalar *blinding_key); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_surjection_genrand(rustsecp256k1zkp_v0_10_1_scalar *s, size_t ns, const rustsecp256k1zkp_v0_10_1_scalar *blinding_key); -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_compute_public_keys(rustsecp256k1zkp_v0_10_0_gej *pubkeys, size_t n_pubkeys, const rustsecp256k1zkp_v0_10_0_ge *input_tags, size_t n_input_tags, const unsigned char *used_tags, const rustsecp256k1zkp_v0_10_0_ge *output_tag, size_t input_index, size_t *ring_input_index); +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_surjection_compute_public_keys(rustsecp256k1zkp_v0_10_1_gej *pubkeys, size_t n_pubkeys, const rustsecp256k1zkp_v0_10_1_ge *input_tags, size_t n_input_tags, const unsigned char *used_tags, const rustsecp256k1zkp_v0_10_1_ge *output_tag, size_t input_index, size_t *ring_input_index); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection_impl.h index 54501093..d19cff53 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/surjection_impl.h @@ -14,32 +14,32 @@ #include "../../scalar.h" #include "../../hash.h" -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_surjection_genmessage(unsigned char *msg32, const rustsecp256k1zkp_v0_10_0_generator *ephemeral_input_tags, size_t n_input_tags, const rustsecp256k1zkp_v0_10_0_generator *ephemeral_output_tag) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_surjection_genmessage(unsigned char *msg32, const rustsecp256k1zkp_v0_10_1_generator *ephemeral_input_tags, size_t n_input_tags, const rustsecp256k1zkp_v0_10_1_generator *ephemeral_output_tag) { /* compute message */ size_t i; unsigned char pk_ser[33]; size_t pk_len = sizeof(pk_ser); - rustsecp256k1zkp_v0_10_0_sha256 sha256_en; + rustsecp256k1zkp_v0_10_1_sha256 sha256_en; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_en); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_en); for (i = 0; i < n_input_tags; i++) { pk_ser[0] = 2 + (ephemeral_input_tags[i].data[63] & 1); memcpy(&pk_ser[1], &ephemeral_input_tags[i].data[0], 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, pk_ser, pk_len); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, pk_ser, pk_len); } pk_ser[0] = 2 + (ephemeral_output_tag->data[63] & 1); memcpy(&pk_ser[1], &ephemeral_output_tag->data[0], 32); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, pk_ser, pk_len); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_en, msg32); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, pk_ser, pk_len); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_en, msg32); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_genrand(rustsecp256k1zkp_v0_10_0_scalar *s, size_t ns, const rustsecp256k1zkp_v0_10_0_scalar *blinding_key) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_surjection_genrand(rustsecp256k1zkp_v0_10_1_scalar *s, size_t ns, const rustsecp256k1zkp_v0_10_1_scalar *blinding_key) { size_t i; unsigned char sec_input[36]; - rustsecp256k1zkp_v0_10_0_sha256 sha256_en; + rustsecp256k1zkp_v0_10_1_sha256 sha256_en; /* compute s values */ - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sec_input[4], blinding_key); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sec_input[4], blinding_key); for (i = 0; i < ns; i++) { int overflow = 0; sec_input[0] = i; @@ -47,10 +47,10 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_genrand(rustsecp sec_input[2] = i >> 16; sec_input[3] = i >> 24; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_en); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_en, sec_input, 36); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_en, sec_input); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s[i], sec_input, &overflow); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_en); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_en, sec_input, 36); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_en, sec_input); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s[i], sec_input, &overflow); if (overflow == 1) { memset(sec_input, 0, 32); return 0; @@ -60,21 +60,21 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_genrand(rustsecp return 1; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_surjection_compute_public_keys(rustsecp256k1zkp_v0_10_0_gej *pubkeys, size_t n_pubkeys, const rustsecp256k1zkp_v0_10_0_generator *input_tags, size_t n_input_tags, const unsigned char *used_tags, const rustsecp256k1zkp_v0_10_0_generator *output_tag, size_t input_index, size_t *ring_input_index) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_surjection_compute_public_keys(rustsecp256k1zkp_v0_10_1_gej *pubkeys, size_t n_pubkeys, const rustsecp256k1zkp_v0_10_1_generator *input_tags, size_t n_input_tags, const unsigned char *used_tags, const rustsecp256k1zkp_v0_10_1_generator *output_tag, size_t input_index, size_t *ring_input_index) { size_t i; size_t j = 0; for (i = 0; i < n_input_tags; i++) { if (used_tags[i / 8] & (1 << (i % 8))) { - rustsecp256k1zkp_v0_10_0_ge tmpge; - rustsecp256k1zkp_v0_10_0_generator_load(&tmpge, &input_tags[i]); - rustsecp256k1zkp_v0_10_0_ge_neg(&tmpge, &tmpge); + rustsecp256k1zkp_v0_10_1_ge tmpge; + rustsecp256k1zkp_v0_10_1_generator_load(&tmpge, &input_tags[i]); + rustsecp256k1zkp_v0_10_1_ge_neg(&tmpge, &tmpge); VERIFY_CHECK(j < SECP256K1_SURJECTIONPROOF_MAX_USED_INPUTS); VERIFY_CHECK(j < n_pubkeys); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pubkeys[j], &tmpge); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pubkeys[j], &tmpge); - rustsecp256k1zkp_v0_10_0_generator_load(&tmpge, output_tag); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&pubkeys[j], &pubkeys[j], &tmpge, NULL); + rustsecp256k1zkp_v0_10_1_generator_load(&tmpge, output_tag); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&pubkeys[j], &pubkeys[j], &tmpge, NULL); if (ring_input_index != NULL && input_index == i) { *ring_input_index = j; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/tests_impl.h index 046db41b..4c84c0dd 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/surjection/tests_impl.h @@ -15,127 +15,127 @@ static void test_surjectionproof_api(void) { unsigned char seed[32]; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_input_tags[10]; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_output_tag; - rustsecp256k1zkp_v0_10_0_generator ephemeral_input_tags[10]; - rustsecp256k1zkp_v0_10_0_generator ephemeral_output_tag; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_input_tags[10]; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_output_tag; + rustsecp256k1zkp_v0_10_1_generator ephemeral_input_tags[10]; + rustsecp256k1zkp_v0_10_1_generator ephemeral_output_tag; unsigned char input_blinding_key[10][32]; unsigned char output_blinding_key[32]; unsigned char serialized_proof[SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES_MAX]; size_t serialized_len; - rustsecp256k1zkp_v0_10_0_surjectionproof proof; - rustsecp256k1zkp_v0_10_0_surjectionproof* proof_on_heap; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_surjectionproof* proof_on_heap; size_t n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]); size_t input_index; size_t i; - rustsecp256k1zkp_v0_10_0_testrand256(seed); + rustsecp256k1zkp_v0_10_1_testrand256(seed); for (i = 0; i < n_inputs; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(input_blinding_key[i]); - rustsecp256k1zkp_v0_10_0_testrand256(fixed_input_tags[i].data); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_input_tags[i], fixed_input_tags[i].data, input_blinding_key[i])); + rustsecp256k1zkp_v0_10_1_testrand256(input_blinding_key[i]); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_input_tags[i].data); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_input_tags[i], fixed_input_tags[i].data, input_blinding_key[i])); } - rustsecp256k1zkp_v0_10_0_testrand256(output_blinding_key); + rustsecp256k1zkp_v0_10_1_testrand256(output_blinding_key); memcpy(&fixed_output_tag, &fixed_input_tags[0], sizeof(fixed_input_tags[0])); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_output_tag, fixed_output_tag.data, output_blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_output_tag, fixed_output_tag.data, output_blinding_key)); /* check allocate_initialized */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, seed) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, seed) == 0); CHECK(proof_on_heap == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); CHECK(proof_on_heap != 0); - rustsecp256k1zkp_v0_10_0_surjectionproof_destroy(proof_on_heap); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, NULL, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, NULL, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + rustsecp256k1zkp_v0_10_1_surjectionproof_destroy(proof_on_heap); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, NULL, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, NULL, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); CHECK(proof_on_heap == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, NULL, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, NULL, n_inputs, 3, &fixed_input_tags[0], 100, seed)); CHECK(proof_on_heap == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS + 1, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS + 1, 3, &fixed_input_tags[0], 100, seed)); CHECK(proof_on_heap == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], 100, seed) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], 100, seed) != 0); CHECK(proof_on_heap != 0); - rustsecp256k1zkp_v0_10_0_surjectionproof_destroy(proof_on_heap); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, n_inputs + 1, &fixed_input_tags[0], 100, seed)); + rustsecp256k1zkp_v0_10_1_surjectionproof_destroy(proof_on_heap); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, n_inputs + 1, &fixed_input_tags[0], 100, seed)); CHECK(proof_on_heap == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 3, NULL, 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 3, NULL, 100, seed)); CHECK(proof_on_heap == 0); - CHECK((rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 0, seed) & 1) == 0); + CHECK((rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 0, seed) & 1) == 0); CHECK(proof_on_heap == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_allocate_initialized(CTX, &proof_on_heap, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, NULL)); CHECK(proof_on_heap == 0); /* we are now going to test essentially the same functions, just without * heap allocation. */ /* check initialize */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, seed) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, NULL, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, NULL, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, NULL, n_inputs, 3, &fixed_input_tags[0], 100, seed)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS + 1, 3, &fixed_input_tags[0], 100, seed)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], 100, seed) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs + 1, &fixed_input_tags[0], 100, seed)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, NULL, 100, seed)); - CHECK((rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 0, seed) & 1) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, NULL)); - - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, seed) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, NULL, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, NULL, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, NULL, n_inputs, 3, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS + 1, 3, &fixed_input_tags[0], 100, seed)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], 100, seed) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs + 1, &fixed_input_tags[0], 100, seed)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, NULL, 100, seed)); + CHECK((rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 0, seed) & 1) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], 100, NULL)); + + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[0], 100, seed) != 0); /* check generate */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) != 0); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(STATIC_CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); - - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, NULL, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, NULL, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs + 1, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs - 1, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, 0, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, NULL, 0, input_blinding_key[0], output_blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) != 0); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(STATIC_CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); + + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, NULL, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, NULL, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs + 1, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs - 1, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, 0, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, NULL, 0, input_blinding_key[0], output_blinding_key)); /* the below line "succeeds" but generates an invalid proof as the input_index is wrong. it is fairly expensive to detect this. should we? */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 1, input_blinding_key[0], output_blinding_key) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, n_inputs + 1, input_blinding_key[0], output_blinding_key) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, NULL, output_blinding_key)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 1, input_blinding_key[0], output_blinding_key) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, n_inputs + 1, input_blinding_key[0], output_blinding_key) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, NULL, output_blinding_key)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) != 0); /* check verify */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, NULL, ephemeral_input_tags, n_inputs, &ephemeral_output_tag)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, NULL, n_inputs, &ephemeral_output_tag)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs - 1, &ephemeral_output_tag) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs + 1, &ephemeral_output_tag) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, NULL, ephemeral_input_tags, n_inputs, &ephemeral_output_tag)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, NULL, n_inputs, &ephemeral_output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs - 1, &ephemeral_output_tag) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs + 1, &ephemeral_output_tag) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, NULL)); /* Test how surjectionproof_generate fails when the proof was not created * with surjectionproof_initialize */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 1); { - rustsecp256k1zkp_v0_10_0_surjectionproof tmp_proof = proof; + rustsecp256k1zkp_v0_10_1_surjectionproof tmp_proof = proof; tmp_proof.n_inputs = 0; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &tmp_proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &tmp_proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key)); } - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_output_tag, 0, input_blinding_key[0], output_blinding_key) == 1); /* Check serialize */ serialized_len = sizeof(serialized_proof); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) != 0); serialized_len = sizeof(serialized_proof); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, NULL, &serialized_len, &proof)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, NULL, &serialized_len, &proof)); serialized_len = sizeof(serialized_proof); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, NULL, &proof)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, NULL, &proof)); serialized_len = sizeof(serialized_proof); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, NULL)); serialized_len = sizeof(serialized_proof); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) != 0); /* Check parse */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof, serialized_len) != 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, NULL, serialized_proof, serialized_len)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, NULL, serialized_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof, serialized_len) != 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, NULL, serialized_proof, serialized_len)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, NULL, serialized_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof, 0) == 0); } static void test_input_selection(size_t n_inputs) { @@ -144,70 +144,70 @@ static void test_input_selection(size_t n_inputs) { size_t result; size_t input_index; size_t try_count = n_inputs * 100; - rustsecp256k1zkp_v0_10_0_surjectionproof proof; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_input_tags[1000]; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_input_tags[1000]; const size_t max_n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]) - 1; CHECK(n_inputs < max_n_inputs); - rustsecp256k1zkp_v0_10_0_testrand256(seed); + rustsecp256k1zkp_v0_10_1_testrand256(seed); for (i = 0; i < n_inputs + 1; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(fixed_input_tags[i].data); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_input_tags[i].data); } /* cannot match output when told to use zero keys */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 0, &fixed_input_tags[0], try_count, seed); CHECK(result == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(CTX, &proof) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof) == 34 + (n_inputs + 7) / 8); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(CTX, &proof) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof) == 34 + (n_inputs + 7) / 8); if (n_inputs > 0) { /* succeed in 100*n_inputs tries (probability of failure e^-100) */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 1, &fixed_input_tags[0], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 1, &fixed_input_tags[0], try_count, seed); CHECK(result > 0); CHECK(result < n_inputs * 10); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(CTX, &proof) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof) == 66 + (n_inputs + 7) / 8); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(CTX, &proof) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof) == 66 + (n_inputs + 7) / 8); CHECK(input_index == 0); } if (n_inputs >= 3) { /* succeed in 10*n_inputs tries (probability of failure e^-10) */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[1], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[1], try_count, seed); CHECK(result > 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(CTX, &proof) == 3); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof) == 130 + (n_inputs + 7) / 8); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(CTX, &proof) == 3); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof) == 130 + (n_inputs + 7) / 8); CHECK(input_index == 1); /* fail, key not found */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[n_inputs], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, 3, &fixed_input_tags[n_inputs], try_count, seed); CHECK(result == 0); /* succeed on first try when told to use all keys */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs, &fixed_input_tags[0], try_count, seed); CHECK(result == 1); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof) == 2 + 32 * (n_inputs + 1) + (n_inputs + 7) / 8); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof) == 2 + 32 * (n_inputs + 1) + (n_inputs + 7) / 8); CHECK(input_index == 0); /* succeed in less than 64 tries when told to use half keys. (probability of failure 2^-64) */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs / 2, &fixed_input_tags[0], 64, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_inputs / 2, &fixed_input_tags[0], 64, seed); CHECK(result > 0); CHECK(result < 64); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_used_inputs(CTX, &proof) == n_inputs / 2); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof) == 2 + 32 * (n_inputs / 2 + 1) + (n_inputs + 7) / 8); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_used_inputs(CTX, &proof) == n_inputs / 2); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_n_total_inputs(CTX, &proof) == n_inputs); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof) == 2 + 32 * (n_inputs / 2 + 1) + (n_inputs + 7) / 8); CHECK(input_index == 0); } } /** Runs surjectionproof_initilize multiple times and records the number of times each input was used. */ -static void test_input_selection_distribution_helper(const rustsecp256k1zkp_v0_10_0_fixed_asset_tag* fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, size_t *used_inputs) { - rustsecp256k1zkp_v0_10_0_surjectionproof proof; +static void test_input_selection_distribution_helper(const rustsecp256k1zkp_v0_10_1_fixed_asset_tag* fixed_input_tags, const size_t n_input_tags, const size_t n_input_tags_to_use, size_t *used_inputs) { + rustsecp256k1zkp_v0_10_1_surjectionproof proof; size_t input_index; size_t i; size_t j; @@ -217,8 +217,8 @@ static void test_input_selection_distribution_helper(const rustsecp256k1zkp_v0_1 used_inputs[i] = 0; } for(j = 0; j < 10000; j++) { - rustsecp256k1zkp_v0_10_0_testrand256(seed); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_input_tags, n_input_tags_to_use, &fixed_input_tags[0], 64, seed); + rustsecp256k1zkp_v0_10_1_testrand256(seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_input_tags, n_input_tags_to_use, &fixed_input_tags[0], 64, seed); CHECK(result > 0); for (i = 0; i < n_input_tags; i++) { @@ -236,11 +236,11 @@ static void test_input_selection_distribution(void) { size_t i; size_t n_input_tags_to_use; const size_t n_inputs = 4; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_input_tags[4]; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_input_tags[4]; size_t used_inputs[4]; for (i = 0; i < n_inputs; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(fixed_input_tags[i].data); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_input_tags[i].data); } /* If there is one input tag to use, initialize must choose the one equal to fixed_output_tag. */ @@ -306,12 +306,12 @@ static void test_input_selection_distribution(void) { static void test_gen_verify(size_t n_inputs, size_t n_used) { unsigned char seed[32]; - rustsecp256k1zkp_v0_10_0_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; unsigned char serialized_proof[SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES_MAX]; unsigned char serialized_proof_trailing[SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES_MAX + 1]; size_t serialized_len = SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES_MAX; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_input_tags[1000]; - rustsecp256k1zkp_v0_10_0_generator ephemeral_input_tags[1000]; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_input_tags[1000]; + rustsecp256k1zkp_v0_10_1_generator ephemeral_input_tags[1000]; unsigned char *input_blinding_key[1000]; const size_t max_n_inputs = sizeof(fixed_input_tags) / sizeof(fixed_input_tags[0]) - 1; size_t try_count = n_inputs * 100; @@ -323,24 +323,24 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { /* setup */ CHECK(n_used <= n_inputs); CHECK(n_inputs < max_n_inputs); - rustsecp256k1zkp_v0_10_0_testrand256(seed); + rustsecp256k1zkp_v0_10_1_testrand256(seed); key_index = (((size_t) seed[0] << 8) + seed[1]) % n_inputs; for (i = 0; i < n_inputs + 1; i++) { input_blinding_key[i] = malloc(32); - rustsecp256k1zkp_v0_10_0_testrand256(input_blinding_key[i]); + rustsecp256k1zkp_v0_10_1_testrand256(input_blinding_key[i]); /* choose random fixed tag, except that for the output one copy from the key_index */ if (i < n_inputs) { - rustsecp256k1zkp_v0_10_0_testrand256(fixed_input_tags[i].data); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_input_tags[i].data); } else { memcpy(&fixed_input_tags[i], &fixed_input_tags[key_index], sizeof(fixed_input_tags[i])); } - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_input_tags[i], fixed_input_tags[i].data, input_blinding_key[i])); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_input_tags[i], fixed_input_tags[i].data, input_blinding_key[i])); } /* test */ - result = rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_used, &fixed_input_tags[key_index], try_count, seed); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, fixed_input_tags, n_inputs, n_used, &fixed_input_tags[key_index], try_count, seed); if (n_used == 0) { CHECK(result == 0); return; @@ -348,32 +348,32 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { CHECK(result > 0); CHECK(input_index == key_index); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs], input_index, input_blinding_key[input_index], input_blinding_key[n_inputs]); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs], input_index, input_blinding_key[input_index], input_blinding_key[n_inputs]); CHECK(result == 1); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof)); - CHECK(serialized_len == rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size(CTX, &proof)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof)); + CHECK(serialized_len == rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size(CTX, &proof)); CHECK(serialized_len == SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES(n_inputs, n_used)); /* trailing garbage */ memcpy(&serialized_proof_trailing, &serialized_proof, serialized_len); serialized_proof_trailing[serialized_len] = seed[0]; - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof_trailing, serialized_len + 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof_trailing, serialized_len + 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof, serialized_len)); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs]); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof, serialized_len)); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs]); CHECK(result == 1); /* various fail cases */ if (n_inputs > 1) { - result = rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs - 1]); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs - 1]); CHECK(result == 0); /* number of entries in ephemeral_input_tags array is less than proof.n_inputs */ n_inputs -= 1; - result = rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs], input_index, input_blinding_key[input_index], input_blinding_key[n_inputs]); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs], input_index, input_blinding_key[input_index], input_blinding_key[n_inputs]); CHECK(result == 0); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs - 1]); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs - 1]); CHECK(result == 0); n_inputs += 1; } @@ -381,7 +381,7 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { for (i = 0; i < n_inputs; i++) { /* flip bit */ proof.used_inputs[i / 8] ^= (1 << (i % 8)); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs]); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_inputs, &ephemeral_input_tags[n_inputs]); CHECK(result == 0); /* reset the bit */ proof.used_inputs[i / 8] ^= (1 << (i % 8)); @@ -395,90 +395,90 @@ static void test_gen_verify(size_t n_inputs, size_t n_used) { /* check that a proof with empty n_used_inputs is invalid */ static void test_no_used_inputs_verify(void) { - rustsecp256k1zkp_v0_10_0_surjectionproof proof; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_input_tag; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_output_tag; - rustsecp256k1zkp_v0_10_0_generator ephemeral_input_tags[1]; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_input_tag; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_output_tag; + rustsecp256k1zkp_v0_10_1_generator ephemeral_input_tags[1]; size_t n_ephemeral_input_tags = 1; - rustsecp256k1zkp_v0_10_0_generator ephemeral_output_tag; + rustsecp256k1zkp_v0_10_1_generator ephemeral_output_tag; unsigned char blinding_key[32]; - rustsecp256k1zkp_v0_10_0_ge output; - rustsecp256k1zkp_v0_10_0_sha256 sha256_e0; + rustsecp256k1zkp_v0_10_1_ge output; + rustsecp256k1zkp_v0_10_1_sha256 sha256_e0; int result; - /* Create proof that doesn't use inputs. rustsecp256k1zkp_v0_10_0_surjectionproof_initialize + /* Create proof that doesn't use inputs. rustsecp256k1zkp_v0_10_1_surjectionproof_initialize * will not work here since it insists on selecting an input that matches the output. */ proof.n_inputs = 1; memset(proof.used_inputs, 0, SECP256K1_SURJECTIONPROOF_MAX_N_INPUTS / 8); /* create different fixed input and output tags */ - rustsecp256k1zkp_v0_10_0_testrand256(fixed_input_tag.data); - rustsecp256k1zkp_v0_10_0_testrand256(fixed_output_tag.data); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_input_tag.data); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_output_tag.data); /* blind fixed output tags with random blinding key */ - rustsecp256k1zkp_v0_10_0_testrand256(blinding_key); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_input_tags[0], fixed_input_tag.data, blinding_key)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_output_tag, fixed_output_tag.data, blinding_key)); + rustsecp256k1zkp_v0_10_1_testrand256(blinding_key); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_input_tags[0], fixed_input_tag.data, blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_output_tag, fixed_output_tag.data, blinding_key)); /* create "borromean signature" which is just a hash of metadata (pubkeys, etc) in this case */ - rustsecp256k1zkp_v0_10_0_generator_load(&output, &ephemeral_output_tag); - rustsecp256k1zkp_v0_10_0_surjection_genmessage(proof.data, ephemeral_input_tags, 1, &ephemeral_output_tag); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha256_e0); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha256_e0, proof.data, 32); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha256_e0, proof.data); + rustsecp256k1zkp_v0_10_1_generator_load(&output, &ephemeral_output_tag); + rustsecp256k1zkp_v0_10_1_surjection_genmessage(proof.data, ephemeral_input_tags, 1, &ephemeral_output_tag); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha256_e0); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha256_e0, proof.data, 32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha256_e0, proof.data); - result = rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_ephemeral_input_tags, &ephemeral_output_tag); + result = rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, ephemeral_input_tags, n_ephemeral_input_tags, &ephemeral_output_tag); CHECK(result == 0); } static void test_bad_serialize(void) { - rustsecp256k1zkp_v0_10_0_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; unsigned char serialized_proof[SECP256K1_SURJECTIONPROOF_SERIALIZATION_BYTES_MAX]; size_t serialized_len; proof.n_inputs = 0; serialized_len = 2 + 31; /* e0 is one byte too short */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_serialize(CTX, serialized_proof, &serialized_len, &proof) == 0); } static void test_bad_parse(void) { - rustsecp256k1zkp_v0_10_0_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; unsigned char serialized_proof0[] = { 0x00 }; unsigned char serialized_proof1[] = { 0x01, 0x00 }; unsigned char serialized_proof2[33] = { 0 }; /* Missing total input count */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof0, sizeof(serialized_proof0)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof0, sizeof(serialized_proof0)) == 0); /* Missing bitmap */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof1, sizeof(serialized_proof1)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof1, sizeof(serialized_proof1)) == 0); /* Missing e0 value */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, serialized_proof2, sizeof(serialized_proof2)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, serialized_proof2, sizeof(serialized_proof2)) == 0); } static void test_input_eq_output(void) { - rustsecp256k1zkp_v0_10_0_surjectionproof proof; - rustsecp256k1zkp_v0_10_0_fixed_asset_tag fixed_tag; - rustsecp256k1zkp_v0_10_0_generator ephemeral_tag; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_fixed_asset_tag fixed_tag; + rustsecp256k1zkp_v0_10_1_generator ephemeral_tag; unsigned char blinding_key[32]; unsigned char entropy[32]; size_t input_index; - rustsecp256k1zkp_v0_10_0_testrand256(fixed_tag.data); - rustsecp256k1zkp_v0_10_0_testrand256(blinding_key); - rustsecp256k1zkp_v0_10_0_testrand256(entropy); + rustsecp256k1zkp_v0_10_1_testrand256(fixed_tag.data); + rustsecp256k1zkp_v0_10_1_testrand256(blinding_key); + rustsecp256k1zkp_v0_10_1_testrand256(entropy); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_initialize(CTX, &proof, &input_index, &fixed_tag, 1, 1, &fixed_tag, 100, entropy) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_initialize(CTX, &proof, &input_index, &fixed_tag, 1, 1, &fixed_tag, 100, entropy) == 1); CHECK(input_index == 0); /* Generation should fail */ - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_tag, fixed_tag.data, blinding_key)); - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, &ephemeral_tag, 1, &ephemeral_tag, input_index, blinding_key, blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_tag, fixed_tag.data, blinding_key)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, &ephemeral_tag, 1, &ephemeral_tag, input_index, blinding_key, blinding_key)); /* ...even when the blinding key is zero */ memset(blinding_key, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_generator_generate_blinded(CTX, &ephemeral_tag, fixed_tag.data, blinding_key)); - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_generate(CTX, &proof, &ephemeral_tag, 1, &ephemeral_tag, input_index, blinding_key, blinding_key)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_generate_blinded(CTX, &ephemeral_tag, fixed_tag.data, blinding_key)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_generate(CTX, &proof, &ephemeral_tag, 1, &ephemeral_tag, input_index, blinding_key, blinding_key)); } static void test_fixed_vectors(void) { @@ -574,57 +574,57 @@ static void test_fixed_vectors(void) { unsigned char bad[sizeof(total5_used5) + 32] = { 0 }; - rustsecp256k1zkp_v0_10_0_generator input_tags[5]; - rustsecp256k1zkp_v0_10_0_generator output_tag; - rustsecp256k1zkp_v0_10_0_surjectionproof proof; + rustsecp256k1zkp_v0_10_1_generator input_tags[5]; + rustsecp256k1zkp_v0_10_1_generator output_tag; + rustsecp256k1zkp_v0_10_1_surjectionproof proof; - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &input_tags[0], tag0_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &input_tags[1], tag1_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &input_tags[2], tag2_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &input_tags[3], tag3_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &input_tags[4], tag4_ser)); - CHECK(rustsecp256k1zkp_v0_10_0_generator_parse(CTX, &output_tag, output_tag_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &input_tags[0], tag0_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &input_tags[1], tag1_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &input_tags[2], tag2_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &input_tags[3], tag3_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &input_tags[4], tag4_ser)); + CHECK(rustsecp256k1zkp_v0_10_1_generator_parse(CTX, &output_tag, output_tag_ser)); /* check 1-of-1 */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total1_used1, total1_used1_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 1, &output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total1_used1, total1_used1_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 1, &output_tag)); /* check 1-of-2 */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total2_used1, total2_used1_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 2, &output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total2_used1, total2_used1_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 2, &output_tag)); /* check 2-of-3 */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total3_used2, total3_used2_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 3, &output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total3_used2, total3_used2_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 3, &output_tag)); /* check 3-of-5 */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total5_used3, total5_used3_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 5, &output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total5_used3, total5_used3_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 5, &output_tag)); /* check 5-of-5 */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total5_used5, total5_used5_len)); - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 5, &output_tag)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total5_used5, total5_used5_len)); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 5, &output_tag)); /* check invalid length fails */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total5_used5, total5_used3_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total5_used5, total5_used3_len)); /* check invalid keys fail */ - CHECK(rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, total1_used1, total1_used1_len)); - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, &input_tags[1], 1, &output_tag)); - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_verify(CTX, &proof, input_tags, 1, &input_tags[0])); + CHECK(rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, total1_used1, total1_used1_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, &input_tags[1], 1, &output_tag)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_verify(CTX, &proof, input_tags, 1, &input_tags[0])); /* Try setting 6 bits on the total5-used-5; check that parsing fails */ memcpy(bad, total5_used5, total5_used5_len); bad[2] = 0x3f; /* 0x1f -> 0x3f */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used5_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used5_len)); /* Correct for the length */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used5_len + 32)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used5_len + 32)); /* Alternately just turn off one of the "legit" bits */ bad[2] = 0x37; /* 0x1f -> 0x37 */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used5_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used5_len)); /* Similarly try setting 4 bits on the total5-used-3, with one bit out of range */ memcpy(bad, total5_used3, total5_used3_len); bad[2] = 0x35; /* 0x15 -> 0x35 */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used3_len)); - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used3_len + 32)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used3_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used3_len + 32)); bad[2] = 0x34; /* 0x15 -> 0x34 */ - CHECK(!rustsecp256k1zkp_v0_10_0_surjectionproof_parse(CTX, &proof, bad, total5_used3_len)); + CHECK(!rustsecp256k1zkp_v0_10_1_surjectionproof_parse(CTX, &proof, bad, total5_used3_len)); } static void run_surjection_tests(void) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/Makefile.am.include b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/Makefile.am.include index 88c9ac06..5be3eab5 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/Makefile.am.include +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/Makefile.am.include @@ -1,4 +1,4 @@ -include_HEADERS += include/rustsecp256k1zkp_v0_10_0_whitelist.h +include_HEADERS += include/rustsecp256k1zkp_v0_10_1_whitelist.h noinst_HEADERS += src/modules/whitelist/whitelist_impl.h noinst_HEADERS += src/modules/whitelist/main_impl.h noinst_HEADERS += src/modules/whitelist/tests_impl.h diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/main_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/main_impl.h index 7f597400..407fac31 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/main_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/main_impl.h @@ -12,16 +12,16 @@ #define MAX_KEYS SECP256K1_WHITELIST_MAX_N_KEYS /* shorter alias */ -int rustsecp256k1zkp_v0_10_0_whitelist_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, const size_t n_keys, const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey, const unsigned char *online_seckey, const unsigned char *summed_seckey, const size_t index) { - rustsecp256k1zkp_v0_10_0_gej pubs[MAX_KEYS]; - rustsecp256k1zkp_v0_10_0_scalar s[MAX_KEYS]; - rustsecp256k1zkp_v0_10_0_scalar sec, non; +int rustsecp256k1zkp_v0_10_1_whitelist_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const size_t n_keys, const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey, const unsigned char *online_seckey, const unsigned char *summed_seckey, const size_t index) { + rustsecp256k1zkp_v0_10_1_gej pubs[MAX_KEYS]; + rustsecp256k1zkp_v0_10_1_scalar s[MAX_KEYS]; + rustsecp256k1zkp_v0_10_1_scalar sec, non; unsigned char msg32[32]; int ret; /* Sanity checks */ VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(sig != NULL); ARG_CHECK(online_pubkeys != NULL); ARG_CHECK(offline_pubkeys != NULL); @@ -32,11 +32,11 @@ int rustsecp256k1zkp_v0_10_0_whitelist_sign(const rustsecp256k1zkp_v0_10_0_conte ARG_CHECK(index < n_keys); /* Compute pubkeys: online_pubkey + tweaked(offline_pubkey + address), and message */ - ret = rustsecp256k1zkp_v0_10_0_whitelist_compute_keys_and_message(ctx, msg32, pubs, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey); + ret = rustsecp256k1zkp_v0_10_1_whitelist_compute_keys_and_message(ctx, msg32, pubs, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey); /* Compute signing key: online_seckey + tweaked(summed_seckey) */ if (ret) { - ret = rustsecp256k1zkp_v0_10_0_whitelist_compute_tweaked_privkey(ctx, &sec, online_seckey, summed_seckey); + ret = rustsecp256k1zkp_v0_10_1_whitelist_compute_tweaked_privkey(ctx, &sec, online_seckey, summed_seckey); } /* Compute nonce and random s-values */ if (ret) { @@ -44,18 +44,18 @@ int rustsecp256k1zkp_v0_10_0_whitelist_sign(const rustsecp256k1zkp_v0_10_0_conte unsigned int count = 0; int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey32, &sec); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey32, &sec); while (1) { size_t i; unsigned char nonce32[32]; int done; - ret = rustsecp256k1zkp_v0_10_0_nonce_function_default(nonce32, msg32, seckey32, NULL, NULL, count); + ret = rustsecp256k1zkp_v0_10_1_nonce_function_default(nonce32, msg32, seckey32, NULL, NULL, count); if (!ret) { break; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&non, nonce32, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&non, nonce32, &overflow); memset(nonce32, 0, 32); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&non)) { + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&non)) { count++; continue; } @@ -63,14 +63,14 @@ int rustsecp256k1zkp_v0_10_0_whitelist_sign(const rustsecp256k1zkp_v0_10_0_conte for (i = 0; i < n_keys; i++) { msg32[0] ^= i + 1; msg32[1] ^= (i + 1) / 0x100; - ret = rustsecp256k1zkp_v0_10_0_nonce_function_default(&sig->data[32 * (i + 1)], msg32, seckey32, NULL, NULL, count); + ret = rustsecp256k1zkp_v0_10_1_nonce_function_default(&sig->data[32 * (i + 1)], msg32, seckey32, NULL, NULL, count); if (!ret) { break; } - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s[i], &sig->data[32 * (i + 1)], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s[i], &sig->data[32 * (i + 1)], &overflow); msg32[0] ^= i + 1; msg32[1] ^= (i + 1) / 0x100; - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s[i])) { + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s[i])) { count++; done = 0; break; @@ -85,19 +85,19 @@ int rustsecp256k1zkp_v0_10_0_whitelist_sign(const rustsecp256k1zkp_v0_10_0_conte /* Actually sign */ if (ret) { sig->n_keys = n_keys; - ret = rustsecp256k1zkp_v0_10_0_borromean_sign(&ctx->ecmult_gen_ctx, &sig->data[0], s, pubs, &non, &sec, &n_keys, &index, 1, msg32, 32); + ret = rustsecp256k1zkp_v0_10_1_borromean_sign(&ctx->ecmult_gen_ctx, &sig->data[0], s, pubs, &non, &sec, &n_keys, &index, 1, msg32, 32); /* Signing will change s[index], so update in the sig structure */ - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[32 * (index + 1)], &s[index]); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[32 * (index + 1)], &s[index]); } - rustsecp256k1zkp_v0_10_0_scalar_clear(&non); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_clear(&non); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); return ret; } -int rustsecp256k1zkp_v0_10_0_whitelist_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, const size_t n_keys, const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey) { - rustsecp256k1zkp_v0_10_0_scalar s[MAX_KEYS]; - rustsecp256k1zkp_v0_10_0_gej pubs[MAX_KEYS]; +int rustsecp256k1zkp_v0_10_1_whitelist_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const size_t n_keys, const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey) { + rustsecp256k1zkp_v0_10_1_scalar s[MAX_KEYS]; + rustsecp256k1zkp_v0_10_1_gej pubs[MAX_KEYS]; unsigned char msg32[32]; size_t i; @@ -112,25 +112,25 @@ int rustsecp256k1zkp_v0_10_0_whitelist_verify(const rustsecp256k1zkp_v0_10_0_con } for (i = 0; i < sig->n_keys; i++) { int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s[i], &sig->data[32 * (i + 1)], &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&s[i])) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s[i], &sig->data[32 * (i + 1)], &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&s[i])) { return 0; } } /* Compute pubkeys: online_pubkey + tweaked(offline_pubkey + address), and message */ - if (!rustsecp256k1zkp_v0_10_0_whitelist_compute_keys_and_message(ctx, msg32, pubs, online_pubkeys, offline_pubkeys, sig->n_keys, sub_pubkey)) { + if (!rustsecp256k1zkp_v0_10_1_whitelist_compute_keys_and_message(ctx, msg32, pubs, online_pubkeys, offline_pubkeys, sig->n_keys, sub_pubkey)) { return 0; } /* Do verification */ - return rustsecp256k1zkp_v0_10_0_borromean_verify(NULL, &sig->data[0], s, pubs, &sig->n_keys, 1, msg32, 32); + return rustsecp256k1zkp_v0_10_1_borromean_verify(NULL, &sig->data[0], s, pubs, &sig->n_keys, 1, msg32, 32); } -size_t rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys(const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig) { +size_t rustsecp256k1zkp_v0_10_1_whitelist_signature_n_keys(const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig) { return sig->n_keys; } -int rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_whitelist_signature *sig, const unsigned char *input, size_t input_len) { +int rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_whitelist_signature *sig, const unsigned char *input, size_t input_len) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(input != NULL); @@ -148,7 +148,7 @@ int rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(const rustsecp256k1zkp_v0 return 1; } -int rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, size_t *output_len, const rustsecp256k1zkp_v0_10_0_whitelist_signature *sig) { +int rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, size_t *output_len, const rustsecp256k1zkp_v0_10_1_whitelist_signature *sig) { VERIFY_CHECK(ctx != NULL); ARG_CHECK(output != NULL); ARG_CHECK(output_len != NULL); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/tests_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/tests_impl.h index 2ccd992a..1dc2f3fc 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/tests_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/tests_impl.h @@ -9,77 +9,77 @@ #include "../../../include/secp256k1_whitelist.h" -static void test_whitelist_end_to_end_internal(const unsigned char *summed_seckey, const unsigned char *online_seckey, const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey, const size_t signer_i, const size_t n_keys) { +static void test_whitelist_end_to_end_internal(const unsigned char *summed_seckey, const unsigned char *online_seckey, const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey, const size_t signer_i, const size_t n_keys) { unsigned char serialized[32 + 4 + 32 * SECP256K1_WHITELIST_MAX_N_KEYS] = {0}; size_t slen = sizeof(serialized); - rustsecp256k1zkp_v0_10_0_whitelist_signature sig; - rustsecp256k1zkp_v0_10_0_whitelist_signature sig1; + rustsecp256k1zkp_v0_10_1_whitelist_signature sig; + rustsecp256k1zkp_v0_10_1_whitelist_signature sig1; - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_sign(CTX, &sig, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey, online_seckey, summed_seckey, signer_i)); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(CTX, &sig, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_sign(CTX, &sig, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey, online_seckey, summed_seckey, signer_i)); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(CTX, &sig, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey) == 1); /* Check that exchanging keys causes a failure */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(CTX, &sig, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(CTX, &sig, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); /* Serialization round trip */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize(CTX, serialized, &slen, &sig) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize(CTX, serialized, &slen, &sig) == 1); CHECK(slen == 33 + 32 * n_keys); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig1, serialized, slen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig1, serialized, slen) == 1); /* (Check various bad-length conditions) */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig1, serialized, slen + 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig1, serialized, slen + 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig1, serialized, slen - 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig1, serialized, 0) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(CTX, &sig1, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(CTX, &sig1, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig1, serialized, slen + 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig1, serialized, slen + 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig1, serialized, slen - 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig1, serialized, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(CTX, &sig1, online_pubkeys, offline_pubkeys, n_keys, sub_pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(CTX, &sig1, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); /* Test n_keys */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys(&sig) == n_keys); - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_n_keys(&sig1) == n_keys); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_n_keys(&sig) == n_keys); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_n_keys(&sig1) == n_keys); /* Test bad number of keys in signature */ sig.n_keys = n_keys + 1; - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_verify(CTX, &sig, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_verify(CTX, &sig, offline_pubkeys, online_pubkeys, n_keys, sub_pubkey) != 1); sig.n_keys = n_keys; } static void test_whitelist_end_to_end(const size_t n_keys, int test_all_keys) { unsigned char **online_seckey = (unsigned char **) malloc(n_keys * sizeof(*online_seckey)); unsigned char **summed_seckey = (unsigned char **) malloc(n_keys * sizeof(*summed_seckey)); - rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys = (rustsecp256k1zkp_v0_10_0_pubkey *) malloc(n_keys * sizeof(*online_pubkeys)); - rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys = (rustsecp256k1zkp_v0_10_0_pubkey *) malloc(n_keys * sizeof(*offline_pubkeys)); + rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys = (rustsecp256k1zkp_v0_10_1_pubkey *) malloc(n_keys * sizeof(*online_pubkeys)); + rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys = (rustsecp256k1zkp_v0_10_1_pubkey *) malloc(n_keys * sizeof(*offline_pubkeys)); - rustsecp256k1zkp_v0_10_0_scalar ssub; + rustsecp256k1zkp_v0_10_1_scalar ssub; unsigned char csub[32]; - rustsecp256k1zkp_v0_10_0_pubkey sub_pubkey; + rustsecp256k1zkp_v0_10_1_pubkey sub_pubkey; /* Generate random keys */ size_t i; /* Start with subkey */ random_scalar_order_test(&ssub); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(csub, &ssub); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, csub) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &sub_pubkey, csub) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(csub, &ssub); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, csub) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &sub_pubkey, csub) == 1); /* Then offline and online whitelist keys */ for (i = 0; i < n_keys; i++) { - rustsecp256k1zkp_v0_10_0_scalar son, soff; + rustsecp256k1zkp_v0_10_1_scalar son, soff; online_seckey[i] = (unsigned char *) malloc(32); summed_seckey[i] = (unsigned char *) malloc(32); /* Create two keys */ random_scalar_order_test(&son); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(online_seckey[i], &son); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, online_seckey[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &online_pubkeys[i], online_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(online_seckey[i], &son); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, online_seckey[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &online_pubkeys[i], online_seckey[i]) == 1); random_scalar_order_test(&soff); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(summed_seckey[i], &soff); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, summed_seckey[i]) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &offline_pubkeys[i], summed_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(summed_seckey[i], &soff); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, summed_seckey[i]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &offline_pubkeys[i], summed_seckey[i]) == 1); /* Make summed_seckey correspond to the sum of offline_pubkey and sub_pubkey */ - rustsecp256k1zkp_v0_10_0_scalar_add(&soff, &soff, &ssub); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(summed_seckey[i], &soff); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, summed_seckey[i]) == 1); + rustsecp256k1zkp_v0_10_1_scalar_add(&soff, &soff, &ssub); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(summed_seckey[i], &soff); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, summed_seckey[i]) == 1); } /* Sign/verify with each one */ @@ -88,7 +88,7 @@ static void test_whitelist_end_to_end(const size_t n_keys, int test_all_keys) { test_whitelist_end_to_end_internal(summed_seckey[i], online_seckey[i], online_pubkeys, offline_pubkeys, &sub_pubkey, i, n_keys); } } else { - uint32_t rand_idx = rustsecp256k1zkp_v0_10_0_testrand_int(n_keys-1); + uint32_t rand_idx = rustsecp256k1zkp_v0_10_1_testrand_int(n_keys-1); test_whitelist_end_to_end_internal(summed_seckey[0], online_seckey[0], online_pubkeys, offline_pubkeys, &sub_pubkey, 0, n_keys); test_whitelist_end_to_end_internal(summed_seckey[rand_idx], online_seckey[rand_idx], online_pubkeys, offline_pubkeys, &sub_pubkey, rand_idx, n_keys); test_whitelist_end_to_end_internal(summed_seckey[n_keys-1], online_seckey[n_keys-1], online_pubkeys, offline_pubkeys, &sub_pubkey, n_keys-1, n_keys); @@ -105,7 +105,7 @@ static void test_whitelist_end_to_end(const size_t n_keys, int test_all_keys) { } static void test_whitelist_bad_parse(void) { - rustsecp256k1zkp_v0_10_0_whitelist_signature sig; + rustsecp256k1zkp_v0_10_1_whitelist_signature sig; const unsigned char serialized0[] = { 1+32*(0+1) }; const unsigned char serialized1[] = { @@ -124,11 +124,11 @@ static void test_whitelist_bad_parse(void) { }; /* Empty input */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig, serialized0, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig, serialized0, 0) == 0); /* Misses one byte of e0 */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig, serialized1, sizeof(serialized1)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig, serialized1, sizeof(serialized1)) == 0); /* Enough bytes for e0, but there is no s value */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig, serialized2, sizeof(serialized2)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig, serialized2, sizeof(serialized2)) == 0); } static void test_whitelist_bad_serialize(void) { @@ -140,12 +140,12 @@ static void test_whitelist_bad_serialize(void) { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 }; size_t serialized_len; - rustsecp256k1zkp_v0_10_0_whitelist_signature sig; + rustsecp256k1zkp_v0_10_1_whitelist_signature sig; - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_parse(CTX, &sig, serialized, sizeof(serialized)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_parse(CTX, &sig, serialized, sizeof(serialized)) == 1); serialized_len = sizeof(serialized) - 1; /* Output buffer is one byte too short */ - CHECK(rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize(CTX, serialized, &serialized_len, &sig) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize(CTX, serialized, &serialized_len, &sig) == 0); } static void run_whitelist_tests(void) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/whitelist_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/whitelist_impl.h index d2bab247..0f09d1c8 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/whitelist_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/modules/whitelist/whitelist_impl.h @@ -7,75 +7,75 @@ #ifndef SECP256K1_WHITELIST_IMPL_H #define SECP256K1_WHITELIST_IMPL_H -static int rustsecp256k1zkp_v0_10_0_whitelist_hash_pubkey(rustsecp256k1zkp_v0_10_0_scalar* output, rustsecp256k1zkp_v0_10_0_gej* pubkey) { +static int rustsecp256k1zkp_v0_10_1_whitelist_hash_pubkey(rustsecp256k1zkp_v0_10_1_scalar* output, rustsecp256k1zkp_v0_10_1_gej* pubkey) { unsigned char h[32]; unsigned char c[33]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; int overflow = 0; size_t size = 33; - rustsecp256k1zkp_v0_10_0_ge ge; + rustsecp256k1zkp_v0_10_1_ge ge; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&ge, pubkey); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&ge, pubkey); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&ge, c, &size, SECP256K1_EC_COMPRESSED)) { + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&ge, c, &size, SECP256K1_EC_COMPRESSED)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, c, size); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, h); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, c, size); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, h); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(output, h, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(output)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(output, h, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(output)) { /* This return path is mathematically impossible to hit */ - rustsecp256k1zkp_v0_10_0_scalar_clear(output); + rustsecp256k1zkp_v0_10_1_scalar_clear(output); return 0; } return 1; } -static int rustsecp256k1zkp_v0_10_0_whitelist_tweak_pubkey(rustsecp256k1zkp_v0_10_0_gej* pub_tweaked) { - rustsecp256k1zkp_v0_10_0_scalar tweak; - rustsecp256k1zkp_v0_10_0_scalar zero; +static int rustsecp256k1zkp_v0_10_1_whitelist_tweak_pubkey(rustsecp256k1zkp_v0_10_1_gej* pub_tweaked) { + rustsecp256k1zkp_v0_10_1_scalar tweak; + rustsecp256k1zkp_v0_10_1_scalar zero; int ret; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&zero, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&zero, 0); - ret = rustsecp256k1zkp_v0_10_0_whitelist_hash_pubkey(&tweak, pub_tweaked); + ret = rustsecp256k1zkp_v0_10_1_whitelist_hash_pubkey(&tweak, pub_tweaked); if (ret) { - rustsecp256k1zkp_v0_10_0_ecmult(pub_tweaked, pub_tweaked, &tweak, &zero); + rustsecp256k1zkp_v0_10_1_ecmult(pub_tweaked, pub_tweaked, &tweak, &zero); } return ret; } -static int rustsecp256k1zkp_v0_10_0_whitelist_compute_tweaked_privkey(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar* skey, const unsigned char *online_key, const unsigned char *summed_key) { - rustsecp256k1zkp_v0_10_0_scalar tweak; +static int rustsecp256k1zkp_v0_10_1_whitelist_compute_tweaked_privkey(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar* skey, const unsigned char *online_key, const unsigned char *summed_key) { + rustsecp256k1zkp_v0_10_1_scalar tweak; int ret = 1; int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(skey, summed_key, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(skey)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(skey, summed_key, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(skey)) { ret = 0; } if (ret) { - rustsecp256k1zkp_v0_10_0_gej pkeyj; - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &pkeyj, skey); - ret = rustsecp256k1zkp_v0_10_0_whitelist_hash_pubkey(&tweak, &pkeyj); + rustsecp256k1zkp_v0_10_1_gej pkeyj; + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &pkeyj, skey); + ret = rustsecp256k1zkp_v0_10_1_whitelist_hash_pubkey(&tweak, &pkeyj); } if (ret) { - rustsecp256k1zkp_v0_10_0_scalar sonline; - rustsecp256k1zkp_v0_10_0_scalar_mul(skey, skey, &tweak); + rustsecp256k1zkp_v0_10_1_scalar sonline; + rustsecp256k1zkp_v0_10_1_scalar_mul(skey, skey, &tweak); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sonline, online_key, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(&sonline)) { + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sonline, online_key, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(&sonline)) { ret = 0; } - rustsecp256k1zkp_v0_10_0_scalar_add(skey, skey, &sonline); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sonline); - rustsecp256k1zkp_v0_10_0_scalar_clear(&tweak); + rustsecp256k1zkp_v0_10_1_scalar_add(skey, skey, &sonline); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sonline); + rustsecp256k1zkp_v0_10_1_scalar_clear(&tweak); } if (!ret) { - rustsecp256k1zkp_v0_10_0_scalar_clear(skey); + rustsecp256k1zkp_v0_10_1_scalar_clear(skey); } return ret; } @@ -83,45 +83,45 @@ static int rustsecp256k1zkp_v0_10_0_whitelist_compute_tweaked_privkey(const rust /* Takes a list of pubkeys and combines them to form the public keys needed * for the ring signature; also produce a commitment to every one that will * be our "message". */ -static int rustsecp256k1zkp_v0_10_0_whitelist_compute_keys_and_message(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *msg32, rustsecp256k1zkp_v0_10_0_gej *keys, const rustsecp256k1zkp_v0_10_0_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_0_pubkey *offline_pubkeys, const int n_keys, const rustsecp256k1zkp_v0_10_0_pubkey *sub_pubkey) { +static int rustsecp256k1zkp_v0_10_1_whitelist_compute_keys_and_message(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *msg32, rustsecp256k1zkp_v0_10_1_gej *keys, const rustsecp256k1zkp_v0_10_1_pubkey *online_pubkeys, const rustsecp256k1zkp_v0_10_1_pubkey *offline_pubkeys, const int n_keys, const rustsecp256k1zkp_v0_10_1_pubkey *sub_pubkey) { unsigned char c[33]; size_t size = 33; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; int i; - rustsecp256k1zkp_v0_10_0_ge subkey_ge; + rustsecp256k1zkp_v0_10_1_ge subkey_ge; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &subkey_ge, sub_pubkey); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &subkey_ge, sub_pubkey); /* commit to sub-key */ - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&subkey_ge, c, &size, SECP256K1_EC_COMPRESSED)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&subkey_ge, c, &size, SECP256K1_EC_COMPRESSED)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, c, size); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, c, size); for (i = 0; i < n_keys; i++) { - rustsecp256k1zkp_v0_10_0_ge offline_ge; - rustsecp256k1zkp_v0_10_0_ge online_ge; - rustsecp256k1zkp_v0_10_0_gej tweaked_gej; + rustsecp256k1zkp_v0_10_1_ge offline_ge; + rustsecp256k1zkp_v0_10_1_ge online_ge; + rustsecp256k1zkp_v0_10_1_gej tweaked_gej; /* commit to fixed keys */ - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &offline_ge, &offline_pubkeys[i]); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&offline_ge, c, &size, SECP256K1_EC_COMPRESSED)) { + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &offline_ge, &offline_pubkeys[i]); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&offline_ge, c, &size, SECP256K1_EC_COMPRESSED)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, c, size); - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &online_ge, &online_pubkeys[i]); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&online_ge, c, &size, SECP256K1_EC_COMPRESSED)) { + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, c, size); + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &online_ge, &online_pubkeys[i]); + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&online_ge, c, &size, SECP256K1_EC_COMPRESSED)) { return 0; } - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, c, size); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, c, size); /* compute tweaked keys */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&tweaked_gej, &offline_ge); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&tweaked_gej, &tweaked_gej, &subkey_ge, NULL); - rustsecp256k1zkp_v0_10_0_whitelist_tweak_pubkey(&tweaked_gej); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&keys[i], &tweaked_gej, &online_ge, NULL); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&tweaked_gej, &offline_ge); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&tweaked_gej, &tweaked_gej, &subkey_ge, NULL); + rustsecp256k1zkp_v0_10_1_whitelist_tweak_pubkey(&tweaked_gej); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&keys[i], &tweaked_gej, &online_ge, NULL); } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, msg32); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, msg32); return 1; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult.c b/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult.c index a378d14e..f23fec87 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult.c @@ -18,11 +18,11 @@ #include "ecmult.h" #include "ecmult_compute_table_impl.h" -static void print_table(FILE *fp, const char *name, int window_g, const rustsecp256k1zkp_v0_10_0_ge_storage* table) { +static void print_table(FILE *fp, const char *name, int window_g, const rustsecp256k1zkp_v0_10_1_ge_storage* table) { int j; int i; - fprintf(fp, "const rustsecp256k1zkp_v0_10_0_ge_storage %s[ECMULT_TABLE_SIZE(WINDOW_G)] = {\n", name); + fprintf(fp, "const rustsecp256k1zkp_v0_10_1_ge_storage %s[ECMULT_TABLE_SIZE(WINDOW_G)] = {\n", name); fprintf(fp, " S(%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32 ",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32",%"PRIx32")\n", SECP256K1_GE_STORAGE_CONST_GET(table[0])); @@ -41,13 +41,13 @@ static void print_table(FILE *fp, const char *name, int window_g, const rustsecp } static void print_two_tables(FILE *fp, int window_g) { - rustsecp256k1zkp_v0_10_0_ge_storage* table = malloc(ECMULT_TABLE_SIZE(window_g) * sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)); - rustsecp256k1zkp_v0_10_0_ge_storage* table_128 = malloc(ECMULT_TABLE_SIZE(window_g) * sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)); + rustsecp256k1zkp_v0_10_1_ge_storage* table = malloc(ECMULT_TABLE_SIZE(window_g) * sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)); + rustsecp256k1zkp_v0_10_1_ge_storage* table_128 = malloc(ECMULT_TABLE_SIZE(window_g) * sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)); - rustsecp256k1zkp_v0_10_0_ecmult_compute_two_tables(table, table_128, window_g, &rustsecp256k1zkp_v0_10_0_ge_const_g); + rustsecp256k1zkp_v0_10_1_ecmult_compute_two_tables(table, table_128, window_g, &rustsecp256k1zkp_v0_10_1_ge_const_g); - print_table(fp, "rustsecp256k1zkp_v0_10_0_pre_g", window_g, table); - print_table(fp, "rustsecp256k1zkp_v0_10_0_pre_g_128", window_g, table_128); + print_table(fp, "rustsecp256k1zkp_v0_10_1_pre_g", window_g, table); + print_table(fp, "rustsecp256k1zkp_v0_10_1_pre_g_128", window_g, table_128); free(table); free(table_128); @@ -66,8 +66,8 @@ int main(void) { } fprintf(fp, "/* This file was automatically generated by precompute_ecmult. */\n"); - fprintf(fp, "/* This file contains an array rustsecp256k1zkp_v0_10_0_pre_g with odd multiples of the base point G and\n"); - fprintf(fp, " * an array rustsecp256k1zkp_v0_10_0_pre_g_128 with odd multiples of 2^128*G for accelerating the computation of a*P + b*G.\n"); + fprintf(fp, "/* This file contains an array rustsecp256k1zkp_v0_10_1_pre_g with odd multiples of the base point G and\n"); + fprintf(fp, " * an array rustsecp256k1zkp_v0_10_1_pre_g_128 with odd multiples of 2^128*G for accelerating the computation of a*P + b*G.\n"); fprintf(fp, " */\n"); fprintf(fp, "#include \"group.h\"\n"); fprintf(fp, "#include \"ecmult.h\"\n"); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult_gen.c b/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult_gen.c index b45d53ab..759654a0 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult_gen.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precompute_ecmult_gen.c @@ -40,15 +40,15 @@ int main(int argc, char **argv) { fprintf(fp, "# error Cannot compile precomputed_ecmult_gen.c in exhaustive test mode\n"); fprintf(fp, "#endif /* EXHAUSTIVE_TEST_ORDER */\n"); fprintf(fp, "#define S(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) SECP256K1_GE_STORAGE_CONST(0x##a##u,0x##b##u,0x##c##u,0x##d##u,0x##e##u,0x##f##u,0x##g##u,0x##h##u,0x##i##u,0x##j##u,0x##k##u,0x##l##u,0x##m##u,0x##n##u,0x##o##u,0x##p##u)\n"); - fprintf(fp, "const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)] = {\n"); + fprintf(fp, "const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)] = {\n"); for (bits = 2; bits <= 8; bits *= 2) { int g = ECMULT_GEN_PREC_G(bits); int n = ECMULT_GEN_PREC_N(bits); int inner, outer; - rustsecp256k1zkp_v0_10_0_ge_storage* table = checked_malloc(&default_error_callback, n * g * sizeof(rustsecp256k1zkp_v0_10_0_ge_storage)); - rustsecp256k1zkp_v0_10_0_ecmult_gen_compute_table(table, &rustsecp256k1zkp_v0_10_0_ge_const_g, bits); + rustsecp256k1zkp_v0_10_1_ge_storage* table = checked_malloc(&default_error_callback, n * g * sizeof(rustsecp256k1zkp_v0_10_1_ge_storage)); + rustsecp256k1zkp_v0_10_1_ecmult_gen_compute_table(table, &rustsecp256k1zkp_v0_10_1_ge_const_g, bits); fprintf(fp, "#if ECMULT_GEN_PREC_BITS == %d\n", bits); for(outer = 0; outer != n; outer++) { diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.c b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.c index 8c70920e..9b729a94 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.c @@ -1,6 +1,6 @@ /* This file was automatically generated by precompute_ecmult. */ -/* This file contains an array rustsecp256k1zkp_v0_10_0_pre_g with odd multiples of the base point G and - * an array rustsecp256k1zkp_v0_10_0_pre_g_128 with odd multiples of 2^128*G for accelerating the computation of a*P + b*G. +/* This file contains an array rustsecp256k1zkp_v0_10_1_pre_g with odd multiples of the base point G and + * an array rustsecp256k1zkp_v0_10_1_pre_g_128 with odd multiples of 2^128*G for accelerating the computation of a*P + b*G. */ #include "group.h" #include "ecmult.h" @@ -13,7 +13,7 @@ # error Cannot compile precomputed_ecmult.c in exhaustive test mode #endif /* EXHAUSTIVE_TEST_ORDER */ #define WINDOW_G ECMULT_WINDOW_SIZE -const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)] = { +const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)] = { S(79be667e,f9dcbbac,55a06295,ce870b07,29bfcdb,2dce28d9,59f2815b,16f81798,483ada77,26a3c465,5da4fbfc,e1108a8,fd17b448,a6855419,9c47d08f,fb10d4b8) #if WINDOW_G > 2 ,S(f9308a01,9258c310,49344f85,f89d5229,b531c845,836f99b0,8601f113,bce036f9,388f7b0f,632de814,fe337e6,2a37f356,6500a999,34c2231b,6cb9fd75,84b8e672) @@ -8233,7 +8233,7 @@ const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g[ECMULT_ ,S(1e70619c,381a6adc,e5d925e0,c9c74f97,3c02ff64,ff2662d7,34efc485,d2bce895,c923f771,f543ffed,42935c28,8474aaaf,80a46ad4,3c579ce0,bb5e663d,668b24b3) #endif }; -const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)] = { +const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)] = { S(8f68b9d2,f63b5f33,9239c1ad,981f162e,e88c5678,723ea335,1b7b444c,9ec4c0da,662a9f2d,ba063986,de1d90c2,b6be215d,bbea2cfe,95510bfd,f23cbf79,501fff82) #if WINDOW_G > 2 ,S(38381dbe,2e509f22,8ba93363,f2451f08,fd845cb3,51d954be,18e2b8ed,d23809fa,e4a32d0a,fb917dc,b09405a5,520eb1cc,3681fccb,32d8f24d,bd707518,331fed52) diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.h b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.h index 05e0f6b2..fa67773b 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult.h @@ -23,12 +23,12 @@ extern "C" { # else # error No known generator for the specified exhaustive test group order. # endif -static rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)]; -static rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)]; +static rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)]; +static rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)]; #else /* !defined(EXHAUSTIVE_TEST_ORDER) */ # define WINDOW_G ECMULT_WINDOW_SIZE -extern const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)]; -extern const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)]; +extern const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g[ECMULT_TABLE_SIZE(WINDOW_G)]; +extern const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_pre_g_128[ECMULT_TABLE_SIZE(WINDOW_G)]; #endif /* defined(EXHAUSTIVE_TEST_ORDER) */ #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.c b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.c index 5442475f..0c34ab97 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.c @@ -7,7 +7,7 @@ # error Cannot compile precomputed_ecmult_gen.c in exhaustive test mode #endif /* EXHAUSTIVE_TEST_ORDER */ #define S(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) SECP256K1_GE_STORAGE_CONST(0x##a##u,0x##b##u,0x##c##u,0x##d##u,0x##e##u,0x##f##u,0x##g##u,0x##h##u,0x##i##u,0x##j##u,0x##k##u,0x##l##u,0x##m##u,0x##n##u,0x##o##u,0x##p##u) -const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)] = { +const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)] = { #if ECMULT_GEN_PREC_BITS == 2 {S(3a9ed373,6eed3eec,9aeb5ac0,21b54652,56817b1f,8de6cd0,fbcee548,ba044bb5,7bcc5928,bdc9c023,dfc663b8,9e4f6969,ab751798,8e600ec1,d242010c,45c7974a), S(e44d7675,c3cb2857,4e133c01,a74f4afc,5ce684f8,4a789711,603f7c4f,50abef58,25bcb62f,fe2e2ce2,196ad86c,a006e20,8c64d21b,b25320a3,b5574b9c,1e1bfb4b), diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.h b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.h index 0d965a8b..a50aec94 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/precomputed_ecmult_gen.h @@ -14,9 +14,9 @@ extern "C" { #include "group.h" #include "ecmult_gen.h" #ifdef EXHAUSTIVE_TEST_ORDER -static rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)]; +static rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)]; #else -extern const rustsecp256k1zkp_v0_10_0_ge_storage rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)]; +extern const rustsecp256k1zkp_v0_10_1_ge_storage rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[ECMULT_GEN_PREC_N(ECMULT_GEN_PREC_BITS)][ECMULT_GEN_PREC_G(ECMULT_GEN_PREC_BITS)]; #endif /* defined(EXHAUSTIVE_TEST_ORDER) */ #ifdef __cplusplus diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar.h index 29aa8e47..55ce78a2 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar.h @@ -20,92 +20,92 @@ #endif /** Clear a scalar to prevent the leak of sensitive data. */ -static void rustsecp256k1zkp_v0_10_0_scalar_clear(rustsecp256k1zkp_v0_10_0_scalar *r); +static void rustsecp256k1zkp_v0_10_1_scalar_clear(rustsecp256k1zkp_v0_10_1_scalar *r); /** Access bits from a scalar. All requested bits must belong to the same 32-bit limb. */ -static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count); +static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count); /** Access bits from a scalar. Not constant time in offset and count. */ -static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count); +static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count); /** Set a scalar from a big endian byte array. The scalar will be reduced modulo group order `n`. * In: bin: pointer to a 32-byte array. * Out: r: scalar to be set. * overflow: non-zero if the scalar was bigger or equal to `n` before reduction, zero otherwise (can be NULL). */ -static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *bin, int *overflow); +static void rustsecp256k1zkp_v0_10_1_scalar_set_b32(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *bin, int *overflow); /** Set a scalar from a big endian byte array and returns 1 if it is a valid * seckey and 0 otherwise. */ -static int rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *bin); +static int rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *bin); /** Set a scalar to an unsigned integer. */ -static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int v); +static void rustsecp256k1zkp_v0_10_1_scalar_set_int(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int v); /** Set a scalar to an unsigned 64-bit integer */ -static void rustsecp256k1zkp_v0_10_0_scalar_set_u64(rustsecp256k1zkp_v0_10_0_scalar *r, uint64_t v); +static void rustsecp256k1zkp_v0_10_1_scalar_set_u64(rustsecp256k1zkp_v0_10_1_scalar *r, uint64_t v); /** Convert a scalar to a byte array. */ -static void rustsecp256k1zkp_v0_10_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_0_scalar* a); +static void rustsecp256k1zkp_v0_10_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_1_scalar* a); /** Add two scalars together (modulo the group order). Returns whether it overflowed. */ -static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b); +static int rustsecp256k1zkp_v0_10_1_scalar_add(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b); /** Conditionally add a power of two to a scalar. The result is not allowed to overflow. */ -static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int bit, int flag); +static void rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int bit, int flag); /** Multiply two scalars (modulo the group order). */ -static void rustsecp256k1zkp_v0_10_0_scalar_mul(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b); +static void rustsecp256k1zkp_v0_10_1_scalar_mul(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b); /** Compute the square of a scalar (modulo the group order). */ -static void rustsecp256k1zkp_v0_10_0_scalar_sqr(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_scalar_sqr(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a); /** Compute the inverse of a scalar (modulo the group order). */ -static void rustsecp256k1zkp_v0_10_0_scalar_inverse(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_scalar_inverse(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a); /** Compute the inverse of a scalar (modulo the group order), without constant-time guarantee. */ -static void rustsecp256k1zkp_v0_10_0_scalar_inverse_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_scalar_inverse_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a); /** Compute the complement of a scalar (modulo the group order). */ -static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_scalar_negate(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a); /** Multiply a scalar with the multiplicative inverse of 2. */ -static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a); +static void rustsecp256k1zkp_v0_10_1_scalar_half(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a); /** Check whether a scalar equals zero. */ -static int rustsecp256k1zkp_v0_10_0_scalar_is_zero(const rustsecp256k1zkp_v0_10_0_scalar *a); +static int rustsecp256k1zkp_v0_10_1_scalar_is_zero(const rustsecp256k1zkp_v0_10_1_scalar *a); /** Check whether a scalar equals one. */ -static int rustsecp256k1zkp_v0_10_0_scalar_is_one(const rustsecp256k1zkp_v0_10_0_scalar *a); +static int rustsecp256k1zkp_v0_10_1_scalar_is_one(const rustsecp256k1zkp_v0_10_1_scalar *a); /** Check whether a scalar, considered as an nonnegative integer, is even. */ -static int rustsecp256k1zkp_v0_10_0_scalar_is_even(const rustsecp256k1zkp_v0_10_0_scalar *a); +static int rustsecp256k1zkp_v0_10_1_scalar_is_even(const rustsecp256k1zkp_v0_10_1_scalar *a); /** Check whether a scalar is higher than the group order divided by 2. */ -static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_0_scalar *a); +static int rustsecp256k1zkp_v0_10_1_scalar_is_high(const rustsecp256k1zkp_v0_10_1_scalar *a); /** Conditionally negate a number, in constant time. * Returns -1 if the number was negated, 1 otherwise */ -static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_scalar *a, int flag); +static int rustsecp256k1zkp_v0_10_1_scalar_cond_negate(rustsecp256k1zkp_v0_10_1_scalar *a, int flag); /** Compare two scalars. */ -static int rustsecp256k1zkp_v0_10_0_scalar_eq(const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b); +static int rustsecp256k1zkp_v0_10_1_scalar_eq(const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b); /** Find r1 and r2 such that r1+r2*2^128 = k. */ -static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_scalar *r1, rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *k); +static void rustsecp256k1zkp_v0_10_1_scalar_split_128(rustsecp256k1zkp_v0_10_1_scalar *r1, rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *k); /** Find r1 and r2 such that r1+r2*lambda = k, where r1 and r2 or their - * negations are maximum 128 bits long (see rustsecp256k1zkp_v0_10_0_ge_mul_lambda). It is + * negations are maximum 128 bits long (see rustsecp256k1zkp_v0_10_1_ge_mul_lambda). It is * required that r1, r2, and k all point to different objects. */ -static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT k); +static void rustsecp256k1zkp_v0_10_1_scalar_split_lambda(rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT k); /** Multiply a and b (without taking the modulus!), divide by 2**shift, and round to the nearest integer. Shift must be at least 256. */ -static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b, unsigned int shift); +static void rustsecp256k1zkp_v0_10_1_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b, unsigned int shift); /** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. Both *r and *a must be initialized.*/ -static void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, int flag); +static void rustsecp256k1zkp_v0_10_1_scalar_cmov(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, int flag); /** Check invariants on a scalar (no-op unless VERIFY is enabled). */ -static void rustsecp256k1zkp_v0_10_0_scalar_verify(const rustsecp256k1zkp_v0_10_0_scalar *r); -#define SECP256K1_SCALAR_VERIFY(r) rustsecp256k1zkp_v0_10_0_scalar_verify(r) +static void rustsecp256k1zkp_v0_10_1_scalar_verify(const rustsecp256k1zkp_v0_10_1_scalar *r); +#define SECP256K1_SCALAR_VERIFY(r) rustsecp256k1zkp_v0_10_1_scalar_verify(r) #endif /* SECP256K1_SCALAR_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64.h index 80166f30..91eb6121 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64.h @@ -12,7 +12,7 @@ /** A scalar modulo the group order of the secp256k1 curve. */ typedef struct { uint64_t d[4]; -} rustsecp256k1zkp_v0_10_0_scalar; +} rustsecp256k1zkp_v0_10_1_scalar; #define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{((uint64_t)(d1)) << 32 | (d0), ((uint64_t)(d3)) << 32 | (d2), ((uint64_t)(d5)) << 32 | (d4), ((uint64_t)(d7)) << 32 | (d6)}} diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64_impl.h index 8ca2b962..10ccbbbe 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_4x64_impl.h @@ -30,14 +30,14 @@ #define SECP256K1_N_H_2 ((uint64_t)0xFFFFFFFFFFFFFFFFULL) #define SECP256K1_N_H_3 ((uint64_t)0x7FFFFFFFFFFFFFFFULL) -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_clear(rustsecp256k1zkp_v0_10_0_scalar *r) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_clear(rustsecp256k1zkp_v0_10_1_scalar *r) { r->d[0] = 0; r->d[1] = 0; r->d[2] = 0; r->d[3] = 0; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_int(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int v) { r->d[0] = v; r->d[1] = 0; r->d[2] = 0; @@ -46,34 +46,34 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256 SECP256K1_SCALAR_VERIFY(r); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_u64(rustsecp256k1zkp_v0_10_0_scalar *r, uint64_t v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_u64(rustsecp256k1zkp_v0_10_1_scalar *r, uint64_t v) { r->d[0] = v; r->d[1] = 0; r->d[2] = 0; r->d[3] = 0; } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); VERIFY_CHECK((offset + count - 1) >> 6 == offset >> 6); return (a->d[offset >> 6] >> (offset & 0x3F)) & ((((uint64_t)1) << count) - 1); } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); VERIFY_CHECK(count < 32); VERIFY_CHECK(offset + count <= 256); if ((offset + count - 1) >> 6 == offset >> 6) { - return rustsecp256k1zkp_v0_10_0_scalar_get_bits(a, offset, count); + return rustsecp256k1zkp_v0_10_1_scalar_get_bits(a, offset, count); } else { VERIFY_CHECK((offset >> 6) + 1 < 4); return ((a->d[offset >> 6] >> (offset & 0x3F)) | (a->d[(offset >> 6) + 1] << (64 - (offset & 0x3F)))) & ((((uint64_t)1) << count) - 1); } } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_check_overflow(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_check_overflow(const rustsecp256k1zkp_v0_10_1_scalar *a) { int yes = 0; int no = 0; no |= (a->d[3] < SECP256K1_N_3); /* No need for a > check. */ @@ -85,83 +85,83 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_check_overflow(const return yes; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_reduce(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int overflow) { - rustsecp256k1zkp_v0_10_0_uint128 t; +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_reduce(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int overflow) { + rustsecp256k1zkp_v0_10_1_uint128 t; VERIFY_CHECK(overflow <= 1); - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, r->d[0]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, overflow * SECP256K1_N_C_0); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, overflow * SECP256K1_N_C_1); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, overflow * SECP256K1_N_C_2); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[3]); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, r->d[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, overflow * SECP256K1_N_C_0); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, overflow * SECP256K1_N_C_1); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, overflow * SECP256K1_N_C_2); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[3]); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); SECP256K1_SCALAR_VERIFY(r); return overflow; } -static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static int rustsecp256k1zkp_v0_10_1_scalar_add(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { int overflow; - rustsecp256k1zkp_v0_10_0_uint128 t; + rustsecp256k1zkp_v0_10_1_uint128 t; SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, a->d[0]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, b->d[0]); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, a->d[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, b->d[1]); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, a->d[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, b->d[2]); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, a->d[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, b->d[3]); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - overflow = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) + rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r); + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, a->d[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, b->d[0]); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, a->d[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, b->d[1]); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, a->d[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, b->d[2]); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, a->d[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, b->d[3]); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + overflow = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) + rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r); VERIFY_CHECK(overflow == 0 || overflow == 1); - rustsecp256k1zkp_v0_10_0_scalar_reduce(r, overflow); + rustsecp256k1zkp_v0_10_1_scalar_reduce(r, overflow); SECP256K1_SCALAR_VERIFY(r); return overflow; } -static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int bit, int flag) { - rustsecp256k1zkp_v0_10_0_uint128 t; +static void rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int bit, int flag) { + rustsecp256k1zkp_v0_10_1_uint128 t; volatile int vflag = flag; SECP256K1_SCALAR_VERIFY(r); VERIFY_CHECK(bit < 256); bit += ((uint32_t) vflag - 1) & 0x100; /* forcing (bit >> 6) > 3 makes this a noop */ - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, r->d[0]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 0)) << (bit & 0x3F)); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 1)) << (bit & 0x3F)); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 2)) << (bit & 0x3F)); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 3)) << (bit & 0x3F)); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, r->d[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 0)) << (bit & 0x3F)); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 1)) << (bit & 0x3F)); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 2)) << (bit & 0x3F)); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ((uint64_t)((bit >> 6) == 3)) << (bit & 0x3F)); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); SECP256K1_SCALAR_VERIFY(r); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_u128_hi_u64(&t) == 0); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_u128_hi_u64(&t) == 0); } -static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *b32, int *overflow) { +static void rustsecp256k1zkp_v0_10_1_scalar_set_b32(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *b32, int *overflow) { int over; - r->d[0] = rustsecp256k1zkp_v0_10_0_read_be64(&b32[24]); - r->d[1] = rustsecp256k1zkp_v0_10_0_read_be64(&b32[16]); - r->d[2] = rustsecp256k1zkp_v0_10_0_read_be64(&b32[8]); - r->d[3] = rustsecp256k1zkp_v0_10_0_read_be64(&b32[0]); - over = rustsecp256k1zkp_v0_10_0_scalar_reduce(r, rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r)); + r->d[0] = rustsecp256k1zkp_v0_10_1_read_be64(&b32[24]); + r->d[1] = rustsecp256k1zkp_v0_10_1_read_be64(&b32[16]); + r->d[2] = rustsecp256k1zkp_v0_10_1_read_be64(&b32[8]); + r->d[3] = rustsecp256k1zkp_v0_10_1_read_be64(&b32[0]); + over = rustsecp256k1zkp_v0_10_1_scalar_reduce(r, rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r)); if (overflow) { *overflow = over; } @@ -169,43 +169,43 @@ static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_sca SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_0_scalar* a) { +static void rustsecp256k1zkp_v0_10_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_1_scalar* a) { SECP256K1_SCALAR_VERIFY(a); - rustsecp256k1zkp_v0_10_0_write_be64(&bin[0], a->d[3]); - rustsecp256k1zkp_v0_10_0_write_be64(&bin[8], a->d[2]); - rustsecp256k1zkp_v0_10_0_write_be64(&bin[16], a->d[1]); - rustsecp256k1zkp_v0_10_0_write_be64(&bin[24], a->d[0]); + rustsecp256k1zkp_v0_10_1_write_be64(&bin[0], a->d[3]); + rustsecp256k1zkp_v0_10_1_write_be64(&bin[8], a->d[2]); + rustsecp256k1zkp_v0_10_1_write_be64(&bin[16], a->d[1]); + rustsecp256k1zkp_v0_10_1_write_be64(&bin[24], a->d[0]); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_zero(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_zero(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return (a->d[0] | a->d[1] | a->d[2] | a->d[3]) == 0; } -static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { - uint64_t nonzero = 0xFFFFFFFFFFFFFFFFULL * (rustsecp256k1zkp_v0_10_0_scalar_is_zero(a) == 0); - rustsecp256k1zkp_v0_10_0_uint128 t; +static void rustsecp256k1zkp_v0_10_1_scalar_negate(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { + uint64_t nonzero = 0xFFFFFFFFFFFFFFFFULL * (rustsecp256k1zkp_v0_10_1_scalar_is_zero(a) == 0); + rustsecp256k1zkp_v0_10_1_uint128 t; SECP256K1_SCALAR_VERIFY(a); - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, ~a->d[0]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_0 + 1); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ~a->d[1]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_1); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ~a->d[2]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_2); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, ~a->d[3]); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_3); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, ~a->d[0]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_0 + 1); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ~a->d[1]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_1); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ~a->d[2]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_2); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, ~a->d[3]); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_3); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_half(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { /* Writing `/` for field division and `//` for integer division, we compute * * a/2 = (a - (a&1))/2 + (a&1)/2 @@ -221,38 +221,38 @@ static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar * Together they sum to (n-3)//2 + (n+1)//2 = (2n-2)//2 = n - 1, which is less than n. */ uint64_t mask = -(uint64_t)(a->d[0] & 1U); - rustsecp256k1zkp_v0_10_0_uint128 t; + rustsecp256k1zkp_v0_10_1_uint128 t; SECP256K1_SCALAR_VERIFY(a); - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, (a->d[0] >> 1) | (a->d[1] << 63)); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, (SECP256K1_N_H_0 + 1U) & mask); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, (a->d[1] >> 1) | (a->d[2] << 63)); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_H_1 & mask); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, (a->d[2] >> 1) | (a->d[3] << 63)); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_H_2 & mask); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) + (a->d[3] >> 1) + (SECP256K1_N_H_3 & mask); + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, (a->d[0] >> 1) | (a->d[1] << 63)); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, (SECP256K1_N_H_0 + 1U) & mask); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, (a->d[1] >> 1) | (a->d[2] << 63)); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_H_1 & mask); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, (a->d[2] >> 1) | (a->d[3] << 63)); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_H_2 & mask); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) + (a->d[3] >> 1) + (SECP256K1_N_H_3 & mask); #ifdef VERIFY /* The line above only computed the bottom 64 bits of r->d[3]; redo the computation * in full 128 bits to make sure the top 64 bits are indeed zero. */ - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, a->d[3] >> 1); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_H_3 & mask); - rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) == 0); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, a->d[3] >> 1); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_H_3 & mask); + rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) == 0); SECP256K1_SCALAR_VERIFY(r); #endif } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_one(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_one(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3]) == 0; } -static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_0_scalar *a) { +static int rustsecp256k1zkp_v0_10_1_scalar_is_high(const rustsecp256k1zkp_v0_10_1_scalar *a) { int yes = 0; int no = 0; SECP256K1_SCALAR_VERIFY(a); @@ -266,27 +266,27 @@ static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_ return yes; } -static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_scalar *r, int flag) { +static int rustsecp256k1zkp_v0_10_1_scalar_cond_negate(rustsecp256k1zkp_v0_10_1_scalar *r, int flag) { /* If we are flag = 0, mask = 00...00 and this is a no-op; - * if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1zkp_v0_10_0_scalar_negate */ + * if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1zkp_v0_10_1_scalar_negate */ volatile int vflag = flag; uint64_t mask = -vflag; - uint64_t nonzero = (rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) != 0) - 1; - rustsecp256k1zkp_v0_10_0_uint128 t; + uint64_t nonzero = (rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) != 0) - 1; + rustsecp256k1zkp_v0_10_1_uint128 t; SECP256K1_SCALAR_VERIFY(r); - rustsecp256k1zkp_v0_10_0_u128_from_u64(&t, r->d[0] ^ mask); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, (SECP256K1_N_0 + 1) & mask); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[1] ^ mask); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_1 & mask); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[2] ^ mask); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_2 & mask); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_0_u128_rshift(&t, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, r->d[3] ^ mask); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&t, SECP256K1_N_3 & mask); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t) & nonzero; + rustsecp256k1zkp_v0_10_1_u128_from_u64(&t, r->d[0] ^ mask); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, (SECP256K1_N_0 + 1) & mask); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[1] ^ mask); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_1 & mask); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[2] ^ mask); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_2 & mask); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; rustsecp256k1zkp_v0_10_1_u128_rshift(&t, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, r->d[3] ^ mask); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&t, SECP256K1_N_3 & mask); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t) & nonzero; SECP256K1_SCALAR_VERIFY(r); return 2 * (mask == 0) - 1; @@ -298,10 +298,10 @@ static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_ #define muladd(a,b) { \ uint64_t tl, th; \ { \ - rustsecp256k1zkp_v0_10_0_uint128 t; \ - rustsecp256k1zkp_v0_10_0_u128_mul(&t, a, b); \ - th = rustsecp256k1zkp_v0_10_0_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ - tl = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); \ + rustsecp256k1zkp_v0_10_1_uint128 t; \ + rustsecp256k1zkp_v0_10_1_u128_mul(&t, a, b); \ + th = rustsecp256k1zkp_v0_10_1_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ + tl = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); \ } \ c0 += tl; /* overflow is handled on the next line */ \ th += (c0 < tl); /* at most 0xFFFFFFFFFFFFFFFF */ \ @@ -314,10 +314,10 @@ static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_ #define muladd_fast(a,b) { \ uint64_t tl, th; \ { \ - rustsecp256k1zkp_v0_10_0_uint128 t; \ - rustsecp256k1zkp_v0_10_0_u128_mul(&t, a, b); \ - th = rustsecp256k1zkp_v0_10_0_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ - tl = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); \ + rustsecp256k1zkp_v0_10_1_uint128 t; \ + rustsecp256k1zkp_v0_10_1_u128_mul(&t, a, b); \ + th = rustsecp256k1zkp_v0_10_1_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ + tl = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); \ } \ c0 += tl; /* overflow is handled on the next line */ \ th += (c0 < tl); /* at most 0xFFFFFFFFFFFFFFFF */ \ @@ -329,10 +329,10 @@ static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_ #define muladd2(a,b) { \ uint64_t tl, th, th2, tl2; \ { \ - rustsecp256k1zkp_v0_10_0_uint128 t; \ - rustsecp256k1zkp_v0_10_0_u128_mul(&t, a, b); \ - th = rustsecp256k1zkp_v0_10_0_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ - tl = rustsecp256k1zkp_v0_10_0_u128_to_u64(&t); \ + rustsecp256k1zkp_v0_10_1_uint128 t; \ + rustsecp256k1zkp_v0_10_1_u128_mul(&t, a, b); \ + th = rustsecp256k1zkp_v0_10_1_u128_hi_u64(&t); /* at most 0xFFFFFFFFFFFFFFFE */ \ + tl = rustsecp256k1zkp_v0_10_1_u128_to_u64(&t); \ } \ th2 = th + th; /* at most 0xFFFFFFFFFFFFFFFE (in case th was 0x7FFFFFFFFFFFFFFF) */ \ c2 += (th2 < th); /* never overflows by contract (verified the next line) */ \ @@ -381,7 +381,7 @@ static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_ VERIFY_CHECK(c2 == 0); \ } -static void rustsecp256k1zkp_v0_10_0_scalar_reduce_512(rustsecp256k1zkp_v0_10_0_scalar *r, const uint64_t *l) { +static void rustsecp256k1zkp_v0_10_1_scalar_reduce_512(rustsecp256k1zkp_v0_10_1_scalar *r, const uint64_t *l) { #ifdef USE_ASM_X86_64 /* Reduce 512 bits into 385. */ uint64_t m0, m1, m2, m3, m4, m5, m6; @@ -618,7 +618,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_reduce_512(rustsecp256k1zkp_v0_10_0_ : "g"(p0), "g"(p1), "g"(p2), "g"(p3), "g"(p4), "D"(r), "i"(SECP256K1_N_C_0), "i"(SECP256K1_N_C_1) : "rax", "rdx", "r8", "r9", "r10", "cc", "memory"); #else - rustsecp256k1zkp_v0_10_0_uint128 c128; + rustsecp256k1zkp_v0_10_1_uint128 c128; uint64_t c, c0, c1, c2; uint64_t n0 = l[4], n1 = l[5], n2 = l[6], n3 = l[7]; uint64_t m0, m1, m2, m3, m4, m5; @@ -676,25 +676,25 @@ static void rustsecp256k1zkp_v0_10_0_scalar_reduce_512(rustsecp256k1zkp_v0_10_0_ /* Reduce 258 bits into 256. */ /* r[0..3] = p[0..3] + p[4] * SECP256K1_N_C. */ - rustsecp256k1zkp_v0_10_0_u128_from_u64(&c128, p0); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c128, SECP256K1_N_C_0, p4); - r->d[0] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_0_u128_rshift(&c128, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c128, p1); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&c128, SECP256K1_N_C_1, p4); - r->d[1] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_0_u128_rshift(&c128, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c128, p2); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c128, p4); - r->d[2] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_0_u128_rshift(&c128, 64); - rustsecp256k1zkp_v0_10_0_u128_accum_u64(&c128, p3); - r->d[3] = rustsecp256k1zkp_v0_10_0_u128_to_u64(&c128); - c = rustsecp256k1zkp_v0_10_0_u128_hi_u64(&c128); + rustsecp256k1zkp_v0_10_1_u128_from_u64(&c128, p0); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c128, SECP256K1_N_C_0, p4); + r->d[0] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_1_u128_rshift(&c128, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c128, p1); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&c128, SECP256K1_N_C_1, p4); + r->d[1] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_1_u128_rshift(&c128, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c128, p2); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c128, p4); + r->d[2] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c128); rustsecp256k1zkp_v0_10_1_u128_rshift(&c128, 64); + rustsecp256k1zkp_v0_10_1_u128_accum_u64(&c128, p3); + r->d[3] = rustsecp256k1zkp_v0_10_1_u128_to_u64(&c128); + c = rustsecp256k1zkp_v0_10_1_u128_hi_u64(&c128); #endif /* Final reduction of r. */ - rustsecp256k1zkp_v0_10_0_scalar_reduce(r, c + rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r)); + rustsecp256k1zkp_v0_10_1_scalar_reduce(r, c + rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r)); } -static void rustsecp256k1zkp_v0_10_0_scalar_mul_512(uint64_t l[8], const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static void rustsecp256k1zkp_v0_10_1_scalar_mul_512(uint64_t l[8], const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { #ifdef USE_ASM_X86_64 const uint64_t *pb = b->d; __asm__ __volatile__( @@ -861,7 +861,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_mul_512(uint64_t l[8], const rustsec #endif } -static void rustsecp256k1zkp_v0_10_0_scalar_sqr_512(uint64_t l[8], const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_sqr_512(uint64_t l[8], const rustsecp256k1zkp_v0_10_1_scalar *a) { #ifdef USE_ASM_X86_64 __asm__ __volatile__( /* Preload */ @@ -1006,24 +1006,24 @@ static void rustsecp256k1zkp_v0_10_0_scalar_sqr_512(uint64_t l[8], const rustsec #undef extract #undef extract_fast -static void rustsecp256k1zkp_v0_10_0_scalar_mul(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static void rustsecp256k1zkp_v0_10_1_scalar_mul(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { uint64_t l[8]; SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); - rustsecp256k1zkp_v0_10_0_scalar_mul_512(l, a, b); - rustsecp256k1zkp_v0_10_0_scalar_reduce_512(r, l); + rustsecp256k1zkp_v0_10_1_scalar_mul_512(l, a, b); + rustsecp256k1zkp_v0_10_1_scalar_reduce_512(r, l); SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_sqr(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_sqr(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { uint64_t l[8]; - rustsecp256k1zkp_v0_10_0_scalar_sqr_512(l, a); - rustsecp256k1zkp_v0_10_0_scalar_reduce_512(r, l); + rustsecp256k1zkp_v0_10_1_scalar_sqr_512(l, a); + rustsecp256k1zkp_v0_10_1_scalar_reduce_512(r, l); } -static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_scalar *r1, rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *k) { +static void rustsecp256k1zkp_v0_10_1_scalar_split_128(rustsecp256k1zkp_v0_10_1_scalar *r1, rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *k) { SECP256K1_SCALAR_VERIFY(k); r1->d[0] = k->d[0]; @@ -1039,14 +1039,14 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_s SECP256K1_SCALAR_VERIFY(r2); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_eq(const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_eq(const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3])) == 0; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b, unsigned int shift) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b, unsigned int shift) { uint64_t l[8]; unsigned int shiftlimbs; unsigned int shiftlow; @@ -1055,7 +1055,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rusts SECP256K1_SCALAR_VERIFY(b); VERIFY_CHECK(shift >= 256); - rustsecp256k1zkp_v0_10_0_scalar_mul_512(l, a, b); + rustsecp256k1zkp_v0_10_1_scalar_mul_512(l, a, b); shiftlimbs = shift >> 6; shiftlow = shift & 0x3F; shifthigh = 64 - shiftlow; @@ -1063,12 +1063,12 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rusts r->d[1] = shift < 448 ? (l[1 + shiftlimbs] >> shiftlow | (shift < 384 && shiftlow ? (l[2 + shiftlimbs] << shifthigh) : 0)) : 0; r->d[2] = shift < 384 ? (l[2 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[3 + shiftlimbs] << shifthigh) : 0)) : 0; r->d[3] = shift < 320 ? (l[3 + shiftlimbs] >> shiftlow) : 0; - rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 6] >> ((shift - 1) & 0x3f)) & 1); + rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 6] >> ((shift - 1) & 0x3f)) & 1); SECP256K1_SCALAR_VERIFY(r); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_scalar_cmov(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, int flag) { uint64_t mask0, mask1; volatile int vflag = flag; SECP256K1_SCALAR_VERIFY(a); @@ -1084,10 +1084,10 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1z SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_from_signed62(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_modinv64_signed62 *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_from_signed62(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_modinv64_signed62 *a) { const uint64_t a0 = a->v[0], a1 = a->v[1], a2 = a->v[2], a3 = a->v[3], a4 = a->v[4]; - /* The output from rustsecp256k1zkp_v0_10_0_modinv64{_var} should be normalized to range [0,modulus), and + /* The output from rustsecp256k1zkp_v0_10_1_modinv64{_var} should be normalized to range [0,modulus), and * have limbs in [0,2^62). The modulus is < 2^256, so the top limb must be below 2^(256-62*4). */ VERIFY_CHECK(a0 >> 62 == 0); @@ -1104,7 +1104,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_from_signed62(rustsecp256k1zkp_v0_10 SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_to_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_to_signed62(rustsecp256k1zkp_v0_10_1_modinv64_signed62 *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { const uint64_t M62 = UINT64_MAX >> 2; const uint64_t a0 = a->d[0], a1 = a->d[1], a2 = a->d[2], a3 = a->d[3]; SECP256K1_SCALAR_VERIFY(a); @@ -1116,42 +1116,42 @@ static void rustsecp256k1zkp_v0_10_0_scalar_to_signed62(rustsecp256k1zkp_v0_10_0 r->v[4] = a3 >> 56; } -static const rustsecp256k1zkp_v0_10_0_modinv64_modinfo rustsecp256k1zkp_v0_10_0_const_modinfo_scalar = { +static const rustsecp256k1zkp_v0_10_1_modinv64_modinfo rustsecp256k1zkp_v0_10_1_const_modinfo_scalar = { {{0x3FD25E8CD0364141LL, 0x2ABB739ABD2280EELL, -0x15LL, 0, 256}}, 0x34F20099AA774EC1LL }; -static void rustsecp256k1zkp_v0_10_0_scalar_inverse(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { - rustsecp256k1zkp_v0_10_0_modinv64_signed62 s; +static void rustsecp256k1zkp_v0_10_1_scalar_inverse(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { + rustsecp256k1zkp_v0_10_1_modinv64_signed62 s; #ifdef VERIFY - int zero_in = rustsecp256k1zkp_v0_10_0_scalar_is_zero(x); + int zero_in = rustsecp256k1zkp_v0_10_1_scalar_is_zero(x); #endif SECP256K1_SCALAR_VERIFY(x); - rustsecp256k1zkp_v0_10_0_scalar_to_signed62(&s, x); - rustsecp256k1zkp_v0_10_0_modinv64(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_scalar); - rustsecp256k1zkp_v0_10_0_scalar_from_signed62(r, &s); + rustsecp256k1zkp_v0_10_1_scalar_to_signed62(&s, x); + rustsecp256k1zkp_v0_10_1_modinv64(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_scalar); + rustsecp256k1zkp_v0_10_1_scalar_from_signed62(r, &s); SECP256K1_SCALAR_VERIFY(r); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) == zero_in); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) == zero_in); } -static void rustsecp256k1zkp_v0_10_0_scalar_inverse_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { - rustsecp256k1zkp_v0_10_0_modinv64_signed62 s; +static void rustsecp256k1zkp_v0_10_1_scalar_inverse_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { + rustsecp256k1zkp_v0_10_1_modinv64_signed62 s; #ifdef VERIFY - int zero_in = rustsecp256k1zkp_v0_10_0_scalar_is_zero(x); + int zero_in = rustsecp256k1zkp_v0_10_1_scalar_is_zero(x); #endif SECP256K1_SCALAR_VERIFY(x); - rustsecp256k1zkp_v0_10_0_scalar_to_signed62(&s, x); - rustsecp256k1zkp_v0_10_0_modinv64_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_scalar); - rustsecp256k1zkp_v0_10_0_scalar_from_signed62(r, &s); + rustsecp256k1zkp_v0_10_1_scalar_to_signed62(&s, x); + rustsecp256k1zkp_v0_10_1_modinv64_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_scalar); + rustsecp256k1zkp_v0_10_1_scalar_from_signed62(r, &s); SECP256K1_SCALAR_VERIFY(r); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) == zero_in); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) == zero_in); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_even(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_even(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return !(a->d[0] & 1); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32.h index 74c0d98b..18d359c6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32.h @@ -12,7 +12,7 @@ /** A scalar modulo the group order of the secp256k1 curve. */ typedef struct { uint32_t d[8]; -} rustsecp256k1zkp_v0_10_0_scalar; +} rustsecp256k1zkp_v0_10_1_scalar; #define SECP256K1_SCALAR_CONST(d7, d6, d5, d4, d3, d2, d1, d0) {{(d0), (d1), (d2), (d3), (d4), (d5), (d6), (d7)}} diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32_impl.h index a94ce892..af9faef7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_8x32_impl.h @@ -40,7 +40,7 @@ #define SECP256K1_N_H_6 ((uint32_t)0xFFFFFFFFUL) #define SECP256K1_N_H_7 ((uint32_t)0x7FFFFFFFUL) -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_clear(rustsecp256k1zkp_v0_10_0_scalar *r) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_clear(rustsecp256k1zkp_v0_10_1_scalar *r) { r->d[0] = 0; r->d[1] = 0; r->d[2] = 0; @@ -51,7 +51,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_clear(rustsecp256k1 r->d[7] = 0; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_int(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int v) { r->d[0] = v; r->d[1] = 0; r->d[2] = 0; @@ -64,7 +64,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256 SECP256K1_SCALAR_VERIFY(r); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_u64(rustsecp256k1zkp_v0_10_0_scalar *r, uint64_t v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_u64(rustsecp256k1zkp_v0_10_1_scalar *r, uint64_t v) { r->d[0] = v; r->d[1] = v >> 32; r->d[2] = 0; @@ -75,27 +75,27 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_u64(rustsecp256 r->d[7] = 0; } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); VERIFY_CHECK((offset + count - 1) >> 5 == offset >> 5); return (a->d[offset >> 5] >> (offset & 0x1F)) & ((1 << count) - 1); } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); VERIFY_CHECK(count < 32); VERIFY_CHECK(offset + count <= 256); if ((offset + count - 1) >> 5 == offset >> 5) { - return rustsecp256k1zkp_v0_10_0_scalar_get_bits(a, offset, count); + return rustsecp256k1zkp_v0_10_1_scalar_get_bits(a, offset, count); } else { VERIFY_CHECK((offset >> 5) + 1 < 8); return ((a->d[offset >> 5] >> (offset & 0x1F)) | (a->d[(offset >> 5) + 1] << (32 - (offset & 0x1F)))) & ((((uint32_t)1) << count) - 1); } } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_check_overflow(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_check_overflow(const rustsecp256k1zkp_v0_10_1_scalar *a) { int yes = 0; int no = 0; no |= (a->d[7] < SECP256K1_N_7); /* No need for a > check. */ @@ -113,7 +113,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_check_overflow(const return yes; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_reduce(rustsecp256k1zkp_v0_10_0_scalar *r, uint32_t overflow) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_reduce(rustsecp256k1zkp_v0_10_1_scalar *r, uint32_t overflow) { uint64_t t; VERIFY_CHECK(overflow <= 1); @@ -138,7 +138,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_reduce(rustsecp256k1 return overflow; } -static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static int rustsecp256k1zkp_v0_10_1_scalar_add(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { int overflow; uint64_t t = (uint64_t)a->d[0] + b->d[0]; SECP256K1_SCALAR_VERIFY(a); @@ -159,15 +159,15 @@ static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar * r->d[6] = t & 0xFFFFFFFFULL; t >>= 32; t += (uint64_t)a->d[7] + b->d[7]; r->d[7] = t & 0xFFFFFFFFULL; t >>= 32; - overflow = t + rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r); + overflow = t + rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r); VERIFY_CHECK(overflow == 0 || overflow == 1); - rustsecp256k1zkp_v0_10_0_scalar_reduce(r, overflow); + rustsecp256k1zkp_v0_10_1_scalar_reduce(r, overflow); SECP256K1_SCALAR_VERIFY(r); return overflow; } -static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int bit, int flag) { +static void rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int bit, int flag) { uint64_t t; volatile int vflag = flag; SECP256K1_SCALAR_VERIFY(r); @@ -195,17 +195,17 @@ static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_sc VERIFY_CHECK((t >> 32) == 0); } -static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *b32, int *overflow) { +static void rustsecp256k1zkp_v0_10_1_scalar_set_b32(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *b32, int *overflow) { int over; - r->d[0] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[28]); - r->d[1] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[24]); - r->d[2] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[20]); - r->d[3] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[16]); - r->d[4] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[12]); - r->d[5] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[8]); - r->d[6] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[4]); - r->d[7] = rustsecp256k1zkp_v0_10_0_read_be32(&b32[0]); - over = rustsecp256k1zkp_v0_10_0_scalar_reduce(r, rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r)); + r->d[0] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[28]); + r->d[1] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[24]); + r->d[2] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[20]); + r->d[3] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[16]); + r->d[4] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[12]); + r->d[5] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[8]); + r->d[6] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[4]); + r->d[7] = rustsecp256k1zkp_v0_10_1_read_be32(&b32[0]); + over = rustsecp256k1zkp_v0_10_1_scalar_reduce(r, rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r)); if (overflow) { *overflow = over; } @@ -213,27 +213,27 @@ static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_sca SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_0_scalar* a) { +static void rustsecp256k1zkp_v0_10_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_1_scalar* a) { SECP256K1_SCALAR_VERIFY(a); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[0], a->d[7]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[4], a->d[6]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[8], a->d[5]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[12], a->d[4]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[16], a->d[3]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[20], a->d[2]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[24], a->d[1]); - rustsecp256k1zkp_v0_10_0_write_be32(&bin[28], a->d[0]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[0], a->d[7]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[4], a->d[6]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[8], a->d[5]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[12], a->d[4]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[16], a->d[3]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[20], a->d[2]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[24], a->d[1]); + rustsecp256k1zkp_v0_10_1_write_be32(&bin[28], a->d[0]); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_zero(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_zero(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return (a->d[0] | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0; } -static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { - uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1zkp_v0_10_0_scalar_is_zero(a) == 0); +static void rustsecp256k1zkp_v0_10_1_scalar_negate(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { + uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1zkp_v0_10_1_scalar_is_zero(a) == 0); uint64_t t = (uint64_t)(~a->d[0]) + SECP256K1_N_0 + 1; SECP256K1_SCALAR_VERIFY(a); @@ -256,7 +256,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scal SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_half(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { /* Writing `/` for field division and `//` for integer division, we compute * * a/2 = (a - (a&1))/2 + (a&1)/2 @@ -304,13 +304,13 @@ static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar SECP256K1_SCALAR_VERIFY(r); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_one(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_one(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return ((a->d[0] ^ 1) | a->d[1] | a->d[2] | a->d[3] | a->d[4] | a->d[5] | a->d[6] | a->d[7]) == 0; } -static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_0_scalar *a) { +static int rustsecp256k1zkp_v0_10_1_scalar_is_high(const rustsecp256k1zkp_v0_10_1_scalar *a) { int yes = 0; int no = 0; SECP256K1_SCALAR_VERIFY(a); @@ -330,12 +330,12 @@ static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_ return yes; } -static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_scalar *r, int flag) { +static int rustsecp256k1zkp_v0_10_1_scalar_cond_negate(rustsecp256k1zkp_v0_10_1_scalar *r, int flag) { /* If we are flag = 0, mask = 00...00 and this is a no-op; - * if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1zkp_v0_10_0_scalar_negate */ + * if we are flag = 1, mask = 11...11 and this is identical to rustsecp256k1zkp_v0_10_1_scalar_negate */ volatile int vflag = flag; uint32_t mask = -vflag; - uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) == 0); + uint32_t nonzero = 0xFFFFFFFFUL * (rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) == 0); uint64_t t = (uint64_t)(r->d[0] ^ mask) + ((SECP256K1_N_0 + 1) & mask); SECP256K1_SCALAR_VERIFY(r); @@ -446,7 +446,7 @@ static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_ VERIFY_CHECK(c2 == 0); \ } -static void rustsecp256k1zkp_v0_10_0_scalar_reduce_512(rustsecp256k1zkp_v0_10_0_scalar *r, const uint32_t *l) { +static void rustsecp256k1zkp_v0_10_1_scalar_reduce_512(rustsecp256k1zkp_v0_10_1_scalar *r, const uint32_t *l) { uint64_t c; uint32_t n0 = l[8], n1 = l[9], n2 = l[10], n3 = l[11], n4 = l[12], n5 = l[13], n6 = l[14], n7 = l[15]; uint32_t m0, m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12; @@ -585,10 +585,10 @@ static void rustsecp256k1zkp_v0_10_0_scalar_reduce_512(rustsecp256k1zkp_v0_10_0_ r->d[7] = c & 0xFFFFFFFFUL; c >>= 32; /* Final reduction of r. */ - rustsecp256k1zkp_v0_10_0_scalar_reduce(r, c + rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r)); + rustsecp256k1zkp_v0_10_1_scalar_reduce(r, c + rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r)); } -static void rustsecp256k1zkp_v0_10_0_scalar_mul_512(uint32_t *l, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static void rustsecp256k1zkp_v0_10_1_scalar_mul_512(uint32_t *l, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { /* 96 bit accumulator. */ uint32_t c0 = 0, c1 = 0, c2 = 0; @@ -676,7 +676,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_mul_512(uint32_t *l, const rustsecp2 l[15] = c0; } -static void rustsecp256k1zkp_v0_10_0_scalar_sqr_512(uint32_t *l, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_sqr_512(uint32_t *l, const rustsecp256k1zkp_v0_10_1_scalar *a) { /* 96 bit accumulator. */ uint32_t c0 = 0, c1 = 0, c2 = 0; @@ -744,24 +744,24 @@ static void rustsecp256k1zkp_v0_10_0_scalar_sqr_512(uint32_t *l, const rustsecp2 #undef extract #undef extract_fast -static void rustsecp256k1zkp_v0_10_0_scalar_mul(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static void rustsecp256k1zkp_v0_10_1_scalar_mul(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { uint32_t l[16]; SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); - rustsecp256k1zkp_v0_10_0_scalar_mul_512(l, a, b); - rustsecp256k1zkp_v0_10_0_scalar_reduce_512(r, l); + rustsecp256k1zkp_v0_10_1_scalar_mul_512(l, a, b); + rustsecp256k1zkp_v0_10_1_scalar_reduce_512(r, l); SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_sqr(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_sqr(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { uint32_t l[16]; - rustsecp256k1zkp_v0_10_0_scalar_sqr_512(l, a); - rustsecp256k1zkp_v0_10_0_scalar_reduce_512(r, l); + rustsecp256k1zkp_v0_10_1_scalar_sqr_512(l, a); + rustsecp256k1zkp_v0_10_1_scalar_reduce_512(r, l); } -static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_scalar *r1, rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *k) { +static void rustsecp256k1zkp_v0_10_1_scalar_split_128(rustsecp256k1zkp_v0_10_1_scalar *r1, rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *k) { SECP256K1_SCALAR_VERIFY(k); r1->d[0] = k->d[0]; @@ -785,14 +785,14 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_s SECP256K1_SCALAR_VERIFY(r2); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_eq(const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_eq(const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); return ((a->d[0] ^ b->d[0]) | (a->d[1] ^ b->d[1]) | (a->d[2] ^ b->d[2]) | (a->d[3] ^ b->d[3]) | (a->d[4] ^ b->d[4]) | (a->d[5] ^ b->d[5]) | (a->d[6] ^ b->d[6]) | (a->d[7] ^ b->d[7])) == 0; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b, unsigned int shift) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_mul_shift_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b, unsigned int shift) { uint32_t l[16]; unsigned int shiftlimbs; unsigned int shiftlow; @@ -801,7 +801,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rusts SECP256K1_SCALAR_VERIFY(b); VERIFY_CHECK(shift >= 256); - rustsecp256k1zkp_v0_10_0_scalar_mul_512(l, a, b); + rustsecp256k1zkp_v0_10_1_scalar_mul_512(l, a, b); shiftlimbs = shift >> 5; shiftlow = shift & 0x1F; shifthigh = 32 - shiftlow; @@ -813,12 +813,12 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(rusts r->d[5] = shift < 352 ? (l[5 + shiftlimbs] >> shiftlow | (shift < 320 && shiftlow ? (l[6 + shiftlimbs] << shifthigh) : 0)) : 0; r->d[6] = shift < 320 ? (l[6 + shiftlimbs] >> shiftlow | (shift < 288 && shiftlow ? (l[7 + shiftlimbs] << shifthigh) : 0)) : 0; r->d[7] = shift < 288 ? (l[7 + shiftlimbs] >> shiftlow) : 0; - rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 5] >> ((shift - 1) & 0x1f)) & 1); + rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(r, 0, (l[(shift - 1) >> 5] >> ((shift - 1) & 0x1f)) & 1); SECP256K1_SCALAR_VERIFY(r); } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_scalar_cmov(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, int flag) { uint32_t mask0, mask1; volatile int vflag = flag; SECP256K1_SCALAR_VERIFY(a); @@ -838,11 +838,11 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1z SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_from_signed30(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_modinv32_signed30 *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_from_signed30(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_modinv32_signed30 *a) { const uint32_t a0 = a->v[0], a1 = a->v[1], a2 = a->v[2], a3 = a->v[3], a4 = a->v[4], a5 = a->v[5], a6 = a->v[6], a7 = a->v[7], a8 = a->v[8]; - /* The output from rustsecp256k1zkp_v0_10_0_modinv32{_var} should be normalized to range [0,modulus), and + /* The output from rustsecp256k1zkp_v0_10_1_modinv32{_var} should be normalized to range [0,modulus), and * have limbs in [0,2^30). The modulus is < 2^256, so the top limb must be below 2^(256-30*8). */ VERIFY_CHECK(a0 >> 30 == 0); @@ -867,7 +867,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_from_signed30(rustsecp256k1zkp_v0_10 SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_to_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_to_signed30(rustsecp256k1zkp_v0_10_1_modinv32_signed30 *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { const uint32_t M30 = UINT32_MAX >> 2; const uint32_t a0 = a->d[0], a1 = a->d[1], a2 = a->d[2], a3 = a->d[3], a4 = a->d[4], a5 = a->d[5], a6 = a->d[6], a7 = a->d[7]; @@ -884,42 +884,42 @@ static void rustsecp256k1zkp_v0_10_0_scalar_to_signed30(rustsecp256k1zkp_v0_10_0 r->v[8] = a7 >> 16; } -static const rustsecp256k1zkp_v0_10_0_modinv32_modinfo rustsecp256k1zkp_v0_10_0_const_modinfo_scalar = { +static const rustsecp256k1zkp_v0_10_1_modinv32_modinfo rustsecp256k1zkp_v0_10_1_const_modinfo_scalar = { {{0x10364141L, 0x3F497A33L, 0x348A03BBL, 0x2BB739ABL, -0x146L, 0, 0, 0, 65536}}, 0x2A774EC1L }; -static void rustsecp256k1zkp_v0_10_0_scalar_inverse(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { - rustsecp256k1zkp_v0_10_0_modinv32_signed30 s; +static void rustsecp256k1zkp_v0_10_1_scalar_inverse(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { + rustsecp256k1zkp_v0_10_1_modinv32_signed30 s; #ifdef VERIFY - int zero_in = rustsecp256k1zkp_v0_10_0_scalar_is_zero(x); + int zero_in = rustsecp256k1zkp_v0_10_1_scalar_is_zero(x); #endif SECP256K1_SCALAR_VERIFY(x); - rustsecp256k1zkp_v0_10_0_scalar_to_signed30(&s, x); - rustsecp256k1zkp_v0_10_0_modinv32(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_scalar); - rustsecp256k1zkp_v0_10_0_scalar_from_signed30(r, &s); + rustsecp256k1zkp_v0_10_1_scalar_to_signed30(&s, x); + rustsecp256k1zkp_v0_10_1_modinv32(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_scalar); + rustsecp256k1zkp_v0_10_1_scalar_from_signed30(r, &s); SECP256K1_SCALAR_VERIFY(r); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) == zero_in); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) == zero_in); } -static void rustsecp256k1zkp_v0_10_0_scalar_inverse_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { - rustsecp256k1zkp_v0_10_0_modinv32_signed30 s; +static void rustsecp256k1zkp_v0_10_1_scalar_inverse_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { + rustsecp256k1zkp_v0_10_1_modinv32_signed30 s; #ifdef VERIFY - int zero_in = rustsecp256k1zkp_v0_10_0_scalar_is_zero(x); + int zero_in = rustsecp256k1zkp_v0_10_1_scalar_is_zero(x); #endif SECP256K1_SCALAR_VERIFY(x); - rustsecp256k1zkp_v0_10_0_scalar_to_signed30(&s, x); - rustsecp256k1zkp_v0_10_0_modinv32_var(&s, &rustsecp256k1zkp_v0_10_0_const_modinfo_scalar); - rustsecp256k1zkp_v0_10_0_scalar_from_signed30(r, &s); + rustsecp256k1zkp_v0_10_1_scalar_to_signed30(&s, x); + rustsecp256k1zkp_v0_10_1_modinv32_var(&s, &rustsecp256k1zkp_v0_10_1_const_modinfo_scalar); + rustsecp256k1zkp_v0_10_1_scalar_from_signed30(r, &s); SECP256K1_SCALAR_VERIFY(r); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(r) == zero_in); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(r) == zero_in); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_even(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_even(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return !(a->d[0] & 1); diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_impl.h index 4f5e1bd0..e3da2047 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_impl.h @@ -24,19 +24,19 @@ #error "Please select wide multiplication implementation" #endif -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_scalar_one = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 1); -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_scalar_zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0); +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_scalar_one = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 1); +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_scalar_zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0); -static int rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *bin) { +static int rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *bin) { int overflow; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, bin, &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, bin, &overflow); SECP256K1_SCALAR_VERIFY(r); - return (!overflow) & (!rustsecp256k1zkp_v0_10_0_scalar_is_zero(r)); + return (!overflow) & (!rustsecp256k1zkp_v0_10_1_scalar_is_zero(r)); } -static void rustsecp256k1zkp_v0_10_0_scalar_verify(const rustsecp256k1zkp_v0_10_0_scalar *r) { - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_check_overflow(r) == 0); +static void rustsecp256k1zkp_v0_10_1_scalar_verify(const rustsecp256k1zkp_v0_10_1_scalar *r) { + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_check_overflow(r) == 0); (void)r; } @@ -60,7 +60,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_verify(const rustsecp256k1zkp_v0_10_ * nontrivial to get full test coverage for the exhaustive tests. We therefore * (arbitrarily) set r2 = k + 5 (mod n) and r1 = k - r2 * lambda (mod n). */ -static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT k) { +static void rustsecp256k1zkp_v0_10_1_scalar_split_lambda(rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT k) { SECP256K1_SCALAR_VERIFY(k); VERIFY_CHECK(r1 != k); VERIFY_CHECK(r2 != k); @@ -76,13 +76,13 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_ /** * The Secp256k1 curve has an endomorphism, where lambda * (x, y) = (beta * x, y), where * lambda is: */ -static const rustsecp256k1zkp_v0_10_0_scalar rustsecp256k1zkp_v0_10_0_const_lambda = SECP256K1_SCALAR_CONST( +static const rustsecp256k1zkp_v0_10_1_scalar rustsecp256k1zkp_v0_10_1_const_lambda = SECP256K1_SCALAR_CONST( 0x5363AD4CUL, 0xC05C30E0UL, 0xA5261C02UL, 0x8812645AUL, 0x122E22EAUL, 0x20816678UL, 0xDF02967CUL, 0x1B23BD72UL ); #ifdef VERIFY -static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda_verify(const rustsecp256k1zkp_v0_10_0_scalar *r1, const rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *k); +static void rustsecp256k1zkp_v0_10_1_scalar_split_lambda_verify(const rustsecp256k1zkp_v0_10_1_scalar *r1, const rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *k); #endif /* @@ -135,21 +135,21 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda_verify(const rustsecp25 * * See proof below. */ -static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_0_scalar * SECP256K1_RESTRICT k) { - rustsecp256k1zkp_v0_10_0_scalar c1, c2; - static const rustsecp256k1zkp_v0_10_0_scalar minus_b1 = SECP256K1_SCALAR_CONST( +static void rustsecp256k1zkp_v0_10_1_scalar_split_lambda(rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r1, rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT r2, const rustsecp256k1zkp_v0_10_1_scalar * SECP256K1_RESTRICT k) { + rustsecp256k1zkp_v0_10_1_scalar c1, c2; + static const rustsecp256k1zkp_v0_10_1_scalar minus_b1 = SECP256K1_SCALAR_CONST( 0x00000000UL, 0x00000000UL, 0x00000000UL, 0x00000000UL, 0xE4437ED6UL, 0x010E8828UL, 0x6F547FA9UL, 0x0ABFE4C3UL ); - static const rustsecp256k1zkp_v0_10_0_scalar minus_b2 = SECP256K1_SCALAR_CONST( + static const rustsecp256k1zkp_v0_10_1_scalar minus_b2 = SECP256K1_SCALAR_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL, 0x8A280AC5UL, 0x0774346DUL, 0xD765CDA8UL, 0x3DB1562CUL ); - static const rustsecp256k1zkp_v0_10_0_scalar g1 = SECP256K1_SCALAR_CONST( + static const rustsecp256k1zkp_v0_10_1_scalar g1 = SECP256K1_SCALAR_CONST( 0x3086D221UL, 0xA7D46BCDUL, 0xE86C90E4UL, 0x9284EB15UL, 0x3DAA8A14UL, 0x71E8CA7FUL, 0xE893209AUL, 0x45DBB031UL ); - static const rustsecp256k1zkp_v0_10_0_scalar g2 = SECP256K1_SCALAR_CONST( + static const rustsecp256k1zkp_v0_10_1_scalar g2 = SECP256K1_SCALAR_CONST( 0xE4437ED6UL, 0x010E8828UL, 0x6F547FA9UL, 0x0ABFE4C4UL, 0x221208ACUL, 0x9DF506C6UL, 0x1571B4AEUL, 0x8AC47F71UL ); @@ -159,25 +159,25 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_ VERIFY_CHECK(r1 != r2); /* these _var calls are constant time since the shift amount is constant */ - rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(&c1, k, &g1, 384); - rustsecp256k1zkp_v0_10_0_scalar_mul_shift_var(&c2, k, &g2, 384); - rustsecp256k1zkp_v0_10_0_scalar_mul(&c1, &c1, &minus_b1); - rustsecp256k1zkp_v0_10_0_scalar_mul(&c2, &c2, &minus_b2); - rustsecp256k1zkp_v0_10_0_scalar_add(r2, &c1, &c2); - rustsecp256k1zkp_v0_10_0_scalar_mul(r1, r2, &rustsecp256k1zkp_v0_10_0_const_lambda); - rustsecp256k1zkp_v0_10_0_scalar_negate(r1, r1); - rustsecp256k1zkp_v0_10_0_scalar_add(r1, r1, k); + rustsecp256k1zkp_v0_10_1_scalar_mul_shift_var(&c1, k, &g1, 384); + rustsecp256k1zkp_v0_10_1_scalar_mul_shift_var(&c2, k, &g2, 384); + rustsecp256k1zkp_v0_10_1_scalar_mul(&c1, &c1, &minus_b1); + rustsecp256k1zkp_v0_10_1_scalar_mul(&c2, &c2, &minus_b2); + rustsecp256k1zkp_v0_10_1_scalar_add(r2, &c1, &c2); + rustsecp256k1zkp_v0_10_1_scalar_mul(r1, r2, &rustsecp256k1zkp_v0_10_1_const_lambda); + rustsecp256k1zkp_v0_10_1_scalar_negate(r1, r1); + rustsecp256k1zkp_v0_10_1_scalar_add(r1, r1, k); SECP256K1_SCALAR_VERIFY(r1); SECP256K1_SCALAR_VERIFY(r2); #ifdef VERIFY - rustsecp256k1zkp_v0_10_0_scalar_split_lambda_verify(r1, r2, k); + rustsecp256k1zkp_v0_10_1_scalar_split_lambda_verify(r1, r2, k); #endif } #ifdef VERIFY /* - * Proof for rustsecp256k1zkp_v0_10_0_scalar_split_lambda's bounds. + * Proof for rustsecp256k1zkp_v0_10_1_scalar_split_lambda's bounds. * * Let * - epsilon1 = 2^256 * |g1/2^384 - b2/d| @@ -280,8 +280,8 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda(rustsecp256k1zkp_v0_10_ * * Q.E.D. */ -static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda_verify(const rustsecp256k1zkp_v0_10_0_scalar *r1, const rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *k) { - rustsecp256k1zkp_v0_10_0_scalar s; +static void rustsecp256k1zkp_v0_10_1_scalar_split_lambda_verify(const rustsecp256k1zkp_v0_10_1_scalar *r1, const rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *k) { + rustsecp256k1zkp_v0_10_1_scalar s; unsigned char buf1[32]; unsigned char buf2[32]; @@ -297,19 +297,19 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_lambda_verify(const rustsecp25 0x8a, 0x65, 0x28, 0x7b, 0xd4, 0x71, 0x79, 0xfb, 0x2b, 0xe0, 0x88, 0x46, 0xce, 0xa2, 0x67, 0xed }; - rustsecp256k1zkp_v0_10_0_scalar_mul(&s, &rustsecp256k1zkp_v0_10_0_const_lambda, r2); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, r1); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&s, k)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s, &rustsecp256k1zkp_v0_10_1_const_lambda, r2); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, r1); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&s, k)); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, r1); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf1, r1); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf2, &s); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf1, k1_bound, 32) < 0 || rustsecp256k1zkp_v0_10_0_memcmp_var(buf2, k1_bound, 32) < 0); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, r1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf1, r1); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf2, &s); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf1, k1_bound, 32) < 0 || rustsecp256k1zkp_v0_10_1_memcmp_var(buf2, k1_bound, 32) < 0); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, r2); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf1, r2); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(buf2, &s); - VERIFY_CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf1, k2_bound, 32) < 0 || rustsecp256k1zkp_v0_10_0_memcmp_var(buf2, k2_bound, 32) < 0); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, r2); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf1, r2); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(buf2, &s); + VERIFY_CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf1, k2_bound, 32) < 0 || rustsecp256k1zkp_v0_10_1_memcmp_var(buf2, k2_bound, 32) < 0); } #endif /* VERIFY */ #endif /* !defined(EXHAUSTIVE_TEST_ORDER) */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low.h index 97e202be..e403979f 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low.h @@ -10,7 +10,7 @@ #include /** A scalar modulo the group order of the secp256k1 curve. */ -typedef uint32_t rustsecp256k1zkp_v0_10_0_scalar; +typedef uint32_t rustsecp256k1zkp_v0_10_1_scalar; /* A compile-time constant equal to 2^32 (modulo order). */ #define SCALAR_2P32 ((0xffffffffUL % EXHAUSTIVE_TEST_ORDER) + 1U) diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low_impl.h index e3eebc29..748a1d04 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scalar_low_impl.h @@ -13,27 +13,27 @@ #include -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_even(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_even(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return !(*a & 1); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_clear(rustsecp256k1zkp_v0_10_0_scalar *r) { *r = 0; } +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_clear(rustsecp256k1zkp_v0_10_1_scalar *r) { *r = 0; } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_int(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_int(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int v) { *r = v % EXHAUSTIVE_TEST_ORDER; SECP256K1_SCALAR_VERIFY(r); } -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_scalar_set_u64(rustsecp256k1zkp_v0_10_0_scalar *r, uint64_t v) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_scalar_set_u64(rustsecp256k1zkp_v0_10_1_scalar *r, uint64_t v) { *r = v % EXHAUSTIVE_TEST_ORDER; - rustsecp256k1zkp_v0_10_0_scalar_verify(r); + rustsecp256k1zkp_v0_10_1_scalar_verify(r); } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); if (offset < 32) @@ -42,15 +42,15 @@ SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits(co return 0; } -SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_0_scalar *a, unsigned int offset, unsigned int count) { +SECP256K1_INLINE static unsigned int rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(const rustsecp256k1zkp_v0_10_1_scalar *a, unsigned int offset, unsigned int count) { SECP256K1_SCALAR_VERIFY(a); - return rustsecp256k1zkp_v0_10_0_scalar_get_bits(a, offset, count); + return rustsecp256k1zkp_v0_10_1_scalar_get_bits(a, offset, count); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_check_overflow(const rustsecp256k1zkp_v0_10_0_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; } +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_check_overflow(const rustsecp256k1zkp_v0_10_1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; } -static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static int rustsecp256k1zkp_v0_10_1_scalar_add(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); @@ -60,7 +60,7 @@ static int rustsecp256k1zkp_v0_10_0_scalar_add(rustsecp256k1zkp_v0_10_0_scalar * return *r < *b; } -static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_scalar *r, unsigned int bit, int flag) { +static void rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(rustsecp256k1zkp_v0_10_1_scalar *r, unsigned int bit, int flag) { SECP256K1_SCALAR_VERIFY(r); if (flag && bit < 32) @@ -72,7 +72,7 @@ static void rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(rustsecp256k1zkp_v0_10_0_sc VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER); } -static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_scalar *r, const unsigned char *b32, int *overflow) { +static void rustsecp256k1zkp_v0_10_1_scalar_set_b32(rustsecp256k1zkp_v0_10_1_scalar *r, const unsigned char *b32, int *overflow) { int i; int over = 0; *r = 0; @@ -88,20 +88,20 @@ static void rustsecp256k1zkp_v0_10_0_scalar_set_b32(rustsecp256k1zkp_v0_10_0_sca SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_0_scalar* a) { +static void rustsecp256k1zkp_v0_10_1_scalar_get_b32(unsigned char *bin, const rustsecp256k1zkp_v0_10_1_scalar* a) { SECP256K1_SCALAR_VERIFY(a); memset(bin, 0, 32); bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_zero(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_zero(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return *a == 0; } -static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_negate(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); if (*a == 0) { @@ -113,28 +113,28 @@ static void rustsecp256k1zkp_v0_10_0_scalar_negate(rustsecp256k1zkp_v0_10_0_scal SECP256K1_SCALAR_VERIFY(r); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_is_one(const rustsecp256k1zkp_v0_10_0_scalar *a) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_is_one(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return *a == 1; } -static int rustsecp256k1zkp_v0_10_0_scalar_is_high(const rustsecp256k1zkp_v0_10_0_scalar *a) { +static int rustsecp256k1zkp_v0_10_1_scalar_is_high(const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); return *a > EXHAUSTIVE_TEST_ORDER / 2; } -static int rustsecp256k1zkp_v0_10_0_scalar_cond_negate(rustsecp256k1zkp_v0_10_0_scalar *r, int flag) { +static int rustsecp256k1zkp_v0_10_1_scalar_cond_negate(rustsecp256k1zkp_v0_10_1_scalar *r, int flag) { SECP256K1_SCALAR_VERIFY(r); - if (flag) rustsecp256k1zkp_v0_10_0_scalar_negate(r, r); + if (flag) rustsecp256k1zkp_v0_10_1_scalar_negate(r, r); SECP256K1_SCALAR_VERIFY(r); return flag ? -1 : 1; } -static void rustsecp256k1zkp_v0_10_0_scalar_mul(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +static void rustsecp256k1zkp_v0_10_1_scalar_mul(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); @@ -143,11 +143,11 @@ static void rustsecp256k1zkp_v0_10_0_scalar_mul(rustsecp256k1zkp_v0_10_0_scalar SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_sqr(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_sqr(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { *r = (*a * *a) % EXHAUSTIVE_TEST_ORDER; } -static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_scalar *r1, rustsecp256k1zkp_v0_10_0_scalar *r2, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_split_128(rustsecp256k1zkp_v0_10_1_scalar *r1, rustsecp256k1zkp_v0_10_1_scalar *r2, const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); *r1 = *a; @@ -157,14 +157,14 @@ static void rustsecp256k1zkp_v0_10_0_scalar_split_128(rustsecp256k1zkp_v0_10_0_s SECP256K1_SCALAR_VERIFY(r2); } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_scalar_eq(const rustsecp256k1zkp_v0_10_0_scalar *a, const rustsecp256k1zkp_v0_10_0_scalar *b) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_scalar_eq(const rustsecp256k1zkp_v0_10_1_scalar *a, const rustsecp256k1zkp_v0_10_1_scalar *b) { SECP256K1_SCALAR_VERIFY(a); SECP256K1_SCALAR_VERIFY(b); return *a == *b; } -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_scalar_cmov(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a, int flag) { uint32_t mask0, mask1; volatile int vflag = flag; SECP256K1_SCALAR_VERIFY(a); @@ -177,7 +177,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_scalar_cmov(rustsecp256k1z SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_inverse(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { +static void rustsecp256k1zkp_v0_10_1_scalar_inverse(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { int i; *r = 0; SECP256K1_SCALAR_VERIFY(x); @@ -192,15 +192,15 @@ static void rustsecp256k1zkp_v0_10_0_scalar_inverse(rustsecp256k1zkp_v0_10_0_sca VERIFY_CHECK(*r != 0); } -static void rustsecp256k1zkp_v0_10_0_scalar_inverse_var(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *x) { +static void rustsecp256k1zkp_v0_10_1_scalar_inverse_var(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *x) { SECP256K1_SCALAR_VERIFY(x); - rustsecp256k1zkp_v0_10_0_scalar_inverse(r, x); + rustsecp256k1zkp_v0_10_1_scalar_inverse(r, x); SECP256K1_SCALAR_VERIFY(r); } -static void rustsecp256k1zkp_v0_10_0_scalar_half(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_scalar *a) { +static void rustsecp256k1zkp_v0_10_1_scalar_half(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_scalar *a) { SECP256K1_SCALAR_VERIFY(a); *r = (*a + ((-(uint32_t)(*a & 1)) & EXHAUSTIVE_TEST_ORDER)) >> 1; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scratch.h b/secp256k1-zkp-sys/depend/secp256k1/src/scratch.h index 0ff329bb..ae704fd6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scratch.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scratch.h @@ -9,7 +9,7 @@ /* The typedef is used internally; the struct name is used in the public API * (where it is exposed as a different typedef) */ -typedef struct rustsecp256k1zkp_v0_10_0_scratch_space_struct { +typedef struct rustsecp256k1zkp_v0_10_1_scratch_space_struct { /** guard against interpreting this object as other types */ unsigned char magic[8]; /** actual allocated data */ @@ -19,24 +19,24 @@ typedef struct rustsecp256k1zkp_v0_10_0_scratch_space_struct { size_t alloc_size; /** maximum size available to allocate */ size_t max_size; -} rustsecp256k1zkp_v0_10_0_scratch; +} rustsecp256k1zkp_v0_10_1_scratch; -static rustsecp256k1zkp_v0_10_0_scratch* rustsecp256k1zkp_v0_10_0_scratch_create(const rustsecp256k1zkp_v0_10_0_callback* error_callback, size_t max_size); +static rustsecp256k1zkp_v0_10_1_scratch* rustsecp256k1zkp_v0_10_1_scratch_create(const rustsecp256k1zkp_v0_10_1_callback* error_callback, size_t max_size); -static void rustsecp256k1zkp_v0_10_0_scratch_destroy(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch* scratch); +static void rustsecp256k1zkp_v0_10_1_scratch_destroy(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch* scratch); /** Returns an opaque object used to "checkpoint" a scratch space. Used - * with `rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint` to undo allocations. */ -static size_t rustsecp256k1zkp_v0_10_0_scratch_checkpoint(const rustsecp256k1zkp_v0_10_0_callback* error_callback, const rustsecp256k1zkp_v0_10_0_scratch* scratch); + * with `rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint` to undo allocations. */ +static size_t rustsecp256k1zkp_v0_10_1_scratch_checkpoint(const rustsecp256k1zkp_v0_10_1_callback* error_callback, const rustsecp256k1zkp_v0_10_1_scratch* scratch); -/** Applies a check point received from `rustsecp256k1zkp_v0_10_0_scratch_checkpoint`, +/** Applies a check point received from `rustsecp256k1zkp_v0_10_1_scratch_checkpoint`, * undoing all allocations since that point. */ -static void rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t checkpoint); +static void rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t checkpoint); /** Returns the maximum allocation the scratch space will allow */ -static size_t rustsecp256k1zkp_v0_10_0_scratch_max_allocation(const rustsecp256k1zkp_v0_10_0_callback* error_callback, const rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t n_objects); +static size_t rustsecp256k1zkp_v0_10_1_scratch_max_allocation(const rustsecp256k1zkp_v0_10_1_callback* error_callback, const rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t n_objects); /** Returns a pointer into the most recently allocated frame, or NULL if there is insufficient available space */ -static void *rustsecp256k1zkp_v0_10_0_scratch_alloc(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t n); +static void *rustsecp256k1zkp_v0_10_1_scratch_alloc(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t n); #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/scratch_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/scratch_impl.h index 43187f09..713735f7 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/scratch_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/scratch_impl.h @@ -10,29 +10,29 @@ #include "util.h" #include "scratch.h" -static size_t rustsecp256k1zkp_v0_10_0_scratch_checkpoint(const rustsecp256k1zkp_v0_10_0_callback* error_callback, const rustsecp256k1zkp_v0_10_0_scratch* scratch) { - if (rustsecp256k1zkp_v0_10_0_memcmp_var(scratch->magic, "scratch", 8) != 0) { - rustsecp256k1zkp_v0_10_0_callback_call(error_callback, "invalid scratch space"); +static size_t rustsecp256k1zkp_v0_10_1_scratch_checkpoint(const rustsecp256k1zkp_v0_10_1_callback* error_callback, const rustsecp256k1zkp_v0_10_1_scratch* scratch) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(scratch->magic, "scratch", 8) != 0) { + rustsecp256k1zkp_v0_10_1_callback_call(error_callback, "invalid scratch space"); return 0; } return scratch->alloc_size; } -static void rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t checkpoint) { - if (rustsecp256k1zkp_v0_10_0_memcmp_var(scratch->magic, "scratch", 8) != 0) { - rustsecp256k1zkp_v0_10_0_callback_call(error_callback, "invalid scratch space"); +static void rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t checkpoint) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(scratch->magic, "scratch", 8) != 0) { + rustsecp256k1zkp_v0_10_1_callback_call(error_callback, "invalid scratch space"); return; } if (checkpoint > scratch->alloc_size) { - rustsecp256k1zkp_v0_10_0_callback_call(error_callback, "invalid checkpoint"); + rustsecp256k1zkp_v0_10_1_callback_call(error_callback, "invalid checkpoint"); return; } scratch->alloc_size = checkpoint; } -static size_t rustsecp256k1zkp_v0_10_0_scratch_max_allocation(const rustsecp256k1zkp_v0_10_0_callback* error_callback, const rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t objects) { - if (rustsecp256k1zkp_v0_10_0_memcmp_var(scratch->magic, "scratch", 8) != 0) { - rustsecp256k1zkp_v0_10_0_callback_call(error_callback, "invalid scratch space"); +static size_t rustsecp256k1zkp_v0_10_1_scratch_max_allocation(const rustsecp256k1zkp_v0_10_1_callback* error_callback, const rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t objects) { + if (rustsecp256k1zkp_v0_10_1_memcmp_var(scratch->magic, "scratch", 8) != 0) { + rustsecp256k1zkp_v0_10_1_callback_call(error_callback, "invalid scratch space"); return 0; } /* Ensure that multiplication will not wrap around */ @@ -45,7 +45,7 @@ static size_t rustsecp256k1zkp_v0_10_0_scratch_max_allocation(const rustsecp256k return scratch->max_size - scratch->alloc_size - objects * (ALIGNMENT - 1); } -static void *rustsecp256k1zkp_v0_10_0_scratch_alloc(const rustsecp256k1zkp_v0_10_0_callback* error_callback, rustsecp256k1zkp_v0_10_0_scratch* scratch, size_t size) { +static void *rustsecp256k1zkp_v0_10_1_scratch_alloc(const rustsecp256k1zkp_v0_10_1_callback* error_callback, rustsecp256k1zkp_v0_10_1_scratch* scratch, size_t size) { void *ret; size_t rounded_size; @@ -56,8 +56,8 @@ static void *rustsecp256k1zkp_v0_10_0_scratch_alloc(const rustsecp256k1zkp_v0_10 } size = rounded_size; - if (rustsecp256k1zkp_v0_10_0_memcmp_var(scratch->magic, "scratch", 8) != 0) { - rustsecp256k1zkp_v0_10_0_callback_call(error_callback, "invalid scratch space"); + if (rustsecp256k1zkp_v0_10_1_memcmp_var(scratch->magic, "scratch", 8) != 0) { + rustsecp256k1zkp_v0_10_1_callback_call(error_callback, "invalid scratch space"); return NULL; } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/secp256k1.c b/secp256k1-zkp-sys/depend/secp256k1/src/secp256k1.c index 4670f8d9..cf76ce8a 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/secp256k1.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/secp256k1.c @@ -52,10 +52,10 @@ #ifdef ENABLE_MODULE_ECDSA_S2C #include "../include/secp256k1_ecdsa_s2c.h" -static void rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_0_ge* ge); +static void rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_1_ge* ge); #else -typedef void rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening; -static void rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_0_ge* ge) { +typedef void rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening; +static void rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening* opening, rustsecp256k1zkp_v0_10_1_ge* ge) { (void) opening; (void) ge; VERIFY_CHECK(0); @@ -64,64 +64,64 @@ static void rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(rustsecp256k1zkp_v0_ #define ARG_CHECK(cond) do { \ if (EXPECT(!(cond), 0)) { \ - rustsecp256k1zkp_v0_10_0_callback_call(&ctx->illegal_callback, #cond); \ + rustsecp256k1zkp_v0_10_1_callback_call(&ctx->illegal_callback, #cond); \ return 0; \ } \ } while(0) #define ARG_CHECK_VOID(cond) do { \ if (EXPECT(!(cond), 0)) { \ - rustsecp256k1zkp_v0_10_0_callback_call(&ctx->illegal_callback, #cond); \ + rustsecp256k1zkp_v0_10_1_callback_call(&ctx->illegal_callback, #cond); \ return; \ } \ } while(0) /* Note that whenever you change the context struct, you must also change the * context_eq function. */ -struct rustsecp256k1zkp_v0_10_0_context_struct { - rustsecp256k1zkp_v0_10_0_ecmult_gen_context ecmult_gen_ctx; - rustsecp256k1zkp_v0_10_0_callback illegal_callback; - rustsecp256k1zkp_v0_10_0_callback error_callback; +struct rustsecp256k1zkp_v0_10_1_context_struct { + rustsecp256k1zkp_v0_10_1_ecmult_gen_context ecmult_gen_ctx; + rustsecp256k1zkp_v0_10_1_callback illegal_callback; + rustsecp256k1zkp_v0_10_1_callback error_callback; int declassify; }; -static const rustsecp256k1zkp_v0_10_0_context rustsecp256k1zkp_v0_10_0_context_static_ = { +static const rustsecp256k1zkp_v0_10_1_context rustsecp256k1zkp_v0_10_1_context_static_ = { { 0 }, - { rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn, 0 }, - { rustsecp256k1zkp_v0_10_0_default_error_callback_fn, 0 }, + { rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn, 0 }, + { rustsecp256k1zkp_v0_10_1_default_error_callback_fn, 0 }, 0 }; -const rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_static = &rustsecp256k1zkp_v0_10_0_context_static_; -const rustsecp256k1zkp_v0_10_0_context *rustsecp256k1zkp_v0_10_0_context_no_precomp = &rustsecp256k1zkp_v0_10_0_context_static_; +const rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_static = &rustsecp256k1zkp_v0_10_1_context_static_; +const rustsecp256k1zkp_v0_10_1_context *rustsecp256k1zkp_v0_10_1_context_no_precomp = &rustsecp256k1zkp_v0_10_1_context_static_; /* Helper function that determines if a context is proper, i.e., is not the static context or a copy thereof. * - * This is intended for "context" functions such as rustsecp256k1zkp_v0_10_0_context_clone. Function which need specific + * This is intended for "context" functions such as rustsecp256k1zkp_v0_10_1_context_clone. Function which need specific * features of a context should still check for these features directly. For example, a function that needs * ecmult_gen should directly check for the existence of the ecmult_gen context. */ -static int rustsecp256k1zkp_v0_10_0_context_is_proper(const rustsecp256k1zkp_v0_10_0_context* ctx) { - return rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx); +static int rustsecp256k1zkp_v0_10_1_context_is_proper(const rustsecp256k1zkp_v0_10_1_context* ctx) { + return rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx); } -void rustsecp256k1zkp_v0_10_0_selftest(void) { - if (!rustsecp256k1zkp_v0_10_0_selftest_passes()) { - rustsecp256k1zkp_v0_10_0_callback_call(&default_error_callback, "self test failed"); +void rustsecp256k1zkp_v0_10_1_selftest(void) { + if (!rustsecp256k1zkp_v0_10_1_selftest_passes()) { + rustsecp256k1zkp_v0_10_1_callback_call(&default_error_callback, "self test failed"); } } -size_t rustsecp256k1zkp_v0_10_0_context_preallocated_size(unsigned int flags) { - size_t ret = sizeof(rustsecp256k1zkp_v0_10_0_context); +size_t rustsecp256k1zkp_v0_10_1_context_preallocated_size(unsigned int flags) { + size_t ret = sizeof(rustsecp256k1zkp_v0_10_1_context); /* A return value of 0 is reserved as an indicator for errors when we call this function internally. */ VERIFY_CHECK(ret != 0); if (EXPECT((flags & SECP256K1_FLAGS_TYPE_MASK) != SECP256K1_FLAGS_TYPE_CONTEXT, 0)) { - rustsecp256k1zkp_v0_10_0_callback_call(&default_illegal_callback, + rustsecp256k1zkp_v0_10_1_callback_call(&default_illegal_callback, "Invalid flags"); return 0; } if (EXPECT(!SECP256K1_CHECKMEM_RUNNING() && (flags & SECP256K1_FLAGS_BIT_CONTEXT_DECLASSIFY), 0)) { - rustsecp256k1zkp_v0_10_0_callback_call(&default_illegal_callback, + rustsecp256k1zkp_v0_10_1_callback_call(&default_illegal_callback, "Declassify flag requires running with memory checking"); return 0; } @@ -129,76 +129,76 @@ size_t rustsecp256k1zkp_v0_10_0_context_preallocated_size(unsigned int flags) { return ret; } -size_t rustsecp256k1zkp_v0_10_0_context_preallocated_clone_size(const rustsecp256k1zkp_v0_10_0_context* ctx) { +size_t rustsecp256k1zkp_v0_10_1_context_preallocated_clone_size(const rustsecp256k1zkp_v0_10_1_context* ctx) { VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_context_is_proper(ctx)); - return sizeof(rustsecp256k1zkp_v0_10_0_context); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_context_is_proper(ctx)); + return sizeof(rustsecp256k1zkp_v0_10_1_context); } -rustsecp256k1zkp_v0_10_0_context* rustsecp256k1zkp_v0_10_0_context_preallocated_create(void* prealloc, unsigned int flags) { +rustsecp256k1zkp_v0_10_1_context* rustsecp256k1zkp_v0_10_1_context_preallocated_create(void* prealloc, unsigned int flags) { size_t prealloc_size; - rustsecp256k1zkp_v0_10_0_context* ret; + rustsecp256k1zkp_v0_10_1_context* ret; - rustsecp256k1zkp_v0_10_0_selftest(); + rustsecp256k1zkp_v0_10_1_selftest(); - prealloc_size = rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags); + prealloc_size = rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags); if (prealloc_size == 0) { return NULL; } VERIFY_CHECK(prealloc != NULL); - ret = (rustsecp256k1zkp_v0_10_0_context*)prealloc; + ret = (rustsecp256k1zkp_v0_10_1_context*)prealloc; ret->illegal_callback = default_illegal_callback; ret->error_callback = default_error_callback; - /* Flags have been checked by rustsecp256k1zkp_v0_10_0_context_preallocated_size. */ + /* Flags have been checked by rustsecp256k1zkp_v0_10_1_context_preallocated_size. */ VERIFY_CHECK((flags & SECP256K1_FLAGS_TYPE_MASK) == SECP256K1_FLAGS_TYPE_CONTEXT); - rustsecp256k1zkp_v0_10_0_ecmult_gen_context_build(&ret->ecmult_gen_ctx); + rustsecp256k1zkp_v0_10_1_ecmult_gen_context_build(&ret->ecmult_gen_ctx); ret->declassify = !!(flags & SECP256K1_FLAGS_BIT_CONTEXT_DECLASSIFY); return ret; } -rustsecp256k1zkp_v0_10_0_context* rustsecp256k1zkp_v0_10_0_context_preallocated_clone(const rustsecp256k1zkp_v0_10_0_context* ctx, void* prealloc) { - rustsecp256k1zkp_v0_10_0_context* ret; +rustsecp256k1zkp_v0_10_1_context* rustsecp256k1zkp_v0_10_1_context_preallocated_clone(const rustsecp256k1zkp_v0_10_1_context* ctx, void* prealloc) { + rustsecp256k1zkp_v0_10_1_context* ret; VERIFY_CHECK(ctx != NULL); ARG_CHECK(prealloc != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_context_is_proper(ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_context_is_proper(ctx)); - ret = (rustsecp256k1zkp_v0_10_0_context*)prealloc; + ret = (rustsecp256k1zkp_v0_10_1_context*)prealloc; *ret = *ctx; return ret; } -void rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(rustsecp256k1zkp_v0_10_0_context* ctx) { - ARG_CHECK_VOID(ctx == NULL || rustsecp256k1zkp_v0_10_0_context_is_proper(ctx)); +void rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(rustsecp256k1zkp_v0_10_1_context* ctx) { + ARG_CHECK_VOID(ctx == NULL || rustsecp256k1zkp_v0_10_1_context_is_proper(ctx)); /* Defined as noop */ if (ctx == NULL) { return; } - rustsecp256k1zkp_v0_10_0_ecmult_gen_context_clear(&ctx->ecmult_gen_ctx); + rustsecp256k1zkp_v0_10_1_ecmult_gen_context_clear(&ctx->ecmult_gen_ctx); } -void rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(rustsecp256k1zkp_v0_10_0_context* ctx, void (*fun)(const char* message, void* data), const void* data) { - /* We compare pointers instead of checking rustsecp256k1zkp_v0_10_0_context_is_proper() here +void rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(rustsecp256k1zkp_v0_10_1_context* ctx, void (*fun)(const char* message, void* data), const void* data) { + /* We compare pointers instead of checking rustsecp256k1zkp_v0_10_1_context_is_proper() here because setting callbacks is allowed on *copies* of the static context: it's harmless and makes testing easier. */ - ARG_CHECK_VOID(ctx != rustsecp256k1zkp_v0_10_0_context_static); + ARG_CHECK_VOID(ctx != rustsecp256k1zkp_v0_10_1_context_static); if (fun == NULL) { - fun = rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn; + fun = rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn; } ctx->illegal_callback.fn = fun; ctx->illegal_callback.data = data; } -void rustsecp256k1zkp_v0_10_0_context_set_error_callback(rustsecp256k1zkp_v0_10_0_context* ctx, void (*fun)(const char* message, void* data), const void* data) { - /* We compare pointers instead of checking rustsecp256k1zkp_v0_10_0_context_is_proper() here +void rustsecp256k1zkp_v0_10_1_context_set_error_callback(rustsecp256k1zkp_v0_10_1_context* ctx, void (*fun)(const char* message, void* data), const void* data) { + /* We compare pointers instead of checking rustsecp256k1zkp_v0_10_1_context_is_proper() here because setting callbacks is allowed on *copies* of the static context: it's harmless and makes testing easier. */ - ARG_CHECK_VOID(ctx != rustsecp256k1zkp_v0_10_0_context_static); + ARG_CHECK_VOID(ctx != rustsecp256k1zkp_v0_10_1_context_static); if (fun == NULL) { - fun = rustsecp256k1zkp_v0_10_0_default_error_callback_fn; + fun = rustsecp256k1zkp_v0_10_1_default_error_callback_fn; } ctx->error_callback.fn = fun; ctx->error_callback.data = data; @@ -207,40 +207,40 @@ void rustsecp256k1zkp_v0_10_0_context_set_error_callback(rustsecp256k1zkp_v0_10_ /* Mark memory as no-longer-secret for the purpose of analysing constant-time behaviour * of the software. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_declassify(const rustsecp256k1zkp_v0_10_0_context* ctx, const void *p, size_t len) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_declassify(const rustsecp256k1zkp_v0_10_1_context* ctx, const void *p, size_t len) { if (EXPECT(ctx->declassify, 0)) SECP256K1_CHECKMEM_DEFINE(p, len); } -static int rustsecp256k1zkp_v0_10_0_pubkey_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ge* ge, const rustsecp256k1zkp_v0_10_0_pubkey* pubkey) { - rustsecp256k1zkp_v0_10_0_ge_from_bytes(ge, pubkey->data); - ARG_CHECK(!rustsecp256k1zkp_v0_10_0_fe_is_zero(&ge->x)); +static int rustsecp256k1zkp_v0_10_1_pubkey_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ge* ge, const rustsecp256k1zkp_v0_10_1_pubkey* pubkey) { + rustsecp256k1zkp_v0_10_1_ge_from_bytes(ge, pubkey->data); + ARG_CHECK(!rustsecp256k1zkp_v0_10_1_fe_is_zero(&ge->x)); return 1; } -static void rustsecp256k1zkp_v0_10_0_pubkey_save(rustsecp256k1zkp_v0_10_0_pubkey* pubkey, rustsecp256k1zkp_v0_10_0_ge* ge) { - rustsecp256k1zkp_v0_10_0_ge_to_bytes(pubkey->data, ge); +static void rustsecp256k1zkp_v0_10_1_pubkey_save(rustsecp256k1zkp_v0_10_1_pubkey* pubkey, rustsecp256k1zkp_v0_10_1_ge* ge) { + rustsecp256k1zkp_v0_10_1_ge_to_bytes(pubkey->data, ge); } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey* pubkey, const unsigned char *input, size_t inputlen) { - rustsecp256k1zkp_v0_10_0_ge Q; +int rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey* pubkey, const unsigned char *input, size_t inputlen) { + rustsecp256k1zkp_v0_10_1_ge Q; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); memset(pubkey, 0, sizeof(*pubkey)); ARG_CHECK(input != NULL); - if (!rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&Q, input, inputlen)) { + if (!rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&Q, input, inputlen)) { return 0; } - if (!rustsecp256k1zkp_v0_10_0_ge_is_in_correct_subgroup(&Q)) { + if (!rustsecp256k1zkp_v0_10_1_ge_is_in_correct_subgroup(&Q)) { return 0; } - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &Q); - rustsecp256k1zkp_v0_10_0_ge_clear(&Q); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &Q); + rustsecp256k1zkp_v0_10_1_ge_clear(&Q); return 1; } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_0_pubkey* pubkey, unsigned int flags) { - rustsecp256k1zkp_v0_10_0_ge Q; +int rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_1_pubkey* pubkey, unsigned int flags) { + rustsecp256k1zkp_v0_10_1_ge Q; size_t len; int ret = 0; @@ -253,8 +253,8 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(const rustsecp256k1zkp_v0_10_0_ memset(output, 0, len); ARG_CHECK(pubkey != NULL); ARG_CHECK((flags & SECP256K1_FLAGS_TYPE_MASK) == SECP256K1_FLAGS_TYPE_COMPRESSION); - if (rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &Q, pubkey)) { - ret = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&Q, output, &len, flags & SECP256K1_FLAGS_BIT_COMPRESSION); + if (rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &Q, pubkey)) { + ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&Q, output, &len, flags & SECP256K1_FLAGS_BIT_COMPRESSION); if (ret) { *outputlen = len; } @@ -262,9 +262,9 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(const rustsecp256k1zkp_v0_10_0_ return ret; } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_pubkey* pubkey0, const rustsecp256k1zkp_v0_10_0_pubkey* pubkey1) { +int rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_pubkey* pubkey0, const rustsecp256k1zkp_v0_10_1_pubkey* pubkey1) { unsigned char out[2][33]; - const rustsecp256k1zkp_v0_10_0_pubkey* pk[2]; + const rustsecp256k1zkp_v0_10_1_pubkey* pk[2]; int i; VERIFY_CHECK(ctx != NULL); @@ -277,7 +277,7 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_contex * results in consistent comparisons even if NULL or invalid pubkeys are * involved and prevents edge cases such as sorting algorithms that use * this function and do not terminate as a result. */ - if (!rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(ctx, out[i], &out_size, pk[i], SECP256K1_EC_COMPRESSED)) { + if (!rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(ctx, out[i], &out_size, pk[i], SECP256K1_EC_COMPRESSED)) { /* Note that ec_pubkey_serialize should already set the output to * zero in that case, but it's not guaranteed by the API, we can't * test it and writing a VERIFY_CHECK is more complex than @@ -285,42 +285,42 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(const rustsecp256k1zkp_v0_10_0_contex memset(out[i], 0, sizeof(out[i])); } } - return rustsecp256k1zkp_v0_10_0_memcmp_var(out[0], out[1], sizeof(out[0])); + return rustsecp256k1zkp_v0_10_1_memcmp_var(out[0], out[1], sizeof(out[0])); } -static void rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar* r, rustsecp256k1zkp_v0_10_0_scalar* s, const rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig) { +static void rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar* r, rustsecp256k1zkp_v0_10_1_scalar* s, const rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig) { (void)ctx; - if (sizeof(rustsecp256k1zkp_v0_10_0_scalar) == 32) { - /* When the rustsecp256k1zkp_v0_10_0_scalar type is exactly 32 byte, use its - * representation inside rustsecp256k1zkp_v0_10_0_ecdsa_signature, as conversion is very fast. - * Note that rustsecp256k1zkp_v0_10_0_ecdsa_signature_save must use the same representation. */ + if (sizeof(rustsecp256k1zkp_v0_10_1_scalar) == 32) { + /* When the rustsecp256k1zkp_v0_10_1_scalar type is exactly 32 byte, use its + * representation inside rustsecp256k1zkp_v0_10_1_ecdsa_signature, as conversion is very fast. + * Note that rustsecp256k1zkp_v0_10_1_ecdsa_signature_save must use the same representation. */ memcpy(r, &sig->data[0], 32); memcpy(s, &sig->data[32], 32); } else { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, &sig->data[0], NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(s, &sig->data[32], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, &sig->data[0], NULL); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(s, &sig->data[32], NULL); } } -static void rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const rustsecp256k1zkp_v0_10_0_scalar* r, const rustsecp256k1zkp_v0_10_0_scalar* s) { - if (sizeof(rustsecp256k1zkp_v0_10_0_scalar) == 32) { +static void rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const rustsecp256k1zkp_v0_10_1_scalar* r, const rustsecp256k1zkp_v0_10_1_scalar* s) { + if (sizeof(rustsecp256k1zkp_v0_10_1_scalar) == 32) { memcpy(&sig->data[0], r, 32); memcpy(&sig->data[32], s, 32); } else { - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[0], r); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&sig->data[32], s); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[0], r); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&sig->data[32], s); } } -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char *input, size_t inputlen) { + rustsecp256k1zkp_v0_10_1_scalar r, s; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(input != NULL); - if (rustsecp256k1zkp_v0_10_0_ecdsa_sig_parse(&r, &s, input, inputlen)) { - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(sig, &r, &s); + if (rustsecp256k1zkp_v0_10_1_ecdsa_sig_parse(&r, &s, input, inputlen)) { + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(sig, &r, &s); return 1; } else { memset(sig, 0, sizeof(*sig)); @@ -328,8 +328,8 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(const rustsecp256k1zkp_v0 } } -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig, const unsigned char *input64) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig, const unsigned char *input64) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret = 1; int overflow = 0; @@ -337,76 +337,76 @@ int rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(const rustsecp256k1zk ARG_CHECK(sig != NULL); ARG_CHECK(input64 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&r, &input64[0], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&r, &input64[0], &overflow); ret &= !overflow; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s, &input64[32], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s, &input64[32], &overflow); ret &= !overflow; if (ret) { - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(sig, &r, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(sig, &r, &s); } else { memset(sig, 0, sizeof(*sig)); } return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output, size_t *outputlen, const rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig) { + rustsecp256k1zkp_v0_10_1_scalar r, s; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output != NULL); ARG_CHECK(outputlen != NULL); ARG_CHECK(sig != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, sig); - return rustsecp256k1zkp_v0_10_0_ecdsa_sig_serialize(output, outputlen, &r, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, sig); + return rustsecp256k1zkp_v0_10_1_ecdsa_sig_serialize(output, outputlen, &r, &s); } -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *output64, const rustsecp256k1zkp_v0_10_0_ecdsa_signature* sig) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *output64, const rustsecp256k1zkp_v0_10_1_ecdsa_signature* sig) { + rustsecp256k1zkp_v0_10_1_scalar r, s; VERIFY_CHECK(ctx != NULL); ARG_CHECK(output64 != NULL); ARG_CHECK(sig != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, sig); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&output64[0], &r); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(&output64[32], &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, sig); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&output64[0], &r); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(&output64[32], &s); return 1; } -int rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature *sigout, const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sigin) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature *sigout, const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sigin) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(sigin != NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, sigin); - ret = rustsecp256k1zkp_v0_10_0_scalar_is_high(&s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, sigin); + ret = rustsecp256k1zkp_v0_10_1_scalar_is_high(&s); if (sigout != NULL) { if (ret) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); } - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(sigout, &r, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(sigout, &r, &s); } return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig, const unsigned char *msghash32, const rustsecp256k1zkp_v0_10_0_pubkey *pubkey) { - rustsecp256k1zkp_v0_10_0_ge q; - rustsecp256k1zkp_v0_10_0_scalar r, s; - rustsecp256k1zkp_v0_10_0_scalar m; +int rustsecp256k1zkp_v0_10_1_ecdsa_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig, const unsigned char *msghash32, const rustsecp256k1zkp_v0_10_1_pubkey *pubkey) { + rustsecp256k1zkp_v0_10_1_ge q; + rustsecp256k1zkp_v0_10_1_scalar r, s; + rustsecp256k1zkp_v0_10_1_scalar m; VERIFY_CHECK(ctx != NULL); ARG_CHECK(msghash32 != NULL); ARG_CHECK(sig != NULL); ARG_CHECK(pubkey != NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&m, msghash32, NULL); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, sig); - return (!rustsecp256k1zkp_v0_10_0_scalar_is_high(&s) && - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &q, pubkey) && - rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&r, &s, &q, &m)); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&m, msghash32, NULL); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, sig); + return (!rustsecp256k1zkp_v0_10_1_scalar_is_high(&s) && + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &q, pubkey) && + rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&r, &s, &q, &m)); } static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *offset, const void *data, unsigned int len) { @@ -417,12 +417,12 @@ static SECP256K1_INLINE void buffer_append(unsigned char *buf, unsigned int *off static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) { unsigned char keydata[112]; unsigned int offset = 0; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; unsigned int i; - rustsecp256k1zkp_v0_10_0_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar msg; unsigned char msgmod32[32]; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&msg, msg32, NULL); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msgmod32, &msg); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&msg, msg32, NULL); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msgmod32, &msg); /* We feed a byte array to the PRNG as input, consisting of: * - the private key (32 bytes) and reduced message (32 bytes), see RFC 6979 3.2d. * - optionally 32 extra bytes of data, see RFC 6979 3.6 Additional Data. @@ -439,80 +439,80 @@ static int nonce_function_rfc6979(unsigned char *nonce32, const unsigned char *m if (algo16 != NULL) { buffer_append(keydata, &offset, algo16, 16); } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, keydata, offset); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, keydata, offset); memset(keydata, 0, sizeof(keydata)); for (i = 0; i <= counter; i++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, nonce32, 32); } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); return 1; } -const rustsecp256k1zkp_v0_10_0_nonce_function rustsecp256k1zkp_v0_10_0_nonce_function_rfc6979 = nonce_function_rfc6979; -const rustsecp256k1zkp_v0_10_0_nonce_function rustsecp256k1zkp_v0_10_0_nonce_function_default = nonce_function_rfc6979; +const rustsecp256k1zkp_v0_10_1_nonce_function rustsecp256k1zkp_v0_10_1_nonce_function_rfc6979 = nonce_function_rfc6979; +const rustsecp256k1zkp_v0_10_1_nonce_function rustsecp256k1zkp_v0_10_1_nonce_function_default = nonce_function_rfc6979; -static int rustsecp256k1zkp_v0_10_0_ecdsa_sign_inner(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_scalar* r, rustsecp256k1zkp_v0_10_0_scalar* s, int* recid, rustsecp256k1zkp_v0_10_0_sha256* s2c_sha, rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening *s2c_opening, const unsigned char* s2c_data32, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_0_nonce_function noncefp, const void* noncedata) { - rustsecp256k1zkp_v0_10_0_scalar sec, non, msg; +static int rustsecp256k1zkp_v0_10_1_ecdsa_sign_inner(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_scalar* r, rustsecp256k1zkp_v0_10_1_scalar* s, int* recid, rustsecp256k1zkp_v0_10_1_sha256* s2c_sha, rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening *s2c_opening, const unsigned char* s2c_data32, const unsigned char *msg32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_1_nonce_function noncefp, const void* noncedata) { + rustsecp256k1zkp_v0_10_1_scalar sec, non, msg; int ret = 0; int is_sec_valid; unsigned char nonce32[32]; unsigned int count = 0; /* Default initialization here is important so we won't pass uninit values to the cmov in the end */ - *r = rustsecp256k1zkp_v0_10_0_scalar_zero; - *s = rustsecp256k1zkp_v0_10_0_scalar_zero; + *r = rustsecp256k1zkp_v0_10_1_scalar_zero; + *s = rustsecp256k1zkp_v0_10_1_scalar_zero; if (recid) { *recid = 0; } if (noncefp == NULL) { - noncefp = rustsecp256k1zkp_v0_10_0_nonce_function_default; + noncefp = rustsecp256k1zkp_v0_10_1_nonce_function_default; } /* sign-to-contract commitments only work with the default nonce function, * because we need to ensure that s2c_data is actually hashed into the nonce and * not just ignored. Otherwise an attacker can exfiltrate the secret key by * signing the same message thrice with different commitments. */ - VERIFY_CHECK(s2c_data32 == NULL || noncefp == rustsecp256k1zkp_v0_10_0_nonce_function_default); + VERIFY_CHECK(s2c_data32 == NULL || noncefp == rustsecp256k1zkp_v0_10_1_nonce_function_default); /* Fail if the secret key is invalid. */ - is_sec_valid = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sec, seckey); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_0_scalar_one, !is_sec_valid); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&msg, msg32, NULL); + is_sec_valid = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sec, seckey); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_1_scalar_one, !is_sec_valid); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&msg, msg32, NULL); while (1) { int is_nonce_valid; ret = !!noncefp(nonce32, msg32, seckey, NULL, (void*)noncedata, count); if (!ret) { break; } - is_nonce_valid = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&non, nonce32); + is_nonce_valid = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&non, nonce32); /* The nonce is still secret here, but it being invalid is is less likely than 1:2^255. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &is_nonce_valid, sizeof(is_nonce_valid)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &is_nonce_valid, sizeof(is_nonce_valid)); if (is_nonce_valid) { if (s2c_data32 != NULL) { - rustsecp256k1zkp_v0_10_0_gej nonce_pj; - rustsecp256k1zkp_v0_10_0_ge nonce_p; + rustsecp256k1zkp_v0_10_1_gej nonce_pj; + rustsecp256k1zkp_v0_10_1_ge nonce_p; /* Compute original nonce commitment/pubkey */ - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &nonce_pj, &non); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&nonce_p, &nonce_pj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &nonce_pj, &non); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&nonce_p, &nonce_pj); if (s2c_opening != NULL) { - rustsecp256k1zkp_v0_10_0_ecdsa_s2c_opening_save(s2c_opening, &nonce_p); + rustsecp256k1zkp_v0_10_1_ecdsa_s2c_opening_save(s2c_opening, &nonce_p); } /* Because the nonce is valid, the nonce point isn't the point * at infinity and we can declassify that information to be able to * serialize the point. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &nonce_p.infinity, sizeof(nonce_p.infinity)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &nonce_p.infinity, sizeof(nonce_p.infinity)); /* Tweak nonce with s2c commitment. */ - ret = rustsecp256k1zkp_v0_10_0_ec_commit_seckey(&non, &nonce_p, s2c_sha, s2c_data32, 32); - rustsecp256k1zkp_v0_10_0_declassify(ctx, &ret, sizeof(ret)); /* may be secret that the tweak falied, but happens with negligible probability */ + ret = rustsecp256k1zkp_v0_10_1_ec_commit_seckey(&non, &nonce_p, s2c_sha, s2c_data32, 32); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &ret, sizeof(ret)); /* may be secret that the tweak falied, but happens with negligible probability */ if (!ret) { break; } } - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, r, s, &sec, &msg, &non, recid); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(&ctx->ecmult_gen_ctx, r, s, &sec, &msg, &non, recid); /* The final signature is no longer a secret, nor is the fact that we were successful or not. */ - rustsecp256k1zkp_v0_10_0_declassify(ctx, &ret, sizeof(ret)); + rustsecp256k1zkp_v0_10_1_declassify(ctx, &ret, sizeof(ret)); if (ret) { break; } @@ -524,202 +524,202 @@ static int rustsecp256k1zkp_v0_10_0_ecdsa_sign_inner(const rustsecp256k1zkp_v0_1 * used as a branching variable. */ ret &= is_sec_valid; memset(nonce32, 0, 32); - rustsecp256k1zkp_v0_10_0_scalar_clear(&msg); - rustsecp256k1zkp_v0_10_0_scalar_clear(&non); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); - rustsecp256k1zkp_v0_10_0_scalar_cmov(r, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_scalar_cmov(s, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_clear(&msg); + rustsecp256k1zkp_v0_10_1_scalar_clear(&non); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_cmov(r, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_cmov(s, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); if (recid) { const int zero = 0; - rustsecp256k1zkp_v0_10_0_int_cmov(recid, &zero, !ret); + rustsecp256k1zkp_v0_10_1_int_cmov(recid, &zero, !ret); } return ret; } -int rustsecp256k1zkp_v0_10_0_ecdsa_sign(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_ecdsa_signature *signature, const unsigned char *msghash32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_0_nonce_function noncefp, const void* noncedata) { - rustsecp256k1zkp_v0_10_0_scalar r, s; +int rustsecp256k1zkp_v0_10_1_ecdsa_sign(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_ecdsa_signature *signature, const unsigned char *msghash32, const unsigned char *seckey, rustsecp256k1zkp_v0_10_1_nonce_function noncefp, const void* noncedata) { + rustsecp256k1zkp_v0_10_1_scalar r, s; int ret; VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(msghash32 != NULL); ARG_CHECK(signature != NULL); ARG_CHECK(seckey != NULL); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign_inner(ctx, &r, &s, NULL, NULL, NULL, NULL, msghash32, seckey, noncefp, noncedata); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(signature, &r, &s); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign_inner(ctx, &r, &s, NULL, NULL, NULL, NULL, msghash32, seckey, noncefp, noncedata); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(signature, &r, &s); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_seckey_verify(const rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char *seckey) { - rustsecp256k1zkp_v0_10_0_scalar sec; +int rustsecp256k1zkp_v0_10_1_ec_seckey_verify(const rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char *seckey) { + rustsecp256k1zkp_v0_10_1_scalar sec; int ret; VERIFY_CHECK(ctx != NULL); ARG_CHECK(seckey != NULL); - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sec, seckey); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sec, seckey); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); return ret; } -static int rustsecp256k1zkp_v0_10_0_ec_pubkey_create_helper(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_0_scalar *seckey_scalar, rustsecp256k1zkp_v0_10_0_ge *p, const unsigned char *seckey) { - rustsecp256k1zkp_v0_10_0_gej pj; +static int rustsecp256k1zkp_v0_10_1_ec_pubkey_create_helper(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *ecmult_gen_ctx, rustsecp256k1zkp_v0_10_1_scalar *seckey_scalar, rustsecp256k1zkp_v0_10_1_ge *p, const unsigned char *seckey) { + rustsecp256k1zkp_v0_10_1_gej pj; int ret; - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(seckey_scalar, seckey); - rustsecp256k1zkp_v0_10_0_scalar_cmov(seckey_scalar, &rustsecp256k1zkp_v0_10_0_scalar_one, !ret); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(seckey_scalar, seckey); + rustsecp256k1zkp_v0_10_1_scalar_cmov(seckey_scalar, &rustsecp256k1zkp_v0_10_1_scalar_one, !ret); - rustsecp256k1zkp_v0_10_0_ecmult_gen(ecmult_gen_ctx, &pj, seckey_scalar); - rustsecp256k1zkp_v0_10_0_ge_set_gej(p, &pj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(ecmult_gen_ctx, &pj, seckey_scalar); + rustsecp256k1zkp_v0_10_1_ge_set_gej(p, &pj); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_create(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *seckey) { - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_scalar seckey_scalar; +int rustsecp256k1zkp_v0_10_1_ec_pubkey_create(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *seckey) { + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_scalar seckey_scalar; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); memset(pubkey, 0, sizeof(*pubkey)); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)); ARG_CHECK(seckey != NULL); - ret = rustsecp256k1zkp_v0_10_0_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &seckey_scalar, &p, seckey); - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &p); - rustsecp256k1zkp_v0_10_0_memczero(pubkey, sizeof(*pubkey), !ret); + ret = rustsecp256k1zkp_v0_10_1_ec_pubkey_create_helper(&ctx->ecmult_gen_ctx, &seckey_scalar, &p, seckey); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &p); + rustsecp256k1zkp_v0_10_1_memczero(pubkey, sizeof(*pubkey), !ret); - rustsecp256k1zkp_v0_10_0_scalar_clear(&seckey_scalar); + rustsecp256k1zkp_v0_10_1_scalar_clear(&seckey_scalar); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_seckey_negate(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey) { - rustsecp256k1zkp_v0_10_0_scalar sec; +int rustsecp256k1zkp_v0_10_1_ec_seckey_negate(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey) { + rustsecp256k1zkp_v0_10_1_scalar sec; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(seckey != NULL); - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sec, seckey); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_scalar_negate(&sec, &sec); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey, &sec); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sec, seckey); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sec, &sec); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &sec); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_privkey_negate(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey) { - return rustsecp256k1zkp_v0_10_0_ec_seckey_negate(ctx, seckey); +int rustsecp256k1zkp_v0_10_1_ec_privkey_negate(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey) { + return rustsecp256k1zkp_v0_10_1_ec_seckey_negate(ctx, seckey); } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey) { +int rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey) { int ret = 0; - rustsecp256k1zkp_v0_10_0_ge p; + rustsecp256k1zkp_v0_10_1_ge p; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); - ret = rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &p, pubkey); + ret = rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &p, pubkey); memset(pubkey, 0, sizeof(*pubkey)); if (ret) { - rustsecp256k1zkp_v0_10_0_ge_neg(&p, &p); - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &p); + rustsecp256k1zkp_v0_10_1_ge_neg(&p, &p); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &p); } return ret; } -static int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_0_scalar *sec, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_scalar term; +static int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(rustsecp256k1zkp_v0_10_1_scalar *sec, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_scalar term; int overflow = 0; int ret = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&term, tweak32, &overflow); - ret = (!overflow) & rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_add(sec, &term); - rustsecp256k1zkp_v0_10_0_scalar_clear(&term); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&term, tweak32, &overflow); + ret = (!overflow) & rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_add(sec, &term); + rustsecp256k1zkp_v0_10_1_scalar_clear(&term); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_scalar sec; +int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_scalar sec; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(seckey != NULL); ARG_CHECK(tweak32 != NULL); - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sec, seckey); - ret &= rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add_helper(&sec, tweak32); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey, &sec); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sec, seckey); + ret &= rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add_helper(&sec, tweak32); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &sec); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { - return rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(ctx, seckey, tweak32); +int rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { + return rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(ctx, seckey, tweak32); } -static int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(rustsecp256k1zkp_v0_10_0_ge *p, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_scalar term; +static int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(rustsecp256k1zkp_v0_10_1_ge *p, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_scalar term; int overflow = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&term, tweak32, &overflow); - return !overflow && rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_add(p, &term); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&term, tweak32, &overflow); + return !overflow && rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_add(p, &term); } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_ge p; +int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_ge p; int ret = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); ARG_CHECK(tweak32 != NULL); - ret = rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &p, pubkey); + ret = rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &p, pubkey); memset(pubkey, 0, sizeof(*pubkey)); - ret = ret && rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add_helper(&p, tweak32); + ret = ret && rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add_helper(&p, tweak32); if (ret) { - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &p); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &p); } return ret; } -int rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_scalar factor; - rustsecp256k1zkp_v0_10_0_scalar sec; +int rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_scalar factor; + rustsecp256k1zkp_v0_10_1_scalar sec; int ret = 0; int overflow = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(seckey != NULL); ARG_CHECK(tweak32 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&factor, tweak32, &overflow); - ret = rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&sec, seckey); - ret &= (!overflow) & rustsecp256k1zkp_v0_10_0_eckey_privkey_tweak_mul(&sec, &factor); - rustsecp256k1zkp_v0_10_0_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_0_scalar_zero, !ret); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey, &sec); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&factor, tweak32, &overflow); + ret = rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&sec, seckey); + ret &= (!overflow) & rustsecp256k1zkp_v0_10_1_eckey_privkey_tweak_mul(&sec, &factor); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&sec, &rustsecp256k1zkp_v0_10_1_scalar_zero, !ret); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &sec); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sec); - rustsecp256k1zkp_v0_10_0_scalar_clear(&factor); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sec); + rustsecp256k1zkp_v0_10_1_scalar_clear(&factor); return ret; } -int rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_mul(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { - return rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(ctx, seckey, tweak32); +int rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_mul(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *seckey, const unsigned char *tweak32) { + return rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(ctx, seckey, tweak32); } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubkey, const unsigned char *tweak32) { - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_scalar factor; +int rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubkey, const unsigned char *tweak32) { + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_scalar factor; int ret = 0; int overflow = 0; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubkey != NULL); ARG_CHECK(tweak32 != NULL); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&factor, tweak32, &overflow); - ret = !overflow && rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &p, pubkey); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&factor, tweak32, &overflow); + ret = !overflow && rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &p, pubkey); memset(pubkey, 0, sizeof(*pubkey)); if (ret) { - if (rustsecp256k1zkp_v0_10_0_eckey_pubkey_tweak_mul(&p, &factor)) { - rustsecp256k1zkp_v0_10_0_pubkey_save(pubkey, &p); + if (rustsecp256k1zkp_v0_10_1_eckey_pubkey_tweak_mul(&p, &factor)) { + rustsecp256k1zkp_v0_10_1_pubkey_save(pubkey, &p); } else { ret = 0; } @@ -728,20 +728,20 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(const rustsecp256k1zkp_v0_10_0_ return ret; } -int rustsecp256k1zkp_v0_10_0_context_randomize(rustsecp256k1zkp_v0_10_0_context* ctx, const unsigned char *seed32) { +int rustsecp256k1zkp_v0_10_1_context_randomize(rustsecp256k1zkp_v0_10_1_context* ctx, const unsigned char *seed32) { VERIFY_CHECK(ctx != NULL); - ARG_CHECK(rustsecp256k1zkp_v0_10_0_context_is_proper(ctx)); + ARG_CHECK(rustsecp256k1zkp_v0_10_1_context_is_proper(ctx)); - if (rustsecp256k1zkp_v0_10_0_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) { - rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(&ctx->ecmult_gen_ctx, seed32); + if (rustsecp256k1zkp_v0_10_1_ecmult_gen_context_is_built(&ctx->ecmult_gen_ctx)) { + rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(&ctx->ecmult_gen_ctx, seed32); } return 1; } -int rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(const rustsecp256k1zkp_v0_10_0_context* ctx, rustsecp256k1zkp_v0_10_0_pubkey *pubnonce, const rustsecp256k1zkp_v0_10_0_pubkey * const *pubnonces, size_t n) { +int rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(const rustsecp256k1zkp_v0_10_1_context* ctx, rustsecp256k1zkp_v0_10_1_pubkey *pubnonce, const rustsecp256k1zkp_v0_10_1_pubkey * const *pubnonces, size_t n) { size_t i; - rustsecp256k1zkp_v0_10_0_gej Qj; - rustsecp256k1zkp_v0_10_0_ge Q; + rustsecp256k1zkp_v0_10_1_gej Qj; + rustsecp256k1zkp_v0_10_1_ge Q; VERIFY_CHECK(ctx != NULL); ARG_CHECK(pubnonce != NULL); @@ -749,43 +749,43 @@ int rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(const rustsecp256k1zkp_v0_10_0_co ARG_CHECK(n >= 1); ARG_CHECK(pubnonces != NULL); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&Qj); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&Qj); for (i = 0; i < n; i++) { ARG_CHECK(pubnonces[i] != NULL); - rustsecp256k1zkp_v0_10_0_pubkey_load(ctx, &Q, pubnonces[i]); - rustsecp256k1zkp_v0_10_0_gej_add_ge(&Qj, &Qj, &Q); + rustsecp256k1zkp_v0_10_1_pubkey_load(ctx, &Q, pubnonces[i]); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&Qj, &Qj, &Q); } - if (rustsecp256k1zkp_v0_10_0_gej_is_infinity(&Qj)) { + if (rustsecp256k1zkp_v0_10_1_gej_is_infinity(&Qj)) { return 0; } - rustsecp256k1zkp_v0_10_0_ge_set_gej(&Q, &Qj); - rustsecp256k1zkp_v0_10_0_pubkey_save(pubnonce, &Q); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&Q, &Qj); + rustsecp256k1zkp_v0_10_1_pubkey_save(pubnonce, &Q); return 1; } -int rustsecp256k1zkp_v0_10_0_tagged_sha256(const rustsecp256k1zkp_v0_10_0_context* ctx, unsigned char *hash32, const unsigned char *tag, size_t taglen, const unsigned char *msg, size_t msglen) { - rustsecp256k1zkp_v0_10_0_sha256 sha; +int rustsecp256k1zkp_v0_10_1_tagged_sha256(const rustsecp256k1zkp_v0_10_1_context* ctx, unsigned char *hash32, const unsigned char *tag, size_t taglen, const unsigned char *msg, size_t msglen) { + rustsecp256k1zkp_v0_10_1_sha256 sha; VERIFY_CHECK(ctx != NULL); ARG_CHECK(hash32 != NULL); ARG_CHECK(tag != NULL); ARG_CHECK(msg != NULL); - rustsecp256k1zkp_v0_10_0_sha256_initialize_tagged(&sha, tag, taglen); - rustsecp256k1zkp_v0_10_0_sha256_write(&sha, msg, msglen); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&sha, hash32); + rustsecp256k1zkp_v0_10_1_sha256_initialize_tagged(&sha, tag, taglen); + rustsecp256k1zkp_v0_10_1_sha256_write(&sha, msg, msglen); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&sha, hash32); return 1; } /* Outputs 33 zero bytes if the given group element is the point at infinity and * otherwise outputs the compressed serialization */ -static void rustsecp256k1zkp_v0_10_0_ge_serialize_ext(unsigned char *out33, rustsecp256k1zkp_v0_10_0_ge* ge) { - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(ge)) { +static void rustsecp256k1zkp_v0_10_1_ge_serialize_ext(unsigned char *out33, rustsecp256k1zkp_v0_10_1_ge* ge) { + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(ge)) { memset(out33, 0, 33); } else { int ret; size_t size = 33; - ret = rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(ge, out33, &size, 1); + ret = rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(ge, out33, &size, 1); #ifdef VERIFY /* Serialize must succeed because the point is not at infinity */ VERIFY_CHECK(ret && size == 33); @@ -797,14 +797,14 @@ static void rustsecp256k1zkp_v0_10_0_ge_serialize_ext(unsigned char *out33, rust /* Outputs the point at infinity if the given byte array is all zero, otherwise * attempts to parse compressed point serialization. */ -static int rustsecp256k1zkp_v0_10_0_ge_parse_ext(rustsecp256k1zkp_v0_10_0_ge* ge, const unsigned char *in33) { +static int rustsecp256k1zkp_v0_10_1_ge_parse_ext(rustsecp256k1zkp_v0_10_1_ge* ge, const unsigned char *in33) { unsigned char zeros[33] = { 0 }; - if (rustsecp256k1zkp_v0_10_0_memcmp_var(in33, zeros, sizeof(zeros)) == 0) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(ge); + if (rustsecp256k1zkp_v0_10_1_memcmp_var(in33, zeros, sizeof(zeros)) == 0) { + rustsecp256k1zkp_v0_10_1_ge_set_infinity(ge); return 1; } - return rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(ge, in33, 33); + return rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(ge, in33, 33); } #ifdef ENABLE_MODULE_BPPP @@ -831,6 +831,10 @@ static int rustsecp256k1zkp_v0_10_0_ge_parse_ext(rustsecp256k1zkp_v0_10_0_ge* ge #include "modules/schnorrsig_halfagg/main_impl.h" #endif +#ifdef ENABLE_MODULE_SCHNORR_ADAPTOR +#include "modules/schnorr_adaptor/main_impl.h" +#endif + #ifdef ENABLE_MODULE_ELLSWIFT #include "modules/ellswift/main_impl.h" #endif diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/selftest.h b/secp256k1-zkp-sys/depend/secp256k1/src/selftest.h index 007e9769..f17ffad1 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/selftest.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/selftest.h @@ -11,22 +11,22 @@ #include -static int rustsecp256k1zkp_v0_10_0_selftest_sha256(void) { +static int rustsecp256k1zkp_v0_10_1_selftest_sha256(void) { static const char *input63 = "For this sample, this 63-byte string will be used as input data"; static const unsigned char output32[32] = { 0xf0, 0x8a, 0x78, 0xcb, 0xba, 0xee, 0x08, 0x2b, 0x05, 0x2a, 0xe0, 0x70, 0x8f, 0x32, 0xfa, 0x1e, 0x50, 0xc5, 0xc4, 0x21, 0xaa, 0x77, 0x2b, 0xa5, 0xdb, 0xb4, 0x06, 0xa2, 0xea, 0x6b, 0xe3, 0x42, }; unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_sha256 hasher; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hasher); - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, (const unsigned char*)input63, 63); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hasher, out); - return rustsecp256k1zkp_v0_10_0_memcmp_var(out, output32, 32) == 0; + rustsecp256k1zkp_v0_10_1_sha256 hasher; + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hasher); + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, (const unsigned char*)input63, 63); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hasher, out); + return rustsecp256k1zkp_v0_10_1_memcmp_var(out, output32, 32) == 0; } -static int rustsecp256k1zkp_v0_10_0_selftest_passes(void) { - return rustsecp256k1zkp_v0_10_0_selftest_sha256(); +static int rustsecp256k1zkp_v0_10_1_selftest_passes(void) { + return rustsecp256k1zkp_v0_10_1_selftest_sha256(); } #endif /* SECP256K1_SELFTEST_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/testrand.h b/secp256k1-zkp-sys/depend/secp256k1/src/testrand.h index e541889a..b714b6e6 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/testrand.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/testrand.h @@ -12,40 +12,40 @@ /* A non-cryptographic RNG used only for test infrastructure. */ /** Seed the pseudorandom number generator for testing. */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_testrand_seed(const unsigned char *seed16); +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_testrand_seed(const unsigned char *seed16); /** Generate a pseudorandom number in the range [0..2**32-1]. */ -SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_0_testrand32(void); +SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_1_testrand32(void); /** Generate a pseudorandom number in the range [0..2**64-1]. */ -SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_testrand64(void); +SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_1_testrand64(void); /** Generate a pseudorandom number in the range [0..2**bits-1]. Bits must be 1 or * more. */ -SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_testrand_bits(int bits); +SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_1_testrand_bits(int bits); /** Generate a pseudorandom number in the range [0..range-1]. */ -static uint32_t rustsecp256k1zkp_v0_10_0_testrand_int(uint32_t range); +static uint32_t rustsecp256k1zkp_v0_10_1_testrand_int(uint32_t range); /** Generate a pseudorandom 32-byte array. */ -static void rustsecp256k1zkp_v0_10_0_testrand256(unsigned char *b32); +static void rustsecp256k1zkp_v0_10_1_testrand256(unsigned char *b32); /** Generate a pseudorandom 32-byte array with long sequences of zero and one bits. */ -static void rustsecp256k1zkp_v0_10_0_testrand256_test(unsigned char *b32); +static void rustsecp256k1zkp_v0_10_1_testrand256_test(unsigned char *b32); /** Generate pseudorandom bytes with long sequences of zero and one bits. */ -static void rustsecp256k1zkp_v0_10_0_testrand_bytes_test(unsigned char *bytes, size_t len); +static void rustsecp256k1zkp_v0_10_1_testrand_bytes_test(unsigned char *bytes, size_t len); /** Generate a pseudorandom 64-bit integer in the range min..max, inclusive. */ -static int64_t rustsecp256k1zkp_v0_10_0_testrandi64(uint64_t min, uint64_t max); +static int64_t rustsecp256k1zkp_v0_10_1_testrandi64(uint64_t min, uint64_t max); /** Flip a single random bit in a byte array */ -static void rustsecp256k1zkp_v0_10_0_testrand_flip(unsigned char *b, size_t len); +static void rustsecp256k1zkp_v0_10_1_testrand_flip(unsigned char *b, size_t len); /** Initialize the test RNG using (hex encoded) array up to 16 bytes, or randomly if hexseed is NULL. */ -static void rustsecp256k1zkp_v0_10_0_testrand_init(const char* hexseed); +static void rustsecp256k1zkp_v0_10_1_testrand_init(const char* hexseed); /** Print final test information. */ -static void rustsecp256k1zkp_v0_10_0_testrand_finish(void); +static void rustsecp256k1zkp_v0_10_1_testrand_finish(void); #endif /* SECP256K1_TESTRAND_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/testrand_impl.h b/secp256k1-zkp-sys/depend/secp256k1/src/testrand_impl.h index 9b50017f..8e2bc7da 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/testrand_impl.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/testrand_impl.h @@ -16,24 +16,24 @@ #include "hash.h" #include "util.h" -static uint64_t rustsecp256k1zkp_v0_10_0_test_state[4]; +static uint64_t rustsecp256k1zkp_v0_10_1_test_state[4]; -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_testrand_seed(const unsigned char *seed16) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_testrand_seed(const unsigned char *seed16) { static const unsigned char PREFIX[19] = "secp256k1 test init"; unsigned char out32[32]; - rustsecp256k1zkp_v0_10_0_sha256 hash; + rustsecp256k1zkp_v0_10_1_sha256 hash; int i; /* Use SHA256(PREFIX || seed16) as initial state. */ - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hash); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, PREFIX, sizeof(PREFIX)); - rustsecp256k1zkp_v0_10_0_sha256_write(&hash, seed16, 16); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hash, out32); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hash); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, PREFIX, sizeof(PREFIX)); + rustsecp256k1zkp_v0_10_1_sha256_write(&hash, seed16, 16); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hash, out32); for (i = 0; i < 4; ++i) { uint64_t s = 0; int j; for (j = 0; j < 8; ++j) s = (s << 8) | out32[8*i + j]; - rustsecp256k1zkp_v0_10_0_test_state[i] = s; + rustsecp256k1zkp_v0_10_1_test_state[i] = s; } } @@ -41,29 +41,29 @@ SECP256K1_INLINE static uint64_t rotl(const uint64_t x, int k) { return (x << k) | (x >> (64 - k)); } -SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_testrand64(void) { +SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_1_testrand64(void) { /* Test-only Xoshiro256++ RNG. See https://prng.di.unimi.it/ */ - const uint64_t result = rotl(rustsecp256k1zkp_v0_10_0_test_state[0] + rustsecp256k1zkp_v0_10_0_test_state[3], 23) + rustsecp256k1zkp_v0_10_0_test_state[0]; - const uint64_t t = rustsecp256k1zkp_v0_10_0_test_state[1] << 17; - rustsecp256k1zkp_v0_10_0_test_state[2] ^= rustsecp256k1zkp_v0_10_0_test_state[0]; - rustsecp256k1zkp_v0_10_0_test_state[3] ^= rustsecp256k1zkp_v0_10_0_test_state[1]; - rustsecp256k1zkp_v0_10_0_test_state[1] ^= rustsecp256k1zkp_v0_10_0_test_state[2]; - rustsecp256k1zkp_v0_10_0_test_state[0] ^= rustsecp256k1zkp_v0_10_0_test_state[3]; - rustsecp256k1zkp_v0_10_0_test_state[2] ^= t; - rustsecp256k1zkp_v0_10_0_test_state[3] = rotl(rustsecp256k1zkp_v0_10_0_test_state[3], 45); + const uint64_t result = rotl(rustsecp256k1zkp_v0_10_1_test_state[0] + rustsecp256k1zkp_v0_10_1_test_state[3], 23) + rustsecp256k1zkp_v0_10_1_test_state[0]; + const uint64_t t = rustsecp256k1zkp_v0_10_1_test_state[1] << 17; + rustsecp256k1zkp_v0_10_1_test_state[2] ^= rustsecp256k1zkp_v0_10_1_test_state[0]; + rustsecp256k1zkp_v0_10_1_test_state[3] ^= rustsecp256k1zkp_v0_10_1_test_state[1]; + rustsecp256k1zkp_v0_10_1_test_state[1] ^= rustsecp256k1zkp_v0_10_1_test_state[2]; + rustsecp256k1zkp_v0_10_1_test_state[0] ^= rustsecp256k1zkp_v0_10_1_test_state[3]; + rustsecp256k1zkp_v0_10_1_test_state[2] ^= t; + rustsecp256k1zkp_v0_10_1_test_state[3] = rotl(rustsecp256k1zkp_v0_10_1_test_state[3], 45); return result; } -SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_testrand_bits(int bits) { +SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_1_testrand_bits(int bits) { if (bits == 0) return 0; - return rustsecp256k1zkp_v0_10_0_testrand64() >> (64 - bits); + return rustsecp256k1zkp_v0_10_1_testrand64() >> (64 - bits); } -SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_0_testrand32(void) { - return rustsecp256k1zkp_v0_10_0_testrand64() >> 32; +SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_1_testrand32(void) { + return rustsecp256k1zkp_v0_10_1_testrand64() >> 32; } -static uint32_t rustsecp256k1zkp_v0_10_0_testrand_int(uint32_t range) { +static uint32_t rustsecp256k1zkp_v0_10_1_testrand_int(uint32_t range) { uint32_t mask = 0; uint32_t range_copy; /* Reduce range by 1, changing its meaning to "maximum value". */ @@ -77,15 +77,15 @@ static uint32_t rustsecp256k1zkp_v0_10_0_testrand_int(uint32_t range) { } /* Generation loop. */ while (1) { - uint32_t val = rustsecp256k1zkp_v0_10_0_testrand64() & mask; + uint32_t val = rustsecp256k1zkp_v0_10_1_testrand64() & mask; if (val <= range) return val; } } -static void rustsecp256k1zkp_v0_10_0_testrand256(unsigned char *b32) { +static void rustsecp256k1zkp_v0_10_1_testrand256(unsigned char *b32) { int i; for (i = 0; i < 4; ++i) { - uint64_t val = rustsecp256k1zkp_v0_10_0_testrand64(); + uint64_t val = rustsecp256k1zkp_v0_10_1_testrand64(); b32[0] = val; b32[1] = val >> 8; b32[2] = val >> 16; @@ -98,14 +98,14 @@ static void rustsecp256k1zkp_v0_10_0_testrand256(unsigned char *b32) { } } -static void rustsecp256k1zkp_v0_10_0_testrand_bytes_test(unsigned char *bytes, size_t len) { +static void rustsecp256k1zkp_v0_10_1_testrand_bytes_test(unsigned char *bytes, size_t len) { size_t bits = 0; memset(bytes, 0, len); while (bits < len * 8) { int now; uint32_t val; - now = 1 + (rustsecp256k1zkp_v0_10_0_testrand_bits(6) * rustsecp256k1zkp_v0_10_0_testrand_bits(5) + 16) / 31; - val = rustsecp256k1zkp_v0_10_0_testrand_bits(1); + now = 1 + (rustsecp256k1zkp_v0_10_1_testrand_bits(6) * rustsecp256k1zkp_v0_10_1_testrand_bits(5) + 16) / 31; + val = rustsecp256k1zkp_v0_10_1_testrand_bits(1); while (now > 0 && bits < len * 8) { bytes[bits / 8] |= val << (bits % 8); now--; @@ -114,11 +114,11 @@ static void rustsecp256k1zkp_v0_10_0_testrand_bytes_test(unsigned char *bytes, s } } -static void rustsecp256k1zkp_v0_10_0_testrand256_test(unsigned char *b32) { - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(b32, 32); +static void rustsecp256k1zkp_v0_10_1_testrand256_test(unsigned char *b32) { + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(b32, 32); } -SECP256K1_INLINE static int64_t rustsecp256k1zkp_v0_10_0_testrandi64(uint64_t min, uint64_t max) { +SECP256K1_INLINE static int64_t rustsecp256k1zkp_v0_10_1_testrandi64(uint64_t min, uint64_t max) { uint64_t range; uint64_t r; uint64_t clz; @@ -127,19 +127,19 @@ SECP256K1_INLINE static int64_t rustsecp256k1zkp_v0_10_0_testrandi64(uint64_t mi return min; } range = max - min; - clz = rustsecp256k1zkp_v0_10_0_clz64_var(range); + clz = rustsecp256k1zkp_v0_10_1_clz64_var(range); do { - r = ((uint64_t)rustsecp256k1zkp_v0_10_0_testrand32() << 32) | rustsecp256k1zkp_v0_10_0_testrand32(); + r = ((uint64_t)rustsecp256k1zkp_v0_10_1_testrand32() << 32) | rustsecp256k1zkp_v0_10_1_testrand32(); r >>= clz; } while (r > range); return min + (int64_t)r; } -static void rustsecp256k1zkp_v0_10_0_testrand_flip(unsigned char *b, size_t len) { - b[rustsecp256k1zkp_v0_10_0_testrand_int(len)] ^= (1 << rustsecp256k1zkp_v0_10_0_testrand_bits(3)); +static void rustsecp256k1zkp_v0_10_1_testrand_flip(unsigned char *b, size_t len) { + b[rustsecp256k1zkp_v0_10_1_testrand_int(len)] ^= (1 << rustsecp256k1zkp_v0_10_1_testrand_bits(3)); } -static void rustsecp256k1zkp_v0_10_0_testrand_init(const char* hexseed) { +static void rustsecp256k1zkp_v0_10_1_testrand_init(const char* hexseed) { unsigned char seed16[16] = {0}; if (hexseed && strlen(hexseed) != 0) { int pos = 0; @@ -173,12 +173,12 @@ static void rustsecp256k1zkp_v0_10_0_testrand_init(const char* hexseed) { } printf("random seed = %02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x\n", seed16[0], seed16[1], seed16[2], seed16[3], seed16[4], seed16[5], seed16[6], seed16[7], seed16[8], seed16[9], seed16[10], seed16[11], seed16[12], seed16[13], seed16[14], seed16[15]); - rustsecp256k1zkp_v0_10_0_testrand_seed(seed16); + rustsecp256k1zkp_v0_10_1_testrand_seed(seed16); } -static void rustsecp256k1zkp_v0_10_0_testrand_finish(void) { +static void rustsecp256k1zkp_v0_10_1_testrand_finish(void) { unsigned char run32[32]; - rustsecp256k1zkp_v0_10_0_testrand256(run32); + rustsecp256k1zkp_v0_10_1_testrand256(run32); printf("random run = %02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x\n", run32[0], run32[1], run32[2], run32[3], run32[4], run32[5], run32[6], run32[7], run32[8], run32[9], run32[10], run32[11], run32[12], run32[13], run32[14], run32[15]); } diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/tests.c b/secp256k1-zkp-sys/depend/secp256k1/src/tests.c index d872503e..c6615272 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/tests.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/tests.c @@ -38,8 +38,8 @@ #define CONDITIONAL_TEST(cnt, nam) if (COUNT < (cnt)) { printf("Skipping %s (iteration count too low)\n", nam); } else static int COUNT = 64; -static rustsecp256k1zkp_v0_10_0_context *CTX = NULL; -static rustsecp256k1zkp_v0_10_0_context *STATIC_CTX = NULL; +static rustsecp256k1zkp_v0_10_1_context *CTX = NULL; +static rustsecp256k1zkp_v0_10_1_context *STATIC_CTX = NULL; static int all_bytes_equal(const void* s, unsigned char value, size_t n) { const unsigned char *p = s; @@ -55,7 +55,7 @@ static int all_bytes_equal(const void* s, unsigned char value, size_t n) { #define CHECK_COUNTING_CALLBACK_VOID(ctx, expr_or_stmt, callback, callback_setter) do { \ int32_t _calls_to_callback = 0; \ - rustsecp256k1zkp_v0_10_0_callback _saved_callback = ctx->callback; \ + rustsecp256k1zkp_v0_10_1_callback _saved_callback = ctx->callback; \ callback_setter(ctx, counting_callback_fn, &_calls_to_callback); \ { expr_or_stmt; } \ ctx->callback = _saved_callback; \ @@ -66,9 +66,9 @@ static int all_bytes_equal(const void* s, unsigned char value, size_t n) { * * Useful for checking functions that return void (e.g., API functions that use ARG_CHECK_VOID) */ #define CHECK_ERROR_VOID(ctx, expr_or_stmt) \ - CHECK_COUNTING_CALLBACK_VOID(ctx, expr_or_stmt, error_callback, rustsecp256k1zkp_v0_10_0_context_set_error_callback) + CHECK_COUNTING_CALLBACK_VOID(ctx, expr_or_stmt, error_callback, rustsecp256k1zkp_v0_10_1_context_set_error_callback) #define CHECK_ILLEGAL_VOID(ctx, expr_or_stmt) \ - CHECK_COUNTING_CALLBACK_VOID(ctx, expr_or_stmt, illegal_callback, rustsecp256k1zkp_v0_10_0_context_set_illegal_callback) + CHECK_COUNTING_CALLBACK_VOID(ctx, expr_or_stmt, illegal_callback, rustsecp256k1zkp_v0_10_1_context_set_illegal_callback) /* CHECK that * - expr calls the illegal callback of ctx exactly once and, @@ -96,110 +96,110 @@ static void uncounting_illegal_callback_fn(const char* str, void* data) { (*p)--; } -static void random_field_element_magnitude(rustsecp256k1zkp_v0_10_0_fe *fe, int m) { - rustsecp256k1zkp_v0_10_0_fe zero; - int n = rustsecp256k1zkp_v0_10_0_testrand_int(m + 1); - rustsecp256k1zkp_v0_10_0_fe_normalize(fe); +static void random_field_element_magnitude(rustsecp256k1zkp_v0_10_1_fe *fe, int m) { + rustsecp256k1zkp_v0_10_1_fe zero; + int n = rustsecp256k1zkp_v0_10_1_testrand_int(m + 1); + rustsecp256k1zkp_v0_10_1_fe_normalize(fe); if (n == 0) { return; } - rustsecp256k1zkp_v0_10_0_fe_clear(&zero); - rustsecp256k1zkp_v0_10_0_fe_negate(&zero, &zero, 0); - rustsecp256k1zkp_v0_10_0_fe_mul_int_unchecked(&zero, n - 1); - rustsecp256k1zkp_v0_10_0_fe_add(fe, &zero); + rustsecp256k1zkp_v0_10_1_fe_clear(&zero); + rustsecp256k1zkp_v0_10_1_fe_negate(&zero, &zero, 0); + rustsecp256k1zkp_v0_10_1_fe_mul_int_unchecked(&zero, n - 1); + rustsecp256k1zkp_v0_10_1_fe_add(fe, &zero); #ifdef VERIFY CHECK(fe->magnitude == n); #endif } -static void random_fe_test(rustsecp256k1zkp_v0_10_0_fe *x) { +static void random_fe_test(rustsecp256k1zkp_v0_10_1_fe *x) { unsigned char bin[32]; do { - rustsecp256k1zkp_v0_10_0_testrand256_test(bin); - if (rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(x, bin)) { + rustsecp256k1zkp_v0_10_1_testrand256_test(bin); + if (rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(x, bin)) { return; } } while(1); } -static void random_fe_non_zero_test(rustsecp256k1zkp_v0_10_0_fe *fe) { +static void random_fe_non_zero_test(rustsecp256k1zkp_v0_10_1_fe *fe) { do { random_fe_test(fe); - } while(rustsecp256k1zkp_v0_10_0_fe_is_zero(fe)); + } while(rustsecp256k1zkp_v0_10_1_fe_is_zero(fe)); } -static void random_fe_magnitude(rustsecp256k1zkp_v0_10_0_fe *fe) { +static void random_fe_magnitude(rustsecp256k1zkp_v0_10_1_fe *fe) { random_field_element_magnitude(fe, 8); } -static void random_ge_x_magnitude(rustsecp256k1zkp_v0_10_0_ge *ge) { +static void random_ge_x_magnitude(rustsecp256k1zkp_v0_10_1_ge *ge) { random_field_element_magnitude(&ge->x, SECP256K1_GE_X_MAGNITUDE_MAX); } -static void random_ge_y_magnitude(rustsecp256k1zkp_v0_10_0_ge *ge) { +static void random_ge_y_magnitude(rustsecp256k1zkp_v0_10_1_ge *ge) { random_field_element_magnitude(&ge->y, SECP256K1_GE_Y_MAGNITUDE_MAX); } -static void random_gej_x_magnitude(rustsecp256k1zkp_v0_10_0_gej *gej) { +static void random_gej_x_magnitude(rustsecp256k1zkp_v0_10_1_gej *gej) { random_field_element_magnitude(&gej->x, SECP256K1_GEJ_X_MAGNITUDE_MAX); } -static void random_gej_y_magnitude(rustsecp256k1zkp_v0_10_0_gej *gej) { +static void random_gej_y_magnitude(rustsecp256k1zkp_v0_10_1_gej *gej) { random_field_element_magnitude(&gej->y, SECP256K1_GEJ_Y_MAGNITUDE_MAX); } -static void random_gej_z_magnitude(rustsecp256k1zkp_v0_10_0_gej *gej) { +static void random_gej_z_magnitude(rustsecp256k1zkp_v0_10_1_gej *gej) { random_field_element_magnitude(&gej->z, SECP256K1_GEJ_Z_MAGNITUDE_MAX); } -static void random_group_element_test(rustsecp256k1zkp_v0_10_0_ge *ge) { - rustsecp256k1zkp_v0_10_0_fe fe; +static void random_group_element_test(rustsecp256k1zkp_v0_10_1_ge *ge) { + rustsecp256k1zkp_v0_10_1_fe fe; do { random_fe_test(&fe); - if (rustsecp256k1zkp_v0_10_0_ge_set_xo_var(ge, &fe, rustsecp256k1zkp_v0_10_0_testrand_bits(1))) { - rustsecp256k1zkp_v0_10_0_fe_normalize(&ge->y); + if (rustsecp256k1zkp_v0_10_1_ge_set_xo_var(ge, &fe, rustsecp256k1zkp_v0_10_1_testrand_bits(1))) { + rustsecp256k1zkp_v0_10_1_fe_normalize(&ge->y); break; } } while(1); ge->infinity = 0; } -static void random_group_element_jacobian_test(rustsecp256k1zkp_v0_10_0_gej *gej, const rustsecp256k1zkp_v0_10_0_ge *ge) { - rustsecp256k1zkp_v0_10_0_fe z2, z3; +static void random_group_element_jacobian_test(rustsecp256k1zkp_v0_10_1_gej *gej, const rustsecp256k1zkp_v0_10_1_ge *ge) { + rustsecp256k1zkp_v0_10_1_fe z2, z3; random_fe_non_zero_test(&gej->z); - rustsecp256k1zkp_v0_10_0_fe_sqr(&z2, &gej->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&z3, &z2, &gej->z); - rustsecp256k1zkp_v0_10_0_fe_mul(&gej->x, &ge->x, &z2); - rustsecp256k1zkp_v0_10_0_fe_mul(&gej->y, &ge->y, &z3); + rustsecp256k1zkp_v0_10_1_fe_sqr(&z2, &gej->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&z3, &z2, &gej->z); + rustsecp256k1zkp_v0_10_1_fe_mul(&gej->x, &ge->x, &z2); + rustsecp256k1zkp_v0_10_1_fe_mul(&gej->y, &ge->y, &z3); gej->infinity = ge->infinity; } -static void random_gej_test(rustsecp256k1zkp_v0_10_0_gej *gej) { - rustsecp256k1zkp_v0_10_0_ge ge; +static void random_gej_test(rustsecp256k1zkp_v0_10_1_gej *gej) { + rustsecp256k1zkp_v0_10_1_ge ge; random_group_element_test(&ge); random_group_element_jacobian_test(gej, &ge); } -static void random_scalar_order_test(rustsecp256k1zkp_v0_10_0_scalar *num) { +static void random_scalar_order_test(rustsecp256k1zkp_v0_10_1_scalar *num) { do { unsigned char b32[32]; int overflow = 0; - rustsecp256k1zkp_v0_10_0_testrand256_test(b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(num, b32, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(num)) { + rustsecp256k1zkp_v0_10_1_testrand256_test(b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(num, b32, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(num)) { continue; } break; } while(1); } -static void random_scalar_order(rustsecp256k1zkp_v0_10_0_scalar *num) { +static void random_scalar_order(rustsecp256k1zkp_v0_10_1_scalar *num) { do { unsigned char b32[32]; int overflow = 0; - rustsecp256k1zkp_v0_10_0_testrand256(b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(num, b32, &overflow); - if (overflow || rustsecp256k1zkp_v0_10_0_scalar_is_zero(num)) { + rustsecp256k1zkp_v0_10_1_testrand256(b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(num, b32, &overflow); + if (overflow || rustsecp256k1zkp_v0_10_1_scalar_is_zero(num)) { continue; } break; @@ -207,9 +207,9 @@ static void random_scalar_order(rustsecp256k1zkp_v0_10_0_scalar *num) { } static void random_scalar_order_b32(unsigned char *b32) { - rustsecp256k1zkp_v0_10_0_scalar num; + rustsecp256k1zkp_v0_10_1_scalar num; random_scalar_order(&num); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(b32, &num); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(b32, &num); } static void run_util_tests(void) { @@ -218,42 +218,42 @@ static void run_util_tests(void) { uint64_t r2; uint64_t r3; int64_t s; - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var(0) == 64); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var(1) == 63); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var(2) == 62); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var(3) == 62); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var(~0ULL) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var((~0ULL) - 1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var((~0ULL) >> 1) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var((~0ULL) >> 2) == 2); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, INT64_MAX) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var(0) == 64); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var(1) == 63); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var(2) == 62); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var(3) == 62); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var(~0ULL) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var((~0ULL) - 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var((~0ULL) >> 1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var((~0ULL) >> 2) == 2); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, INT64_MAX) == 0); CHECK(r == INT64_MAX); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, INT64_MAX - 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, INT64_MAX - 1) == 0); CHECK(r == INT64_MAX - 1); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, INT64_MIN) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, INT64_MIN) == 1); CHECK(r == (uint64_t)INT64_MAX + 1); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, INT64_MIN + 1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, INT64_MIN + 1) == 1); CHECK(r == (uint64_t)INT64_MAX); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, 0) == 0); CHECK(r == 0); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, 1) == 0); CHECK(r == 1); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, -1) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, -1) == 1); CHECK(r == 1); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, 2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, 2) == 0); CHECK(r == 2); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r, -2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r, -2) == 1); CHECK(r == 2); for (i = 0; i < 10; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_clz64_var((~0ULL) - rustsecp256k1zkp_v0_10_0_testrand32()) == 0); - r = ((uint64_t)rustsecp256k1zkp_v0_10_0_testrand32() << 32) | rustsecp256k1zkp_v0_10_0_testrand32(); - r2 = rustsecp256k1zkp_v0_10_0_testrandi64(0, r); + CHECK(rustsecp256k1zkp_v0_10_1_clz64_var((~0ULL) - rustsecp256k1zkp_v0_10_1_testrand32()) == 0); + r = ((uint64_t)rustsecp256k1zkp_v0_10_1_testrand32() << 32) | rustsecp256k1zkp_v0_10_1_testrand32(); + r2 = rustsecp256k1zkp_v0_10_1_testrandi64(0, r); CHECK(r2 <= r); - r3 = rustsecp256k1zkp_v0_10_0_testrandi64(r2, r); + r3 = rustsecp256k1zkp_v0_10_1_testrandi64(r2, r); CHECK((r3 >= r2) && (r3 <= r)); - r = rustsecp256k1zkp_v0_10_0_testrandi64(0, INT64_MAX); - s = (int64_t)r * (rustsecp256k1zkp_v0_10_0_testrand32()&1?-1:1); - CHECK(rustsecp256k1zkp_v0_10_0_sign_and_abs64(&r2, s) == (s < 0)); + r = rustsecp256k1zkp_v0_10_1_testrandi64(0, INT64_MAX); + s = (int64_t)r * (rustsecp256k1zkp_v0_10_1_testrand32()&1?-1:1); + CHECK(rustsecp256k1zkp_v0_10_1_sign_and_abs64(&r2, s) == (s < 0)); CHECK(r2 == r); } } @@ -262,8 +262,8 @@ static void run_xoshiro256pp_tests(void) { { size_t i; /* Sanity check that we run before the actual seeding. */ - for (i = 0; i < sizeof(rustsecp256k1zkp_v0_10_0_test_state)/sizeof(rustsecp256k1zkp_v0_10_0_test_state[0]); i++) { - CHECK(rustsecp256k1zkp_v0_10_0_test_state[i] == 0); + for (i = 0; i < sizeof(rustsecp256k1zkp_v0_10_1_test_state)/sizeof(rustsecp256k1zkp_v0_10_1_test_state[0]); i++) { + CHECK(rustsecp256k1zkp_v0_10_1_test_state[i] == 0); } } { @@ -279,26 +279,26 @@ static void run_xoshiro256pp_tests(void) { 0x4C, 0xCC, 0xC1, 0x18, 0xB2, 0xD8, 0x8F, 0xEF, 0x43, 0x26, 0x15, 0x57, 0x37, 0x00, 0xEF, 0x30, }; - rustsecp256k1zkp_v0_10_0_testrand_seed(seed16); + rustsecp256k1zkp_v0_10_1_testrand_seed(seed16); for (i = 0; i < 17; i++) { - rustsecp256k1zkp_v0_10_0_testrand256(buf32); + rustsecp256k1zkp_v0_10_1_testrand256(buf32); } - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf32, buf32_expected, sizeof(buf32)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf32, buf32_expected, sizeof(buf32)) == 0); } } static void run_selftest_tests(void) { /* Test public API */ - rustsecp256k1zkp_v0_10_0_selftest(); + rustsecp256k1zkp_v0_10_1_selftest(); } -static int ecmult_gen_context_eq(const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *a, const rustsecp256k1zkp_v0_10_0_ecmult_gen_context *b) { +static int ecmult_gen_context_eq(const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *a, const rustsecp256k1zkp_v0_10_1_ecmult_gen_context *b) { return a->built == b->built - && rustsecp256k1zkp_v0_10_0_scalar_eq(&a->blind, &b->blind) - && rustsecp256k1zkp_v0_10_0_gej_eq_var(&a->initial, &b->initial); + && rustsecp256k1zkp_v0_10_1_scalar_eq(&a->blind, &b->blind) + && rustsecp256k1zkp_v0_10_1_gej_eq_var(&a->initial, &b->initial); } -static int context_eq(const rustsecp256k1zkp_v0_10_0_context *a, const rustsecp256k1zkp_v0_10_0_context *b) { +static int context_eq(const rustsecp256k1zkp_v0_10_1_context *a, const rustsecp256k1zkp_v0_10_1_context *b) { return a->declassify == b->declassify && ecmult_gen_context_eq(&a->ecmult_gen_ctx, &b->ecmult_gen_ctx) && a->illegal_callback.fn == b->illegal_callback.fn @@ -313,22 +313,22 @@ static void run_deprecated_context_flags_test(void) { unsigned int flags[] = { SECP256K1_CONTEXT_SIGN, SECP256K1_CONTEXT_VERIFY, SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY }; - rustsecp256k1zkp_v0_10_0_context *none_ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + rustsecp256k1zkp_v0_10_1_context *none_ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); int i; for (i = 0; i < (int)(sizeof(flags)/sizeof(flags[0])); i++) { - rustsecp256k1zkp_v0_10_0_context *tmp_ctx; - CHECK(rustsecp256k1zkp_v0_10_0_context_preallocated_size(SECP256K1_CONTEXT_NONE) == rustsecp256k1zkp_v0_10_0_context_preallocated_size(flags[i])); - tmp_ctx = rustsecp256k1zkp_v0_10_0_context_create(flags[i]); + rustsecp256k1zkp_v0_10_1_context *tmp_ctx; + CHECK(rustsecp256k1zkp_v0_10_1_context_preallocated_size(SECP256K1_CONTEXT_NONE) == rustsecp256k1zkp_v0_10_1_context_preallocated_size(flags[i])); + tmp_ctx = rustsecp256k1zkp_v0_10_1_context_create(flags[i]); CHECK(context_eq(none_ctx, tmp_ctx)); - rustsecp256k1zkp_v0_10_0_context_destroy(tmp_ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(tmp_ctx); } - rustsecp256k1zkp_v0_10_0_context_destroy(none_ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(none_ctx); } static void run_ec_illegal_argument_tests(void) { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey zero_pubkey; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey zero_pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; unsigned char ctmp[32]; /* Setup */ @@ -336,187 +336,187 @@ static void run_ec_illegal_argument_tests(void) { memset(&zero_pubkey, 0, sizeof(zero_pubkey)); /* Verify context-type checking illegal-argument errors. */ - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_create(STATIC_CTX, &pubkey, ctmp)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_create(STATIC_CTX, &pubkey, ctmp)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign(STATIC_CTX, &sig, ctmp, ctmp, NULL, NULL)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign(STATIC_CTX, &sig, ctmp, ctmp, NULL, NULL)); SECP256K1_CHECKMEM_UNDEFINE(&sig, sizeof(sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, ctmp, ctmp, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, ctmp, ctmp, NULL, NULL) == 1); SECP256K1_CHECKMEM_CHECK(&sig, sizeof(sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, ctmp, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(STATIC_CTX, &sig, ctmp, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(STATIC_CTX, &pubkey, ctmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey, ctmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(STATIC_CTX, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(CTX, &pubkey) == 1); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(STATIC_CTX, &zero_pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(CTX, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(STATIC_CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, ctmp, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(STATIC_CTX, &sig, ctmp, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(STATIC_CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(STATIC_CTX, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(CTX, &pubkey) == 1); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(STATIC_CTX, &zero_pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(CTX, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(STATIC_CTX, &pubkey, ctmp) == 1); } static void run_static_context_tests(int use_prealloc) { - /* Check that deprecated rustsecp256k1zkp_v0_10_0_context_no_precomp is an alias to rustsecp256k1zkp_v0_10_0_context_static. */ - CHECK(rustsecp256k1zkp_v0_10_0_context_no_precomp == rustsecp256k1zkp_v0_10_0_context_static); + /* Check that deprecated rustsecp256k1zkp_v0_10_1_context_no_precomp is an alias to rustsecp256k1zkp_v0_10_1_context_static. */ + CHECK(rustsecp256k1zkp_v0_10_1_context_no_precomp == rustsecp256k1zkp_v0_10_1_context_static); { unsigned char seed[32] = {0x17}; - /* Randomizing rustsecp256k1zkp_v0_10_0_context_static is not supported. */ - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_randomize(STATIC_CTX, seed)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_randomize(STATIC_CTX, NULL)); + /* Randomizing rustsecp256k1zkp_v0_10_1_context_static is not supported. */ + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_randomize(STATIC_CTX, seed)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_randomize(STATIC_CTX, NULL)); - /* Destroying or cloning rustsecp256k1zkp_v0_10_0_context_static is not supported. */ + /* Destroying or cloning rustsecp256k1zkp_v0_10_1_context_static is not supported. */ if (use_prealloc) { - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_preallocated_clone_size(STATIC_CTX)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_preallocated_clone_size(STATIC_CTX)); { - rustsecp256k1zkp_v0_10_0_context *my_static_ctx = malloc(sizeof(*STATIC_CTX)); + rustsecp256k1zkp_v0_10_1_context *my_static_ctx = malloc(sizeof(*STATIC_CTX)); CHECK(my_static_ctx != NULL); memset(my_static_ctx, 0x2a, sizeof(*my_static_ctx)); - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_preallocated_clone(STATIC_CTX, my_static_ctx)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_preallocated_clone(STATIC_CTX, my_static_ctx)); CHECK(all_bytes_equal(my_static_ctx, 0x2a, sizeof(*my_static_ctx))); free(my_static_ctx); } - CHECK_ILLEGAL_VOID(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(STATIC_CTX)); + CHECK_ILLEGAL_VOID(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(STATIC_CTX)); } else { - CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_clone(STATIC_CTX)); - CHECK_ILLEGAL_VOID(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_destroy(STATIC_CTX)); + CHECK_ILLEGAL(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_clone(STATIC_CTX)); + CHECK_ILLEGAL_VOID(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_destroy(STATIC_CTX)); } } { /* Verify that setting and resetting illegal callback works */ int32_t dummy = 0; - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(STATIC_CTX, counting_callback_fn, &dummy); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(STATIC_CTX, counting_callback_fn, &dummy); CHECK(STATIC_CTX->illegal_callback.fn == counting_callback_fn); CHECK(STATIC_CTX->illegal_callback.data == &dummy); - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(STATIC_CTX, NULL, NULL); - CHECK(STATIC_CTX->illegal_callback.fn == rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(STATIC_CTX, NULL, NULL); + CHECK(STATIC_CTX->illegal_callback.fn == rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn); CHECK(STATIC_CTX->illegal_callback.data == NULL); } } static void run_proper_context_tests(int use_prealloc) { int32_t dummy = 0; - rustsecp256k1zkp_v0_10_0_context *my_ctx, *my_ctx_fresh; + rustsecp256k1zkp_v0_10_1_context *my_ctx, *my_ctx_fresh; void *my_ctx_prealloc = NULL; unsigned char seed[32] = {0x17}; - rustsecp256k1zkp_v0_10_0_gej pubj; - rustsecp256k1zkp_v0_10_0_ge pub; - rustsecp256k1zkp_v0_10_0_scalar msg, key, nonce; - rustsecp256k1zkp_v0_10_0_scalar sigr, sigs; + rustsecp256k1zkp_v0_10_1_gej pubj; + rustsecp256k1zkp_v0_10_1_ge pub; + rustsecp256k1zkp_v0_10_1_scalar msg, key, nonce; + rustsecp256k1zkp_v0_10_1_scalar sigr, sigs; /* Fresh reference context for comparison */ - my_ctx_fresh = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + my_ctx_fresh = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); if (use_prealloc) { - my_ctx_prealloc = malloc(rustsecp256k1zkp_v0_10_0_context_preallocated_size(SECP256K1_CONTEXT_NONE)); + my_ctx_prealloc = malloc(rustsecp256k1zkp_v0_10_1_context_preallocated_size(SECP256K1_CONTEXT_NONE)); CHECK(my_ctx_prealloc != NULL); - my_ctx = rustsecp256k1zkp_v0_10_0_context_preallocated_create(my_ctx_prealloc, SECP256K1_CONTEXT_NONE); + my_ctx = rustsecp256k1zkp_v0_10_1_context_preallocated_create(my_ctx_prealloc, SECP256K1_CONTEXT_NONE); } else { - my_ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + my_ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); } /* Randomize and reset randomization */ CHECK(context_eq(my_ctx, my_ctx_fresh)); - CHECK(rustsecp256k1zkp_v0_10_0_context_randomize(my_ctx, seed) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_context_randomize(my_ctx, seed) == 1); CHECK(!context_eq(my_ctx, my_ctx_fresh)); - CHECK(rustsecp256k1zkp_v0_10_0_context_randomize(my_ctx, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_context_randomize(my_ctx, NULL) == 1); CHECK(context_eq(my_ctx, my_ctx_fresh)); - /* set error callback (to a function that still aborts in case malloc() fails in rustsecp256k1zkp_v0_10_0_context_clone() below) */ - rustsecp256k1zkp_v0_10_0_context_set_error_callback(my_ctx, rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn, NULL); - CHECK(my_ctx->error_callback.fn != rustsecp256k1zkp_v0_10_0_default_error_callback_fn); - CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn); + /* set error callback (to a function that still aborts in case malloc() fails in rustsecp256k1zkp_v0_10_1_context_clone() below) */ + rustsecp256k1zkp_v0_10_1_context_set_error_callback(my_ctx, rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn, NULL); + CHECK(my_ctx->error_callback.fn != rustsecp256k1zkp_v0_10_1_default_error_callback_fn); + CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn); /* check if sizes for cloning are consistent */ - CHECK(rustsecp256k1zkp_v0_10_0_context_preallocated_clone_size(my_ctx) == rustsecp256k1zkp_v0_10_0_context_preallocated_size(SECP256K1_CONTEXT_NONE)); + CHECK(rustsecp256k1zkp_v0_10_1_context_preallocated_clone_size(my_ctx) == rustsecp256k1zkp_v0_10_1_context_preallocated_size(SECP256K1_CONTEXT_NONE)); /*** clone and destroy all of them to make sure cloning was complete ***/ { - rustsecp256k1zkp_v0_10_0_context *ctx_tmp; + rustsecp256k1zkp_v0_10_1_context *ctx_tmp; if (use_prealloc) { /* clone into a non-preallocated context and then again into a new preallocated one. */ ctx_tmp = my_ctx; - my_ctx = rustsecp256k1zkp_v0_10_0_context_clone(my_ctx); + my_ctx = rustsecp256k1zkp_v0_10_1_context_clone(my_ctx); CHECK(context_eq(ctx_tmp, my_ctx)); - rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(ctx_tmp); + rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(ctx_tmp); free(my_ctx_prealloc); - my_ctx_prealloc = malloc(rustsecp256k1zkp_v0_10_0_context_preallocated_size(SECP256K1_CONTEXT_NONE)); + my_ctx_prealloc = malloc(rustsecp256k1zkp_v0_10_1_context_preallocated_size(SECP256K1_CONTEXT_NONE)); CHECK(my_ctx_prealloc != NULL); ctx_tmp = my_ctx; - my_ctx = rustsecp256k1zkp_v0_10_0_context_preallocated_clone(my_ctx, my_ctx_prealloc); + my_ctx = rustsecp256k1zkp_v0_10_1_context_preallocated_clone(my_ctx, my_ctx_prealloc); CHECK(context_eq(ctx_tmp, my_ctx)); - rustsecp256k1zkp_v0_10_0_context_destroy(ctx_tmp); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx_tmp); } else { /* clone into a preallocated context and then again into a new non-preallocated one. */ void *prealloc_tmp; - prealloc_tmp = malloc(rustsecp256k1zkp_v0_10_0_context_preallocated_size(SECP256K1_CONTEXT_NONE)); + prealloc_tmp = malloc(rustsecp256k1zkp_v0_10_1_context_preallocated_size(SECP256K1_CONTEXT_NONE)); CHECK(prealloc_tmp != NULL); ctx_tmp = my_ctx; - my_ctx = rustsecp256k1zkp_v0_10_0_context_preallocated_clone(my_ctx, prealloc_tmp); + my_ctx = rustsecp256k1zkp_v0_10_1_context_preallocated_clone(my_ctx, prealloc_tmp); CHECK(context_eq(ctx_tmp, my_ctx)); - rustsecp256k1zkp_v0_10_0_context_destroy(ctx_tmp); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx_tmp); ctx_tmp = my_ctx; - my_ctx = rustsecp256k1zkp_v0_10_0_context_clone(my_ctx); + my_ctx = rustsecp256k1zkp_v0_10_1_context_clone(my_ctx); CHECK(context_eq(ctx_tmp, my_ctx)); - rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(ctx_tmp); + rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(ctx_tmp); free(prealloc_tmp); } } /* Verify that the error callback makes it across the clone. */ - CHECK(my_ctx->error_callback.fn != rustsecp256k1zkp_v0_10_0_default_error_callback_fn); - CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn); + CHECK(my_ctx->error_callback.fn != rustsecp256k1zkp_v0_10_1_default_error_callback_fn); + CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn); /* And that it resets back to default. */ - rustsecp256k1zkp_v0_10_0_context_set_error_callback(my_ctx, NULL, NULL); - CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_0_default_error_callback_fn); + rustsecp256k1zkp_v0_10_1_context_set_error_callback(my_ctx, NULL, NULL); + CHECK(my_ctx->error_callback.fn == rustsecp256k1zkp_v0_10_1_default_error_callback_fn); CHECK(context_eq(my_ctx, my_ctx_fresh)); /* Verify that setting and resetting illegal callback works */ - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(my_ctx, counting_callback_fn, &dummy); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(my_ctx, counting_callback_fn, &dummy); CHECK(my_ctx->illegal_callback.fn == counting_callback_fn); CHECK(my_ctx->illegal_callback.data == &dummy); - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(my_ctx, NULL, NULL); - CHECK(my_ctx->illegal_callback.fn == rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(my_ctx, NULL, NULL); + CHECK(my_ctx->illegal_callback.fn == rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn); CHECK(my_ctx->illegal_callback.data == NULL); CHECK(context_eq(my_ctx, my_ctx_fresh)); /*** attempt to use them ***/ random_scalar_order_test(&msg); random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&my_ctx->ecmult_gen_ctx, &pubj, &key); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pub, &pubj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&my_ctx->ecmult_gen_ctx, &pubj, &key); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pub, &pubj); /* obtain a working nonce */ do { random_scalar_order_test(&nonce); - } while(!rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(&my_ctx->ecmult_gen_ctx, &sigr, &sigs, &key, &msg, &nonce, NULL)); + } while(!rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(&my_ctx->ecmult_gen_ctx, &sigr, &sigs, &key, &msg, &nonce, NULL)); /* try signing */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(&my_ctx->ecmult_gen_ctx, &sigr, &sigs, &key, &msg, &nonce, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(&my_ctx->ecmult_gen_ctx, &sigr, &sigs, &key, &msg, &nonce, NULL)); /* try verifying */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); /* cleanup */ if (use_prealloc) { - rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(my_ctx); + rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(my_ctx); free(my_ctx_prealloc); } else { - rustsecp256k1zkp_v0_10_0_context_destroy(my_ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(my_ctx); } - rustsecp256k1zkp_v0_10_0_context_destroy(my_ctx_fresh); + rustsecp256k1zkp_v0_10_1_context_destroy(my_ctx_fresh); /* Defined as no-op. */ - rustsecp256k1zkp_v0_10_0_context_destroy(NULL); - rustsecp256k1zkp_v0_10_0_context_preallocated_destroy(NULL); + rustsecp256k1zkp_v0_10_1_context_destroy(NULL); + rustsecp256k1zkp_v0_10_1_context_preallocated_destroy(NULL); } static void run_scratch_tests(void) { @@ -524,69 +524,69 @@ static void run_scratch_tests(void) { size_t checkpoint; size_t checkpoint_2; - rustsecp256k1zkp_v0_10_0_scratch_space *scratch; - rustsecp256k1zkp_v0_10_0_scratch_space local_scratch; + rustsecp256k1zkp_v0_10_1_scratch_space *scratch; + rustsecp256k1zkp_v0_10_1_scratch_space local_scratch; /* Test public API */ - scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000); + scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000); CHECK(scratch != NULL); /* Test internal API */ - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - (ALIGNMENT - 1)); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - (ALIGNMENT - 1)); CHECK(scratch->alloc_size == 0); CHECK(scratch->alloc_size % ALIGNMENT == 0); /* Allocating 500 bytes succeeds */ - checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(&CTX->error_callback, scratch); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, 500) != NULL); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000 - adj_alloc); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - adj_alloc - (ALIGNMENT - 1)); + checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(&CTX->error_callback, scratch); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, 500) != NULL); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000 - adj_alloc); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - adj_alloc - (ALIGNMENT - 1)); CHECK(scratch->alloc_size != 0); CHECK(scratch->alloc_size % ALIGNMENT == 0); /* Allocating another 501 bytes fails */ - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, 501) == NULL); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000 - adj_alloc); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - adj_alloc - (ALIGNMENT - 1)); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, 501) == NULL); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000 - adj_alloc); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 1) == 1000 - adj_alloc - (ALIGNMENT - 1)); CHECK(scratch->alloc_size != 0); CHECK(scratch->alloc_size % ALIGNMENT == 0); /* ...but it succeeds once we apply the checkpoint to undo it */ - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); CHECK(scratch->alloc_size == 0); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000); - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, 500) != NULL); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 0) == 1000); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, 500) != NULL); CHECK(scratch->alloc_size != 0); /* try to apply a bad checkpoint */ - checkpoint_2 = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(&CTX->error_callback, scratch); - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); - CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint_2)); /* checkpoint_2 is after checkpoint */ - CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, (size_t) -1)); /* this is just wildly invalid */ + checkpoint_2 = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(&CTX->error_callback, scratch); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); + CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint_2)); /* checkpoint_2 is after checkpoint */ + CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, (size_t) -1)); /* this is just wildly invalid */ /* try to use badly initialized scratch space */ - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); memset(&local_scratch, 0, sizeof(local_scratch)); scratch = &local_scratch; - CHECK_ERROR(CTX, rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, 0)); - CHECK_ERROR(CTX, rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, 500)); - CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch)); + CHECK_ERROR(CTX, rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, 0)); + CHECK_ERROR(CTX, rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, 500)); + CHECK_ERROR_VOID(CTX, rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch)); /* Test that large integers do not wrap around in a bad way */ - scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 1000); + scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 1000); /* Try max allocation with a large number of objects. Only makes sense if * ALIGNMENT is greater than 1 because otherwise the objects take no extra * space. */ - CHECK(ALIGNMENT <= 1 || !rustsecp256k1zkp_v0_10_0_scratch_max_allocation(&CTX->error_callback, scratch, (SIZE_MAX / (ALIGNMENT - 1)) + 1)); + CHECK(ALIGNMENT <= 1 || !rustsecp256k1zkp_v0_10_1_scratch_max_allocation(&CTX->error_callback, scratch, (SIZE_MAX / (ALIGNMENT - 1)) + 1)); /* Try allocating SIZE_MAX to test wrap around which only happens if * ALIGNMENT > 1, otherwise it returns NULL anyway because the scratch * space is too small. */ - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, SIZE_MAX) == NULL); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, SIZE_MAX) == NULL); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); /* cleanup */ - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, NULL); /* no-op */ + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, NULL); /* no-op */ } static void run_ctz_tests(void) { @@ -596,14 +596,14 @@ static void run_ctz_tests(void) { unsigned i; for (i = 0; i < sizeof(b32) / sizeof(b32[0]); ++i) { for (shift = 0; shift < 32; ++shift) { - CHECK(rustsecp256k1zkp_v0_10_0_ctz32_var_debruijn(b32[i] << shift) == shift); - CHECK(rustsecp256k1zkp_v0_10_0_ctz32_var(b32[i] << shift) == shift); + CHECK(rustsecp256k1zkp_v0_10_1_ctz32_var_debruijn(b32[i] << shift) == shift); + CHECK(rustsecp256k1zkp_v0_10_1_ctz32_var(b32[i] << shift) == shift); } } for (i = 0; i < sizeof(b64) / sizeof(b64[0]); ++i) { for (shift = 0; shift < 64; ++shift) { - CHECK(rustsecp256k1zkp_v0_10_0_ctz64_var_debruijn(b64[i] << shift) == shift); - CHECK(rustsecp256k1zkp_v0_10_0_ctz64_var(b64[i] << shift) == shift); + CHECK(rustsecp256k1zkp_v0_10_1_ctz64_var_debruijn(b64[i] << shift) == shift); + CHECK(rustsecp256k1zkp_v0_10_1_ctz64_var(b64[i] << shift) == shift); } } } @@ -640,29 +640,29 @@ static void run_sha256_known_output_tests(void) { for (i = 0; i < ninputs; i++) { unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_sha256 hasher; + rustsecp256k1zkp_v0_10_1_sha256 hasher; unsigned int j; /* 1. Run: simply write the input bytestrings */ j = repeat[i]; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hasher); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hasher); while (j > 0) { - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); j--; } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hasher, out); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, outputs[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hasher, out); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, outputs[i], 32) == 0); /* 2. Run: split the input bytestrings randomly before writing */ if (strlen(inputs[i]) > 0) { - int split = rustsecp256k1zkp_v0_10_0_testrand_int(strlen(inputs[i])); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hasher); + int split = rustsecp256k1zkp_v0_10_1_testrand_int(strlen(inputs[i])); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hasher); j = repeat[i]; while (j > 0) { - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); j--; } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hasher, out); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, outputs[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hasher, out); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, outputs[i], 32) == 0); } } } @@ -713,7 +713,7 @@ for x in digests: */ static void run_sha256_counter_tests(void) { static const char *input = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmno"; - static const rustsecp256k1zkp_v0_10_0_sha256 midstates[] = { + static const rustsecp256k1zkp_v0_10_1_sha256 midstates[] = { {{0xa2b5c8bb, 0x26c88bb3, 0x2abdc3d2, 0x9def99a3, 0xdfd21a6e, 0x41fe585b, 0x7ef2c440, 0x2b79adda}, {0x00}, 0xfffc0}, {{0xa0d29445, 0x9287de66, 0x76aabd71, 0x41acd765, 0x0c7528b4, 0x84e14906, 0x942faec6, 0xcc5a7b26}, @@ -762,22 +762,22 @@ static void run_sha256_counter_tests(void) { unsigned int i; for (i = 0; i < sizeof(midstates)/sizeof(midstates[0]); i++) { unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_sha256 hasher = midstates[i]; - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, (const unsigned char*)input, strlen(input)); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hasher, out); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, outputs[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256 hasher = midstates[i]; + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, (const unsigned char*)input, strlen(input)); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hasher, out); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, outputs[i], 32) == 0); } } /* Tests for the equality of two sha256 structs. This function only produces a * correct result if an integer multiple of 64 many bytes have been written * into the hash functions. This function is used by some module tests. */ -static void test_sha256_eq(const rustsecp256k1zkp_v0_10_0_sha256 *sha1, const rustsecp256k1zkp_v0_10_0_sha256 *sha2) { +static void test_sha256_eq(const rustsecp256k1zkp_v0_10_1_sha256 *sha1, const rustsecp256k1zkp_v0_10_1_sha256 *sha2) { /* Is buffer fully consumed? */ CHECK((sha1->bytes & 0x3F) == 0); CHECK(sha1->bytes == sha2->bytes); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(sha1->s, sha2->s, sizeof(sha1->s)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(sha1->s, sha2->s, sizeof(sha1->s)) == 0); } static void run_hmac_sha256_tests(void) { @@ -807,19 +807,19 @@ static void run_hmac_sha256_tests(void) { }; int i; for (i = 0; i < 6; i++) { - rustsecp256k1zkp_v0_10_0_hmac_sha256 hasher; + rustsecp256k1zkp_v0_10_1_hmac_sha256 hasher; unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hasher, out); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, outputs[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), strlen(inputs[i])); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hasher, out); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, outputs[i], 32) == 0); if (strlen(inputs[i]) > 0) { - int split = rustsecp256k1zkp_v0_10_0_testrand_int(strlen(inputs[i])); - rustsecp256k1zkp_v0_10_0_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); - rustsecp256k1zkp_v0_10_0_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); - rustsecp256k1zkp_v0_10_0_hmac_sha256_finalize(&hasher, out); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, outputs[i], 32) == 0); + int split = rustsecp256k1zkp_v0_10_1_testrand_int(strlen(inputs[i])); + rustsecp256k1zkp_v0_10_1_hmac_sha256_initialize(&hasher, (const unsigned char*)(keys[i]), strlen(keys[i])); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i]), split); + rustsecp256k1zkp_v0_10_1_hmac_sha256_write(&hasher, (const unsigned char*)(inputs[i] + split), strlen(inputs[i]) - split); + rustsecp256k1zkp_v0_10_1_hmac_sha256_finalize(&hasher, out); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, outputs[i], 32) == 0); } } } @@ -839,30 +839,30 @@ static void run_rfc6979_hmac_sha256_tests(void) { {0x75, 0x97, 0x88, 0x7c, 0xbd, 0x76, 0x32, 0x1f, 0x32, 0xe3, 0x04, 0x40, 0x67, 0x9a, 0x22, 0xcf, 0x7f, 0x8d, 0x9d, 0x2e, 0xac, 0x39, 0x0e, 0x58, 0x1f, 0xea, 0x09, 0x1c, 0xe2, 0x02, 0xba, 0x94} }; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256 rng; + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256 rng; unsigned char out[32]; int i; - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, key1, 64); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, key1, 64); for (i = 0; i < 3; i++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, out, 32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, out1[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, out, 32); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, out1[i], 32) == 0); } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, key1, 65); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, key1, 65); for (i = 0; i < 3; i++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, out, 32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, out1[i], 32) != 0); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, out, 32); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, out1[i], 32) != 0); } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_initialize(&rng, key2, 64); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_initialize(&rng, key2, 64); for (i = 0; i < 3; i++) { - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_generate(&rng, out, 32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, out2[i], 32) == 0); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_generate(&rng, out, 32); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, out2[i], 32) == 0); } - rustsecp256k1zkp_v0_10_0_rfc6979_hmac_sha256_finalize(&rng); + rustsecp256k1zkp_v0_10_1_rfc6979_hmac_sha256_finalize(&rng); } static void run_tagged_sha256_tests(void) { @@ -877,16 +877,16 @@ static void run_tagged_sha256_tests(void) { }; /* API test */ - CHECK(rustsecp256k1zkp_v0_10_0_tagged_sha256(CTX, hash32, tag, sizeof(tag), msg, sizeof(msg)) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_tagged_sha256(CTX, NULL, tag, sizeof(tag), msg, sizeof(msg))); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_tagged_sha256(CTX, hash32, NULL, 0, msg, sizeof(msg))); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_tagged_sha256(CTX, hash32, tag, sizeof(tag), NULL, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_tagged_sha256(CTX, hash32, tag, sizeof(tag), msg, sizeof(msg)) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_tagged_sha256(CTX, NULL, tag, sizeof(tag), msg, sizeof(msg))); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_tagged_sha256(CTX, hash32, NULL, 0, msg, sizeof(msg))); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_tagged_sha256(CTX, hash32, tag, sizeof(tag), NULL, 0)); /* Static test vector */ memcpy(tag, "tag", 3); memcpy(msg, "msg", 3); - CHECK(rustsecp256k1zkp_v0_10_0_tagged_sha256(CTX, hash32, tag, 3, msg, 3) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(hash32, hash_expected, sizeof(hash32)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_tagged_sha256(CTX, hash32, tag, 3, msg, 3) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(hash32, hash_expected, sizeof(hash32)) == 0); } /***** MODINV TESTS *****/ @@ -993,7 +993,7 @@ static void mulmod256(uint16_t* out, const uint16_t* a, const uint16_t* b, const } /* Convert a 256-bit number represented as 16 uint16_t's to signed30 notation. */ -static void uint16_to_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30* out, const uint16_t* in) { +static void uint16_to_signed30(rustsecp256k1zkp_v0_10_1_modinv32_signed30* out, const uint16_t* in) { int i; memset(out->v, 0, sizeof(out->v)); for (i = 0; i < 256; ++i) { @@ -1002,7 +1002,7 @@ static void uint16_to_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30* out, } /* Convert a 256-bit number in signed30 notation to a representation as 16 uint16_t's. */ -static void signed30_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_0_modinv32_signed30* in) { +static void signed30_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_1_modinv32_signed30* in) { int i; memset(out, 0, 32); for (i = 0; i < 256; ++i) { @@ -1011,10 +1011,10 @@ static void signed30_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_0_mod } /* Randomly mutate the sign of limbs in signed30 representation, without changing the value. */ -static void mutate_sign_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30* x) { +static void mutate_sign_signed30(rustsecp256k1zkp_v0_10_1_modinv32_signed30* x) { int i; for (i = 0; i < 16; ++i) { - int pos = rustsecp256k1zkp_v0_10_0_testrand_bits(3); + int pos = rustsecp256k1zkp_v0_10_1_testrand_bits(3); if (x->v[pos] > 0 && x->v[pos + 1] <= 0x3fffffff) { x->v[pos] -= 0x40000000; x->v[pos + 1] += 1; @@ -1025,11 +1025,11 @@ static void mutate_sign_signed30(rustsecp256k1zkp_v0_10_0_modinv32_signed30* x) } } -/* Test rustsecp256k1zkp_v0_10_0_modinv32{_var}, using inputs in 16-bit limb format, and returning inverse. */ +/* Test rustsecp256k1zkp_v0_10_1_modinv32{_var}, using inputs in 16-bit limb format, and returning inverse. */ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16_t* mod) { uint16_t tmp[16]; - rustsecp256k1zkp_v0_10_0_modinv32_signed30 x; - rustsecp256k1zkp_v0_10_0_modinv32_modinfo m; + rustsecp256k1zkp_v0_10_1_modinv32_signed30 x; + rustsecp256k1zkp_v0_10_1_modinv32_modinfo m; int i, vartime, nonzero; uint16_to_signed30(&x, in); @@ -1040,14 +1040,14 @@ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16 m.modulus_inv30 = modinv2p64(m.modulus.v[0]) & 0x3fffffff; CHECK(((m.modulus_inv30 * m.modulus.v[0]) & 0x3fffffff) == 1); - /* Test rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var. */ + /* Test rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var. */ if (nonzero) { int jac; uint16_t sqr[16], negone[16]; mulmod256(sqr, in, in, mod); uint16_to_signed30(&x, sqr); /* Compute jacobi symbol of in^2, which must be 1 (or uncomputable). */ - jac = rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(&x, &m); + jac = rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var(&x, &m); CHECK(jac == 0 || jac == 1); /* Then compute the jacobi symbol of -(in^2). x and -x have opposite * jacobi symbols if and only if (mod % 4) == 3. */ @@ -1055,7 +1055,7 @@ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16 for (i = 1; i < 16; ++i) negone[i] = mod[i]; mulmod256(sqr, sqr, negone, mod); uint16_to_signed30(&x, sqr); - jac = rustsecp256k1zkp_v0_10_0_jacobi32_maybe_var(&x, &m); + jac = rustsecp256k1zkp_v0_10_1_jacobi32_maybe_var(&x, &m); CHECK(jac == 0 || jac == 1 - (mod[0] & 2)); } @@ -1063,7 +1063,7 @@ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16 mutate_sign_signed30(&m.modulus); for (vartime = 0; vartime < 2; ++vartime) { /* compute inverse */ - (vartime ? rustsecp256k1zkp_v0_10_0_modinv32_var : rustsecp256k1zkp_v0_10_0_modinv32)(&x, &m); + (vartime ? rustsecp256k1zkp_v0_10_1_modinv32_var : rustsecp256k1zkp_v0_10_1_modinv32)(&x, &m); /* produce output */ signed30_to_uint16(out, &x); @@ -1074,7 +1074,7 @@ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16 for (i = 1; i < 16; ++i) CHECK(tmp[i] == 0); /* invert again */ - (vartime ? rustsecp256k1zkp_v0_10_0_modinv32_var : rustsecp256k1zkp_v0_10_0_modinv32)(&x, &m); + (vartime ? rustsecp256k1zkp_v0_10_1_modinv32_var : rustsecp256k1zkp_v0_10_1_modinv32)(&x, &m); /* check if the result is equal to the input */ signed30_to_uint16(tmp, &x); @@ -1084,7 +1084,7 @@ static void test_modinv32_uint16(uint16_t* out, const uint16_t* in, const uint16 #ifdef SECP256K1_WIDEMUL_INT128 /* Convert a 256-bit number represented as 16 uint16_t's to signed62 notation. */ -static void uint16_to_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62* out, const uint16_t* in) { +static void uint16_to_signed62(rustsecp256k1zkp_v0_10_1_modinv64_signed62* out, const uint16_t* in) { int i; memset(out->v, 0, sizeof(out->v)); for (i = 0; i < 256; ++i) { @@ -1093,7 +1093,7 @@ static void uint16_to_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62* out, } /* Convert a 256-bit number in signed62 notation to a representation as 16 uint16_t's. */ -static void signed62_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_0_modinv64_signed62* in) { +static void signed62_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_1_modinv64_signed62* in) { int i; memset(out, 0, 32); for (i = 0; i < 256; ++i) { @@ -1102,11 +1102,11 @@ static void signed62_to_uint16(uint16_t* out, const rustsecp256k1zkp_v0_10_0_mod } /* Randomly mutate the sign of limbs in signed62 representation, without changing the value. */ -static void mutate_sign_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62* x) { +static void mutate_sign_signed62(rustsecp256k1zkp_v0_10_1_modinv64_signed62* x) { static const int64_t M62 = (int64_t)(UINT64_MAX >> 2); int i; for (i = 0; i < 8; ++i) { - int pos = rustsecp256k1zkp_v0_10_0_testrand_bits(2); + int pos = rustsecp256k1zkp_v0_10_1_testrand_bits(2); if (x->v[pos] > 0 && x->v[pos + 1] <= M62) { x->v[pos] -= (M62 + 1); x->v[pos + 1] += 1; @@ -1117,12 +1117,12 @@ static void mutate_sign_signed62(rustsecp256k1zkp_v0_10_0_modinv64_signed62* x) } } -/* Test rustsecp256k1zkp_v0_10_0_modinv64{_var}, using inputs in 16-bit limb format, and returning inverse. */ +/* Test rustsecp256k1zkp_v0_10_1_modinv64{_var}, using inputs in 16-bit limb format, and returning inverse. */ static void test_modinv64_uint16(uint16_t* out, const uint16_t* in, const uint16_t* mod) { static const int64_t M62 = (int64_t)(UINT64_MAX >> 2); uint16_t tmp[16]; - rustsecp256k1zkp_v0_10_0_modinv64_signed62 x; - rustsecp256k1zkp_v0_10_0_modinv64_modinfo m; + rustsecp256k1zkp_v0_10_1_modinv64_signed62 x; + rustsecp256k1zkp_v0_10_1_modinv64_modinfo m; int i, vartime, nonzero; uint16_to_signed62(&x, in); @@ -1133,14 +1133,14 @@ static void test_modinv64_uint16(uint16_t* out, const uint16_t* in, const uint16 m.modulus_inv62 = modinv2p64(m.modulus.v[0]) & M62; CHECK(((m.modulus_inv62 * m.modulus.v[0]) & M62) == 1); - /* Test rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var. */ + /* Test rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var. */ if (nonzero) { int jac; uint16_t sqr[16], negone[16]; mulmod256(sqr, in, in, mod); uint16_to_signed62(&x, sqr); /* Compute jacobi symbol of in^2, which must be 1 (or uncomputable). */ - jac = rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(&x, &m); + jac = rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var(&x, &m); CHECK(jac == 0 || jac == 1); /* Then compute the jacobi symbol of -(in^2). x and -x have opposite * jacobi symbols if and only if (mod % 4) == 3. */ @@ -1148,7 +1148,7 @@ static void test_modinv64_uint16(uint16_t* out, const uint16_t* in, const uint16 for (i = 1; i < 16; ++i) negone[i] = mod[i]; mulmod256(sqr, sqr, negone, mod); uint16_to_signed62(&x, sqr); - jac = rustsecp256k1zkp_v0_10_0_jacobi64_maybe_var(&x, &m); + jac = rustsecp256k1zkp_v0_10_1_jacobi64_maybe_var(&x, &m); CHECK(jac == 0 || jac == 1 - (mod[0] & 2)); } @@ -1156,7 +1156,7 @@ static void test_modinv64_uint16(uint16_t* out, const uint16_t* in, const uint16 mutate_sign_signed62(&m.modulus); for (vartime = 0; vartime < 2; ++vartime) { /* compute inverse */ - (vartime ? rustsecp256k1zkp_v0_10_0_modinv64_var : rustsecp256k1zkp_v0_10_0_modinv64)(&x, &m); + (vartime ? rustsecp256k1zkp_v0_10_1_modinv64_var : rustsecp256k1zkp_v0_10_1_modinv64)(&x, &m); /* produce output */ signed62_to_uint16(out, &x); @@ -1167,7 +1167,7 @@ static void test_modinv64_uint16(uint16_t* out, const uint16_t* in, const uint16 for (i = 1; i < 16; ++i) CHECK(tmp[i] == 0); /* invert again */ - (vartime ? rustsecp256k1zkp_v0_10_0_modinv64_var : rustsecp256k1zkp_v0_10_0_modinv64)(&x, &m); + (vartime ? rustsecp256k1zkp_v0_10_1_modinv64_var : rustsecp256k1zkp_v0_10_1_modinv64)(&x, &m); /* check if the result is equal to the input */ signed62_to_uint16(tmp, &x); @@ -1819,8 +1819,8 @@ static void run_modinv_tests(void) { /* generate random xd and md, so that md is odd, md>1, xd> (j % 16)) uwa_bits = 1 + j; } for (j = 0; j < 128; ++j) { - CHECK(rustsecp256k1zkp_v0_10_0_u128_check_bits(&uwa, j) == (uwa_bits <= j)); + CHECK(rustsecp256k1zkp_v0_10_1_u128_check_bits(&uwa, j) == (uwa_bits <= j)); } } - /* test rustsecp256k1zkp_v0_10_0_i128_mul */ + /* test rustsecp256k1zkp_v0_10_1_i128_mul */ mulmod256(rswr, rsb, rsc, NULL); - rustsecp256k1zkp_v0_10_0_i128_mul(&swz, sb, sc); + rustsecp256k1zkp_v0_10_1_i128_mul(&swz, sb, sc); load256i128(rswz, &swz); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(rswr, rswz, 16) == 0); - /* test rustsecp256k1zkp_v0_10_0_i128_accum_mul */ + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(rswr, rswz, 16) == 0); + /* test rustsecp256k1zkp_v0_10_1_i128_accum_mul */ mulmod256(rswr, rsb, rsc, NULL); add256(rswr, rswr, rswa); if (int256is127(rswr)) { swz = swa; - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&swz, sb, sc); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&swz, sb, sc); load256i128(rswz, &swz); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(rswr, rswz, 16) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(rswr, rswz, 16) == 0); } - /* test rustsecp256k1zkp_v0_10_0_i128_det */ + /* test rustsecp256k1zkp_v0_10_1_i128_det */ { uint16_t rsd[16], rse[16], rst[32]; int64_t sd = v[0], se = v[1]; @@ -2041,25 +2041,25 @@ static void run_int128_test_case(void) { neg256(rst, rst); mulmod256(rswr, rsb, rse, NULL); add256(rswr, rswr, rst); - rustsecp256k1zkp_v0_10_0_i128_det(&swz, sb, sc, sd, se); + rustsecp256k1zkp_v0_10_1_i128_det(&swz, sb, sc, sd, se); load256i128(rswz, &swz); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(rswr, rswz, 16) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(rswr, rswz, 16) == 0); } - /* test rustsecp256k1zkp_v0_10_0_i128_rshift */ + /* test rustsecp256k1zkp_v0_10_1_i128_rshift */ rshift256(rswr, rswa, uc % 127, 1); swz = swa; - rustsecp256k1zkp_v0_10_0_i128_rshift(&swz, uc % 127); + rustsecp256k1zkp_v0_10_1_i128_rshift(&swz, uc % 127); load256i128(rswz, &swz); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(rswr, rswz, 16) == 0); - /* test rustsecp256k1zkp_v0_10_0_i128_to_u64 */ - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&swa) == v[0]); - /* test rustsecp256k1zkp_v0_10_0_i128_from_i64 */ - rustsecp256k1zkp_v0_10_0_i128_from_i64(&swz, sb); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(rswr, rswz, 16) == 0); + /* test rustsecp256k1zkp_v0_10_1_i128_to_u64 */ + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&swa) == v[0]); + /* test rustsecp256k1zkp_v0_10_1_i128_from_i64 */ + rustsecp256k1zkp_v0_10_1_i128_from_i64(&swz, sb); load256i128(rswz, &swz); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(rsb, rswz, 16) == 0); - /* test rustsecp256k1zkp_v0_10_0_i128_to_i64 */ - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_i64(&swz) == sb); - /* test rustsecp256k1zkp_v0_10_0_i128_eq_var */ + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(rsb, rswz, 16) == 0); + /* test rustsecp256k1zkp_v0_10_1_i128_to_i64 */ + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_i64(&swz) == sb); + /* test rustsecp256k1zkp_v0_10_1_i128_eq_var */ { int expect = (uc & 1); swz = swa; @@ -2071,11 +2071,11 @@ static void run_int128_test_case(void) { } else { v0c ^= (((uint64_t)1) << (ub & 63)); } - rustsecp256k1zkp_v0_10_0_i128_load(&swz, v1c, v0c); + rustsecp256k1zkp_v0_10_1_i128_load(&swz, v1c, v0c); } - CHECK(rustsecp256k1zkp_v0_10_0_i128_eq_var(&swa, &swz) == expect); + CHECK(rustsecp256k1zkp_v0_10_1_i128_eq_var(&swa, &swz) == expect); } - /* test rustsecp256k1zkp_v0_10_0_i128_check_pow2 (sign == 1) */ + /* test rustsecp256k1zkp_v0_10_1_i128_check_pow2 (sign == 1) */ { int expect = (uc & 1); int pos = ub % 127; @@ -2088,7 +2088,7 @@ static void run_int128_test_case(void) { } else { lo = (((uint64_t)1) << (pos & 63)); } - rustsecp256k1zkp_v0_10_0_i128_load(&swz, hi, lo); + rustsecp256k1zkp_v0_10_1_i128_load(&swz, hi, lo); } else { /* If expect==0, set swz = swa, but update expect=1 if swa happens to equal 2^pos. */ if (pos >= 64) { @@ -2098,9 +2098,9 @@ static void run_int128_test_case(void) { } swz = swa; } - CHECK(rustsecp256k1zkp_v0_10_0_i128_check_pow2(&swz, pos, 1) == expect); + CHECK(rustsecp256k1zkp_v0_10_1_i128_check_pow2(&swz, pos, 1) == expect); } - /* test rustsecp256k1zkp_v0_10_0_i128_check_pow2 (sign == -1) */ + /* test rustsecp256k1zkp_v0_10_1_i128_check_pow2 (sign == -1) */ { int expect = (uc & 1); int pos = ub % 127; @@ -2114,7 +2114,7 @@ static void run_int128_test_case(void) { } else { lo <<= (pos & 63); } - rustsecp256k1zkp_v0_10_0_i128_load(&swz, hi, lo); + rustsecp256k1zkp_v0_10_1_i128_load(&swz, hi, lo); } else { /* If expect==0, set swz = swa, but update expect=1 if swa happens to equal -2^pos. */ if (pos >= 64) { @@ -2124,42 +2124,42 @@ static void run_int128_test_case(void) { } swz = swa; } - CHECK(rustsecp256k1zkp_v0_10_0_i128_check_pow2(&swz, pos, -1) == expect); + CHECK(rustsecp256k1zkp_v0_10_1_i128_check_pow2(&swz, pos, -1) == expect); } } static void run_int128_tests(void) { - { /* rustsecp256k1zkp_v0_10_0_u128_accum_mul */ - rustsecp256k1zkp_v0_10_0_uint128 res; - - /* Check rustsecp256k1zkp_v0_10_0_u128_accum_mul overflow */ - rustsecp256k1zkp_v0_10_0_u128_mul(&res, UINT64_MAX, UINT64_MAX); - rustsecp256k1zkp_v0_10_0_u128_accum_mul(&res, UINT64_MAX, UINT64_MAX); - CHECK(rustsecp256k1zkp_v0_10_0_u128_to_u64(&res) == 2); - CHECK(rustsecp256k1zkp_v0_10_0_u128_hi_u64(&res) == 18446744073709551612U); - } - { /* rustsecp256k1zkp_v0_10_0_u128_accum_mul */ - rustsecp256k1zkp_v0_10_0_int128 res; - - /* Compute INT128_MAX = 2^127 - 1 with rustsecp256k1zkp_v0_10_0_i128_accum_mul */ - rustsecp256k1zkp_v0_10_0_i128_mul(&res, INT64_MAX, INT64_MAX); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&res, INT64_MAX, INT64_MAX); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&res) == 2); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&res, 4, 9223372036854775807); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&res, 1, 1); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&res) == UINT64_MAX); - rustsecp256k1zkp_v0_10_0_i128_rshift(&res, 64); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_i64(&res) == INT64_MAX); - - /* Compute INT128_MIN = - 2^127 with rustsecp256k1zkp_v0_10_0_i128_accum_mul */ - rustsecp256k1zkp_v0_10_0_i128_mul(&res, INT64_MAX, INT64_MIN); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&res) == (uint64_t)INT64_MIN); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&res, INT64_MAX, INT64_MIN); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&res) == 0); - rustsecp256k1zkp_v0_10_0_i128_accum_mul(&res, 2, INT64_MIN); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_u64(&res) == 0); - rustsecp256k1zkp_v0_10_0_i128_rshift(&res, 64); - CHECK(rustsecp256k1zkp_v0_10_0_i128_to_i64(&res) == INT64_MIN); + { /* rustsecp256k1zkp_v0_10_1_u128_accum_mul */ + rustsecp256k1zkp_v0_10_1_uint128 res; + + /* Check rustsecp256k1zkp_v0_10_1_u128_accum_mul overflow */ + rustsecp256k1zkp_v0_10_1_u128_mul(&res, UINT64_MAX, UINT64_MAX); + rustsecp256k1zkp_v0_10_1_u128_accum_mul(&res, UINT64_MAX, UINT64_MAX); + CHECK(rustsecp256k1zkp_v0_10_1_u128_to_u64(&res) == 2); + CHECK(rustsecp256k1zkp_v0_10_1_u128_hi_u64(&res) == 18446744073709551612U); + } + { /* rustsecp256k1zkp_v0_10_1_u128_accum_mul */ + rustsecp256k1zkp_v0_10_1_int128 res; + + /* Compute INT128_MAX = 2^127 - 1 with rustsecp256k1zkp_v0_10_1_i128_accum_mul */ + rustsecp256k1zkp_v0_10_1_i128_mul(&res, INT64_MAX, INT64_MAX); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&res, INT64_MAX, INT64_MAX); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&res) == 2); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&res, 4, 9223372036854775807); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&res, 1, 1); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&res) == UINT64_MAX); + rustsecp256k1zkp_v0_10_1_i128_rshift(&res, 64); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_i64(&res) == INT64_MAX); + + /* Compute INT128_MIN = - 2^127 with rustsecp256k1zkp_v0_10_1_i128_accum_mul */ + rustsecp256k1zkp_v0_10_1_i128_mul(&res, INT64_MAX, INT64_MIN); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&res) == (uint64_t)INT64_MIN); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&res, INT64_MAX, INT64_MIN); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&res) == 0); + rustsecp256k1zkp_v0_10_1_i128_accum_mul(&res, 2, INT64_MIN); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_u64(&res) == 0); + rustsecp256k1zkp_v0_10_1_i128_rshift(&res, 64); + CHECK(rustsecp256k1zkp_v0_10_1_i128_to_i64(&res) == INT64_MIN); } { /* Randomized tests. */ @@ -2172,9 +2172,9 @@ static void run_int128_tests(void) { /***** SCALAR TESTS *****/ static void scalar_test(void) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar s1; - rustsecp256k1zkp_v0_10_0_scalar s2; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s1; + rustsecp256k1zkp_v0_10_1_scalar s2; unsigned char c[32]; /* Set 's' to a random scalar, with value 'snum'. */ @@ -2185,170 +2185,170 @@ static void scalar_test(void) { /* Set 's2' to a random scalar, with value 'snum2', and byte array representation 'c'. */ random_scalar_order_test(&s2); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(c, &s2); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(c, &s2); { int i; /* Test that fetching groups of 4 bits from a scalar and recursing n(i)=16*n(i-1)+p(i) reconstructs it. */ - rustsecp256k1zkp_v0_10_0_scalar n; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&n, 0); + rustsecp256k1zkp_v0_10_1_scalar n; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&n, 0); for (i = 0; i < 256; i += 4) { - rustsecp256k1zkp_v0_10_0_scalar t; + rustsecp256k1zkp_v0_10_1_scalar t; int j; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, rustsecp256k1zkp_v0_10_0_scalar_get_bits(&s, 256 - 4 - i, 4)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, rustsecp256k1zkp_v0_10_1_scalar_get_bits(&s, 256 - 4 - i, 4)); for (j = 0; j < 4; j++) { - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, &n); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, &n); } - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, &t); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, &t); } - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&n, &s)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&n, &s)); } { /* Test that fetching groups of randomly-sized bits from a scalar and recursing n(i)=b*n(i-1)+p(i) reconstructs it. */ - rustsecp256k1zkp_v0_10_0_scalar n; + rustsecp256k1zkp_v0_10_1_scalar n; int i = 0; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&n, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&n, 0); while (i < 256) { - rustsecp256k1zkp_v0_10_0_scalar t; + rustsecp256k1zkp_v0_10_1_scalar t; int j; - int now = rustsecp256k1zkp_v0_10_0_testrand_int(15) + 1; + int now = rustsecp256k1zkp_v0_10_1_testrand_int(15) + 1; if (now + i > 256) { now = 256 - i; } - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, rustsecp256k1zkp_v0_10_0_scalar_get_bits_var(&s, 256 - now - i, now)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, rustsecp256k1zkp_v0_10_1_scalar_get_bits_var(&s, 256 - now - i, now)); for (j = 0; j < now; j++) { - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, &n); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, &n); } - rustsecp256k1zkp_v0_10_0_scalar_add(&n, &n, &t); + rustsecp256k1zkp_v0_10_1_scalar_add(&n, &n, &t); i += now; } - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&n, &s)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&n, &s)); } { /* Test commutativity of add. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &s1, &s2); - rustsecp256k1zkp_v0_10_0_scalar_add(&r2, &s2, &s1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &s1, &s2); + rustsecp256k1zkp_v0_10_1_scalar_add(&r2, &s2, &s1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar b; + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar b; int i; /* Test add_bit. */ - int bit = rustsecp256k1zkp_v0_10_0_testrand_bits(8); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&b, 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_one(&b)); + int bit = rustsecp256k1zkp_v0_10_1_testrand_bits(8); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&b, 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_one(&b)); for (i = 0; i < bit; i++) { - rustsecp256k1zkp_v0_10_0_scalar_add(&b, &b, &b); + rustsecp256k1zkp_v0_10_1_scalar_add(&b, &b, &b); } r1 = s1; r2 = s1; - if (!rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &r1, &b)) { + if (!rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &r1, &b)) { /* No overflow happened. */ - rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(&r2, bit, 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(&r2, bit, 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); /* cadd is a noop when flag is zero */ - rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(&r2, bit, 0); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(&r2, bit, 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } } { /* Test commutativity of mul. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &s1, &s2); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r2, &s2, &s1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &s1, &s2); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r2, &s2, &s1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { /* Test associativity of add. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &s1, &s2); - rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &r1, &s); - rustsecp256k1zkp_v0_10_0_scalar_add(&r2, &s2, &s); - rustsecp256k1zkp_v0_10_0_scalar_add(&r2, &s1, &r2); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &s1, &s2); + rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &r1, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&r2, &s2, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&r2, &s1, &r2); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { /* Test associativity of mul. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &s1, &s2); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &r1, &s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r2, &s2, &s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r2, &s1, &r2); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &s1, &s2); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &r1, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r2, &s2, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r2, &s1, &r2); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { /* Test distributitivity of mul over add. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2, t; - rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &s1, &s2); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &r1, &s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r2, &s1, &s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&t, &s2, &s); - rustsecp256k1zkp_v0_10_0_scalar_add(&r2, &r2, &t); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2, t; + rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &s1, &s2); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &r1, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r2, &s1, &s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&t, &s2, &s); + rustsecp256k1zkp_v0_10_1_scalar_add(&r2, &r2, &t); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { /* Test square. */ - rustsecp256k1zkp_v0_10_0_scalar r1, r2; - rustsecp256k1zkp_v0_10_0_scalar_sqr(&r1, &s1); - rustsecp256k1zkp_v0_10_0_scalar_mul(&r2, &s1, &s1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &r2)); + rustsecp256k1zkp_v0_10_1_scalar r1, r2; + rustsecp256k1zkp_v0_10_1_scalar_sqr(&r1, &s1); + rustsecp256k1zkp_v0_10_1_scalar_mul(&r2, &s1, &s1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &r2)); } { /* Test multiplicative identity. */ - rustsecp256k1zkp_v0_10_0_scalar r1; - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &s1, &rustsecp256k1zkp_v0_10_0_scalar_one); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &s1)); + rustsecp256k1zkp_v0_10_1_scalar r1; + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &s1, &rustsecp256k1zkp_v0_10_1_scalar_one); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &s1)); } { /* Test additive identity. */ - rustsecp256k1zkp_v0_10_0_scalar r1; - rustsecp256k1zkp_v0_10_0_scalar_add(&r1, &s1, &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &s1)); + rustsecp256k1zkp_v0_10_1_scalar r1; + rustsecp256k1zkp_v0_10_1_scalar_add(&r1, &s1, &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &s1)); } { /* Test zero product property. */ - rustsecp256k1zkp_v0_10_0_scalar r1; - rustsecp256k1zkp_v0_10_0_scalar_mul(&r1, &s1, &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &rustsecp256k1zkp_v0_10_0_scalar_zero)); + rustsecp256k1zkp_v0_10_1_scalar r1; + rustsecp256k1zkp_v0_10_1_scalar_mul(&r1, &s1, &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &rustsecp256k1zkp_v0_10_1_scalar_zero)); } { /* Test halving. */ - rustsecp256k1zkp_v0_10_0_scalar r; - rustsecp256k1zkp_v0_10_0_scalar_add(&r, &s, &s); - rustsecp256k1zkp_v0_10_0_scalar_half(&r, &r); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r, &s)); + rustsecp256k1zkp_v0_10_1_scalar r; + rustsecp256k1zkp_v0_10_1_scalar_add(&r, &s, &s); + rustsecp256k1zkp_v0_10_1_scalar_half(&r, &r); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r, &s)); } } static void run_scalar_set_b32_seckey_tests(void) { unsigned char b32[32]; - rustsecp256k1zkp_v0_10_0_scalar s1; - rustsecp256k1zkp_v0_10_0_scalar s2; + rustsecp256k1zkp_v0_10_1_scalar s1; + rustsecp256k1zkp_v0_10_1_scalar s2; /* Usually set_b32 and set_b32_seckey give the same result */ random_scalar_order_b32(b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&s1, b32, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&s2, b32) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&s1, &s2) == 1); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&s1, b32, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&s2, b32) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&s1, &s2) == 1); memset(b32, 0, sizeof(b32)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&s2, b32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&s2, b32) == 0); memset(b32, 0xFF, sizeof(b32)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_set_b32_seckey(&s2, b32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_set_b32_seckey(&s2, b32) == 0); } static void run_scalar_tests(void) { @@ -2361,32 +2361,32 @@ static void run_scalar_tests(void) { } { - /* Check that the scalar constants rustsecp256k1zkp_v0_10_0_scalar_zero and - rustsecp256k1zkp_v0_10_0_scalar_one contain the expected values. */ - rustsecp256k1zkp_v0_10_0_scalar zero, one; + /* Check that the scalar constants rustsecp256k1zkp_v0_10_1_scalar_zero and + rustsecp256k1zkp_v0_10_1_scalar_one contain the expected values. */ + rustsecp256k1zkp_v0_10_1_scalar zero, one; - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&rustsecp256k1zkp_v0_10_0_scalar_zero)); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&zero, 0); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&zero, &rustsecp256k1zkp_v0_10_0_scalar_zero)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&rustsecp256k1zkp_v0_10_1_scalar_zero)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&zero, 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&zero, &rustsecp256k1zkp_v0_10_1_scalar_zero)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_one(&rustsecp256k1zkp_v0_10_0_scalar_one)); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&one, 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&one, &rustsecp256k1zkp_v0_10_0_scalar_one)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_one(&rustsecp256k1zkp_v0_10_1_scalar_one)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&one, 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&one, &rustsecp256k1zkp_v0_10_1_scalar_one)); } { /* (-1)+1 should be zero. */ - rustsecp256k1zkp_v0_10_0_scalar o; - rustsecp256k1zkp_v0_10_0_scalar_negate(&o, &rustsecp256k1zkp_v0_10_0_scalar_one); - rustsecp256k1zkp_v0_10_0_scalar_add(&o, &o, &rustsecp256k1zkp_v0_10_0_scalar_one); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&o)); - rustsecp256k1zkp_v0_10_0_scalar_negate(&o, &o); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&o)); + rustsecp256k1zkp_v0_10_1_scalar o; + rustsecp256k1zkp_v0_10_1_scalar_negate(&o, &rustsecp256k1zkp_v0_10_1_scalar_one); + rustsecp256k1zkp_v0_10_1_scalar_add(&o, &o, &rustsecp256k1zkp_v0_10_1_scalar_one); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&o)); + rustsecp256k1zkp_v0_10_1_scalar_negate(&o, &o); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&o)); } { /* Test that halving and doubling roundtrips on some fixed values. */ - static const rustsecp256k1zkp_v0_10_0_scalar HALF_TESTS[] = { + static const rustsecp256k1zkp_v0_10_1_scalar HALF_TESTS[] = { /* 0 */ SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0), /* 1 */ @@ -2406,23 +2406,23 @@ static void run_scalar_tests(void) { }; unsigned n; for (n = 0; n < sizeof(HALF_TESTS) / sizeof(HALF_TESTS[0]); ++n) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar_half(&s, &HALF_TESTS[n]); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &s); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&s, &HALF_TESTS[n])); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &s); - rustsecp256k1zkp_v0_10_0_scalar_half(&s, &s); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&s, &HALF_TESTS[n])); + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar_half(&s, &HALF_TESTS[n]); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &s); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&s, &HALF_TESTS[n])); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &s); + rustsecp256k1zkp_v0_10_1_scalar_half(&s, &s); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&s, &HALF_TESTS[n])); } } { /* Does check_overflow check catch all ones? */ - static const rustsecp256k1zkp_v0_10_0_scalar overflowed = SECP256K1_SCALAR_CONST( + static const rustsecp256k1zkp_v0_10_1_scalar overflowed = SECP256K1_SCALAR_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL ); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_check_overflow(&overflowed)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_check_overflow(&overflowed)); } { @@ -2431,13 +2431,13 @@ static void run_scalar_tests(void) { * and edge-case coverage on 32-bit and 64-bit implementations. * The responses were generated with Sage 5.9. */ - rustsecp256k1zkp_v0_10_0_scalar x; - rustsecp256k1zkp_v0_10_0_scalar y; - rustsecp256k1zkp_v0_10_0_scalar z; - rustsecp256k1zkp_v0_10_0_scalar zz; - rustsecp256k1zkp_v0_10_0_scalar r1; - rustsecp256k1zkp_v0_10_0_scalar r2; - rustsecp256k1zkp_v0_10_0_scalar zzv; + rustsecp256k1zkp_v0_10_1_scalar x; + rustsecp256k1zkp_v0_10_1_scalar y; + rustsecp256k1zkp_v0_10_1_scalar z; + rustsecp256k1zkp_v0_10_1_scalar zz; + rustsecp256k1zkp_v0_10_1_scalar r1; + rustsecp256k1zkp_v0_10_1_scalar r2; + rustsecp256k1zkp_v0_10_1_scalar zzv; int overflow; unsigned char chal[33][2][32] = { {{0xff, 0xff, 0x03, 0x07, 0x00, 0x00, 0x00, 0x00, @@ -2972,52 +2972,52 @@ static void run_scalar_tests(void) { 0x5c, 0x02, 0x97, 0x1b, 0x62, 0x43, 0x86, 0xf5}} }; for (i = 0; i < 33; i++) { - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x, chal[i][0], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x, chal[i][0], &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&y, chal[i][1], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&y, chal[i][1], &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&r1, res[i][0], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&r1, res[i][0], &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&r2, res[i][1], &overflow); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&r2, res[i][1], &overflow); CHECK(!overflow); - rustsecp256k1zkp_v0_10_0_scalar_mul(&z, &x, &y); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_check_overflow(&z)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r1, &z)); - if (!rustsecp256k1zkp_v0_10_0_scalar_is_zero(&y)) { - rustsecp256k1zkp_v0_10_0_scalar_inverse(&zz, &y); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_check_overflow(&zz)); - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&zzv, &y); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&zzv, &zz)); - rustsecp256k1zkp_v0_10_0_scalar_mul(&z, &z, &zz); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_check_overflow(&z)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&x, &z)); - rustsecp256k1zkp_v0_10_0_scalar_mul(&zz, &zz, &y); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_check_overflow(&zz)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&rustsecp256k1zkp_v0_10_0_scalar_one, &zz)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&z, &x, &y); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_check_overflow(&z)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r1, &z)); + if (!rustsecp256k1zkp_v0_10_1_scalar_is_zero(&y)) { + rustsecp256k1zkp_v0_10_1_scalar_inverse(&zz, &y); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_check_overflow(&zz)); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&zzv, &y); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&zzv, &zz)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&z, &z, &zz); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_check_overflow(&z)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&x, &z)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&zz, &zz, &y); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_check_overflow(&zz)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&rustsecp256k1zkp_v0_10_1_scalar_one, &zz)); } - rustsecp256k1zkp_v0_10_0_scalar_mul(&z, &x, &x); - rustsecp256k1zkp_v0_10_0_scalar_sqr(&zz, &x); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&zz, &z)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&r2, &z)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&z, &x, &x); + rustsecp256k1zkp_v0_10_1_scalar_sqr(&zz, &x); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&zz, &z)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&r2, &z)); } } } /***** FIELD TESTS *****/ -static void random_fe_non_square(rustsecp256k1zkp_v0_10_0_fe *ns) { - rustsecp256k1zkp_v0_10_0_fe r; +static void random_fe_non_square(rustsecp256k1zkp_v0_10_1_fe *ns) { + rustsecp256k1zkp_v0_10_1_fe r; random_fe_non_zero(ns); - if (rustsecp256k1zkp_v0_10_0_fe_sqrt(&r, ns)) { - rustsecp256k1zkp_v0_10_0_fe_negate(ns, ns, 1); + if (rustsecp256k1zkp_v0_10_1_fe_sqrt(&r, ns)) { + rustsecp256k1zkp_v0_10_1_fe_negate(ns, ns, 1); } } -static int check_fe_equal(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { - rustsecp256k1zkp_v0_10_0_fe an = *a; - rustsecp256k1zkp_v0_10_0_fe bn = *b; - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&an); - return rustsecp256k1zkp_v0_10_0_fe_equal(&an, &bn); +static int check_fe_equal(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { + rustsecp256k1zkp_v0_10_1_fe an = *a; + rustsecp256k1zkp_v0_10_1_fe bn = *b; + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&an); + return rustsecp256k1zkp_v0_10_1_fe_equal(&an, &bn); } static void run_field_convert(void) { @@ -3027,27 +3027,27 @@ static void run_field_convert(void) { 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x40 }; - static const rustsecp256k1zkp_v0_10_0_fe_storage fes = SECP256K1_FE_STORAGE_CONST( + static const rustsecp256k1zkp_v0_10_1_fe_storage fes = SECP256K1_FE_STORAGE_CONST( 0x00010203UL, 0x04050607UL, 0x11121314UL, 0x15161718UL, 0x22232425UL, 0x26272829UL, 0x33343536UL, 0x37383940UL ); - static const rustsecp256k1zkp_v0_10_0_fe fe = SECP256K1_FE_CONST( + static const rustsecp256k1zkp_v0_10_1_fe fe = SECP256K1_FE_CONST( 0x00010203UL, 0x04050607UL, 0x11121314UL, 0x15161718UL, 0x22232425UL, 0x26272829UL, 0x33343536UL, 0x37383940UL ); - rustsecp256k1zkp_v0_10_0_fe fe2; + rustsecp256k1zkp_v0_10_1_fe fe2; unsigned char b322[32]; - rustsecp256k1zkp_v0_10_0_fe_storage fes2; + rustsecp256k1zkp_v0_10_1_fe_storage fes2; /* Check conversions to fe. */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fe2, b32)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&fe, &fe2)); - rustsecp256k1zkp_v0_10_0_fe_from_storage(&fe2, &fes); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&fe, &fe2)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fe2, b32)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&fe, &fe2)); + rustsecp256k1zkp_v0_10_1_fe_from_storage(&fe2, &fes); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&fe, &fe2)); /* Check conversion from fe. */ - rustsecp256k1zkp_v0_10_0_fe_get_b32(b322, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(b322, b32, 32) == 0); - rustsecp256k1zkp_v0_10_0_fe_to_storage(&fes2, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&fes2, &fes, sizeof(fes)) == 0); + rustsecp256k1zkp_v0_10_1_fe_get_b32(b322, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(b322, b32, 32) == 0); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&fes2, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&fes2, &fes, sizeof(fes)) == 0); } static void run_field_be32_overflow(void) { @@ -3060,14 +3060,14 @@ static void run_field_be32_overflow(void) { }; static const unsigned char zero[32] = { 0x00 }; unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_fe fe; - CHECK(rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fe, zero_overflow) == 0); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&fe, zero_overflow); - CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&fe) == 1); - rustsecp256k1zkp_v0_10_0_fe_normalize(&fe); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&fe) == 1); - rustsecp256k1zkp_v0_10_0_fe_get_b32(out, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, zero, 32) == 0); + rustsecp256k1zkp_v0_10_1_fe fe; + CHECK(rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fe, zero_overflow) == 0); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&fe, zero_overflow); + CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&fe) == 1); + rustsecp256k1zkp_v0_10_1_fe_normalize(&fe); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&fe) == 1); + rustsecp256k1zkp_v0_10_1_fe_get_b32(out, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, zero, 32) == 0); } { static const unsigned char one_overflow[32] = { @@ -3083,13 +3083,13 @@ static void run_field_be32_overflow(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, }; unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_fe fe; - CHECK(rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fe, one_overflow) == 0); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&fe, one_overflow); - rustsecp256k1zkp_v0_10_0_fe_normalize(&fe); - CHECK(rustsecp256k1zkp_v0_10_0_fe_cmp_var(&fe, &rustsecp256k1zkp_v0_10_0_fe_one) == 0); - rustsecp256k1zkp_v0_10_0_fe_get_b32(out, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, one, 32) == 0); + rustsecp256k1zkp_v0_10_1_fe fe; + CHECK(rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fe, one_overflow) == 0); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&fe, one_overflow); + rustsecp256k1zkp_v0_10_1_fe_normalize(&fe); + CHECK(rustsecp256k1zkp_v0_10_1_fe_cmp_var(&fe, &rustsecp256k1zkp_v0_10_1_fe_one) == 0); + rustsecp256k1zkp_v0_10_1_fe_get_b32(out, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, one, 32) == 0); } { static const unsigned char ff_overflow[32] = { @@ -3105,116 +3105,116 @@ static void run_field_be32_overflow(void) { 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x03, 0xD0, }; unsigned char out[32]; - rustsecp256k1zkp_v0_10_0_fe fe; - const rustsecp256k1zkp_v0_10_0_fe fe_ff = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0x01, 0x000003d0); - CHECK(rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(&fe, ff_overflow) == 0); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&fe, ff_overflow); - rustsecp256k1zkp_v0_10_0_fe_normalize(&fe); - CHECK(rustsecp256k1zkp_v0_10_0_fe_cmp_var(&fe, &fe_ff) == 0); - rustsecp256k1zkp_v0_10_0_fe_get_b32(out, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(out, ff, 32) == 0); + rustsecp256k1zkp_v0_10_1_fe fe; + const rustsecp256k1zkp_v0_10_1_fe fe_ff = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0x01, 0x000003d0); + CHECK(rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(&fe, ff_overflow) == 0); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&fe, ff_overflow); + rustsecp256k1zkp_v0_10_1_fe_normalize(&fe); + CHECK(rustsecp256k1zkp_v0_10_1_fe_cmp_var(&fe, &fe_ff) == 0); + rustsecp256k1zkp_v0_10_1_fe_get_b32(out, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(out, ff, 32) == 0); } } /* Returns true if two field elements have the same representation. */ -static int fe_identical(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *b) { +static int fe_identical(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *b) { int ret = 1; /* Compare the struct member that holds the limbs. */ - ret &= (rustsecp256k1zkp_v0_10_0_memcmp_var(a->n, b->n, sizeof(a->n)) == 0); + ret &= (rustsecp256k1zkp_v0_10_1_memcmp_var(a->n, b->n, sizeof(a->n)) == 0); return ret; } static void run_field_half(void) { - rustsecp256k1zkp_v0_10_0_fe t, u; + rustsecp256k1zkp_v0_10_1_fe t, u; int m; /* Check magnitude 0 input */ - rustsecp256k1zkp_v0_10_0_fe_get_bounds(&t, 0); - rustsecp256k1zkp_v0_10_0_fe_half(&t); + rustsecp256k1zkp_v0_10_1_fe_get_bounds(&t, 0); + rustsecp256k1zkp_v0_10_1_fe_half(&t); #ifdef VERIFY CHECK(t.magnitude == 1); CHECK(t.normalized == 0); #endif - CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&t)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&t)); /* Check non-zero magnitudes in the supported range */ for (m = 1; m < 32; m++) { /* Check max-value input */ - rustsecp256k1zkp_v0_10_0_fe_get_bounds(&t, m); + rustsecp256k1zkp_v0_10_1_fe_get_bounds(&t, m); u = t; - rustsecp256k1zkp_v0_10_0_fe_half(&u); + rustsecp256k1zkp_v0_10_1_fe_half(&u); #ifdef VERIFY CHECK(u.magnitude == (m >> 1) + 1); CHECK(u.normalized == 0); #endif - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&u); - rustsecp256k1zkp_v0_10_0_fe_add(&u, &u); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&u); + rustsecp256k1zkp_v0_10_1_fe_add(&u, &u); CHECK(check_fe_equal(&t, &u)); /* Check worst-case input: ensure the LSB is 1 so that P will be added, * which will also cause all carries to be 1, since all limbs that can * generate a carry are initially even and all limbs of P are odd in * every existing field implementation. */ - rustsecp256k1zkp_v0_10_0_fe_get_bounds(&t, m); + rustsecp256k1zkp_v0_10_1_fe_get_bounds(&t, m); CHECK(t.n[0] > 0); CHECK((t.n[0] & 1) == 0); --t.n[0]; u = t; - rustsecp256k1zkp_v0_10_0_fe_half(&u); + rustsecp256k1zkp_v0_10_1_fe_half(&u); #ifdef VERIFY CHECK(u.magnitude == (m >> 1) + 1); CHECK(u.normalized == 0); #endif - rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&u); - rustsecp256k1zkp_v0_10_0_fe_add(&u, &u); + rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&u); + rustsecp256k1zkp_v0_10_1_fe_add(&u, &u); CHECK(check_fe_equal(&t, &u)); } } static void run_field_misc(void) { - rustsecp256k1zkp_v0_10_0_fe x; - rustsecp256k1zkp_v0_10_0_fe y; - rustsecp256k1zkp_v0_10_0_fe z; - rustsecp256k1zkp_v0_10_0_fe q; + rustsecp256k1zkp_v0_10_1_fe x; + rustsecp256k1zkp_v0_10_1_fe y; + rustsecp256k1zkp_v0_10_1_fe z; + rustsecp256k1zkp_v0_10_1_fe q; int v; - rustsecp256k1zkp_v0_10_0_fe fe5 = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 5); + rustsecp256k1zkp_v0_10_1_fe fe5 = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 5); int i, j; for (i = 0; i < 1000 * COUNT; i++) { - rustsecp256k1zkp_v0_10_0_fe_storage xs, ys, zs; + rustsecp256k1zkp_v0_10_1_fe_storage xs, ys, zs; if (i & 1) { random_fe(&x); } else { random_fe_test(&x); } random_fe_non_zero(&y); - v = rustsecp256k1zkp_v0_10_0_testrand_bits(15); + v = rustsecp256k1zkp_v0_10_1_testrand_bits(15); /* Test that fe_add_int is equivalent to fe_set_int + fe_add. */ - rustsecp256k1zkp_v0_10_0_fe_set_int(&q, v); /* q = v */ + rustsecp256k1zkp_v0_10_1_fe_set_int(&q, v); /* q = v */ z = x; /* z = x */ - rustsecp256k1zkp_v0_10_0_fe_add(&z, &q); /* z = x+v */ + rustsecp256k1zkp_v0_10_1_fe_add(&z, &q); /* z = x+v */ q = x; /* q = x */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&q, v); /* q = x+v */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&q, v); /* q = x+v */ CHECK(check_fe_equal(&q, &z)); /* Test the fe equality and comparison operations. */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_cmp_var(&x, &x) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&x, &x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_cmp_var(&x, &x) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&x, &x)); z = x; - rustsecp256k1zkp_v0_10_0_fe_add(&z,&y); + rustsecp256k1zkp_v0_10_1_fe_add(&z,&y); /* Test fe conditional move; z is not normalized here. */ q = x; - rustsecp256k1zkp_v0_10_0_fe_cmov(&x, &z, 0); + rustsecp256k1zkp_v0_10_1_fe_cmov(&x, &z, 0); #ifdef VERIFY CHECK(!x.normalized); CHECK((x.magnitude == q.magnitude) || (x.magnitude == z.magnitude)); CHECK((x.magnitude >= q.magnitude) && (x.magnitude >= z.magnitude)); #endif x = q; - rustsecp256k1zkp_v0_10_0_fe_cmov(&x, &x, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&x, &x, 1); CHECK(!fe_identical(&x, &z)); CHECK(fe_identical(&x, &q)); - rustsecp256k1zkp_v0_10_0_fe_cmov(&q, &z, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&q, &z, 1); #ifdef VERIFY CHECK(!q.normalized); CHECK((q.magnitude == x.magnitude) || (q.magnitude == z.magnitude)); @@ -3222,67 +3222,67 @@ static void run_field_misc(void) { #endif CHECK(fe_identical(&q, &z)); q = z; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&z); - CHECK(!rustsecp256k1zkp_v0_10_0_fe_equal(&x, &z)); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&q); - rustsecp256k1zkp_v0_10_0_fe_cmov(&q, &z, (i&1)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&z); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_equal(&x, &z)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&q); + rustsecp256k1zkp_v0_10_1_fe_cmov(&q, &z, (i&1)); #ifdef VERIFY CHECK(q.normalized && q.magnitude == 1); #endif for (j = 0; j < 6; j++) { - rustsecp256k1zkp_v0_10_0_fe_negate_unchecked(&z, &z, j+1); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&q); - rustsecp256k1zkp_v0_10_0_fe_cmov(&q, &z, (j&1)); + rustsecp256k1zkp_v0_10_1_fe_negate_unchecked(&z, &z, j+1); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&q); + rustsecp256k1zkp_v0_10_1_fe_cmov(&q, &z, (j&1)); #ifdef VERIFY CHECK(!q.normalized && q.magnitude == z.magnitude); #endif } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&z); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&z); /* Test storage conversion and conditional moves. */ - rustsecp256k1zkp_v0_10_0_fe_to_storage(&xs, &x); - rustsecp256k1zkp_v0_10_0_fe_to_storage(&ys, &y); - rustsecp256k1zkp_v0_10_0_fe_to_storage(&zs, &z); - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&zs, &xs, 0); - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&zs, &zs, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xs, &zs, sizeof(xs)) != 0); - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&ys, &xs, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&xs, &ys, sizeof(xs)) == 0); - rustsecp256k1zkp_v0_10_0_fe_from_storage(&x, &xs); - rustsecp256k1zkp_v0_10_0_fe_from_storage(&y, &ys); - rustsecp256k1zkp_v0_10_0_fe_from_storage(&z, &zs); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&xs, &x); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&ys, &y); + rustsecp256k1zkp_v0_10_1_fe_to_storage(&zs, &z); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&zs, &xs, 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&zs, &zs, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xs, &zs, sizeof(xs)) != 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&ys, &xs, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&xs, &ys, sizeof(xs)) == 0); + rustsecp256k1zkp_v0_10_1_fe_from_storage(&x, &xs); + rustsecp256k1zkp_v0_10_1_fe_from_storage(&y, &ys); + rustsecp256k1zkp_v0_10_1_fe_from_storage(&z, &zs); /* Test that mul_int, mul, and add agree. */ - rustsecp256k1zkp_v0_10_0_fe_add(&y, &x); - rustsecp256k1zkp_v0_10_0_fe_add(&y, &x); + rustsecp256k1zkp_v0_10_1_fe_add(&y, &x); + rustsecp256k1zkp_v0_10_1_fe_add(&y, &x); z = x; - rustsecp256k1zkp_v0_10_0_fe_mul_int(&z, 3); + rustsecp256k1zkp_v0_10_1_fe_mul_int(&z, 3); CHECK(check_fe_equal(&y, &z)); - rustsecp256k1zkp_v0_10_0_fe_add(&y, &x); - rustsecp256k1zkp_v0_10_0_fe_add(&z, &x); + rustsecp256k1zkp_v0_10_1_fe_add(&y, &x); + rustsecp256k1zkp_v0_10_1_fe_add(&z, &x); CHECK(check_fe_equal(&z, &y)); z = x; - rustsecp256k1zkp_v0_10_0_fe_mul_int(&z, 5); - rustsecp256k1zkp_v0_10_0_fe_mul(&q, &x, &fe5); + rustsecp256k1zkp_v0_10_1_fe_mul_int(&z, 5); + rustsecp256k1zkp_v0_10_1_fe_mul(&q, &x, &fe5); CHECK(check_fe_equal(&z, &q)); - rustsecp256k1zkp_v0_10_0_fe_negate(&x, &x, 1); - rustsecp256k1zkp_v0_10_0_fe_add(&z, &x); - rustsecp256k1zkp_v0_10_0_fe_add(&q, &x); + rustsecp256k1zkp_v0_10_1_fe_negate(&x, &x, 1); + rustsecp256k1zkp_v0_10_1_fe_add(&z, &x); + rustsecp256k1zkp_v0_10_1_fe_add(&q, &x); CHECK(check_fe_equal(&y, &z)); CHECK(check_fe_equal(&q, &y)); - /* Check rustsecp256k1zkp_v0_10_0_fe_half. */ + /* Check rustsecp256k1zkp_v0_10_1_fe_half. */ z = x; - rustsecp256k1zkp_v0_10_0_fe_half(&z); - rustsecp256k1zkp_v0_10_0_fe_add(&z, &z); + rustsecp256k1zkp_v0_10_1_fe_half(&z); + rustsecp256k1zkp_v0_10_1_fe_add(&z, &z); CHECK(check_fe_equal(&x, &z)); - rustsecp256k1zkp_v0_10_0_fe_add(&z, &z); - rustsecp256k1zkp_v0_10_0_fe_half(&z); + rustsecp256k1zkp_v0_10_1_fe_add(&z, &z); + rustsecp256k1zkp_v0_10_1_fe_half(&z); CHECK(check_fe_equal(&x, &z)); } } -static void test_fe_mul(const rustsecp256k1zkp_v0_10_0_fe* a, const rustsecp256k1zkp_v0_10_0_fe* b, int use_sqr) +static void test_fe_mul(const rustsecp256k1zkp_v0_10_1_fe* a, const rustsecp256k1zkp_v0_10_1_fe* b, int use_sqr) { - rustsecp256k1zkp_v0_10_0_fe c, an, bn; + rustsecp256k1zkp_v0_10_1_fe c, an, bn; /* Variables in BE 32-byte format. */ unsigned char a32[32], b32[32], c32[32]; /* Variables in LE 16x uint16_t format. */ @@ -3298,20 +3298,20 @@ static void test_fe_mul(const rustsecp256k1zkp_v0_10_0_fe* a, const rustsecp256k /* Compute C = A * B in fe format. */ c = *a; if (use_sqr) { - rustsecp256k1zkp_v0_10_0_fe_sqr(&c, &c); + rustsecp256k1zkp_v0_10_1_fe_sqr(&c, &c); } else { - rustsecp256k1zkp_v0_10_0_fe_mul(&c, &c, b); + rustsecp256k1zkp_v0_10_1_fe_mul(&c, &c, b); } /* Convert A, B, C into LE 16x uint16_t format. */ an = *a; bn = *b; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&c); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&an); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&bn); - rustsecp256k1zkp_v0_10_0_fe_get_b32(a32, &an); - rustsecp256k1zkp_v0_10_0_fe_get_b32(b32, &bn); - rustsecp256k1zkp_v0_10_0_fe_get_b32(c32, &c); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&c); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&an); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&bn); + rustsecp256k1zkp_v0_10_1_fe_get_b32(a32, &an); + rustsecp256k1zkp_v0_10_1_fe_get_b32(b32, &bn); + rustsecp256k1zkp_v0_10_1_fe_get_b32(c32, &c); for (i = 0; i < 16; ++i) { a16[i] = a32[31 - 2*i] + ((uint16_t)a32[30 - 2*i] << 8); b16[i] = b32[31 - 2*i] + ((uint16_t)b32[30 - 2*i] << 8); @@ -3320,13 +3320,13 @@ static void test_fe_mul(const rustsecp256k1zkp_v0_10_0_fe* a, const rustsecp256k /* Compute T = A * B in LE 16x uint16_t format. */ mulmod256(t16, a16, b16, m16); /* Compare */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(t16, c16, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(t16, c16, 32) == 0); } static void run_fe_mul(void) { int i; for (i = 0; i < 100 * COUNT; ++i) { - rustsecp256k1zkp_v0_10_0_fe a, b, c, d; + rustsecp256k1zkp_v0_10_1_fe a, b, c, d; random_fe(&a); random_fe_magnitude(&a); random_fe(&b); @@ -3345,50 +3345,50 @@ static void run_fe_mul(void) { } static void run_sqr(void) { - rustsecp256k1zkp_v0_10_0_fe x, s; + rustsecp256k1zkp_v0_10_1_fe x, s; { int i; - rustsecp256k1zkp_v0_10_0_fe_set_int(&x, 1); - rustsecp256k1zkp_v0_10_0_fe_negate(&x, &x, 1); + rustsecp256k1zkp_v0_10_1_fe_set_int(&x, 1); + rustsecp256k1zkp_v0_10_1_fe_negate(&x, &x, 1); for (i = 1; i <= 512; ++i) { - rustsecp256k1zkp_v0_10_0_fe_mul_int(&x, 2); - rustsecp256k1zkp_v0_10_0_fe_normalize(&x); - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &x); + rustsecp256k1zkp_v0_10_1_fe_mul_int(&x, 2); + rustsecp256k1zkp_v0_10_1_fe_normalize(&x); + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &x); } } } -static void test_sqrt(const rustsecp256k1zkp_v0_10_0_fe *a, const rustsecp256k1zkp_v0_10_0_fe *k) { - rustsecp256k1zkp_v0_10_0_fe r1, r2; - int v = rustsecp256k1zkp_v0_10_0_fe_sqrt(&r1, a); +static void test_sqrt(const rustsecp256k1zkp_v0_10_1_fe *a, const rustsecp256k1zkp_v0_10_1_fe *k) { + rustsecp256k1zkp_v0_10_1_fe r1, r2; + int v = rustsecp256k1zkp_v0_10_1_fe_sqrt(&r1, a); CHECK((v == 0) == (k == NULL)); if (k != NULL) { /* Check that the returned root is +/- the given known answer */ - rustsecp256k1zkp_v0_10_0_fe_negate(&r2, &r1, 1); - rustsecp256k1zkp_v0_10_0_fe_add(&r1, k); rustsecp256k1zkp_v0_10_0_fe_add(&r2, k); - rustsecp256k1zkp_v0_10_0_fe_normalize(&r1); rustsecp256k1zkp_v0_10_0_fe_normalize(&r2); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&r1) || rustsecp256k1zkp_v0_10_0_fe_is_zero(&r2)); + rustsecp256k1zkp_v0_10_1_fe_negate(&r2, &r1, 1); + rustsecp256k1zkp_v0_10_1_fe_add(&r1, k); rustsecp256k1zkp_v0_10_1_fe_add(&r2, k); + rustsecp256k1zkp_v0_10_1_fe_normalize(&r1); rustsecp256k1zkp_v0_10_1_fe_normalize(&r2); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&r1) || rustsecp256k1zkp_v0_10_1_fe_is_zero(&r2)); } } static void run_sqrt(void) { - rustsecp256k1zkp_v0_10_0_fe ns, x, s, t; + rustsecp256k1zkp_v0_10_1_fe ns, x, s, t; int i; /* Check sqrt(0) is 0 */ - rustsecp256k1zkp_v0_10_0_fe_set_int(&x, 0); - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &x); + rustsecp256k1zkp_v0_10_1_fe_set_int(&x, 0); + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &x); test_sqrt(&s, &x); /* Check sqrt of small squares (and their negatives) */ for (i = 1; i <= 100; i++) { - rustsecp256k1zkp_v0_10_0_fe_set_int(&x, i); - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &x); + rustsecp256k1zkp_v0_10_1_fe_set_int(&x, i); + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &x); test_sqrt(&s, &x); - rustsecp256k1zkp_v0_10_0_fe_negate(&t, &s, 1); + rustsecp256k1zkp_v0_10_1_fe_negate(&t, &s, 1); test_sqrt(&t, NULL); } @@ -3398,13 +3398,13 @@ static void run_sqrt(void) { random_fe_non_square(&ns); for (j = 0; j < COUNT; j++) { random_fe(&x); - rustsecp256k1zkp_v0_10_0_fe_sqr(&s, &x); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_square_var(&s)); + rustsecp256k1zkp_v0_10_1_fe_sqr(&s, &x); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_square_var(&s)); test_sqrt(&s, &x); - rustsecp256k1zkp_v0_10_0_fe_negate(&t, &s, 1); - CHECK(!rustsecp256k1zkp_v0_10_0_fe_is_square_var(&t)); + rustsecp256k1zkp_v0_10_1_fe_negate(&t, &s, 1); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_is_square_var(&t)); test_sqrt(&t, NULL); - rustsecp256k1zkp_v0_10_0_fe_mul(&t, &s, &ns); + rustsecp256k1zkp_v0_10_1_fe_mul(&t, &s, &ns); test_sqrt(&t, NULL); } } @@ -3412,12 +3412,12 @@ static void run_sqrt(void) { /***** FIELD/SCALAR INVERSE TESTS *****/ -static const rustsecp256k1zkp_v0_10_0_scalar scalar_minus_one = SECP256K1_SCALAR_CONST( +static const rustsecp256k1zkp_v0_10_1_scalar scalar_minus_one = SECP256K1_SCALAR_CONST( 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE, 0xBAAEDCE6, 0xAF48A03B, 0xBFD25E8C, 0xD0364140 ); -static const rustsecp256k1zkp_v0_10_0_fe fe_minus_one = SECP256K1_FE_CONST( +static const rustsecp256k1zkp_v0_10_1_fe fe_minus_one = SECP256K1_FE_CONST( 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFE, 0xFFFFFC2E ); @@ -3429,57 +3429,57 @@ static const rustsecp256k1zkp_v0_10_0_fe fe_minus_one = SECP256K1_FE_CONST( * for x!=0 and x!=1: 1/(1/x - 1) + 1 == -1/(x-1) */ -static void test_inverse_scalar(rustsecp256k1zkp_v0_10_0_scalar* out, const rustsecp256k1zkp_v0_10_0_scalar* x, int var) +static void test_inverse_scalar(rustsecp256k1zkp_v0_10_1_scalar* out, const rustsecp256k1zkp_v0_10_1_scalar* x, int var) { - rustsecp256k1zkp_v0_10_0_scalar l, r, t; + rustsecp256k1zkp_v0_10_1_scalar l, r, t; - (var ? rustsecp256k1zkp_v0_10_0_scalar_inverse_var : rustsecp256k1zkp_v0_10_0_scalar_inverse)(&l, x); /* l = 1/x */ + (var ? rustsecp256k1zkp_v0_10_1_scalar_inverse_var : rustsecp256k1zkp_v0_10_1_scalar_inverse)(&l, x); /* l = 1/x */ if (out) *out = l; - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(x)) { - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&l)); + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(x)) { + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&l)); return; } - rustsecp256k1zkp_v0_10_0_scalar_mul(&t, x, &l); /* t = x*(1/x) */ - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_one(&t)); /* x*(1/x) == 1 */ - rustsecp256k1zkp_v0_10_0_scalar_add(&r, x, &scalar_minus_one); /* r = x-1 */ - if (rustsecp256k1zkp_v0_10_0_scalar_is_zero(&r)) return; - (var ? rustsecp256k1zkp_v0_10_0_scalar_inverse_var : rustsecp256k1zkp_v0_10_0_scalar_inverse)(&r, &r); /* r = 1/(x-1) */ - rustsecp256k1zkp_v0_10_0_scalar_add(&l, &scalar_minus_one, &l); /* l = 1/x-1 */ - (var ? rustsecp256k1zkp_v0_10_0_scalar_inverse_var : rustsecp256k1zkp_v0_10_0_scalar_inverse)(&l, &l); /* l = 1/(1/x-1) */ - rustsecp256k1zkp_v0_10_0_scalar_add(&l, &l, &rustsecp256k1zkp_v0_10_0_scalar_one); /* l = 1/(1/x-1)+1 */ - rustsecp256k1zkp_v0_10_0_scalar_add(&l, &r, &l); /* l = 1/(1/x-1)+1 + 1/(x-1) */ - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&l)); /* l == 0 */ + rustsecp256k1zkp_v0_10_1_scalar_mul(&t, x, &l); /* t = x*(1/x) */ + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_one(&t)); /* x*(1/x) == 1 */ + rustsecp256k1zkp_v0_10_1_scalar_add(&r, x, &scalar_minus_one); /* r = x-1 */ + if (rustsecp256k1zkp_v0_10_1_scalar_is_zero(&r)) return; + (var ? rustsecp256k1zkp_v0_10_1_scalar_inverse_var : rustsecp256k1zkp_v0_10_1_scalar_inverse)(&r, &r); /* r = 1/(x-1) */ + rustsecp256k1zkp_v0_10_1_scalar_add(&l, &scalar_minus_one, &l); /* l = 1/x-1 */ + (var ? rustsecp256k1zkp_v0_10_1_scalar_inverse_var : rustsecp256k1zkp_v0_10_1_scalar_inverse)(&l, &l); /* l = 1/(1/x-1) */ + rustsecp256k1zkp_v0_10_1_scalar_add(&l, &l, &rustsecp256k1zkp_v0_10_1_scalar_one); /* l = 1/(1/x-1)+1 */ + rustsecp256k1zkp_v0_10_1_scalar_add(&l, &r, &l); /* l = 1/(1/x-1)+1 + 1/(x-1) */ + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&l)); /* l == 0 */ } -static void test_inverse_field(rustsecp256k1zkp_v0_10_0_fe* out, const rustsecp256k1zkp_v0_10_0_fe* x, int var) +static void test_inverse_field(rustsecp256k1zkp_v0_10_1_fe* out, const rustsecp256k1zkp_v0_10_1_fe* x, int var) { - rustsecp256k1zkp_v0_10_0_fe l, r, t; + rustsecp256k1zkp_v0_10_1_fe l, r, t; - (var ? rustsecp256k1zkp_v0_10_0_fe_inv_var : rustsecp256k1zkp_v0_10_0_fe_inv)(&l, x) ; /* l = 1/x */ + (var ? rustsecp256k1zkp_v0_10_1_fe_inv_var : rustsecp256k1zkp_v0_10_1_fe_inv)(&l, x) ; /* l = 1/x */ if (out) *out = l; t = *x; /* t = x */ - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&t)) { - CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&l)); + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&t)) { + CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&l)); return; } - rustsecp256k1zkp_v0_10_0_fe_mul(&t, x, &l); /* t = x*(1/x) */ - rustsecp256k1zkp_v0_10_0_fe_add(&t, &fe_minus_one); /* t = x*(1/x)-1 */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero(&t)); /* x*(1/x)-1 == 0 */ + rustsecp256k1zkp_v0_10_1_fe_mul(&t, x, &l); /* t = x*(1/x) */ + rustsecp256k1zkp_v0_10_1_fe_add(&t, &fe_minus_one); /* t = x*(1/x)-1 */ + CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero(&t)); /* x*(1/x)-1 == 0 */ r = *x; /* r = x */ - rustsecp256k1zkp_v0_10_0_fe_add(&r, &fe_minus_one); /* r = x-1 */ - if (rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&r)) return; - (var ? rustsecp256k1zkp_v0_10_0_fe_inv_var : rustsecp256k1zkp_v0_10_0_fe_inv)(&r, &r); /* r = 1/(x-1) */ - rustsecp256k1zkp_v0_10_0_fe_add(&l, &fe_minus_one); /* l = 1/x-1 */ - (var ? rustsecp256k1zkp_v0_10_0_fe_inv_var : rustsecp256k1zkp_v0_10_0_fe_inv)(&l, &l); /* l = 1/(1/x-1) */ - rustsecp256k1zkp_v0_10_0_fe_add_int(&l, 1); /* l = 1/(1/x-1)+1 */ - rustsecp256k1zkp_v0_10_0_fe_add(&l, &r); /* l = 1/(1/x-1)+1 + 1/(x-1) */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&l)); /* l == 0 */ + rustsecp256k1zkp_v0_10_1_fe_add(&r, &fe_minus_one); /* r = x-1 */ + if (rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&r)) return; + (var ? rustsecp256k1zkp_v0_10_1_fe_inv_var : rustsecp256k1zkp_v0_10_1_fe_inv)(&r, &r); /* r = 1/(x-1) */ + rustsecp256k1zkp_v0_10_1_fe_add(&l, &fe_minus_one); /* l = 1/x-1 */ + (var ? rustsecp256k1zkp_v0_10_1_fe_inv_var : rustsecp256k1zkp_v0_10_1_fe_inv)(&l, &l); /* l = 1/(1/x-1) */ + rustsecp256k1zkp_v0_10_1_fe_add_int(&l, 1); /* l = 1/(1/x-1)+1 */ + rustsecp256k1zkp_v0_10_1_fe_add(&l, &r); /* l = 1/(1/x-1)+1 + 1/(x-1) */ + CHECK(rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&l)); /* l == 0 */ } static void run_inverse_tests(void) { /* Fixed test cases for field inverses: pairs of (x, 1/x) mod p. */ - static const rustsecp256k1zkp_v0_10_0_fe fe_cases[][2] = { + static const rustsecp256k1zkp_v0_10_1_fe fe_cases[][2] = { /* 0 */ {SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0)}, @@ -3584,7 +3584,7 @@ static void run_inverse_tests(void) SECP256K1_FE_CONST(0x9a94b9b5, 0x57eb71ee, 0x4c975b8b, 0xac5262a8, 0x077b0595, 0xe12a6b1f, 0xd728edef, 0x1a6bf956)} }; /* Fixed test cases for scalar inverses: pairs of (x, 1/x) mod n. */ - static const rustsecp256k1zkp_v0_10_0_scalar scalar_cases[][2] = { + static const rustsecp256k1zkp_v0_10_1_scalar scalar_cases[][2] = { /* 0 */ {SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0), SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0)}, @@ -3671,8 +3671,8 @@ static void run_inverse_tests(void) }; int i, var, testrand; unsigned char b32[32]; - rustsecp256k1zkp_v0_10_0_fe x_fe; - rustsecp256k1zkp_v0_10_0_scalar x_scalar; + rustsecp256k1zkp_v0_10_1_fe x_fe; + rustsecp256k1zkp_v0_10_1_scalar x_scalar; memset(b32, 0, sizeof(b32)); /* Test fixed test cases through test_inverse_{scalar,field}, both ways. */ for (i = 0; (size_t)i < sizeof(fe_cases)/sizeof(fe_cases[0]); ++i) { @@ -3686,23 +3686,23 @@ static void run_inverse_tests(void) for (i = 0; (size_t)i < sizeof(scalar_cases)/sizeof(scalar_cases[0]); ++i) { for (var = 0; var <= 1; ++var) { test_inverse_scalar(&x_scalar, &scalar_cases[i][0], var); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&x_scalar, &scalar_cases[i][1])); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&x_scalar, &scalar_cases[i][1])); test_inverse_scalar(&x_scalar, &scalar_cases[i][1], var); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&x_scalar, &scalar_cases[i][0])); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&x_scalar, &scalar_cases[i][0])); } } /* Test inputs 0..999 and their respective negations. */ for (i = 0; i < 1000; ++i) { b32[31] = i & 0xff; b32[30] = (i >> 8) & 0xff; - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x_scalar, b32, NULL); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&x_fe, b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x_scalar, b32, NULL); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&x_fe, b32); for (var = 0; var <= 1; ++var) { test_inverse_scalar(NULL, &x_scalar, var); test_inverse_field(NULL, &x_fe, var); } - rustsecp256k1zkp_v0_10_0_scalar_negate(&x_scalar, &x_scalar); - rustsecp256k1zkp_v0_10_0_fe_negate(&x_fe, &x_fe, 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&x_scalar, &x_scalar); + rustsecp256k1zkp_v0_10_1_fe_negate(&x_fe, &x_fe, 1); for (var = 0; var <= 1; ++var) { test_inverse_scalar(NULL, &x_scalar, var); test_inverse_field(NULL, &x_fe, var); @@ -3711,9 +3711,9 @@ static void run_inverse_tests(void) /* test 128*count random inputs; half with testrand256_test, half with testrand256 */ for (testrand = 0; testrand <= 1; ++testrand) { for (i = 0; i < 64 * COUNT; ++i) { - (testrand ? rustsecp256k1zkp_v0_10_0_testrand256_test : rustsecp256k1zkp_v0_10_0_testrand256)(b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x_scalar, b32, NULL); - rustsecp256k1zkp_v0_10_0_fe_set_b32_mod(&x_fe, b32); + (testrand ? rustsecp256k1zkp_v0_10_1_testrand256_test : rustsecp256k1zkp_v0_10_1_testrand256)(b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x_scalar, b32, NULL); + rustsecp256k1zkp_v0_10_1_fe_set_b32_mod(&x_fe, b32); for (var = 0; var <= 1; ++var) { test_inverse_scalar(NULL, &x_scalar, var); test_inverse_field(NULL, &x_fe, var); @@ -3725,23 +3725,23 @@ static void run_inverse_tests(void) /***** GROUP TESTS *****/ /* This compares jacobian points including their Z, not just their geometric meaning. */ -static int gej_xyz_equals_gej(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b) { - rustsecp256k1zkp_v0_10_0_gej a2; - rustsecp256k1zkp_v0_10_0_gej b2; +static int gej_xyz_equals_gej(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b) { + rustsecp256k1zkp_v0_10_1_gej a2; + rustsecp256k1zkp_v0_10_1_gej b2; int ret = 1; ret &= a->infinity == b->infinity; if (ret && !a->infinity) { a2 = *a; b2 = *b; - rustsecp256k1zkp_v0_10_0_fe_normalize(&a2.x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&a2.y); - rustsecp256k1zkp_v0_10_0_fe_normalize(&a2.z); - rustsecp256k1zkp_v0_10_0_fe_normalize(&b2.x); - rustsecp256k1zkp_v0_10_0_fe_normalize(&b2.y); - rustsecp256k1zkp_v0_10_0_fe_normalize(&b2.z); - ret &= rustsecp256k1zkp_v0_10_0_fe_cmp_var(&a2.x, &b2.x) == 0; - ret &= rustsecp256k1zkp_v0_10_0_fe_cmp_var(&a2.y, &b2.y) == 0; - ret &= rustsecp256k1zkp_v0_10_0_fe_cmp_var(&a2.z, &b2.z) == 0; + rustsecp256k1zkp_v0_10_1_fe_normalize(&a2.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&a2.y); + rustsecp256k1zkp_v0_10_1_fe_normalize(&a2.z); + rustsecp256k1zkp_v0_10_1_fe_normalize(&b2.x); + rustsecp256k1zkp_v0_10_1_fe_normalize(&b2.y); + rustsecp256k1zkp_v0_10_1_fe_normalize(&b2.z); + ret &= rustsecp256k1zkp_v0_10_1_fe_cmp_var(&a2.x, &b2.x) == 0; + ret &= rustsecp256k1zkp_v0_10_1_fe_cmp_var(&a2.y, &b2.y) == 0; + ret &= rustsecp256k1zkp_v0_10_1_fe_cmp_var(&a2.z, &b2.z) == 0; } return ret; } @@ -3755,32 +3755,32 @@ static void test_ge(void) { * negation, and then those two again but with randomized Z coordinate. * - The same is then done for lambda*p1 and lambda^2*p1. */ - rustsecp256k1zkp_v0_10_0_ge *ge = (rustsecp256k1zkp_v0_10_0_ge *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_0_ge) * (1 + 4 * runs)); - rustsecp256k1zkp_v0_10_0_gej *gej = (rustsecp256k1zkp_v0_10_0_gej *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_0_gej) * (1 + 4 * runs)); - rustsecp256k1zkp_v0_10_0_fe zf, r; - rustsecp256k1zkp_v0_10_0_fe zfi2, zfi3; - - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&gej[0]); - rustsecp256k1zkp_v0_10_0_ge_clear(&ge[0]); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&ge[0], &gej[0]); + rustsecp256k1zkp_v0_10_1_ge *ge = (rustsecp256k1zkp_v0_10_1_ge *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_1_ge) * (1 + 4 * runs)); + rustsecp256k1zkp_v0_10_1_gej *gej = (rustsecp256k1zkp_v0_10_1_gej *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_1_gej) * (1 + 4 * runs)); + rustsecp256k1zkp_v0_10_1_fe zf, r; + rustsecp256k1zkp_v0_10_1_fe zfi2, zfi3; + + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&gej[0]); + rustsecp256k1zkp_v0_10_1_ge_clear(&ge[0]); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&ge[0], &gej[0]); for (i = 0; i < runs; i++) { int j, k; - rustsecp256k1zkp_v0_10_0_ge g; + rustsecp256k1zkp_v0_10_1_ge g; random_group_element_test(&g); if (i >= runs - 2) { - rustsecp256k1zkp_v0_10_0_ge_mul_lambda(&g, &ge[1]); - CHECK(!rustsecp256k1zkp_v0_10_0_ge_eq_var(&g, &ge[1])); + rustsecp256k1zkp_v0_10_1_ge_mul_lambda(&g, &ge[1]); + CHECK(!rustsecp256k1zkp_v0_10_1_ge_eq_var(&g, &ge[1])); } if (i >= runs - 1) { - rustsecp256k1zkp_v0_10_0_ge_mul_lambda(&g, &g); + rustsecp256k1zkp_v0_10_1_ge_mul_lambda(&g, &g); } ge[1 + 4 * i] = g; ge[2 + 4 * i] = g; - rustsecp256k1zkp_v0_10_0_ge_neg(&ge[3 + 4 * i], &g); - rustsecp256k1zkp_v0_10_0_ge_neg(&ge[4 + 4 * i], &g); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gej[1 + 4 * i], &ge[1 + 4 * i]); + rustsecp256k1zkp_v0_10_1_ge_neg(&ge[3 + 4 * i], &g); + rustsecp256k1zkp_v0_10_1_ge_neg(&ge[4 + 4 * i], &g); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gej[1 + 4 * i], &ge[1 + 4 * i]); random_group_element_jacobian_test(&gej[2 + 4 * i], &ge[2 + 4 * i]); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gej[3 + 4 * i], &ge[3 + 4 * i]); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gej[3 + 4 * i], &ge[3 + 4 * i]); random_group_element_jacobian_test(&gej[4 + 4 * i], &ge[4 + 4 * i]); for (j = 0; j < 4; j++) { random_ge_x_magnitude(&ge[1 + j + 4 * i]); @@ -3793,10 +3793,10 @@ static void test_ge(void) { for (j = 0; j < 4; ++j) { for (k = 0; k < 4; ++k) { int expect_equal = (j >> 1) == (k >> 1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&ge[1 + j + 4 * i], &ge[1 + k + 4 * i]) == expect_equal); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&gej[1 + j + 4 * i], &gej[1 + k + 4 * i]) == expect_equal); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[1 + j + 4 * i], &ge[1 + k + 4 * i]) == expect_equal); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[1 + k + 4 * i], &ge[1 + j + 4 * i]) == expect_equal); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&ge[1 + j + 4 * i], &ge[1 + k + 4 * i]) == expect_equal); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&gej[1 + j + 4 * i], &gej[1 + k + 4 * i]) == expect_equal); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[1 + j + 4 * i], &ge[1 + k + 4 * i]) == expect_equal); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[1 + k + 4 * i], &ge[1 + j + 4 * i]) == expect_equal); } } } @@ -3804,9 +3804,9 @@ static void test_ge(void) { /* Generate random zf, and zfi2 = 1/zf^2, zfi3 = 1/zf^3 */ random_fe_non_zero_test(&zf); random_fe_magnitude(&zf); - rustsecp256k1zkp_v0_10_0_fe_inv_var(&zfi3, &zf); - rustsecp256k1zkp_v0_10_0_fe_sqr(&zfi2, &zfi3); - rustsecp256k1zkp_v0_10_0_fe_mul(&zfi3, &zfi3, &zfi2); + rustsecp256k1zkp_v0_10_1_fe_inv_var(&zfi3, &zf); + rustsecp256k1zkp_v0_10_1_fe_sqr(&zfi2, &zfi3); + rustsecp256k1zkp_v0_10_1_fe_mul(&zfi3, &zfi3, &zfi2); /* Generate random r */ random_fe_non_zero_test(&r); @@ -3815,165 +3815,165 @@ static void test_ge(void) { int i2; for (i2 = 0; i2 < 1 + 4 * runs; i2++) { /* Compute reference result using gej + gej (var). */ - rustsecp256k1zkp_v0_10_0_gej refj, resj; - rustsecp256k1zkp_v0_10_0_ge ref; - rustsecp256k1zkp_v0_10_0_fe zr; - rustsecp256k1zkp_v0_10_0_gej_add_var(&refj, &gej[i1], &gej[i2], rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i1]) ? NULL : &zr); + rustsecp256k1zkp_v0_10_1_gej refj, resj; + rustsecp256k1zkp_v0_10_1_ge ref; + rustsecp256k1zkp_v0_10_1_fe zr; + rustsecp256k1zkp_v0_10_1_gej_add_var(&refj, &gej[i1], &gej[i2], rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i1]) ? NULL : &zr); /* Check Z ratio. */ - if (!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i1]) && !rustsecp256k1zkp_v0_10_0_gej_is_infinity(&refj)) { - rustsecp256k1zkp_v0_10_0_fe zrz; rustsecp256k1zkp_v0_10_0_fe_mul(&zrz, &zr, &gej[i1].z); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&zrz, &refj.z)); + if (!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i1]) && !rustsecp256k1zkp_v0_10_1_gej_is_infinity(&refj)) { + rustsecp256k1zkp_v0_10_1_fe zrz; rustsecp256k1zkp_v0_10_1_fe_mul(&zrz, &zr, &gej[i1].z); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&zrz, &refj.z)); } - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&ref, &refj); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&ref, &refj); /* Test gej + ge with Z ratio result (var). */ - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&resj, &gej[i1], &ge[i2], rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i1]) ? NULL : &zr); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); - if (!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i1]) && !rustsecp256k1zkp_v0_10_0_gej_is_infinity(&resj)) { - rustsecp256k1zkp_v0_10_0_fe zrz; rustsecp256k1zkp_v0_10_0_fe_mul(&zrz, &zr, &gej[i1].z); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&zrz, &resj.z)); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&resj, &gej[i1], &ge[i2], rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i1]) ? NULL : &zr); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); + if (!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i1]) && !rustsecp256k1zkp_v0_10_1_gej_is_infinity(&resj)) { + rustsecp256k1zkp_v0_10_1_fe zrz; rustsecp256k1zkp_v0_10_1_fe_mul(&zrz, &zr, &gej[i1].z); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&zrz, &resj.z)); } /* Test gej + ge (var, with additional Z factor). */ { - rustsecp256k1zkp_v0_10_0_ge ge2_zfi = ge[i2]; /* the second term with x and y rescaled for z = 1/zf */ - rustsecp256k1zkp_v0_10_0_fe_mul(&ge2_zfi.x, &ge2_zfi.x, &zfi2); - rustsecp256k1zkp_v0_10_0_fe_mul(&ge2_zfi.y, &ge2_zfi.y, &zfi3); + rustsecp256k1zkp_v0_10_1_ge ge2_zfi = ge[i2]; /* the second term with x and y rescaled for z = 1/zf */ + rustsecp256k1zkp_v0_10_1_fe_mul(&ge2_zfi.x, &ge2_zfi.x, &zfi2); + rustsecp256k1zkp_v0_10_1_fe_mul(&ge2_zfi.y, &ge2_zfi.y, &zfi3); random_ge_x_magnitude(&ge2_zfi); random_ge_y_magnitude(&ge2_zfi); - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(&resj, &gej[i1], &ge2_zfi, &zf); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(&resj, &gej[i1], &ge2_zfi, &zf); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); } /* Test gej + ge (const). */ if (i2 != 0) { - /* rustsecp256k1zkp_v0_10_0_gej_add_ge does not support its second argument being infinity. */ - rustsecp256k1zkp_v0_10_0_gej_add_ge(&resj, &gej[i1], &ge[i2]); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); + /* rustsecp256k1zkp_v0_10_1_gej_add_ge does not support its second argument being infinity. */ + rustsecp256k1zkp_v0_10_1_gej_add_ge(&resj, &gej[i1], &ge[i2]); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); } /* Test doubling (var). */ if ((i1 == 0 && i2 == 0) || ((i1 + 3)/4 == (i2 + 3)/4 && ((i1 + 3)%4)/2 == ((i2 + 3)%4)/2)) { - rustsecp256k1zkp_v0_10_0_fe zr2; + rustsecp256k1zkp_v0_10_1_fe zr2; /* Normal doubling with Z ratio result. */ - rustsecp256k1zkp_v0_10_0_gej_double_var(&resj, &gej[i1], &zr2); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); + rustsecp256k1zkp_v0_10_1_gej_double_var(&resj, &gej[i1], &zr2); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); /* Check Z ratio. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&zr2, &zr2, &gej[i1].z); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&zr2, &resj.z)); + rustsecp256k1zkp_v0_10_1_fe_mul(&zr2, &zr2, &gej[i1].z); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&zr2, &resj.z)); /* Normal doubling. */ - rustsecp256k1zkp_v0_10_0_gej_double_var(&resj, &gej[i2], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); + rustsecp256k1zkp_v0_10_1_gej_double_var(&resj, &gej[i2], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); /* Constant-time doubling. */ - rustsecp256k1zkp_v0_10_0_gej_double(&resj, &gej[i2]); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&resj, &ref)); + rustsecp256k1zkp_v0_10_1_gej_double(&resj, &gej[i2]); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&resj, &ref)); } /* Test adding opposites. */ if ((i1 == 0 && i2 == 0) || ((i1 + 3)/4 == (i2 + 3)/4 && ((i1 + 3)%4)/2 != ((i2 + 3)%4)/2)) { - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&ref)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&ref)); } /* Test adding infinity. */ if (i1 == 0) { - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&ge[i1])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i1])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[i2], &ref)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&ge[i1])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i1])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[i2], &ref)); } if (i2 == 0) { - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&ge[i2])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&gej[i2])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[i1], &ref)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&ge[i2])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&gej[i2])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[i1], &ref)); } } } /* Test adding all points together in random order equals infinity. */ { - rustsecp256k1zkp_v0_10_0_gej sum = SECP256K1_GEJ_CONST_INFINITY; - rustsecp256k1zkp_v0_10_0_gej *gej_shuffled = (rustsecp256k1zkp_v0_10_0_gej *)checked_malloc(&CTX->error_callback, (4 * runs + 1) * sizeof(rustsecp256k1zkp_v0_10_0_gej)); + rustsecp256k1zkp_v0_10_1_gej sum = SECP256K1_GEJ_CONST_INFINITY; + rustsecp256k1zkp_v0_10_1_gej *gej_shuffled = (rustsecp256k1zkp_v0_10_1_gej *)checked_malloc(&CTX->error_callback, (4 * runs + 1) * sizeof(rustsecp256k1zkp_v0_10_1_gej)); for (i = 0; i < 4 * runs + 1; i++) { gej_shuffled[i] = gej[i]; } for (i = 0; i < 4 * runs + 1; i++) { - int swap = i + rustsecp256k1zkp_v0_10_0_testrand_int(4 * runs + 1 - i); + int swap = i + rustsecp256k1zkp_v0_10_1_testrand_int(4 * runs + 1 - i); if (swap != i) { - rustsecp256k1zkp_v0_10_0_gej t = gej_shuffled[i]; + rustsecp256k1zkp_v0_10_1_gej t = gej_shuffled[i]; gej_shuffled[i] = gej_shuffled[swap]; gej_shuffled[swap] = t; } } for (i = 0; i < 4 * runs + 1; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_var(&sum, &sum, &gej_shuffled[i], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&sum, &sum, &gej_shuffled[i], NULL); } - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&sum)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&sum)); free(gej_shuffled); } /* Test batch gej -> ge conversion without known z ratios. */ { - rustsecp256k1zkp_v0_10_0_ge *ge_set_all = (rustsecp256k1zkp_v0_10_0_ge *)checked_malloc(&CTX->error_callback, (4 * runs + 1) * sizeof(rustsecp256k1zkp_v0_10_0_ge)); - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(ge_set_all, gej, 4 * runs + 1); + rustsecp256k1zkp_v0_10_1_ge *ge_set_all = (rustsecp256k1zkp_v0_10_1_ge *)checked_malloc(&CTX->error_callback, (4 * runs + 1) * sizeof(rustsecp256k1zkp_v0_10_1_ge)); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(ge_set_all, gej, 4 * runs + 1); for (i = 0; i < 4 * runs + 1; i++) { - rustsecp256k1zkp_v0_10_0_fe s; + rustsecp256k1zkp_v0_10_1_fe s; random_fe_non_zero(&s); - rustsecp256k1zkp_v0_10_0_gej_rescale(&gej[i], &s); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[i], &ge_set_all[i])); + rustsecp256k1zkp_v0_10_1_gej_rescale(&gej[i], &s); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[i], &ge_set_all[i])); } free(ge_set_all); } /* Test that all elements have X coordinates on the curve. */ for (i = 1; i < 4 * runs + 1; i++) { - rustsecp256k1zkp_v0_10_0_fe n; - CHECK(rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&ge[i].x)); + rustsecp256k1zkp_v0_10_1_fe n; + CHECK(rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&ge[i].x)); /* And the same holds after random rescaling. */ - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &zf, &ge[i].x); - CHECK(rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(&n, &zf)); + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &zf, &ge[i].x); + CHECK(rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(&n, &zf)); } - /* Test correspondence of rustsecp256k1zkp_v0_10_0_ge_x{,_frac}_on_curve_var with ge_set_xo. */ + /* Test correspondence of rustsecp256k1zkp_v0_10_1_ge_x{,_frac}_on_curve_var with ge_set_xo. */ { - rustsecp256k1zkp_v0_10_0_fe n; - rustsecp256k1zkp_v0_10_0_ge q; + rustsecp256k1zkp_v0_10_1_fe n; + rustsecp256k1zkp_v0_10_1_ge q; int ret_on_curve, ret_frac_on_curve, ret_set_xo; - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &zf, &r); - ret_on_curve = rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&r); - ret_frac_on_curve = rustsecp256k1zkp_v0_10_0_ge_x_frac_on_curve_var(&n, &zf); - ret_set_xo = rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&q, &r, 0); + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &zf, &r); + ret_on_curve = rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&r); + ret_frac_on_curve = rustsecp256k1zkp_v0_10_1_ge_x_frac_on_curve_var(&n, &zf); + ret_set_xo = rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&q, &r, 0); CHECK(ret_on_curve == ret_frac_on_curve); CHECK(ret_on_curve == ret_set_xo); - if (ret_set_xo) CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&r, &q.x)); + if (ret_set_xo) CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&r, &q.x)); } /* Test batch gej -> ge conversion with many infinities. */ for (i = 0; i < 4 * runs + 1; i++) { int odd; random_group_element_test(&ge[i]); - odd = rustsecp256k1zkp_v0_10_0_fe_is_odd(&ge[i].x); + odd = rustsecp256k1zkp_v0_10_1_fe_is_odd(&ge[i].x); CHECK(odd == 0 || odd == 1); /* randomly set half the points to infinity */ if (odd == i % 2) { - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&ge[i]); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&ge[i]); } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gej[i], &ge[i]); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gej[i], &ge[i]); } /* batch convert */ - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(ge, gej, 4 * runs + 1); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(ge, gej, 4 * runs + 1); /* check result */ for (i = 0; i < 4 * runs + 1; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&gej[i], &ge[i])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&gej[i], &ge[i])); } /* Test batch gej -> ge conversion with all infinities. */ for (i = 0; i < 4 * runs + 1; i++) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&gej[i]); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&gej[i]); } /* batch convert */ - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(ge, gej, 4 * runs + 1); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(ge, gej, 4 * runs + 1); /* check result */ for (i = 0; i < 4 * runs + 1; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&ge[i])); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&ge[i])); } free(ge); @@ -3981,33 +3981,33 @@ static void test_ge(void) { } static void test_intialized_inf(void) { - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_gej pj, npj, infj1, infj2, infj3; - rustsecp256k1zkp_v0_10_0_fe zinv; + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_gej pj, npj, infj1, infj2, infj3; + rustsecp256k1zkp_v0_10_1_fe zinv; /* Test that adding P+(-P) results in a fully initialized infinity*/ random_group_element_test(&p); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pj, &p); - rustsecp256k1zkp_v0_10_0_gej_neg(&npj, &pj); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pj, &p); + rustsecp256k1zkp_v0_10_1_gej_neg(&npj, &pj); - rustsecp256k1zkp_v0_10_0_gej_add_var(&infj1, &pj, &npj, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&infj1)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj1.x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj1.y)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj1.z)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&infj1, &pj, &npj, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&infj1)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj1.x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj1.y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj1.z)); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&infj2, &npj, &p, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&infj2)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj2.x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj2.y)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj2.z)); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&infj2, &npj, &p, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&infj2)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj2.x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj2.y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj2.z)); - rustsecp256k1zkp_v0_10_0_fe_set_int(&zinv, 1); - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(&infj3, &npj, &p, &zinv); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&infj3)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj3.x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj3.y)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_zero(&infj3.z)); + rustsecp256k1zkp_v0_10_1_fe_set_int(&zinv, 1); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(&infj3, &npj, &p, &zinv); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&infj3)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj3.x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj3.y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_zero(&infj3.z)); } @@ -4027,7 +4027,7 @@ static void test_add_neg_y_diff_x(void) { * * These points were generated in sage as * - * load("rustsecp256k1zkp_v0_10_0_params.sage") + * load("rustsecp256k1zkp_v0_10_1_params.sage") * * # random "bad pair" * P = C.random_element() @@ -4036,40 +4036,40 @@ static void test_add_neg_y_diff_x(void) { * print(" Q: %x %x" % Q.xy()) * print("P + Q: %x %x" % (P + Q).xy()) */ - rustsecp256k1zkp_v0_10_0_gej aj = SECP256K1_GEJ_CONST( + rustsecp256k1zkp_v0_10_1_gej aj = SECP256K1_GEJ_CONST( 0x8d24cd95, 0x0a355af1, 0x3c543505, 0x44238d30, 0x0643d79f, 0x05a59614, 0x2f8ec030, 0xd58977cb, 0x001e337a, 0x38093dcd, 0x6c0f386d, 0x0b1293a8, 0x4d72c879, 0xd7681924, 0x44e6d2f3, 0x9190117d ); - rustsecp256k1zkp_v0_10_0_gej bj = SECP256K1_GEJ_CONST( + rustsecp256k1zkp_v0_10_1_gej bj = SECP256K1_GEJ_CONST( 0xc7b74206, 0x1f788cd9, 0xabd0937d, 0x164a0d86, 0x95f6ff75, 0xf19a4ce9, 0xd013bd7b, 0xbf92d2a7, 0xffe1cc85, 0xc7f6c232, 0x93f0c792, 0xf4ed6c57, 0xb28d3786, 0x2897e6db, 0xbb192d0b, 0x6e6feab2 ); - rustsecp256k1zkp_v0_10_0_gej sumj = SECP256K1_GEJ_CONST( + rustsecp256k1zkp_v0_10_1_gej sumj = SECP256K1_GEJ_CONST( 0x671a63c0, 0x3efdad4c, 0x389a7798, 0x24356027, 0xb3d69010, 0x278625c3, 0x5c86d390, 0x184a8f7a, 0x5f6409c2, 0x2ce01f2b, 0x511fd375, 0x25071d08, 0xda651801, 0x70e95caf, 0x8f0d893c, 0xbed8fbbe ); - rustsecp256k1zkp_v0_10_0_ge b; - rustsecp256k1zkp_v0_10_0_gej resj; - rustsecp256k1zkp_v0_10_0_ge res; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&b, &bj); + rustsecp256k1zkp_v0_10_1_ge b; + rustsecp256k1zkp_v0_10_1_gej resj; + rustsecp256k1zkp_v0_10_1_ge res; + rustsecp256k1zkp_v0_10_1_ge_set_gej(&b, &bj); - rustsecp256k1zkp_v0_10_0_gej_add_var(&resj, &aj, &bj, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res, &resj); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&sumj, &res)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&resj, &aj, &bj, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res, &resj); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&sumj, &res)); - rustsecp256k1zkp_v0_10_0_gej_add_ge(&resj, &aj, &b); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res, &resj); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&sumj, &res)); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&resj, &aj, &b); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res, &resj); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&sumj, &res)); - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&resj, &aj, &b, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res, &resj); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&sumj, &res)); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&resj, &aj, &b, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res, &resj); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&sumj, &res)); } static void test_ge_bytes(void) { @@ -4077,12 +4077,12 @@ static void test_ge_bytes(void) { for (i = 0; i < COUNT; i++) { unsigned char buf[64]; - rustsecp256k1zkp_v0_10_0_ge p, q; + rustsecp256k1zkp_v0_10_1_ge p, q; random_group_element_test(&p); - rustsecp256k1zkp_v0_10_0_ge_to_bytes(buf, &p); - rustsecp256k1zkp_v0_10_0_ge_from_bytes(&q, buf); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&p, &q)); + rustsecp256k1zkp_v0_10_1_ge_to_bytes(buf, &p); + rustsecp256k1zkp_v0_10_1_ge_from_bytes(&q, buf); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&p, &q)); } } @@ -4096,22 +4096,22 @@ static void run_ge(void) { test_ge_bytes(); } -static void test_gej_cmov(const rustsecp256k1zkp_v0_10_0_gej *a, const rustsecp256k1zkp_v0_10_0_gej *b) { - rustsecp256k1zkp_v0_10_0_gej t = *a; - rustsecp256k1zkp_v0_10_0_gej_cmov(&t, b, 0); +static void test_gej_cmov(const rustsecp256k1zkp_v0_10_1_gej *a, const rustsecp256k1zkp_v0_10_1_gej *b) { + rustsecp256k1zkp_v0_10_1_gej t = *a; + rustsecp256k1zkp_v0_10_1_gej_cmov(&t, b, 0); CHECK(gej_xyz_equals_gej(&t, a)); - rustsecp256k1zkp_v0_10_0_gej_cmov(&t, b, 1); + rustsecp256k1zkp_v0_10_1_gej_cmov(&t, b, 1); CHECK(gej_xyz_equals_gej(&t, b)); } static void run_gej(void) { int i; - rustsecp256k1zkp_v0_10_0_gej a, b; + rustsecp256k1zkp_v0_10_1_gej a, b; - /* Tests for rustsecp256k1zkp_v0_10_0_gej_cmov */ + /* Tests for rustsecp256k1zkp_v0_10_1_gej_cmov */ for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&a); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&b); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&a); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&b); test_gej_cmov(&a, &b); random_gej_test(&a); @@ -4126,42 +4126,42 @@ static void run_gej(void) { test_gej_cmov(&b, &a); } - /* Tests for rustsecp256k1zkp_v0_10_0_gej_eq_var */ + /* Tests for rustsecp256k1zkp_v0_10_1_gej_eq_var */ for (i = 0; i < COUNT; i++) { - rustsecp256k1zkp_v0_10_0_fe fe; + rustsecp256k1zkp_v0_10_1_fe fe; random_gej_test(&a); random_gej_test(&b); - CHECK(!rustsecp256k1zkp_v0_10_0_gej_eq_var(&a, &b)); + CHECK(!rustsecp256k1zkp_v0_10_1_gej_eq_var(&a, &b)); b = a; random_fe_non_zero_test(&fe); - rustsecp256k1zkp_v0_10_0_gej_rescale(&a, &fe); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&a, &b)); + rustsecp256k1zkp_v0_10_1_gej_rescale(&a, &fe); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&a, &b)); } } static void test_ec_combine(void) { - rustsecp256k1zkp_v0_10_0_scalar sum = rustsecp256k1zkp_v0_10_0_scalar_zero; - rustsecp256k1zkp_v0_10_0_pubkey data[6]; - const rustsecp256k1zkp_v0_10_0_pubkey* d[6]; - rustsecp256k1zkp_v0_10_0_pubkey sd; - rustsecp256k1zkp_v0_10_0_pubkey sd2; - rustsecp256k1zkp_v0_10_0_gej Qj; - rustsecp256k1zkp_v0_10_0_ge Q; + rustsecp256k1zkp_v0_10_1_scalar sum = rustsecp256k1zkp_v0_10_1_scalar_zero; + rustsecp256k1zkp_v0_10_1_pubkey data[6]; + const rustsecp256k1zkp_v0_10_1_pubkey* d[6]; + rustsecp256k1zkp_v0_10_1_pubkey sd; + rustsecp256k1zkp_v0_10_1_pubkey sd2; + rustsecp256k1zkp_v0_10_1_gej Qj; + rustsecp256k1zkp_v0_10_1_ge Q; int i; for (i = 1; i <= 6; i++) { - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; random_scalar_order_test(&s); - rustsecp256k1zkp_v0_10_0_scalar_add(&sum, &sum, &s); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &Qj, &s); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&Q, &Qj); - rustsecp256k1zkp_v0_10_0_pubkey_save(&data[i - 1], &Q); + rustsecp256k1zkp_v0_10_1_scalar_add(&sum, &sum, &s); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &Qj, &s); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&Q, &Qj); + rustsecp256k1zkp_v0_10_1_pubkey_save(&data[i - 1], &Q); d[i - 1] = &data[i - 1]; - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &Qj, &sum); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&Q, &Qj); - rustsecp256k1zkp_v0_10_0_pubkey_save(&sd, &Q); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &sd2, d, i) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&sd, &sd2, sizeof(sd)) == 0); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &Qj, &sum); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&Q, &Qj); + rustsecp256k1zkp_v0_10_1_pubkey_save(&sd, &Q); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &sd2, d, i) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&sd, &sd2, sizeof(sd)) == 0); } } @@ -4173,72 +4173,72 @@ static void run_ec_combine(void) { } static void test_ec_commit(void) { - rustsecp256k1zkp_v0_10_0_scalar seckey_s; - rustsecp256k1zkp_v0_10_0_ge pubkey; - rustsecp256k1zkp_v0_10_0_gej pubkeyj; - rustsecp256k1zkp_v0_10_0_ge commitment; + rustsecp256k1zkp_v0_10_1_scalar seckey_s; + rustsecp256k1zkp_v0_10_1_ge pubkey; + rustsecp256k1zkp_v0_10_1_gej pubkeyj; + rustsecp256k1zkp_v0_10_1_ge commitment; unsigned char data[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; /* Create random keypair and data */ random_scalar_order_test(&seckey_s); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pubkey, &pubkeyj); - rustsecp256k1zkp_v0_10_0_testrand256_test(data); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pubkey, &pubkeyj); + rustsecp256k1zkp_v0_10_1_testrand256_test(data); /* Commit to data and verify */ - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit(&commitment, &pubkey, &sha, data, 32) == 1); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 1); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 1); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); - rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&pubkeyj, &commitment); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit(&commitment, &pubkey, &sha, data, 32) == 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 1); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); + rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&pubkeyj, &commitment); /* Check that verification fails with different data */ - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_verify(&commitment, &pubkey, &sha, data, 31) == 0); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_verify(&commitment, &pubkey, &sha, data, 31) == 0); /* Check that commmitting fails when the inner pubkey is the point at * infinity */ - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&pubkey); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit(&commitment, &pubkey, &sha, data, 32) == 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&seckey_s, 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&pubkey); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit(&commitment, &pubkey, &sha, data, 32) == 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&seckey_s, 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_seckey(&seckey_s, &pubkey, &sha, data, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_verify(&commitment, &pubkey, &sha, data, 32) == 0); } static void test_ec_commit_api(void) { unsigned char seckey[32]; - rustsecp256k1zkp_v0_10_0_scalar seckey_s; - rustsecp256k1zkp_v0_10_0_ge pubkey; - rustsecp256k1zkp_v0_10_0_gej pubkeyj; - rustsecp256k1zkp_v0_10_0_ge commitment; + rustsecp256k1zkp_v0_10_1_scalar seckey_s; + rustsecp256k1zkp_v0_10_1_ge pubkey; + rustsecp256k1zkp_v0_10_1_gej pubkeyj; + rustsecp256k1zkp_v0_10_1_ge commitment; unsigned char data[32]; - rustsecp256k1zkp_v0_10_0_sha256 sha; + rustsecp256k1zkp_v0_10_1_sha256 sha; memset(data, 23, sizeof(data)); /* Create random keypair */ random_scalar_order_test(&seckey_s); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(seckey, &seckey_s); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pubkey, &pubkeyj); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(seckey, &seckey_s); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubkeyj, &seckey_s); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pubkey, &pubkeyj); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit(&commitment, &pubkey, &sha, data, 1) == 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit(&commitment, &pubkey, &sha, data, 1) == 1); /* The same pubkey can be both input and output of the function */ { - rustsecp256k1zkp_v0_10_0_ge pubkey_tmp = pubkey; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit(&pubkey_tmp, &pubkey_tmp, &sha, data, 1) == 1); - rustsecp256k1zkp_v0_10_0_ge_eq_var(&commitment, &pubkey_tmp); + rustsecp256k1zkp_v0_10_1_ge pubkey_tmp = pubkey; + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit(&pubkey_tmp, &pubkey_tmp, &sha, data, 1) == 1); + rustsecp256k1zkp_v0_10_1_ge_eq_var(&commitment, &pubkey_tmp); } - rustsecp256k1zkp_v0_10_0_sha256_initialize(&sha); - CHECK(rustsecp256k1zkp_v0_10_0_ec_commit_verify(&commitment, &pubkey, &sha, data, 1) == 1); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&sha); + CHECK(rustsecp256k1zkp_v0_10_1_ec_commit_verify(&commitment, &pubkey, &sha, data, 1) == 1); } static void run_ec_commit(void) { @@ -4249,32 +4249,32 @@ static void run_ec_commit(void) { test_ec_commit_api(); } -static void test_group_decompress(const rustsecp256k1zkp_v0_10_0_fe* x) { +static void test_group_decompress(const rustsecp256k1zkp_v0_10_1_fe* x) { /* The input itself, normalized. */ - rustsecp256k1zkp_v0_10_0_fe fex = *x; - rustsecp256k1zkp_v0_10_0_fe fez; + rustsecp256k1zkp_v0_10_1_fe fex = *x; + rustsecp256k1zkp_v0_10_1_fe fez; /* Results of set_xquad_var, set_xo_var(..., 0), set_xo_var(..., 1). */ - rustsecp256k1zkp_v0_10_0_ge ge_quad, ge_even, ge_odd; - rustsecp256k1zkp_v0_10_0_gej gej_quad; + rustsecp256k1zkp_v0_10_1_ge ge_quad, ge_even, ge_odd; + rustsecp256k1zkp_v0_10_1_gej gej_quad; /* Return values of the above calls. */ int res_quad, res_even, res_odd; - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&fex); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&fex); - res_quad = rustsecp256k1zkp_v0_10_0_ge_set_xquad(&ge_quad, &fex); - res_even = rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&ge_even, &fex, 0); - res_odd = rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&ge_odd, &fex, 1); + res_quad = rustsecp256k1zkp_v0_10_1_ge_set_xquad(&ge_quad, &fex); + res_even = rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&ge_even, &fex, 0); + res_odd = rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&ge_odd, &fex, 1); CHECK(res_quad == res_even); CHECK(res_quad == res_odd); if (res_quad) { - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_quad.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_odd.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_even.x); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_quad.y); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_odd.y); - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&ge_even.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_quad.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_odd.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_even.x); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_quad.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_odd.y); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&ge_even.y); /* No infinity allowed. */ CHECK(!ge_quad.infinity); @@ -4282,41 +4282,41 @@ static void test_group_decompress(const rustsecp256k1zkp_v0_10_0_fe* x) { CHECK(!ge_odd.infinity); /* Check that the x coordinates check out. */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&ge_quad.x, x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&ge_even.x, x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&ge_odd.x, x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&ge_quad.x, x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&ge_even.x, x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&ge_odd.x, x)); /* Check that the Y coordinate result in ge_quad is a square. */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_square_var(&ge_quad.y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_square_var(&ge_quad.y)); /* Check odd/even Y in ge_odd, ge_even. */ - CHECK(rustsecp256k1zkp_v0_10_0_fe_is_odd(&ge_odd.y)); - CHECK(!rustsecp256k1zkp_v0_10_0_fe_is_odd(&ge_even.y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_is_odd(&ge_odd.y)); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_is_odd(&ge_even.y)); - /* Check rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gej_quad, &ge_quad); - CHECK(rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&gej_quad)); + /* Check rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var. */ + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gej_quad, &ge_quad); + CHECK(rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&gej_quad)); do { random_fe_test(&fez); - } while (rustsecp256k1zkp_v0_10_0_fe_is_zero(&fez)); - rustsecp256k1zkp_v0_10_0_gej_rescale(&gej_quad, &fez); - CHECK(rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&gej_quad)); - rustsecp256k1zkp_v0_10_0_gej_neg(&gej_quad, &gej_quad); - CHECK(!rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&gej_quad)); + } while (rustsecp256k1zkp_v0_10_1_fe_is_zero(&fez)); + rustsecp256k1zkp_v0_10_1_gej_rescale(&gej_quad, &fez); + CHECK(rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&gej_quad)); + rustsecp256k1zkp_v0_10_1_gej_neg(&gej_quad, &gej_quad); + CHECK(!rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&gej_quad)); do { random_fe_test(&fez); - } while (rustsecp256k1zkp_v0_10_0_fe_is_zero(&fez)); - rustsecp256k1zkp_v0_10_0_gej_rescale(&gej_quad, &fez); - CHECK(!rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&gej_quad)); - rustsecp256k1zkp_v0_10_0_gej_neg(&gej_quad, &gej_quad); - CHECK(rustsecp256k1zkp_v0_10_0_gej_has_quad_y_var(&gej_quad)); + } while (rustsecp256k1zkp_v0_10_1_fe_is_zero(&fez)); + rustsecp256k1zkp_v0_10_1_gej_rescale(&gej_quad, &fez); + CHECK(!rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&gej_quad)); + rustsecp256k1zkp_v0_10_1_gej_neg(&gej_quad, &gej_quad); + CHECK(rustsecp256k1zkp_v0_10_1_gej_has_quad_y_var(&gej_quad)); } } static void run_group_decompress(void) { int i; for (i = 0; i < COUNT * 4; i++) { - rustsecp256k1zkp_v0_10_0_fe fe; + rustsecp256k1zkp_v0_10_1_fe fe; random_fe_test(&fe); test_group_decompress(&fe); } @@ -4324,7 +4324,7 @@ static void run_group_decompress(void) { /***** ECMULT TESTS *****/ -static void test_pre_g_table(const rustsecp256k1zkp_v0_10_0_ge_storage * pre_g, size_t n) { +static void test_pre_g_table(const rustsecp256k1zkp_v0_10_1_ge_storage * pre_g, size_t n) { /* Tests the pre_g / pre_g_128 tables for consistency. * For independent verification we take a "geometric" approach to verification. * We check that every entry is on-curve. @@ -4335,168 +4335,168 @@ static void test_pre_g_table(const rustsecp256k1zkp_v0_10_0_ge_storage * pre_g, * * Checking the table's generators are correct is done in run_ecmult_pre_g. */ - rustsecp256k1zkp_v0_10_0_gej g2; - rustsecp256k1zkp_v0_10_0_ge p, q, gg; - rustsecp256k1zkp_v0_10_0_fe dpx, dpy, dqx, dqy; + rustsecp256k1zkp_v0_10_1_gej g2; + rustsecp256k1zkp_v0_10_1_ge p, q, gg; + rustsecp256k1zkp_v0_10_1_fe dpx, dpy, dqx, dqy; size_t i; CHECK(0 < n); - rustsecp256k1zkp_v0_10_0_ge_from_storage(&p, &pre_g[0]); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&p)); + rustsecp256k1zkp_v0_10_1_ge_from_storage(&p, &pre_g[0]); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&p)); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&g2, &p); - rustsecp256k1zkp_v0_10_0_gej_double_var(&g2, &g2, NULL); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&gg, &g2); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&g2, &p); + rustsecp256k1zkp_v0_10_1_gej_double_var(&g2, &g2, NULL); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&gg, &g2); for (i = 1; i < n; ++i) { - rustsecp256k1zkp_v0_10_0_fe_negate(&dpx, &p.x, 1); rustsecp256k1zkp_v0_10_0_fe_add(&dpx, &gg.x); rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&dpx); - rustsecp256k1zkp_v0_10_0_fe_negate(&dpy, &p.y, 1); rustsecp256k1zkp_v0_10_0_fe_add(&dpy, &gg.y); rustsecp256k1zkp_v0_10_0_fe_normalize_weak(&dpy); + rustsecp256k1zkp_v0_10_1_fe_negate(&dpx, &p.x, 1); rustsecp256k1zkp_v0_10_1_fe_add(&dpx, &gg.x); rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&dpx); + rustsecp256k1zkp_v0_10_1_fe_negate(&dpy, &p.y, 1); rustsecp256k1zkp_v0_10_1_fe_add(&dpy, &gg.y); rustsecp256k1zkp_v0_10_1_fe_normalize_weak(&dpy); /* Check that p is not equal to gg */ - CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&dpx) || !rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&dpy)); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&dpx) || !rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&dpy)); - rustsecp256k1zkp_v0_10_0_ge_from_storage(&q, &pre_g[i]); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&q)); + rustsecp256k1zkp_v0_10_1_ge_from_storage(&q, &pre_g[i]); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&q)); - rustsecp256k1zkp_v0_10_0_fe_negate(&dqx, &q.x, 1); rustsecp256k1zkp_v0_10_0_fe_add(&dqx, &gg.x); - dqy = q.y; rustsecp256k1zkp_v0_10_0_fe_add(&dqy, &gg.y); + rustsecp256k1zkp_v0_10_1_fe_negate(&dqx, &q.x, 1); rustsecp256k1zkp_v0_10_1_fe_add(&dqx, &gg.x); + dqy = q.y; rustsecp256k1zkp_v0_10_1_fe_add(&dqy, &gg.y); /* Check that -q is not equal to gg */ - CHECK(!rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&dqx) || !rustsecp256k1zkp_v0_10_0_fe_normalizes_to_zero_var(&dqy)); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&dqx) || !rustsecp256k1zkp_v0_10_1_fe_normalizes_to_zero_var(&dqy)); /* Check that -q is not equal to p */ - CHECK(!rustsecp256k1zkp_v0_10_0_fe_equal(&dpx, &dqx) || !rustsecp256k1zkp_v0_10_0_fe_equal(&dpy, &dqy)); + CHECK(!rustsecp256k1zkp_v0_10_1_fe_equal(&dpx, &dqx) || !rustsecp256k1zkp_v0_10_1_fe_equal(&dpy, &dqy)); /* Check that p, -q and gg are colinear */ - rustsecp256k1zkp_v0_10_0_fe_mul(&dpx, &dpx, &dqy); - rustsecp256k1zkp_v0_10_0_fe_mul(&dpy, &dpy, &dqx); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&dpx, &dpy)); + rustsecp256k1zkp_v0_10_1_fe_mul(&dpx, &dpx, &dqy); + rustsecp256k1zkp_v0_10_1_fe_mul(&dpy, &dpy, &dqx); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&dpx, &dpy)); p = q; } } static void run_ecmult_pre_g(void) { - rustsecp256k1zkp_v0_10_0_ge_storage gs; - rustsecp256k1zkp_v0_10_0_gej gj; - rustsecp256k1zkp_v0_10_0_ge g; + rustsecp256k1zkp_v0_10_1_ge_storage gs; + rustsecp256k1zkp_v0_10_1_gej gj; + rustsecp256k1zkp_v0_10_1_ge g; size_t i; /* Check that the pre_g and pre_g_128 tables are consistent. */ - test_pre_g_table(rustsecp256k1zkp_v0_10_0_pre_g, ECMULT_TABLE_SIZE(WINDOW_G)); - test_pre_g_table(rustsecp256k1zkp_v0_10_0_pre_g_128, ECMULT_TABLE_SIZE(WINDOW_G)); + test_pre_g_table(rustsecp256k1zkp_v0_10_1_pre_g, ECMULT_TABLE_SIZE(WINDOW_G)); + test_pre_g_table(rustsecp256k1zkp_v0_10_1_pre_g_128, ECMULT_TABLE_SIZE(WINDOW_G)); /* Check the first entry from the pre_g table. */ - rustsecp256k1zkp_v0_10_0_ge_to_storage(&gs, &rustsecp256k1zkp_v0_10_0_ge_const_g); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&gs, &rustsecp256k1zkp_v0_10_0_pre_g[0], sizeof(gs)) == 0); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&gs, &rustsecp256k1zkp_v0_10_1_ge_const_g); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&gs, &rustsecp256k1zkp_v0_10_1_pre_g[0], sizeof(gs)) == 0); /* Check the first entry from the pre_g_128 table. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gj, &rustsecp256k1zkp_v0_10_0_ge_const_g); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gj, &rustsecp256k1zkp_v0_10_1_ge_const_g); for (i = 0; i < 128; ++i) { - rustsecp256k1zkp_v0_10_0_gej_double_var(&gj, &gj, NULL); + rustsecp256k1zkp_v0_10_1_gej_double_var(&gj, &gj, NULL); } - rustsecp256k1zkp_v0_10_0_ge_set_gej(&g, &gj); - rustsecp256k1zkp_v0_10_0_ge_to_storage(&gs, &g); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&gs, &rustsecp256k1zkp_v0_10_0_pre_g_128[0], sizeof(gs)) == 0); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&g, &gj); + rustsecp256k1zkp_v0_10_1_ge_to_storage(&gs, &g); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&gs, &rustsecp256k1zkp_v0_10_1_pre_g_128[0], sizeof(gs)) == 0); } static void run_ecmult_chain(void) { /* random starting point A (on the curve) */ - rustsecp256k1zkp_v0_10_0_gej a = SECP256K1_GEJ_CONST( + rustsecp256k1zkp_v0_10_1_gej a = SECP256K1_GEJ_CONST( 0x8b30bbe9, 0xae2a9906, 0x96b22f67, 0x0709dff3, 0x727fd8bc, 0x04d3362c, 0x6c7bf458, 0xe2846004, 0xa357ae91, 0x5c4a6528, 0x1309edf2, 0x0504740f, 0x0eb33439, 0x90216b4f, 0x81063cb6, 0x5f2f7e0f ); /* two random initial factors xn and gn */ - rustsecp256k1zkp_v0_10_0_scalar xn = SECP256K1_SCALAR_CONST( + rustsecp256k1zkp_v0_10_1_scalar xn = SECP256K1_SCALAR_CONST( 0x84cc5452, 0xf7fde1ed, 0xb4d38a8c, 0xe9b1b84c, 0xcef31f14, 0x6e569be9, 0x705d357a, 0x42985407 ); - rustsecp256k1zkp_v0_10_0_scalar gn = SECP256K1_SCALAR_CONST( + rustsecp256k1zkp_v0_10_1_scalar gn = SECP256K1_SCALAR_CONST( 0xa1e58d22, 0x553dcd42, 0xb2398062, 0x5d4c57a9, 0x6e9323d4, 0x2b3152e5, 0xca2c3990, 0xedc7c9de ); /* two small multipliers to be applied to xn and gn in every iteration: */ - static const rustsecp256k1zkp_v0_10_0_scalar xf = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0x1337); - static const rustsecp256k1zkp_v0_10_0_scalar gf = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0x7113); + static const rustsecp256k1zkp_v0_10_1_scalar xf = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0x1337); + static const rustsecp256k1zkp_v0_10_1_scalar gf = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0x7113); /* accumulators with the resulting coefficients to A and G */ - rustsecp256k1zkp_v0_10_0_scalar ae = rustsecp256k1zkp_v0_10_0_scalar_one; - rustsecp256k1zkp_v0_10_0_scalar ge = rustsecp256k1zkp_v0_10_0_scalar_zero; + rustsecp256k1zkp_v0_10_1_scalar ae = rustsecp256k1zkp_v0_10_1_scalar_one; + rustsecp256k1zkp_v0_10_1_scalar ge = rustsecp256k1zkp_v0_10_1_scalar_zero; /* actual points */ - rustsecp256k1zkp_v0_10_0_gej x; - rustsecp256k1zkp_v0_10_0_gej x2; + rustsecp256k1zkp_v0_10_1_gej x; + rustsecp256k1zkp_v0_10_1_gej x2; int i; /* the point being computed */ x = a; for (i = 0; i < 200*COUNT; i++) { /* in each iteration, compute X = xn*X + gn*G; */ - rustsecp256k1zkp_v0_10_0_ecmult(&x, &x, &xn, &gn); + rustsecp256k1zkp_v0_10_1_ecmult(&x, &x, &xn, &gn); /* also compute ae and ge: the actual accumulated factors for A and G */ /* if X was (ae*A+ge*G), xn*X + gn*G results in (xn*ae*A + (xn*ge+gn)*G) */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&ae, &ae, &xn); - rustsecp256k1zkp_v0_10_0_scalar_mul(&ge, &ge, &xn); - rustsecp256k1zkp_v0_10_0_scalar_add(&ge, &ge, &gn); + rustsecp256k1zkp_v0_10_1_scalar_mul(&ae, &ae, &xn); + rustsecp256k1zkp_v0_10_1_scalar_mul(&ge, &ge, &xn); + rustsecp256k1zkp_v0_10_1_scalar_add(&ge, &ge, &gn); /* modify xn and gn */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&xn, &xn, &xf); - rustsecp256k1zkp_v0_10_0_scalar_mul(&gn, &gn, &gf); + rustsecp256k1zkp_v0_10_1_scalar_mul(&xn, &xn, &xf); + rustsecp256k1zkp_v0_10_1_scalar_mul(&gn, &gn, &gf); /* verify */ if (i == 19999) { /* expected result after 19999 iterations */ - rustsecp256k1zkp_v0_10_0_gej rp = SECP256K1_GEJ_CONST( + rustsecp256k1zkp_v0_10_1_gej rp = SECP256K1_GEJ_CONST( 0xD6E96687, 0xF9B10D09, 0x2A6F3543, 0x9D86CEBE, 0xA4535D0D, 0x409F5358, 0x6440BD74, 0xB933E830, 0xB95CBCA2, 0xC77DA786, 0x539BE8FD, 0x53354D2D, 0x3B4F566A, 0xE6580454, 0x07ED6015, 0xEE1B2A88 ); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&rp, &x)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&rp, &x)); } } /* redo the computation, but directly with the resulting ae and ge coefficients: */ - rustsecp256k1zkp_v0_10_0_ecmult(&x2, &a, &ae, &ge); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&x, &x2)); + rustsecp256k1zkp_v0_10_1_ecmult(&x2, &a, &ae, &ge); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&x, &x2)); } -static void test_point_times_order(const rustsecp256k1zkp_v0_10_0_gej *point) { +static void test_point_times_order(const rustsecp256k1zkp_v0_10_1_gej *point) { /* X * (point + G) + (order-X) * (pointer + G) = 0 */ - rustsecp256k1zkp_v0_10_0_scalar x; - rustsecp256k1zkp_v0_10_0_scalar nx; - rustsecp256k1zkp_v0_10_0_gej res1, res2; - rustsecp256k1zkp_v0_10_0_ge res3; + rustsecp256k1zkp_v0_10_1_scalar x; + rustsecp256k1zkp_v0_10_1_scalar nx; + rustsecp256k1zkp_v0_10_1_gej res1, res2; + rustsecp256k1zkp_v0_10_1_ge res3; unsigned char pub[65]; size_t psize = 65; random_scalar_order_test(&x); - rustsecp256k1zkp_v0_10_0_scalar_negate(&nx, &x); - rustsecp256k1zkp_v0_10_0_ecmult(&res1, point, &x, &x); /* calc res1 = x * point + x * G; */ - rustsecp256k1zkp_v0_10_0_ecmult(&res2, point, &nx, &nx); /* calc res2 = (order - x) * point + (order - x) * G; */ - rustsecp256k1zkp_v0_10_0_gej_add_var(&res1, &res1, &res2, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&res1)); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res3, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&res3)); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&res3) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&res3, pub, &psize, 0) == 0); + rustsecp256k1zkp_v0_10_1_scalar_negate(&nx, &x); + rustsecp256k1zkp_v0_10_1_ecmult(&res1, point, &x, &x); /* calc res1 = x * point + x * G; */ + rustsecp256k1zkp_v0_10_1_ecmult(&res2, point, &nx, &nx); /* calc res2 = (order - x) * point + (order - x) * G; */ + rustsecp256k1zkp_v0_10_1_gej_add_var(&res1, &res1, &res2, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&res1)); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res3, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&res3)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&res3) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&res3, pub, &psize, 0) == 0); psize = 65; - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&res3, pub, &psize, 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&res3, pub, &psize, 1) == 0); /* check zero/one edge cases */ - rustsecp256k1zkp_v0_10_0_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_0_scalar_zero, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res3, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&res3)); - rustsecp256k1zkp_v0_10_0_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_0_scalar_one, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res3, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(point, &res3)); - rustsecp256k1zkp_v0_10_0_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_0_scalar_zero, &rustsecp256k1zkp_v0_10_0_scalar_one); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res3, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&rustsecp256k1zkp_v0_10_0_ge_const_g, &res3)); -} - -/* These scalars reach large (in absolute value) outputs when fed to rustsecp256k1zkp_v0_10_0_scalar_split_lambda. + rustsecp256k1zkp_v0_10_1_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_1_scalar_zero, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res3, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&res3)); + rustsecp256k1zkp_v0_10_1_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_1_scalar_one, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res3, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(point, &res3)); + rustsecp256k1zkp_v0_10_1_ecmult(&res1, point, &rustsecp256k1zkp_v0_10_1_scalar_zero, &rustsecp256k1zkp_v0_10_1_scalar_one); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res3, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&rustsecp256k1zkp_v0_10_1_ge_const_g, &res3)); +} + +/* These scalars reach large (in absolute value) outputs when fed to rustsecp256k1zkp_v0_10_1_scalar_split_lambda. * * They are computed as: * - For a in [-2, -1, 0, 1, 2]: * - For b in [-3, -1, 1, 3]: * - Output (a*LAMBDA + (ORDER+b)/2) % ORDER */ -static const rustsecp256k1zkp_v0_10_0_scalar scalars_near_split_bounds[20] = { +static const rustsecp256k1zkp_v0_10_1_scalar scalars_near_split_bounds[20] = { SECP256K1_SCALAR_CONST(0xd938a566, 0x7f479e3e, 0xb5b3c7fa, 0xefdb3749, 0x3aa0585c, 0xc5ea2367, 0xe1b660db, 0x0209e6fc), SECP256K1_SCALAR_CONST(0xd938a566, 0x7f479e3e, 0xb5b3c7fa, 0xefdb3749, 0x3aa0585c, 0xc5ea2367, 0xe1b660db, 0x0209e6fd), SECP256K1_SCALAR_CONST(0xd938a566, 0x7f479e3e, 0xb5b3c7fa, 0xefdb3749, 0x3aa0585c, 0xc5ea2367, 0xe1b660db, 0x0209e6fe), @@ -4519,42 +4519,42 @@ static const rustsecp256k1zkp_v0_10_0_scalar scalars_near_split_bounds[20] = { SECP256K1_SCALAR_CONST(0x26c75a99, 0x80b861c1, 0x4a4c3805, 0x1024c8b4, 0x704d760e, 0xe95e7cd3, 0xde1bfdb1, 0xce2c5a45) }; -static void test_ecmult_target(const rustsecp256k1zkp_v0_10_0_scalar* target, int mode) { +static void test_ecmult_target(const rustsecp256k1zkp_v0_10_1_scalar* target, int mode) { /* Mode: 0=ecmult_gen, 1=ecmult, 2=ecmult_const */ - rustsecp256k1zkp_v0_10_0_scalar n1, n2; - rustsecp256k1zkp_v0_10_0_ge p; - rustsecp256k1zkp_v0_10_0_gej pj, p1j, p2j, ptj; + rustsecp256k1zkp_v0_10_1_scalar n1, n2; + rustsecp256k1zkp_v0_10_1_ge p; + rustsecp256k1zkp_v0_10_1_gej pj, p1j, p2j, ptj; /* Generate random n1,n2 such that n1+n2 = -target. */ random_scalar_order_test(&n1); - rustsecp256k1zkp_v0_10_0_scalar_add(&n2, &n1, target); - rustsecp256k1zkp_v0_10_0_scalar_negate(&n2, &n2); + rustsecp256k1zkp_v0_10_1_scalar_add(&n2, &n1, target); + rustsecp256k1zkp_v0_10_1_scalar_negate(&n2, &n2); /* Generate a random input point. */ if (mode != 0) { random_group_element_test(&p); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pj, &p); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pj, &p); } /* EC multiplications */ if (mode == 0) { - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &p1j, &n1); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &p2j, &n2); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &ptj, target); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &p1j, &n1); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &p2j, &n2); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &ptj, target); } else if (mode == 1) { - rustsecp256k1zkp_v0_10_0_ecmult(&p1j, &pj, &n1, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ecmult(&p2j, &pj, &n2, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ecmult(&ptj, &pj, target, &rustsecp256k1zkp_v0_10_0_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&p1j, &pj, &n1, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&p2j, &pj, &n2, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&ptj, &pj, target, &rustsecp256k1zkp_v0_10_1_scalar_zero); } else { - rustsecp256k1zkp_v0_10_0_ecmult_const(&p1j, &p, &n1); - rustsecp256k1zkp_v0_10_0_ecmult_const(&p2j, &p, &n2); - rustsecp256k1zkp_v0_10_0_ecmult_const(&ptj, &p, target); + rustsecp256k1zkp_v0_10_1_ecmult_const(&p1j, &p, &n1); + rustsecp256k1zkp_v0_10_1_ecmult_const(&p2j, &p, &n2); + rustsecp256k1zkp_v0_10_1_ecmult_const(&ptj, &p, target); } /* Add them all up: n1*P + n2*P + target*P = (n1+n2+target)*P = (n1+n1-n1-n2)*P = 0. */ - rustsecp256k1zkp_v0_10_0_gej_add_var(&ptj, &ptj, &p1j, NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(&ptj, &ptj, &p2j, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&ptj)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&ptj, &ptj, &p1j, NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&ptj, &ptj, &p2j, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&ptj)); } static void run_ecmult_near_split_bound(void) { @@ -4571,119 +4571,119 @@ static void run_ecmult_near_split_bound(void) { static void run_point_times_order(void) { int i; - rustsecp256k1zkp_v0_10_0_fe x = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 2); - static const rustsecp256k1zkp_v0_10_0_fe xr = SECP256K1_FE_CONST( + rustsecp256k1zkp_v0_10_1_fe x = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 2); + static const rustsecp256k1zkp_v0_10_1_fe xr = SECP256K1_FE_CONST( 0x7603CB59, 0xB0EF6C63, 0xFE608479, 0x2A0C378C, 0xDB3233A8, 0x0F8A9A09, 0xA877DEAD, 0x31B38C45 ); for (i = 0; i < 500; i++) { - rustsecp256k1zkp_v0_10_0_ge p; - if (rustsecp256k1zkp_v0_10_0_ge_set_xo_var(&p, &x, 1)) { - rustsecp256k1zkp_v0_10_0_gej j; - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&p)); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&j, &p); + rustsecp256k1zkp_v0_10_1_ge p; + if (rustsecp256k1zkp_v0_10_1_ge_set_xo_var(&p, &x, 1)) { + rustsecp256k1zkp_v0_10_1_gej j; + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&p)); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&j, &p); test_point_times_order(&j); } - rustsecp256k1zkp_v0_10_0_fe_sqr(&x, &x); + rustsecp256k1zkp_v0_10_1_fe_sqr(&x, &x); } - rustsecp256k1zkp_v0_10_0_fe_normalize_var(&x); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&x, &xr)); + rustsecp256k1zkp_v0_10_1_fe_normalize_var(&x); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&x, &xr)); } static void ecmult_const_random_mult(void) { /* random starting point A (on the curve) */ - rustsecp256k1zkp_v0_10_0_ge a = SECP256K1_GE_CONST( + rustsecp256k1zkp_v0_10_1_ge a = SECP256K1_GE_CONST( 0x6d986544, 0x57ff52b8, 0xcf1b8126, 0x5b802a5b, 0xa97f9263, 0xb1e88044, 0x93351325, 0x91bc450a, 0x535c59f7, 0x325e5d2b, 0xc391fbe8, 0x3c12787c, 0x337e4a98, 0xe82a9011, 0x0123ba37, 0xdd769c7d ); /* random initial factor xn */ - rustsecp256k1zkp_v0_10_0_scalar xn = SECP256K1_SCALAR_CONST( + rustsecp256k1zkp_v0_10_1_scalar xn = SECP256K1_SCALAR_CONST( 0x649d4f77, 0xc4242df7, 0x7f2079c9, 0x14530327, 0xa31b876a, 0xd2d8ce2a, 0x2236d5c6, 0xd7b2029b ); /* expected xn * A (from sage) */ - rustsecp256k1zkp_v0_10_0_ge expected_b = SECP256K1_GE_CONST( + rustsecp256k1zkp_v0_10_1_ge expected_b = SECP256K1_GE_CONST( 0x23773684, 0x4d209dc7, 0x098a786f, 0x20d06fcd, 0x070a38bf, 0xc11ac651, 0x03004319, 0x1e2a8786, 0xed8c3b8e, 0xc06dd57b, 0xd06ea66e, 0x45492b0f, 0xb84e4e1b, 0xfb77e21f, 0x96baae2a, 0x63dec956 ); - rustsecp256k1zkp_v0_10_0_gej b; - rustsecp256k1zkp_v0_10_0_ecmult_const(&b, &a, &xn); + rustsecp256k1zkp_v0_10_1_gej b; + rustsecp256k1zkp_v0_10_1_ecmult_const(&b, &a, &xn); - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_valid_var(&a)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&b, &expected_b)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_valid_var(&a)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&b, &expected_b)); } static void ecmult_const_commutativity(void) { - rustsecp256k1zkp_v0_10_0_scalar a; - rustsecp256k1zkp_v0_10_0_scalar b; - rustsecp256k1zkp_v0_10_0_gej res1; - rustsecp256k1zkp_v0_10_0_gej res2; - rustsecp256k1zkp_v0_10_0_ge mid1; - rustsecp256k1zkp_v0_10_0_ge mid2; + rustsecp256k1zkp_v0_10_1_scalar a; + rustsecp256k1zkp_v0_10_1_scalar b; + rustsecp256k1zkp_v0_10_1_gej res1; + rustsecp256k1zkp_v0_10_1_gej res2; + rustsecp256k1zkp_v0_10_1_ge mid1; + rustsecp256k1zkp_v0_10_1_ge mid2; random_scalar_order_test(&a); random_scalar_order_test(&b); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &rustsecp256k1zkp_v0_10_0_ge_const_g, &a); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res2, &rustsecp256k1zkp_v0_10_0_ge_const_g, &b); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&mid1, &res1); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&mid2, &res2); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &mid1, &b); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res2, &mid2, &a); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&mid1, &res1); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&mid2, &res2); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&mid1, &mid2)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &rustsecp256k1zkp_v0_10_1_ge_const_g, &a); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res2, &rustsecp256k1zkp_v0_10_1_ge_const_g, &b); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&mid1, &res1); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&mid2, &res2); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &mid1, &b); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res2, &mid2, &a); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&mid1, &res1); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&mid2, &res2); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&mid1, &mid2)); } static void ecmult_const_mult_zero_one(void) { - rustsecp256k1zkp_v0_10_0_scalar s; - rustsecp256k1zkp_v0_10_0_scalar negone; - rustsecp256k1zkp_v0_10_0_gej res1; - rustsecp256k1zkp_v0_10_0_ge res2; - rustsecp256k1zkp_v0_10_0_ge point; - rustsecp256k1zkp_v0_10_0_ge inf; + rustsecp256k1zkp_v0_10_1_scalar s; + rustsecp256k1zkp_v0_10_1_scalar negone; + rustsecp256k1zkp_v0_10_1_gej res1; + rustsecp256k1zkp_v0_10_1_ge res2; + rustsecp256k1zkp_v0_10_1_ge point; + rustsecp256k1zkp_v0_10_1_ge inf; random_scalar_order_test(&s); - rustsecp256k1zkp_v0_10_0_scalar_negate(&negone, &rustsecp256k1zkp_v0_10_0_scalar_one); + rustsecp256k1zkp_v0_10_1_scalar_negate(&negone, &rustsecp256k1zkp_v0_10_1_scalar_one); random_group_element_test(&point); - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&inf); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&inf); /* 0*point */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &point, &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&res1)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &point, &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&res1)); /* s*inf */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &inf, &s); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&res1)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &inf, &s); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&res1)); /* 1*point */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &point, &rustsecp256k1zkp_v0_10_0_scalar_one); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res2, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&res2, &point)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &point, &rustsecp256k1zkp_v0_10_1_scalar_one); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res2, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&res2, &point)); /* -1*point */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&res1, &point, &negone); - rustsecp256k1zkp_v0_10_0_gej_neg(&res1, &res1); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res2, &res1); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&res2, &point)); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res1, &point, &negone); + rustsecp256k1zkp_v0_10_1_gej_neg(&res1, &res1); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res2, &res1); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&res2, &point)); } -static void ecmult_const_check_result(const rustsecp256k1zkp_v0_10_0_ge *A, const rustsecp256k1zkp_v0_10_0_scalar* q, const rustsecp256k1zkp_v0_10_0_gej *res) { - rustsecp256k1zkp_v0_10_0_gej pointj, res2j; - rustsecp256k1zkp_v0_10_0_ge res2; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&pointj, A); - rustsecp256k1zkp_v0_10_0_ecmult(&res2j, &pointj, q, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res2, &res2j); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(res, &res2)); +static void ecmult_const_check_result(const rustsecp256k1zkp_v0_10_1_ge *A, const rustsecp256k1zkp_v0_10_1_scalar* q, const rustsecp256k1zkp_v0_10_1_gej *res) { + rustsecp256k1zkp_v0_10_1_gej pointj, res2j; + rustsecp256k1zkp_v0_10_1_ge res2; + rustsecp256k1zkp_v0_10_1_gej_set_ge(&pointj, A); + rustsecp256k1zkp_v0_10_1_ecmult(&res2j, &pointj, q, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res2, &res2j); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(res, &res2)); } static void ecmult_const_edges(void) { - rustsecp256k1zkp_v0_10_0_scalar q; - rustsecp256k1zkp_v0_10_0_ge point; - rustsecp256k1zkp_v0_10_0_gej res; + rustsecp256k1zkp_v0_10_1_scalar q; + rustsecp256k1zkp_v0_10_1_ge point; + rustsecp256k1zkp_v0_10_1_gej res; size_t i; size_t cases = 1 + sizeof(scalars_near_split_bounds) / sizeof(scalars_near_split_bounds[0]); @@ -4696,13 +4696,13 @@ static void ecmult_const_edges(void) { * <=> q = 2*scalars_near_split_bounds[i] - K */ for (i = 0; i < cases; ++i) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&q, &rustsecp256k1zkp_v0_10_0_ecmult_const_K); + rustsecp256k1zkp_v0_10_1_scalar_negate(&q, &rustsecp256k1zkp_v0_10_1_ecmult_const_K); if (i > 0) { - rustsecp256k1zkp_v0_10_0_scalar_add(&q, &q, &scalars_near_split_bounds[i - 1]); - rustsecp256k1zkp_v0_10_0_scalar_add(&q, &q, &scalars_near_split_bounds[i - 1]); + rustsecp256k1zkp_v0_10_1_scalar_add(&q, &q, &scalars_near_split_bounds[i - 1]); + rustsecp256k1zkp_v0_10_1_scalar_add(&q, &q, &scalars_near_split_bounds[i - 1]); } random_group_element_test(&point); - rustsecp256k1zkp_v0_10_0_ecmult_const(&res, &point, &q); + rustsecp256k1zkp_v0_10_1_ecmult_const(&res, &point, &q); ecmult_const_check_result(&point, &q, &res); } } @@ -4710,12 +4710,12 @@ static void ecmult_const_edges(void) { static void ecmult_const_mult_xonly(void) { int i; - /* Test correspondence between rustsecp256k1zkp_v0_10_0_ecmult_const and rustsecp256k1zkp_v0_10_0_ecmult_const_xonly. */ + /* Test correspondence between rustsecp256k1zkp_v0_10_1_ecmult_const and rustsecp256k1zkp_v0_10_1_ecmult_const_xonly. */ for (i = 0; i < 2*COUNT; ++i) { - rustsecp256k1zkp_v0_10_0_ge base; - rustsecp256k1zkp_v0_10_0_gej basej, resj; - rustsecp256k1zkp_v0_10_0_fe n, d, resx, v; - rustsecp256k1zkp_v0_10_0_scalar q; + rustsecp256k1zkp_v0_10_1_ge base; + rustsecp256k1zkp_v0_10_1_gej basej, resj; + rustsecp256k1zkp_v0_10_1_fe n, d, resx, v; + rustsecp256k1zkp_v0_10_1_scalar q; int res; /* Random base point. */ random_group_element_test(&base); @@ -4724,68 +4724,68 @@ static void ecmult_const_mult_xonly(void) { /* If i is odd, n=d*base.x for random non-zero d */ if (i & 1) { random_fe_non_zero_test(&d); - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &base.x, &d); + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &base.x, &d); } else { n = base.x; } /* Perform x-only multiplication. */ - res = rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(&resx, &n, (i & 1) ? &d : NULL, &q, i & 2); + res = rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(&resx, &n, (i & 1) ? &d : NULL, &q, i & 2); CHECK(res); /* Perform normal multiplication. */ - rustsecp256k1zkp_v0_10_0_gej_set_ge(&basej, &base); - rustsecp256k1zkp_v0_10_0_ecmult(&resj, &basej, &q, NULL); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&basej, &base); + rustsecp256k1zkp_v0_10_1_ecmult(&resj, &basej, &q, NULL); /* Check that resj's X coordinate corresponds with resx. */ - rustsecp256k1zkp_v0_10_0_fe_sqr(&v, &resj.z); - rustsecp256k1zkp_v0_10_0_fe_mul(&v, &v, &resx); + rustsecp256k1zkp_v0_10_1_fe_sqr(&v, &resj.z); + rustsecp256k1zkp_v0_10_1_fe_mul(&v, &v, &resx); CHECK(check_fe_equal(&v, &resj.x)); } - /* Test that rustsecp256k1zkp_v0_10_0_ecmult_const_xonly correctly rejects X coordinates not on curve. */ + /* Test that rustsecp256k1zkp_v0_10_1_ecmult_const_xonly correctly rejects X coordinates not on curve. */ for (i = 0; i < 2*COUNT; ++i) { - rustsecp256k1zkp_v0_10_0_fe x, n, d, r; + rustsecp256k1zkp_v0_10_1_fe x, n, d, r; int res; - rustsecp256k1zkp_v0_10_0_scalar q; + rustsecp256k1zkp_v0_10_1_scalar q; random_scalar_order_test(&q); /* Generate random X coordinate not on the curve. */ do { random_fe_test(&x); - } while (rustsecp256k1zkp_v0_10_0_ge_x_on_curve_var(&x)); + } while (rustsecp256k1zkp_v0_10_1_ge_x_on_curve_var(&x)); /* If i is odd, n=d*x for random non-zero d. */ if (i & 1) { random_fe_non_zero_test(&d); - rustsecp256k1zkp_v0_10_0_fe_mul(&n, &x, &d); + rustsecp256k1zkp_v0_10_1_fe_mul(&n, &x, &d); } else { n = x; } - res = rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(&r, &n, (i & 1) ? &d : NULL, &q, 0); + res = rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(&r, &n, (i & 1) ? &d : NULL, &q, 0); CHECK(res == 0); } } static void ecmult_const_chain_multiply(void) { /* Check known result (randomly generated test problem from sage) */ - const rustsecp256k1zkp_v0_10_0_scalar scalar = SECP256K1_SCALAR_CONST( + const rustsecp256k1zkp_v0_10_1_scalar scalar = SECP256K1_SCALAR_CONST( 0x4968d524, 0x2abf9b7a, 0x466abbcf, 0x34b11b6d, 0xcd83d307, 0x827bed62, 0x05fad0ce, 0x18fae63b ); - const rustsecp256k1zkp_v0_10_0_gej expected_point = SECP256K1_GEJ_CONST( + const rustsecp256k1zkp_v0_10_1_gej expected_point = SECP256K1_GEJ_CONST( 0x5494c15d, 0x32099706, 0xc2395f94, 0x348745fd, 0x757ce30e, 0x4e8c90fb, 0xa2bad184, 0xf883c69f, 0x5d195d20, 0xe191bf7f, 0x1be3e55f, 0x56a80196, 0x6071ad01, 0xf1462f66, 0xc997fa94, 0xdb858435 ); - rustsecp256k1zkp_v0_10_0_gej point; - rustsecp256k1zkp_v0_10_0_ge res; + rustsecp256k1zkp_v0_10_1_gej point; + rustsecp256k1zkp_v0_10_1_ge res; int i; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&point, &rustsecp256k1zkp_v0_10_0_ge_const_g); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&point, &rustsecp256k1zkp_v0_10_1_ge_const_g); for (i = 0; i < 100; ++i) { - rustsecp256k1zkp_v0_10_0_ge tmp; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&tmp, &point); - rustsecp256k1zkp_v0_10_0_ecmult_const(&point, &tmp, &scalar); + rustsecp256k1zkp_v0_10_1_ge tmp; + rustsecp256k1zkp_v0_10_1_ge_set_gej(&tmp, &point); + rustsecp256k1zkp_v0_10_1_ecmult_const(&point, &tmp, &scalar); } - rustsecp256k1zkp_v0_10_0_ge_set_gej(&res, &point); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&expected_point, &res)); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&res, &point); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&expected_point, &res)); } static void run_ecmult_const_tests(void) { @@ -4798,18 +4798,18 @@ static void run_ecmult_const_tests(void) { } typedef struct { - rustsecp256k1zkp_v0_10_0_scalar *sc; - rustsecp256k1zkp_v0_10_0_ge *pt; + rustsecp256k1zkp_v0_10_1_scalar *sc; + rustsecp256k1zkp_v0_10_1_ge *pt; } ecmult_multi_data; -static int ecmult_multi_callback(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_multi_callback(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ecmult_multi_data *data = (ecmult_multi_data*) cbdata; *sc = data->sc[idx]; *pt = data->pt[idx]; return 1; } -static int ecmult_multi_false_callback(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_multi_false_callback(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { (void)sc; (void)pt; (void)idx; @@ -4817,12 +4817,12 @@ static int ecmult_multi_false_callback(rustsecp256k1zkp_v0_10_0_scalar *sc, rust return 0; } -static void test_ecmult_multi(rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsecp256k1zkp_v0_10_0_ecmult_multi_func ecmult_multi) { +static void test_ecmult_multi(rustsecp256k1zkp_v0_10_1_scratch *scratch, rustsecp256k1zkp_v0_10_1_ecmult_multi_func ecmult_multi) { int ncount; - rustsecp256k1zkp_v0_10_0_scalar sc[32]; - rustsecp256k1zkp_v0_10_0_ge pt[32]; - rustsecp256k1zkp_v0_10_0_gej r; - rustsecp256k1zkp_v0_10_0_gej r2; + rustsecp256k1zkp_v0_10_1_scalar sc[32]; + rustsecp256k1zkp_v0_10_1_ge pt[32]; + rustsecp256k1zkp_v0_10_1_gej r; + rustsecp256k1zkp_v0_10_1_gej r2; ecmult_multi_data data; data.sc = sc; @@ -4833,76 +4833,76 @@ static void test_ecmult_multi(rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsec /* Check 1- and 2-point multiplies against ecmult */ for (ncount = 0; ncount < COUNT; ncount++) { - rustsecp256k1zkp_v0_10_0_ge ptg; - rustsecp256k1zkp_v0_10_0_gej ptgj; + rustsecp256k1zkp_v0_10_1_ge ptg; + rustsecp256k1zkp_v0_10_1_gej ptgj; random_scalar_order(&sc[0]); random_scalar_order(&sc[1]); random_group_element_test(&ptg); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ptgj, &ptg); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ptgj, &ptg); pt[0] = ptg; - pt[1] = rustsecp256k1zkp_v0_10_0_ge_const_g; + pt[1] = rustsecp256k1zkp_v0_10_1_ge_const_g; /* only G scalar */ - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &ptgj, &rustsecp256k1zkp_v0_10_0_scalar_zero, &sc[0]); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &ptgj, &rustsecp256k1zkp_v0_10_1_scalar_zero, &sc[0]); CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &sc[0], ecmult_multi_callback, &data, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); /* 1-point */ - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &ptgj, &sc[0], &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &ptgj, &sc[0], &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); /* Try to multiply 1 point, but callback returns false */ - CHECK(!ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_false_callback, &data, 1)); + CHECK(!ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_false_callback, &data, 1)); /* 2-point */ - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &ptgj, &sc[0], &sc[1]); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 2)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &ptgj, &sc[0], &sc[1]); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); /* 2-point with G scalar */ - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &ptgj, &sc[0], &sc[1]); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &ptgj, &sc[0], &sc[1]); CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &sc[1], ecmult_multi_callback, &data, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); } /* Check infinite outputs of various forms */ for (ncount = 0; ncount < COUNT; ncount++) { - rustsecp256k1zkp_v0_10_0_ge ptg; + rustsecp256k1zkp_v0_10_1_ge ptg; size_t i, j; size_t sizes[] = { 2, 10, 32 }; for (j = 0; j < 3; j++) { for (i = 0; i < 32; i++) { random_scalar_order(&sc[i]); - rustsecp256k1zkp_v0_10_0_ge_set_infinity(&pt[i]); + rustsecp256k1zkp_v0_10_1_ge_set_infinity(&pt[i]); } - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, sizes[j])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, sizes[j])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); } for (j = 0; j < 3; j++) { for (i = 0; i < 32; i++) { random_group_element_test(&ptg); pt[i] = ptg; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sc[i], 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sc[i], 0); } - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, sizes[j])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, sizes[j])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); } for (j = 0; j < 3; j++) { random_group_element_test(&ptg); for (i = 0; i < 16; i++) { random_scalar_order(&sc[2*i]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&sc[2*i + 1], &sc[2*i]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sc[2*i + 1], &sc[2*i]); pt[2 * i] = ptg; pt[2 * i + 1] = ptg; } - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, sizes[j])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, sizes[j])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); random_scalar_order(&sc[0]); for (i = 0; i < 16; i++) { @@ -4911,66 +4911,66 @@ static void test_ecmult_multi(rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsec sc[2*i] = sc[0]; sc[2*i+1] = sc[0]; pt[2 * i] = ptg; - rustsecp256k1zkp_v0_10_0_ge_neg(&pt[2*i+1], &pt[2*i]); + rustsecp256k1zkp_v0_10_1_ge_neg(&pt[2*i+1], &pt[2*i]); } - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, sizes[j])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, sizes[j])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); } random_group_element_test(&ptg); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sc[0], 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sc[0], 0); pt[0] = ptg; for (i = 1; i < 32; i++) { pt[i] = ptg; random_scalar_order(&sc[i]); - rustsecp256k1zkp_v0_10_0_scalar_add(&sc[0], &sc[0], &sc[i]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&sc[i], &sc[i]); + rustsecp256k1zkp_v0_10_1_scalar_add(&sc[0], &sc[0], &sc[i]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sc[i], &sc[i]); } - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 32)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 32)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); } /* Check random points, constant scalar */ for (ncount = 0; ncount < COUNT; ncount++) { size_t i; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&r); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&r); random_scalar_order(&sc[0]); for (i = 0; i < 20; i++) { - rustsecp256k1zkp_v0_10_0_ge ptg; + rustsecp256k1zkp_v0_10_1_ge ptg; sc[i] = sc[0]; random_group_element_test(&ptg); pt[i] = ptg; - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&r, &r, &pt[i], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&r, &r, &pt[i], NULL); } - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &r, &sc[0], &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 20)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &r, &sc[0], &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 20)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); } /* Check random scalars, constant point */ for (ncount = 0; ncount < COUNT; ncount++) { size_t i; - rustsecp256k1zkp_v0_10_0_ge ptg; - rustsecp256k1zkp_v0_10_0_gej p0j; - rustsecp256k1zkp_v0_10_0_scalar rs; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&rs, 0); + rustsecp256k1zkp_v0_10_1_ge ptg; + rustsecp256k1zkp_v0_10_1_gej p0j; + rustsecp256k1zkp_v0_10_1_scalar rs; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&rs, 0); random_group_element_test(&ptg); for (i = 0; i < 20; i++) { random_scalar_order(&sc[i]); pt[i] = ptg; - rustsecp256k1zkp_v0_10_0_scalar_add(&rs, &rs, &sc[i]); + rustsecp256k1zkp_v0_10_1_scalar_add(&rs, &rs, &sc[i]); } - rustsecp256k1zkp_v0_10_0_gej_set_ge(&p0j, &pt[0]); - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &p0j, &rs, &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 20)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&r, &r2)); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&p0j, &pt[0]); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &p0j, &rs, &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 20)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&r, &r2)); } /* Sanity check that zero scalars don't cause problems */ @@ -4979,60 +4979,60 @@ static void test_ecmult_multi(rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsec random_group_element_test(&pt[ncount]); } - rustsecp256k1zkp_v0_10_0_scalar_clear(&sc[0]); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 20)); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sc[1]); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sc[2]); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sc[3]); - rustsecp256k1zkp_v0_10_0_scalar_clear(&sc[4]); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 6)); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 5)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sc[0]); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 20)); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sc[1]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sc[2]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sc[3]); + rustsecp256k1zkp_v0_10_1_scalar_clear(&sc[4]); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 6)); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 5)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); /* Run through s0*(t0*P) + s1*(t1*P) exhaustively for many small values of s0, s1, t0, t1 */ { const size_t TOP = 8; size_t s0i, s1i; size_t t0i, t1i; - rustsecp256k1zkp_v0_10_0_ge ptg; - rustsecp256k1zkp_v0_10_0_gej ptgj; + rustsecp256k1zkp_v0_10_1_ge ptg; + rustsecp256k1zkp_v0_10_1_gej ptgj; random_group_element_test(&ptg); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ptgj, &ptg); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ptgj, &ptg); for(t0i = 0; t0i < TOP; t0i++) { for(t1i = 0; t1i < TOP; t1i++) { - rustsecp256k1zkp_v0_10_0_gej t0p, t1p; - rustsecp256k1zkp_v0_10_0_scalar t0, t1; + rustsecp256k1zkp_v0_10_1_gej t0p, t1p; + rustsecp256k1zkp_v0_10_1_scalar t0, t1; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t0, (t0i + 1) / 2); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&t0, t0i & 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t1, (t1i + 1) / 2); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&t1, t1i & 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t0, (t0i + 1) / 2); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&t0, t0i & 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t1, (t1i + 1) / 2); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&t1, t1i & 1); - rustsecp256k1zkp_v0_10_0_ecmult(&t0p, &ptgj, &t0, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ecmult(&t1p, &ptgj, &t1, &rustsecp256k1zkp_v0_10_0_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&t0p, &ptgj, &t0, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&t1p, &ptgj, &t1, &rustsecp256k1zkp_v0_10_1_scalar_zero); for(s0i = 0; s0i < TOP; s0i++) { for(s1i = 0; s1i < TOP; s1i++) { - rustsecp256k1zkp_v0_10_0_scalar tmp1, tmp2; - rustsecp256k1zkp_v0_10_0_gej expected, actual; + rustsecp256k1zkp_v0_10_1_scalar tmp1, tmp2; + rustsecp256k1zkp_v0_10_1_gej expected, actual; - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pt[0], &t0p); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pt[1], &t1p); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pt[0], &t0p); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pt[1], &t1p); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sc[0], (s0i + 1) / 2); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&sc[0], s0i & 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sc[1], (s1i + 1) / 2); - rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&sc[1], s1i & 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sc[0], (s0i + 1) / 2); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&sc[0], s0i & 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sc[1], (s1i + 1) / 2); + rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&sc[1], s1i & 1); - rustsecp256k1zkp_v0_10_0_scalar_mul(&tmp1, &t0, &sc[0]); - rustsecp256k1zkp_v0_10_0_scalar_mul(&tmp2, &t1, &sc[1]); - rustsecp256k1zkp_v0_10_0_scalar_add(&tmp1, &tmp1, &tmp2); + rustsecp256k1zkp_v0_10_1_scalar_mul(&tmp1, &t0, &sc[0]); + rustsecp256k1zkp_v0_10_1_scalar_mul(&tmp2, &t1, &sc[1]); + rustsecp256k1zkp_v0_10_1_scalar_add(&tmp1, &tmp1, &tmp2); - rustsecp256k1zkp_v0_10_0_ecmult(&expected, &ptgj, &tmp1, &rustsecp256k1zkp_v0_10_0_scalar_zero); - CHECK(ecmult_multi(&CTX->error_callback, scratch, &actual, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 2)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&actual, &expected)); + rustsecp256k1zkp_v0_10_1_ecmult(&expected, &ptgj, &tmp1, &rustsecp256k1zkp_v0_10_1_scalar_zero); + CHECK(ecmult_multi(&CTX->error_callback, scratch, &actual, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 2)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&actual, &expected)); } } } @@ -5040,7 +5040,7 @@ static void test_ecmult_multi(rustsecp256k1zkp_v0_10_0_scratch *scratch, rustsec } } -static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { +static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_1_scratch *scratch) { /* Large random test for ecmult_multi_* functions which exercises: * - Few or many inputs (0 up to 128, roughly exponentially distributed). * - Few or many 0*P or a*INF inputs (roughly uniformly distributed). @@ -5054,48 +5054,48 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { * scalars[0..filled-1] and gejs[0..filled-1] are the scalars and points * which form its normal inputs. */ int filled = 0; - rustsecp256k1zkp_v0_10_0_scalar g_scalar = rustsecp256k1zkp_v0_10_0_scalar_zero; - rustsecp256k1zkp_v0_10_0_scalar scalars[128]; - rustsecp256k1zkp_v0_10_0_gej gejs[128]; + rustsecp256k1zkp_v0_10_1_scalar g_scalar = rustsecp256k1zkp_v0_10_1_scalar_zero; + rustsecp256k1zkp_v0_10_1_scalar scalars[128]; + rustsecp256k1zkp_v0_10_1_gej gejs[128]; /* The expected result, and the computed result. */ - rustsecp256k1zkp_v0_10_0_gej expected, computed; + rustsecp256k1zkp_v0_10_1_gej expected, computed; /* Temporaries. */ - rustsecp256k1zkp_v0_10_0_scalar sc_tmp; - rustsecp256k1zkp_v0_10_0_ge ge_tmp; + rustsecp256k1zkp_v0_10_1_scalar sc_tmp; + rustsecp256k1zkp_v0_10_1_ge ge_tmp; /* Variables needed for the actual input to ecmult_multi. */ - rustsecp256k1zkp_v0_10_0_ge ges[128]; + rustsecp256k1zkp_v0_10_1_ge ges[128]; ecmult_multi_data data; int i; /* Which multiplication function to use */ - int fn = rustsecp256k1zkp_v0_10_0_testrand_int(3); - rustsecp256k1zkp_v0_10_0_ecmult_multi_func ecmult_multi = fn == 0 ? rustsecp256k1zkp_v0_10_0_ecmult_multi_var : - fn == 1 ? rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch_single : - rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch_single; + int fn = rustsecp256k1zkp_v0_10_1_testrand_int(3); + rustsecp256k1zkp_v0_10_1_ecmult_multi_func ecmult_multi = fn == 0 ? rustsecp256k1zkp_v0_10_1_ecmult_multi_var : + fn == 1 ? rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch_single : + rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch_single; /* Simulate exponentially distributed num. */ - int num_bits = 2 + rustsecp256k1zkp_v0_10_0_testrand_int(6); + int num_bits = 2 + rustsecp256k1zkp_v0_10_1_testrand_int(6); /* Number of (scalar, point) inputs (excluding g). */ - int num = rustsecp256k1zkp_v0_10_0_testrand_int((1 << num_bits) + 1); + int num = rustsecp256k1zkp_v0_10_1_testrand_int((1 << num_bits) + 1); /* Number of those which are nonzero. */ - int num_nonzero = rustsecp256k1zkp_v0_10_0_testrand_int(num + 1); + int num_nonzero = rustsecp256k1zkp_v0_10_1_testrand_int(num + 1); /* Whether we're aiming to create an input with nonzero expected result. */ - int nonzero_result = rustsecp256k1zkp_v0_10_0_testrand_bits(1); + int nonzero_result = rustsecp256k1zkp_v0_10_1_testrand_bits(1); /* Whether we will provide nonzero g multiplicand. In some cases our hand * is forced here based on num_nonzero and nonzero_result. */ int g_nonzero = num_nonzero == 0 ? nonzero_result : num_nonzero == 1 && !nonzero_result ? 1 : - (int)rustsecp256k1zkp_v0_10_0_testrand_bits(1); + (int)rustsecp256k1zkp_v0_10_1_testrand_bits(1); /* Which g_scalar pointer to pass into ecmult_multi(). */ - const rustsecp256k1zkp_v0_10_0_scalar* g_scalar_ptr = (g_nonzero || rustsecp256k1zkp_v0_10_0_testrand_bits(1)) ? &g_scalar : NULL; + const rustsecp256k1zkp_v0_10_1_scalar* g_scalar_ptr = (g_nonzero || rustsecp256k1zkp_v0_10_1_testrand_bits(1)) ? &g_scalar : NULL; /* How many EC multiplications were performed in this function. */ int mults = 0; /* How many randomization steps to apply to the input list. */ - int rands = (int)rustsecp256k1zkp_v0_10_0_testrand_bits(3); + int rands = (int)rustsecp256k1zkp_v0_10_1_testrand_bits(3); if (rands > num_nonzero) rands = num_nonzero; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&expected); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&gejs[0]); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalars[0], 0); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&expected); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&gejs[0]); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalars[0], 0); if (g_nonzero) { /* If g_nonzero, set g_scalar to nonzero value r. */ @@ -5104,10 +5104,10 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { /* If expected=0 is desired, add a (a*r, -(1/a)*g) term to compensate. */ CHECK(num_nonzero > filled); random_scalar_order_test(&sc_tmp); - rustsecp256k1zkp_v0_10_0_scalar_mul(&scalars[filled], &sc_tmp, &g_scalar); - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&sc_tmp, &sc_tmp); - rustsecp256k1zkp_v0_10_0_scalar_negate(&sc_tmp, &sc_tmp); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &gejs[filled], &sc_tmp); + rustsecp256k1zkp_v0_10_1_scalar_mul(&scalars[filled], &sc_tmp, &g_scalar); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&sc_tmp, &sc_tmp); + rustsecp256k1zkp_v0_10_1_scalar_negate(&sc_tmp, &sc_tmp); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &gejs[filled], &sc_tmp); ++filled; ++mults; } @@ -5117,14 +5117,14 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { /* If a nonzero result is desired, and there is space, add a random nonzero term. */ random_scalar_order_test(&scalars[filled]); random_group_element_test(&ge_tmp); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gejs[filled], &ge_tmp); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gejs[filled], &ge_tmp); ++filled; } if (nonzero_result) { /* Compute the expected result using normal ecmult. */ CHECK(filled <= 1); - rustsecp256k1zkp_v0_10_0_ecmult(&expected, &gejs[0], &scalars[0], &g_scalar); + rustsecp256k1zkp_v0_10_1_ecmult(&expected, &gejs[0], &scalars[0], &g_scalar); mults += filled + g_nonzero; } @@ -5135,13 +5135,13 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { /* Add entries to scalars,gejs so that there are num of them. All the added entries * either have scalar=0 or point=infinity, so these do not change the expected result. */ while (filled < num) { - if (rustsecp256k1zkp_v0_10_0_testrand_bits(1)) { - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&gejs[filled]); + if (rustsecp256k1zkp_v0_10_1_testrand_bits(1)) { + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&gejs[filled]); random_scalar_order_test(&scalars[filled]); } else { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalars[filled], 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalars[filled], 0); random_group_element_test(&ge_tmp); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gejs[filled], &ge_tmp); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gejs[filled], &ge_tmp); } ++filled; } @@ -5151,13 +5151,13 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { * convert some of them to be both non-0-scalar and non-infinity-point. */ for (i = 0; i < rands; ++i) { int j; - rustsecp256k1zkp_v0_10_0_scalar v, iv; + rustsecp256k1zkp_v0_10_1_scalar v, iv; /* Shuffle the entries. */ for (j = 0; j < num_nonzero; ++j) { - int k = rustsecp256k1zkp_v0_10_0_testrand_int(num_nonzero - j); + int k = rustsecp256k1zkp_v0_10_1_testrand_int(num_nonzero - j); if (k != 0) { - rustsecp256k1zkp_v0_10_0_gej gej = gejs[j]; - rustsecp256k1zkp_v0_10_0_scalar sc = scalars[j]; + rustsecp256k1zkp_v0_10_1_gej gej = gejs[j]; + rustsecp256k1zkp_v0_10_1_scalar sc = scalars[j]; gejs[j] = gejs[j + k]; scalars[j] = scalars[j + k]; gejs[j + k] = gej; @@ -5167,26 +5167,26 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { /* Perturb all consecutive pairs of inputs: * a*P + b*Q -> (a+b)*P + b*(Q-P). */ for (j = 0; j + 1 < num_nonzero; j += 2) { - rustsecp256k1zkp_v0_10_0_gej gej; - rustsecp256k1zkp_v0_10_0_scalar_add(&scalars[j], &scalars[j], &scalars[j+1]); - rustsecp256k1zkp_v0_10_0_gej_neg(&gej, &gejs[j]); - rustsecp256k1zkp_v0_10_0_gej_add_var(&gejs[j+1], &gejs[j+1], &gej, NULL); + rustsecp256k1zkp_v0_10_1_gej gej; + rustsecp256k1zkp_v0_10_1_scalar_add(&scalars[j], &scalars[j], &scalars[j+1]); + rustsecp256k1zkp_v0_10_1_gej_neg(&gej, &gejs[j]); + rustsecp256k1zkp_v0_10_1_gej_add_var(&gejs[j+1], &gejs[j+1], &gej, NULL); } /* Transform the last input: a*P -> (v*a) * ((1/v)*P). */ CHECK(num_nonzero >= 1); random_scalar_order_test(&v); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&iv, &v); - rustsecp256k1zkp_v0_10_0_scalar_mul(&scalars[num_nonzero - 1], &scalars[num_nonzero - 1], &v); - rustsecp256k1zkp_v0_10_0_ecmult(&gejs[num_nonzero - 1], &gejs[num_nonzero - 1], &iv, NULL); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&iv, &v); + rustsecp256k1zkp_v0_10_1_scalar_mul(&scalars[num_nonzero - 1], &scalars[num_nonzero - 1], &v); + rustsecp256k1zkp_v0_10_1_ecmult(&gejs[num_nonzero - 1], &gejs[num_nonzero - 1], &iv, NULL); ++mults; } /* Shuffle all entries (0..num-1). */ for (i = 0; i < num; ++i) { - int j = rustsecp256k1zkp_v0_10_0_testrand_int(num - i); + int j = rustsecp256k1zkp_v0_10_1_testrand_int(num - i); if (j != 0) { - rustsecp256k1zkp_v0_10_0_gej gej = gejs[i]; - rustsecp256k1zkp_v0_10_0_scalar sc = scalars[i]; + rustsecp256k1zkp_v0_10_1_gej gej = gejs[i]; + rustsecp256k1zkp_v0_10_1_scalar sc = scalars[i]; gejs[i] = gejs[i + j]; scalars[i] = scalars[i + j]; gejs[i + j] = gej; @@ -5195,23 +5195,23 @@ static int test_ecmult_multi_random(rustsecp256k1zkp_v0_10_0_scratch *scratch) { } /* Compute affine versions of all inputs. */ - rustsecp256k1zkp_v0_10_0_ge_set_all_gej_var(ges, gejs, filled); + rustsecp256k1zkp_v0_10_1_ge_set_all_gej_var(ges, gejs, filled); /* Invoke ecmult_multi code. */ data.sc = scalars; data.pt = ges; CHECK(ecmult_multi(&CTX->error_callback, scratch, &computed, g_scalar_ptr, ecmult_multi_callback, &data, filled)); mults += num_nonzero + g_nonzero; /* Compare with expected result. */ - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_var(&computed, &expected)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_var(&computed, &expected)); return mults; } -static void test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_0_ecmult_multi_func ecmult_multi) { - rustsecp256k1zkp_v0_10_0_scalar sc; - rustsecp256k1zkp_v0_10_0_ge pt; - rustsecp256k1zkp_v0_10_0_gej r; +static void test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_1_ecmult_multi_func ecmult_multi) { + rustsecp256k1zkp_v0_10_1_scalar sc; + rustsecp256k1zkp_v0_10_1_ge pt; + rustsecp256k1zkp_v0_10_1_gej r; ecmult_multi_data data; - rustsecp256k1zkp_v0_10_0_scratch *scratch_empty; + rustsecp256k1zkp_v0_10_1_scratch *scratch_empty; random_group_element_test(&pt); random_scalar_order(&sc); @@ -5219,23 +5219,23 @@ static void test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_0_ecmult_multi data.pt = &pt; /* Try to multiply 1 point, but scratch space is empty.*/ - scratch_empty = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, 0); - CHECK(!ecmult_multi(&CTX->error_callback, scratch_empty, &r, &rustsecp256k1zkp_v0_10_0_scalar_zero, ecmult_multi_callback, &data, 1)); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch_empty); + scratch_empty = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, 0); + CHECK(!ecmult_multi(&CTX->error_callback, scratch_empty, &r, &rustsecp256k1zkp_v0_10_1_scalar_zero, ecmult_multi_callback, &data, 1)); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch_empty); } -static void test_rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(void) { +static void test_rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(void) { int i; - CHECK(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(0) == 0); for(i = 1; i <= PIPPENGER_MAX_BUCKET_WINDOW; i++) { /* Bucket_window of 8 is not used with endo */ if (i == 8) { continue; } - CHECK(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(i)) == i); + CHECK(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(i)) == i); if (i != PIPPENGER_MAX_BUCKET_WINDOW) { - CHECK(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(i)+1) > i); + CHECK(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(i)+1) > i); } } } @@ -5245,9 +5245,9 @@ static void test_rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(void) { * for a given scratch space. */ static void test_ecmult_multi_pippenger_max_points(void) { - size_t scratch_size = rustsecp256k1zkp_v0_10_0_testrand_bits(8); - size_t max_size = rustsecp256k1zkp_v0_10_0_pippenger_scratch_size(rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(PIPPENGER_MAX_BUCKET_WINDOW-1)+512, 12); - rustsecp256k1zkp_v0_10_0_scratch *scratch; + size_t scratch_size = rustsecp256k1zkp_v0_10_1_testrand_bits(8); + size_t max_size = rustsecp256k1zkp_v0_10_1_pippenger_scratch_size(rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(PIPPENGER_MAX_BUCKET_WINDOW-1)+512, 12); + rustsecp256k1zkp_v0_10_1_scratch *scratch; size_t n_points_supported; int bucket_window = 0; @@ -5255,24 +5255,24 @@ static void test_ecmult_multi_pippenger_max_points(void) { size_t i; size_t total_alloc; size_t checkpoint; - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, scratch_size); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, scratch_size); CHECK(scratch != NULL); - checkpoint = rustsecp256k1zkp_v0_10_0_scratch_checkpoint(&CTX->error_callback, scratch); - n_points_supported = rustsecp256k1zkp_v0_10_0_pippenger_max_points(&CTX->error_callback, scratch); + checkpoint = rustsecp256k1zkp_v0_10_1_scratch_checkpoint(&CTX->error_callback, scratch); + n_points_supported = rustsecp256k1zkp_v0_10_1_pippenger_max_points(&CTX->error_callback, scratch); if (n_points_supported == 0) { - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); continue; } - bucket_window = rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(n_points_supported); + bucket_window = rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(n_points_supported); /* allocate `total_alloc` bytes over `PIPPENGER_SCRATCH_OBJECTS` many allocations */ - total_alloc = rustsecp256k1zkp_v0_10_0_pippenger_scratch_size(n_points_supported, bucket_window); + total_alloc = rustsecp256k1zkp_v0_10_1_pippenger_scratch_size(n_points_supported, bucket_window); for (i = 0; i < PIPPENGER_SCRATCH_OBJECTS - 1; i++) { - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, 1)); total_alloc--; } - CHECK(rustsecp256k1zkp_v0_10_0_scratch_alloc(&CTX->error_callback, scratch, total_alloc)); - rustsecp256k1zkp_v0_10_0_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + CHECK(rustsecp256k1zkp_v0_10_1_scratch_alloc(&CTX->error_callback, scratch, total_alloc)); + rustsecp256k1zkp_v0_10_1_scratch_apply_checkpoint(&CTX->error_callback, scratch, checkpoint); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); } CHECK(bucket_window == PIPPENGER_MAX_BUCKET_WINDOW); } @@ -5282,154 +5282,154 @@ static void test_ecmult_multi_batch_size_helper(void) { max_n_batch_points = 0; n = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 0); max_n_batch_points = 1; n = 0; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == 0); CHECK(n_batch_points == 0); max_n_batch_points = 2; n = 5; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == 3); CHECK(n_batch_points == 2); max_n_batch_points = ECMULT_MAX_POINTS_PER_BATCH; n = ECMULT_MAX_POINTS_PER_BATCH; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == 1); CHECK(n_batch_points == ECMULT_MAX_POINTS_PER_BATCH); max_n_batch_points = ECMULT_MAX_POINTS_PER_BATCH + 1; n = ECMULT_MAX_POINTS_PER_BATCH + 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == 2); CHECK(n_batch_points == ECMULT_MAX_POINTS_PER_BATCH/2 + 1); max_n_batch_points = 1; n = SIZE_MAX; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == SIZE_MAX); CHECK(n_batch_points == 1); max_n_batch_points = 2; n = SIZE_MAX; - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_batch_size_helper(&n_batches, &n_batch_points, max_n_batch_points, n) == 1); CHECK(n_batches == SIZE_MAX/2 + 1); CHECK(n_batch_points == 2); } /** - * Run rustsecp256k1zkp_v0_10_0_ecmult_multi_var with num points and a scratch space restricted to + * Run rustsecp256k1zkp_v0_10_1_ecmult_multi_var with num points and a scratch space restricted to * 1 <= i <= num points. */ static void test_ecmult_multi_batching(void) { static const int n_points = 2*ECMULT_PIPPENGER_THRESHOLD; - rustsecp256k1zkp_v0_10_0_scalar scG; - rustsecp256k1zkp_v0_10_0_scalar *sc = (rustsecp256k1zkp_v0_10_0_scalar *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_0_scalar) * n_points); - rustsecp256k1zkp_v0_10_0_ge *pt = (rustsecp256k1zkp_v0_10_0_ge *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_0_ge) * n_points); - rustsecp256k1zkp_v0_10_0_gej r; - rustsecp256k1zkp_v0_10_0_gej r2; + rustsecp256k1zkp_v0_10_1_scalar scG; + rustsecp256k1zkp_v0_10_1_scalar *sc = (rustsecp256k1zkp_v0_10_1_scalar *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_1_scalar) * n_points); + rustsecp256k1zkp_v0_10_1_ge *pt = (rustsecp256k1zkp_v0_10_1_ge *)checked_malloc(&CTX->error_callback, sizeof(rustsecp256k1zkp_v0_10_1_ge) * n_points); + rustsecp256k1zkp_v0_10_1_gej r; + rustsecp256k1zkp_v0_10_1_gej r2; ecmult_multi_data data; int i; - rustsecp256k1zkp_v0_10_0_scratch *scratch; + rustsecp256k1zkp_v0_10_1_scratch *scratch; - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&r2); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&r2); /* Get random scalars and group elements and compute result */ random_scalar_order(&scG); - rustsecp256k1zkp_v0_10_0_ecmult(&r2, &r2, &rustsecp256k1zkp_v0_10_0_scalar_zero, &scG); + rustsecp256k1zkp_v0_10_1_ecmult(&r2, &r2, &rustsecp256k1zkp_v0_10_1_scalar_zero, &scG); for(i = 0; i < n_points; i++) { - rustsecp256k1zkp_v0_10_0_ge ptg; - rustsecp256k1zkp_v0_10_0_gej ptgj; + rustsecp256k1zkp_v0_10_1_ge ptg; + rustsecp256k1zkp_v0_10_1_gej ptgj; random_group_element_test(&ptg); - rustsecp256k1zkp_v0_10_0_gej_set_ge(&ptgj, &ptg); + rustsecp256k1zkp_v0_10_1_gej_set_ge(&ptgj, &ptg); pt[i] = ptg; random_scalar_order(&sc[i]); - rustsecp256k1zkp_v0_10_0_ecmult(&ptgj, &ptgj, &sc[i], NULL); - rustsecp256k1zkp_v0_10_0_gej_add_var(&r2, &r2, &ptgj, NULL); + rustsecp256k1zkp_v0_10_1_ecmult(&ptgj, &ptgj, &sc[i], NULL); + rustsecp256k1zkp_v0_10_1_gej_add_var(&r2, &r2, &ptgj, NULL); } data.sc = sc; data.pt = pt; - rustsecp256k1zkp_v0_10_0_gej_neg(&r2, &r2); + rustsecp256k1zkp_v0_10_1_gej_neg(&r2, &r2); /* Test with empty scratch space. It should compute the correct result using * ecmult_mult_simple algorithm which doesn't require a scratch space. */ - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); - rustsecp256k1zkp_v0_10_0_gej_add_var(&r, &r, &r2, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&r, &r, &r2, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); /* Test with space for 1 point in pippenger. That's not enough because * ecmult_multi selects strauss which requires more memory. It should * therefore select the simple algorithm. */ - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, rustsecp256k1zkp_v0_10_0_pippenger_scratch_size(1, 1) + PIPPENGER_SCRATCH_OBJECTS*ALIGNMENT); - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); - rustsecp256k1zkp_v0_10_0_gej_add_var(&r, &r, &r2, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, rustsecp256k1zkp_v0_10_1_pippenger_scratch_size(1, 1) + PIPPENGER_SCRATCH_OBJECTS*ALIGNMENT); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&r, &r, &r2, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); for(i = 1; i <= n_points; i++) { if (i > ECMULT_PIPPENGER_THRESHOLD) { - int bucket_window = rustsecp256k1zkp_v0_10_0_pippenger_bucket_window(i); - size_t scratch_size = rustsecp256k1zkp_v0_10_0_pippenger_scratch_size(i, bucket_window); - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, scratch_size + PIPPENGER_SCRATCH_OBJECTS*ALIGNMENT); + int bucket_window = rustsecp256k1zkp_v0_10_1_pippenger_bucket_window(i); + size_t scratch_size = rustsecp256k1zkp_v0_10_1_pippenger_scratch_size(i, bucket_window); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, scratch_size + PIPPENGER_SCRATCH_OBJECTS*ALIGNMENT); } else { - size_t scratch_size = rustsecp256k1zkp_v0_10_0_strauss_scratch_size(i); - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, scratch_size + STRAUSS_SCRATCH_OBJECTS*ALIGNMENT); + size_t scratch_size = rustsecp256k1zkp_v0_10_1_strauss_scratch_size(i); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, scratch_size + STRAUSS_SCRATCH_OBJECTS*ALIGNMENT); } - CHECK(rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); - rustsecp256k1zkp_v0_10_0_gej_add_var(&r, &r, &r2, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&r)); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + CHECK(rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&CTX->error_callback, scratch, &r, &scG, ecmult_multi_callback, &data, n_points)); + rustsecp256k1zkp_v0_10_1_gej_add_var(&r, &r, &r2, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&r)); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); } free(sc); free(pt); } static void run_ecmult_multi_tests(void) { - rustsecp256k1zkp_v0_10_0_scratch *scratch; + rustsecp256k1zkp_v0_10_1_scratch *scratch; int64_t todo = (int64_t)320 * COUNT; - test_rustsecp256k1zkp_v0_10_0_pippenger_bucket_window_inv(); + test_rustsecp256k1zkp_v0_10_1_pippenger_bucket_window_inv(); test_ecmult_multi_pippenger_max_points(); - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, 819200); - test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_0_ecmult_multi_var); - test_ecmult_multi(NULL, rustsecp256k1zkp_v0_10_0_ecmult_multi_var); - test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch_single); - test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_0_ecmult_pippenger_batch_single); - test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch_single); - test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_0_ecmult_strauss_batch_single); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, 819200); + test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_1_ecmult_multi_var); + test_ecmult_multi(NULL, rustsecp256k1zkp_v0_10_1_ecmult_multi_var); + test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch_single); + test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_1_ecmult_pippenger_batch_single); + test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch_single); + test_ecmult_multi_batch_single(rustsecp256k1zkp_v0_10_1_ecmult_strauss_batch_single); while (todo > 0) { todo -= test_ecmult_multi_random(scratch); } - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); /* Run test_ecmult_multi with space for exactly one point */ - scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&CTX->error_callback, rustsecp256k1zkp_v0_10_0_strauss_scratch_size(1) + STRAUSS_SCRATCH_OBJECTS*ALIGNMENT); - test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_0_ecmult_multi_var); - rustsecp256k1zkp_v0_10_0_scratch_destroy(&CTX->error_callback, scratch); + scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&CTX->error_callback, rustsecp256k1zkp_v0_10_1_strauss_scratch_size(1) + STRAUSS_SCRATCH_OBJECTS*ALIGNMENT); + test_ecmult_multi(scratch, rustsecp256k1zkp_v0_10_1_ecmult_multi_var); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&CTX->error_callback, scratch); test_ecmult_multi_batch_size_helper(); test_ecmult_multi_batching(); } -static void test_wnaf(const rustsecp256k1zkp_v0_10_0_scalar *number, int w) { - rustsecp256k1zkp_v0_10_0_scalar x, two, t; +static void test_wnaf(const rustsecp256k1zkp_v0_10_1_scalar *number, int w) { + rustsecp256k1zkp_v0_10_1_scalar x, two, t; int wnaf[256]; int zeroes = -1; int i; int bits; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&two, 2); - bits = rustsecp256k1zkp_v0_10_0_ecmult_wnaf(wnaf, 256, number, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&two, 2); + bits = rustsecp256k1zkp_v0_10_1_ecmult_wnaf(wnaf, 256, number, w); CHECK(bits <= 256); for (i = bits-1; i >= 0; i--) { int v = wnaf[i]; - rustsecp256k1zkp_v0_10_0_scalar_mul(&x, &x, &two); + rustsecp256k1zkp_v0_10_1_scalar_mul(&x, &x, &two); if (v) { CHECK(zeroes == -1 || zeroes >= w-1); /* check that distance between non-zero elements is at least w-1 */ zeroes=0; @@ -5441,48 +5441,48 @@ static void test_wnaf(const rustsecp256k1zkp_v0_10_0_scalar *number, int w) { zeroes++; } if (v >= 0) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, v); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, v); } else { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, -v); - rustsecp256k1zkp_v0_10_0_scalar_negate(&t, &t); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, -v); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); } - rustsecp256k1zkp_v0_10_0_scalar_add(&x, &x, &t); + rustsecp256k1zkp_v0_10_1_scalar_add(&x, &x, &t); } - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&x, number)); /* check that wnaf represents number */ + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&x, number)); /* check that wnaf represents number */ } -static void test_fixed_wnaf(const rustsecp256k1zkp_v0_10_0_scalar *number, int w) { - rustsecp256k1zkp_v0_10_0_scalar x, shift; +static void test_fixed_wnaf(const rustsecp256k1zkp_v0_10_1_scalar *number, int w) { + rustsecp256k1zkp_v0_10_1_scalar x, shift; int wnaf[256] = {0}; int i; int skew; - rustsecp256k1zkp_v0_10_0_scalar num, unused; + rustsecp256k1zkp_v0_10_1_scalar num, unused; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&shift, 1 << w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&shift, 1 << w); /* Make num a 128-bit scalar. */ - rustsecp256k1zkp_v0_10_0_scalar_split_128(&num, &unused, number); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_split_128(&num, &unused, number); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); for (i = WNAF_SIZE(w)-1; i >= 0; --i) { - rustsecp256k1zkp_v0_10_0_scalar t; + rustsecp256k1zkp_v0_10_1_scalar t; int v = wnaf[i]; CHECK(v == 0 || v & 1); /* check parity */ CHECK(v > -(1 << w)); /* check range above */ CHECK(v < (1 << w)); /* check range below */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&x, &x, &shift); + rustsecp256k1zkp_v0_10_1_scalar_mul(&x, &x, &shift); if (v >= 0) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, v); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, v); } else { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&t, -v); - rustsecp256k1zkp_v0_10_0_scalar_negate(&t, &t); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&t, -v); + rustsecp256k1zkp_v0_10_1_scalar_negate(&t, &t); } - rustsecp256k1zkp_v0_10_0_scalar_add(&x, &x, &t); + rustsecp256k1zkp_v0_10_1_scalar_add(&x, &x, &t); } /* If skew is 1 then add 1 to num */ - rustsecp256k1zkp_v0_10_0_scalar_cadd_bit(&num, 0, skew == 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&x, &num)); + rustsecp256k1zkp_v0_10_1_scalar_cadd_bit(&num, 0, skew == 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&x, &num)); } /* Checks that the first 8 elements of wnaf are equal to wnaf_expected and the @@ -5502,18 +5502,18 @@ static void test_fixed_wnaf_small(void) { int wnaf[256] = {0}; int i; int skew; - rustsecp256k1zkp_v0_10_0_scalar num; + rustsecp256k1zkp_v0_10_1_scalar num; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 0); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 0); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); for (i = WNAF_SIZE(w)-1; i >= 0; --i) { int v = wnaf[i]; CHECK(v == 0); } CHECK(skew == 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 1); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 1); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); for (i = WNAF_SIZE(w)-1; i >= 1; --i) { int v = wnaf[i]; CHECK(v == 0); @@ -5523,29 +5523,29 @@ static void test_fixed_wnaf_small(void) { { int wnaf_expected[8] = { 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf }; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 0xffffffff); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 0xffffffff); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); test_fixed_wnaf_small_helper(wnaf, wnaf_expected, w); CHECK(skew == 0); } { int wnaf_expected[8] = { -1, -1, -1, -1, -1, -1, -1, 0xf }; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 0xeeeeeeee); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 0xeeeeeeee); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); test_fixed_wnaf_small_helper(wnaf, wnaf_expected, w); CHECK(skew == 1); } { int wnaf_expected[8] = { 1, 0, 1, 0, 1, 0, 1, 0 }; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 0x01010101); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 0x01010101); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); test_fixed_wnaf_small_helper(wnaf, wnaf_expected, w); CHECK(skew == 0); } { int wnaf_expected[8] = { -0xf, 0, 0xf, -0xf, 0, 0xf, 1, 0 }; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&num, 0x01ef1ef1); - skew = rustsecp256k1zkp_v0_10_0_wnaf_fixed(wnaf, &num, w); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&num, 0x01ef1ef1); + skew = rustsecp256k1zkp_v0_10_1_wnaf_fixed(wnaf, &num, w); test_fixed_wnaf_small_helper(wnaf, wnaf_expected, w); CHECK(skew == 0); } @@ -5553,7 +5553,7 @@ static void test_fixed_wnaf_small(void) { static void run_wnaf(void) { int i; - rustsecp256k1zkp_v0_10_0_scalar n; + rustsecp256k1zkp_v0_10_1_scalar n; /* Test 0 for fixed wnaf */ test_fixed_wnaf_small(); @@ -5563,50 +5563,50 @@ static void run_wnaf(void) { test_wnaf(&n, 4+(i%10)); test_fixed_wnaf(&n, 4 + (i % 10)); } - rustsecp256k1zkp_v0_10_0_scalar_set_int(&n, 0); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&n, 1) == -1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&n)); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_cond_negate(&n, 0) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_is_zero(&n)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&n, 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&n, 1) == -1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&n)); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_cond_negate(&n, 0) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_is_zero(&n)); } -static int test_ecmult_accumulate_cb(rustsecp256k1zkp_v0_10_0_scalar* sc, rustsecp256k1zkp_v0_10_0_ge* pt, size_t idx, void* data) { - const rustsecp256k1zkp_v0_10_0_scalar* indata = (const rustsecp256k1zkp_v0_10_0_scalar*)data; +static int test_ecmult_accumulate_cb(rustsecp256k1zkp_v0_10_1_scalar* sc, rustsecp256k1zkp_v0_10_1_ge* pt, size_t idx, void* data) { + const rustsecp256k1zkp_v0_10_1_scalar* indata = (const rustsecp256k1zkp_v0_10_1_scalar*)data; *sc = *indata; - *pt = rustsecp256k1zkp_v0_10_0_ge_const_g; + *pt = rustsecp256k1zkp_v0_10_1_ge_const_g; CHECK(idx == 0); return 1; } -static void test_ecmult_accumulate(rustsecp256k1zkp_v0_10_0_sha256* acc, const rustsecp256k1zkp_v0_10_0_scalar* x, rustsecp256k1zkp_v0_10_0_scratch* scratch) { +static void test_ecmult_accumulate(rustsecp256k1zkp_v0_10_1_sha256* acc, const rustsecp256k1zkp_v0_10_1_scalar* x, rustsecp256k1zkp_v0_10_1_scratch* scratch) { /* Compute x*G in 6 different ways, serialize it uncompressed, and feed it into acc. */ - rustsecp256k1zkp_v0_10_0_gej rj1, rj2, rj3, rj4, rj5, rj6, gj, infj; - rustsecp256k1zkp_v0_10_0_ge r; + rustsecp256k1zkp_v0_10_1_gej rj1, rj2, rj3, rj4, rj5, rj6, gj, infj; + rustsecp256k1zkp_v0_10_1_ge r; unsigned char bytes[65]; size_t size = 65; - rustsecp256k1zkp_v0_10_0_gej_set_ge(&gj, &rustsecp256k1zkp_v0_10_0_ge_const_g); - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&infj); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &rj1, x); - rustsecp256k1zkp_v0_10_0_ecmult(&rj2, &gj, x, &rustsecp256k1zkp_v0_10_0_scalar_zero); - rustsecp256k1zkp_v0_10_0_ecmult(&rj3, &infj, &rustsecp256k1zkp_v0_10_0_scalar_zero, x); - rustsecp256k1zkp_v0_10_0_ecmult_multi_var(NULL, scratch, &rj4, x, NULL, NULL, 0); - rustsecp256k1zkp_v0_10_0_ecmult_multi_var(NULL, scratch, &rj5, &rustsecp256k1zkp_v0_10_0_scalar_zero, test_ecmult_accumulate_cb, (void*)x, 1); - rustsecp256k1zkp_v0_10_0_ecmult_const(&rj6, &rustsecp256k1zkp_v0_10_0_ge_const_g, x); - rustsecp256k1zkp_v0_10_0_ge_set_gej_var(&r, &rj1); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&rj2, &r)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&rj3, &r)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&rj4, &r)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&rj5, &r)); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&rj6, &r)); - if (rustsecp256k1zkp_v0_10_0_ge_is_infinity(&r)) { + rustsecp256k1zkp_v0_10_1_gej_set_ge(&gj, &rustsecp256k1zkp_v0_10_1_ge_const_g); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&infj); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &rj1, x); + rustsecp256k1zkp_v0_10_1_ecmult(&rj2, &gj, x, &rustsecp256k1zkp_v0_10_1_scalar_zero); + rustsecp256k1zkp_v0_10_1_ecmult(&rj3, &infj, &rustsecp256k1zkp_v0_10_1_scalar_zero, x); + rustsecp256k1zkp_v0_10_1_ecmult_multi_var(NULL, scratch, &rj4, x, NULL, NULL, 0); + rustsecp256k1zkp_v0_10_1_ecmult_multi_var(NULL, scratch, &rj5, &rustsecp256k1zkp_v0_10_1_scalar_zero, test_ecmult_accumulate_cb, (void*)x, 1); + rustsecp256k1zkp_v0_10_1_ecmult_const(&rj6, &rustsecp256k1zkp_v0_10_1_ge_const_g, x); + rustsecp256k1zkp_v0_10_1_ge_set_gej_var(&r, &rj1); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&rj2, &r)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&rj3, &r)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&rj4, &r)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&rj5, &r)); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&rj6, &r)); + if (rustsecp256k1zkp_v0_10_1_ge_is_infinity(&r)) { /* Store infinity as 0x00 */ const unsigned char zerobyte[1] = {0}; - rustsecp256k1zkp_v0_10_0_sha256_write(acc, zerobyte, 1); + rustsecp256k1zkp_v0_10_1_sha256_write(acc, zerobyte, 1); } else { /* Store other points using their uncompressed serialization. */ - rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&r, bytes, &size, 0); + rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&r, bytes, &size, 0); CHECK(size == 65); - rustsecp256k1zkp_v0_10_0_sha256_write(acc, bytes, size); + rustsecp256k1zkp_v0_10_1_sha256_write(acc, bytes, size); } } @@ -5619,11 +5619,11 @@ static void test_ecmult_constants_2bit(void) { * - For j in 1..255 (only odd values): * - Key (j*2^i) mod order */ - rustsecp256k1zkp_v0_10_0_scalar x; - rustsecp256k1zkp_v0_10_0_sha256 acc; + rustsecp256k1zkp_v0_10_1_scalar x; + rustsecp256k1zkp_v0_10_1_sha256 acc; unsigned char b32[32]; int i, j; - rustsecp256k1zkp_v0_10_0_scratch_space *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 65536); + rustsecp256k1zkp_v0_10_1_scratch_space *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 65536); /* Expected hash of all the computed points; created with an independent * implementation. */ @@ -5633,25 +5633,25 @@ static void test_ecmult_constants_2bit(void) { 0x3a, 0x75, 0x87, 0x60, 0x1a, 0xf9, 0x63, 0x60, 0xd0, 0xcb, 0x1f, 0xaa, 0x85, 0x9a, 0xb7, 0xb4 }; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&acc); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&acc); for (i = 0; i <= 36; ++i) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, i); test_ecmult_accumulate(&acc, &x, scratch); - rustsecp256k1zkp_v0_10_0_scalar_negate(&x, &x); + rustsecp256k1zkp_v0_10_1_scalar_negate(&x, &x); test_ecmult_accumulate(&acc, &x, scratch); }; for (i = 0; i < 256; ++i) { for (j = 1; j < 256; j += 2) { int k; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, j); - for (k = 0; k < i; ++k) rustsecp256k1zkp_v0_10_0_scalar_add(&x, &x, &x); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, j); + for (k = 0; k < i; ++k) rustsecp256k1zkp_v0_10_1_scalar_add(&x, &x, &x); test_ecmult_accumulate(&acc, &x, scratch); } } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&acc, b32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(b32, expected32, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&acc, b32); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(b32, expected32, 32) == 0); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } static void test_ecmult_constants_sha(uint32_t prefix, size_t iter, const unsigned char* expected32) { @@ -5662,39 +5662,39 @@ static void test_ecmult_constants_sha(uint32_t prefix, size_t iter, const unsign * - For i in range(iter): * - Key SHA256(LE32(prefix) || LE16(i)) */ - rustsecp256k1zkp_v0_10_0_scalar x; - rustsecp256k1zkp_v0_10_0_sha256 acc; + rustsecp256k1zkp_v0_10_1_scalar x; + rustsecp256k1zkp_v0_10_1_sha256 acc; unsigned char b32[32]; unsigned char inp[6]; size_t i; - rustsecp256k1zkp_v0_10_0_scratch_space *scratch = rustsecp256k1zkp_v0_10_0_scratch_space_create(CTX, 65536); + rustsecp256k1zkp_v0_10_1_scratch_space *scratch = rustsecp256k1zkp_v0_10_1_scratch_space_create(CTX, 65536); inp[0] = prefix & 0xFF; inp[1] = (prefix >> 8) & 0xFF; inp[2] = (prefix >> 16) & 0xFF; inp[3] = (prefix >> 24) & 0xFF; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&acc); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, 0); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&acc); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, 0); test_ecmult_accumulate(&acc, &x, scratch); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&x, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&x, 1); test_ecmult_accumulate(&acc, &x, scratch); - rustsecp256k1zkp_v0_10_0_scalar_negate(&x, &x); + rustsecp256k1zkp_v0_10_1_scalar_negate(&x, &x); test_ecmult_accumulate(&acc, &x, scratch); for (i = 0; i < iter; ++i) { - rustsecp256k1zkp_v0_10_0_sha256 gen; + rustsecp256k1zkp_v0_10_1_sha256 gen; inp[4] = i & 0xff; inp[5] = (i >> 8) & 0xff; - rustsecp256k1zkp_v0_10_0_sha256_initialize(&gen); - rustsecp256k1zkp_v0_10_0_sha256_write(&gen, inp, sizeof(inp)); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&gen, b32); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&x, b32, NULL); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&gen); + rustsecp256k1zkp_v0_10_1_sha256_write(&gen, inp, sizeof(inp)); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&gen, b32); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&x, b32, NULL); test_ecmult_accumulate(&acc, &x, scratch); } - rustsecp256k1zkp_v0_10_0_sha256_finalize(&acc, b32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(b32, expected32, 32) == 0); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&acc, b32); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(b32, expected32, 32) == 0); - rustsecp256k1zkp_v0_10_0_scratch_space_destroy(CTX, scratch); + rustsecp256k1zkp_v0_10_1_scratch_space_destroy(CTX, scratch); } static void run_ecmult_constants(void) { @@ -5733,36 +5733,36 @@ static void run_ecmult_constants(void) { static void test_ecmult_gen_blind(void) { /* Test ecmult_gen() blinding and confirm that the blinding changes, the affine points match, and the z's don't match. */ - rustsecp256k1zkp_v0_10_0_scalar key; - rustsecp256k1zkp_v0_10_0_scalar b; + rustsecp256k1zkp_v0_10_1_scalar key; + rustsecp256k1zkp_v0_10_1_scalar b; unsigned char seed32[32]; - rustsecp256k1zkp_v0_10_0_gej pgej; - rustsecp256k1zkp_v0_10_0_gej pgej2; - rustsecp256k1zkp_v0_10_0_gej i; - rustsecp256k1zkp_v0_10_0_ge pge; + rustsecp256k1zkp_v0_10_1_gej pgej; + rustsecp256k1zkp_v0_10_1_gej pgej2; + rustsecp256k1zkp_v0_10_1_gej i; + rustsecp256k1zkp_v0_10_1_ge pge; random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pgej, &key); - rustsecp256k1zkp_v0_10_0_testrand256(seed32); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pgej, &key); + rustsecp256k1zkp_v0_10_1_testrand256(seed32); b = CTX->ecmult_gen_ctx.blind; i = CTX->ecmult_gen_ctx.initial; - rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(&CTX->ecmult_gen_ctx, seed32); - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&b, &CTX->ecmult_gen_ctx.blind)); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pgej2, &key); + rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, seed32); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&b, &CTX->ecmult_gen_ctx.blind)); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pgej2, &key); CHECK(!gej_xyz_equals_gej(&pgej, &pgej2)); CHECK(!gej_xyz_equals_gej(&i, &CTX->ecmult_gen_ctx.initial)); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pge, &pgej); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&pgej2, &pge)); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pge, &pgej); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&pgej2, &pge)); } static void test_ecmult_gen_blind_reset(void) { /* Test ecmult_gen() blinding reset and confirm that the blinding is consistent. */ - rustsecp256k1zkp_v0_10_0_scalar b; - rustsecp256k1zkp_v0_10_0_gej initial; - rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); + rustsecp256k1zkp_v0_10_1_scalar b; + rustsecp256k1zkp_v0_10_1_gej initial; + rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); b = CTX->ecmult_gen_ctx.blind; initial = CTX->ecmult_gen_ctx.initial; - rustsecp256k1zkp_v0_10_0_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&b, &CTX->ecmult_gen_ctx.blind)); + rustsecp256k1zkp_v0_10_1_ecmult_gen_blind(&CTX->ecmult_gen_ctx, 0); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&b, &CTX->ecmult_gen_ctx.blind)); CHECK(gej_xyz_equals_gej(&initial, &CTX->ecmult_gen_ctx.initial)); } @@ -5775,46 +5775,46 @@ static void run_ecmult_gen_blind(void) { } /***** ENDOMORPHISH TESTS *****/ -static void test_scalar_split(const rustsecp256k1zkp_v0_10_0_scalar* full) { - rustsecp256k1zkp_v0_10_0_scalar s, s1, slam; +static void test_scalar_split(const rustsecp256k1zkp_v0_10_1_scalar* full) { + rustsecp256k1zkp_v0_10_1_scalar s, s1, slam; const unsigned char zero[32] = {0}; unsigned char tmp[32]; - rustsecp256k1zkp_v0_10_0_scalar_split_lambda(&s1, &slam, full); + rustsecp256k1zkp_v0_10_1_scalar_split_lambda(&s1, &slam, full); /* check slam*lambda + s1 == full */ - rustsecp256k1zkp_v0_10_0_scalar_mul(&s, &rustsecp256k1zkp_v0_10_0_const_lambda, &slam); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &s, &s1); - CHECK(rustsecp256k1zkp_v0_10_0_scalar_eq(&s, full)); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s, &rustsecp256k1zkp_v0_10_1_const_lambda, &slam); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &s, &s1); + CHECK(rustsecp256k1zkp_v0_10_1_scalar_eq(&s, full)); /* check that both are <= 128 bits in size */ - if (rustsecp256k1zkp_v0_10_0_scalar_is_high(&s1)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&s1, &s1); + if (rustsecp256k1zkp_v0_10_1_scalar_is_high(&s1)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&s1, &s1); } - if (rustsecp256k1zkp_v0_10_0_scalar_is_high(&slam)) { - rustsecp256k1zkp_v0_10_0_scalar_negate(&slam, &slam); + if (rustsecp256k1zkp_v0_10_1_scalar_is_high(&slam)) { + rustsecp256k1zkp_v0_10_1_scalar_negate(&slam, &slam); } - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tmp, &s1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zero, tmp, 16) == 0); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(tmp, &slam); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zero, tmp, 16) == 0); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tmp, &s1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zero, tmp, 16) == 0); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(tmp, &slam); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zero, tmp, 16) == 0); } static void run_endomorphism_tests(void) { unsigned i; - static rustsecp256k1zkp_v0_10_0_scalar s; - test_scalar_split(&rustsecp256k1zkp_v0_10_0_scalar_zero); - test_scalar_split(&rustsecp256k1zkp_v0_10_0_scalar_one); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s,&rustsecp256k1zkp_v0_10_0_scalar_one); + static rustsecp256k1zkp_v0_10_1_scalar s; + test_scalar_split(&rustsecp256k1zkp_v0_10_1_scalar_zero); + test_scalar_split(&rustsecp256k1zkp_v0_10_1_scalar_one); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s,&rustsecp256k1zkp_v0_10_1_scalar_one); test_scalar_split(&s); - test_scalar_split(&rustsecp256k1zkp_v0_10_0_const_lambda); - rustsecp256k1zkp_v0_10_0_scalar_add(&s, &rustsecp256k1zkp_v0_10_0_const_lambda, &rustsecp256k1zkp_v0_10_0_scalar_one); + test_scalar_split(&rustsecp256k1zkp_v0_10_1_const_lambda); + rustsecp256k1zkp_v0_10_1_scalar_add(&s, &rustsecp256k1zkp_v0_10_1_const_lambda, &rustsecp256k1zkp_v0_10_1_scalar_one); test_scalar_split(&s); for (i = 0; i < 100U * COUNT; ++i) { - rustsecp256k1zkp_v0_10_0_scalar full; + rustsecp256k1zkp_v0_10_1_scalar full; random_scalar_order_test(&full); test_scalar_split(&full); } @@ -5825,8 +5825,8 @@ static void run_endomorphism_tests(void) { static void ec_pubkey_parse_pointtest(const unsigned char *input, int xvalid, int yvalid) { unsigned char pubkeyc[65]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_ge ge; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ge ge; size_t pubkeyclen; for (pubkeyclen = 3; pubkeyclen <= 65; pubkeyclen++) { @@ -5853,38 +5853,38 @@ static void ec_pubkey_parse_pointtest(const unsigned char *input, int xvalid, in size_t outl; memset(&pubkey, 0, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 1); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); outl = 65; SECP256K1_CHECKMEM_UNDEFINE(pubkeyo, 65); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, pubkeyo, &outl, &pubkey, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, pubkeyo, &outl, &pubkey, SECP256K1_EC_COMPRESSED) == 1); SECP256K1_CHECKMEM_CHECK(pubkeyo, outl); CHECK(outl == 33); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkeyo[1], &pubkeyc[1], 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkeyo[1], &pubkeyc[1], 32) == 0); CHECK((pubkeyclen != 33) || (pubkeyo[0] == pubkeyc[0])); if (ypass) { /* This test isn't always done because we decode with alternative signs, so the y won't match. */ CHECK(pubkeyo[0] == ysign); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey) == 1); memset(&pubkey, 0, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - rustsecp256k1zkp_v0_10_0_pubkey_save(&pubkey, &ge); + rustsecp256k1zkp_v0_10_1_pubkey_save(&pubkey, &ge); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); outl = 65; SECP256K1_CHECKMEM_UNDEFINE(pubkeyo, 65); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, pubkeyo, &outl, &pubkey, SECP256K1_EC_UNCOMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, pubkeyo, &outl, &pubkey, SECP256K1_EC_UNCOMPRESSED) == 1); SECP256K1_CHECKMEM_CHECK(pubkeyo, outl); CHECK(outl == 65); CHECK(pubkeyo[0] == 4); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkeyo[1], input, 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkeyo[1], input, 64) == 0); } } else { /* These cases must fail to parse. */ memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); } } } @@ -6071,8 +6071,8 @@ static void run_ec_pubkey_parse_test(void) { }; unsigned char sout[65]; unsigned char shortkey[2] = { 0 }; - rustsecp256k1zkp_v0_10_0_ge ge; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ge ge; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; size_t len; int32_t i; @@ -6082,18 +6082,18 @@ static void run_ec_pubkey_parse_test(void) { memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(shortkey, 2); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, shortkey, 0) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, shortkey, 0) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); /* Length one claimed, fail, zeroize, no illegal arg error. */ for (i = 0; i < 256 ; i++) { memset(&pubkey, 0xfe, sizeof(pubkey)); shortkey[0] = i; SECP256K1_CHECKMEM_UNDEFINE(&shortkey[1], 1); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, shortkey, 1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, shortkey, 1) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); } /* Length two claimed, fail, zeroize, no illegal arg error. */ for (i = 0; i < 65536 ; i++) { @@ -6101,68 +6101,68 @@ static void run_ec_pubkey_parse_test(void) { shortkey[0] = i & 255; shortkey[1] = i >> 8; SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, shortkey, 2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, shortkey, 2) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); } memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); /* 33 bytes claimed on otherwise valid input starting with 0x04, fail, zeroize output, no illegal arg error. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 33) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); /* NULL pubkey, illegal arg error. Pubkey isn't rewritten before this step, since it's NULL into the parser. */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, NULL, pubkeyc, 65)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, NULL, pubkeyc, 65)); /* NULL input string. Illegal arg and zeroize output. */ memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, NULL, 65)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, NULL, 65)); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); /* 64 bytes claimed on input starting with 0x04, fail, zeroize output, no illegal arg error. */ memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 64) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); /* 66 bytes claimed, fail, zeroize output, no illegal arg error. */ memset(&pubkey, 0xfe, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 66) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 66) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey)); /* Valid parse. */ memset(&pubkey, 0, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 65) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(rustsecp256k1zkp_v0_10_0_context_static, &pubkey, pubkeyc, 65) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, 65) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(rustsecp256k1zkp_v0_10_1_context_static, &pubkey, pubkeyc, 65) == 1); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&ge, sizeof(ge)); - CHECK(rustsecp256k1zkp_v0_10_0_pubkey_load(CTX, &ge, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_pubkey_load(CTX, &ge, &pubkey) == 1); SECP256K1_CHECKMEM_CHECK(&ge.x, sizeof(ge.x)); SECP256K1_CHECKMEM_CHECK(&ge.y, sizeof(ge.y)); SECP256K1_CHECKMEM_CHECK(&ge.infinity, sizeof(ge.infinity)); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&ge, &rustsecp256k1zkp_v0_10_0_ge_const_g)); - /* rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize illegal args. */ + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&ge, &rustsecp256k1zkp_v0_10_1_ge_const_g)); + /* rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize illegal args. */ len = 65; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, NULL, &len, &pubkey, SECP256K1_EC_UNCOMPRESSED)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, NULL, &len, &pubkey, SECP256K1_EC_UNCOMPRESSED)); CHECK(len == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, sout, NULL, &pubkey, SECP256K1_EC_UNCOMPRESSED)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, sout, NULL, &pubkey, SECP256K1_EC_UNCOMPRESSED)); len = 65; SECP256K1_CHECKMEM_UNDEFINE(sout, 65); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, sout, &len, NULL, SECP256K1_EC_UNCOMPRESSED)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, sout, &len, NULL, SECP256K1_EC_UNCOMPRESSED)); SECP256K1_CHECKMEM_CHECK(sout, 65); CHECK(len == 0); len = 65; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, sout, &len, &pubkey, ~0)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, sout, &len, &pubkey, ~0)); CHECK(len == 0); len = 65; SECP256K1_CHECKMEM_UNDEFINE(sout, 65); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, sout, &len, &pubkey, SECP256K1_EC_UNCOMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, sout, &len, &pubkey, SECP256K1_EC_UNCOMPRESSED) == 1); SECP256K1_CHECKMEM_CHECK(sout, 65); CHECK(len == 65); /* Multiple illegal args. Should still set arg error only once. */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, NULL, NULL, 65)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, NULL, NULL, 65)); /* Try a bunch of prefabbed points with all possible encodings. */ for (i = 0; i < SECP256K1_EC_PARSE_TEST_NVALID; i++) { ec_pubkey_parse_pointtest(valid[i], 1, 1); @@ -6182,219 +6182,219 @@ static void run_eckey_edge_case_test(void) { 0xba, 0xae, 0xdc, 0xe6, 0xaf, 0x48, 0xa0, 0x3b, 0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x41 }; - const unsigned char zeros[sizeof(rustsecp256k1zkp_v0_10_0_pubkey)] = {0x00}; + const unsigned char zeros[sizeof(rustsecp256k1zkp_v0_10_1_pubkey)] = {0x00}; unsigned char ctmp[33]; unsigned char ctmp2[33]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey pubkey2; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_one; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_negone; - const rustsecp256k1zkp_v0_10_0_pubkey *pubkeys[3]; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey2; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_one; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_negone; + const rustsecp256k1zkp_v0_10_1_pubkey *pubkeys[3]; size_t len; /* Group order is too large, reject. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, orderc) == 0); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, orderc) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); /* Maximum value is too large, reject. */ memset(ctmp, 255, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 0); memset(&pubkey, 1, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); /* Zero is too small, reject. */ memset(ctmp, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 0); memset(&pubkey, 1, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); /* One must be accepted. */ ctmp[31] = 0x01; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 1); memset(&pubkey, 0, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) > 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) > 0); pubkey_one = pubkey; /* Group order + 1 is too large, reject. */ memcpy(ctmp, orderc, 32); ctmp[31] = 0x42; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 0); memset(&pubkey, 1, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 0); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); /* -1 must be accepted. */ ctmp[31] = 0x40; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 1); memset(&pubkey, 0, sizeof(pubkey)); SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, ctmp) == 1); SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) > 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) > 0); pubkey_negone = pubkey; /* Tweak of zero leaves the value unchanged. */ memset(ctmp2, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp, ctmp2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(orderc, ctmp, 31) == 0 && ctmp[31] == 0x40); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(orderc, ctmp, 31) == 0 && ctmp[31] == 0x40); memcpy(&pubkey2, &pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); /* Multiply tweak of zero zeroizes the output. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, ctmp, ctmp2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp, 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey, ctmp2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, ctmp, ctmp2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey, ctmp2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); memcpy(&pubkey, &pubkey2, sizeof(pubkey)); /* If seckey_tweak_add or seckey_tweak_mul are called with an overflowing seckey, the seckey is zeroized. */ memcpy(ctmp, orderc, 32); memset(ctmp2, 0, 32); ctmp2[31] = 0x01; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp, ctmp2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp, ctmp2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp, 32) == 0); memcpy(ctmp, orderc, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, ctmp, ctmp2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, ctmp, ctmp2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp, 32) == 0); /* If seckey_tweak_add or seckey_tweak_mul are called with an overflowing tweak, the seckey is zeroized. */ memcpy(ctmp, orderc, 32); ctmp[31] = 0x40; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp, orderc) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp, 32) == 0); memcpy(ctmp, orderc, 32); ctmp[31] = 0x40; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, ctmp, orderc) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, ctmp, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp, 32) == 0); memcpy(ctmp, orderc, 32); ctmp[31] = 0x40; /* If pubkey_tweak_add or pubkey_tweak_mul are called with an overflowing tweak, the pubkey is zeroized. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, orderc) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); memcpy(&pubkey, &pubkey2, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey, orderc) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey, orderc) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); memcpy(&pubkey, &pubkey2, sizeof(pubkey)); - /* If the resulting key in rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add and - * rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add is 0 the functions fail and in the latter + /* If the resulting key in rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add and + * rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add is 0 the functions fail and in the latter * case the pubkey is zeroized. */ memcpy(ctmp, orderc, 32); ctmp[31] = 0x40; memset(ctmp2, 0, 32); ctmp2[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp2, ctmp) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(zeros, ctmp2, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp2, ctmp) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(zeros, ctmp2, 32) == 0); ctmp2[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); memcpy(&pubkey, &pubkey2, sizeof(pubkey)); /* Tweak computation wraps and results in a key of 1. */ ctmp2[31] = 2; - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp2, ctmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(ctmp2, zeros, 31) == 0 && ctmp2[31] == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp2, ctmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(ctmp2, zeros, 31) == 0 && ctmp2[31] == 1); ctmp2[31] = 2; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); ctmp2[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey2, ctmp2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey2, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); /* Tweak mul * 2 = 1+1. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2) == 1); ctmp2[31] = 2; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey2, ctmp2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey2, ctmp2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); /* Zeroize pubkey on parse error. */ memset(&pubkey, 0, 32); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, ctmp2)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(pubkey)) == 0); memcpy(&pubkey, &pubkey2, sizeof(pubkey)); memset(&pubkey2, 0, 32); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey2, ctmp2)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey2, zeros, sizeof(pubkey2)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey2, ctmp2)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey2, zeros, sizeof(pubkey2)) == 0); /* Plain argument errors. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, ctmp) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, ctmp) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, NULL)); memset(ctmp2, 0, 32); ctmp2[31] = 4; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, NULL, ctmp2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, NULL, ctmp2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, NULL)); memset(ctmp2, 0, 32); ctmp2[31] = 4; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, NULL, ctmp2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, NULL, ctmp2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey, NULL)); memset(ctmp2, 0, 32); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, NULL, ctmp2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, ctmp, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, NULL, ctmp2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, ctmp, NULL)); memset(ctmp2, 0, 32); ctmp2[31] = 1; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, NULL, ctmp2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, ctmp, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, NULL, ctmp)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, NULL, ctmp2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, ctmp, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, NULL, ctmp)); memset(&pubkey, 1, sizeof(pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); - /* rustsecp256k1zkp_v0_10_0_ec_pubkey_combine tests. */ + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); + /* rustsecp256k1zkp_v0_10_1_ec_pubkey_combine tests. */ pubkeys[0] = &pubkey_one; - SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[0], sizeof(rustsecp256k1zkp_v0_10_0_pubkey *)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[1], sizeof(rustsecp256k1zkp_v0_10_0_pubkey *)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[2], sizeof(rustsecp256k1zkp_v0_10_0_pubkey *)); - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, pubkeys, 0)); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, NULL, pubkeys, 1)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, NULL, 1)); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[0], sizeof(rustsecp256k1zkp_v0_10_1_pubkey *)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[1], sizeof(rustsecp256k1zkp_v0_10_1_pubkey *)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkeys[2], sizeof(rustsecp256k1zkp_v0_10_1_pubkey *)); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, pubkeys, 0)); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, NULL, pubkeys, 1)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, NULL, 1)); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); pubkeys[0] = &pubkey_negone; - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, pubkeys, 1) == 1); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) > 0); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, pubkeys, 1) == 1); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) > 0); len = 33; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, ctmp, &len, &pubkey, SECP256K1_EC_COMPRESSED) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, ctmp2, &len, &pubkey_negone, SECP256K1_EC_COMPRESSED) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(ctmp, ctmp2, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, ctmp, &len, &pubkey, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, ctmp2, &len, &pubkey_negone, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(ctmp, ctmp2, 33) == 0); /* Result is infinity. */ pubkeys[0] = &pubkey_one; pubkeys[1] = &pubkey_negone; - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, pubkeys, 2) == 0); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) == 0); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, pubkeys, 2) == 0); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) == 0); /* Passes through infinity but comes out one. */ pubkeys[2] = &pubkey_one; - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, pubkeys, 3) == 1); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) > 0); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, pubkeys, 3) == 1); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) > 0); len = 33; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, ctmp, &len, &pubkey, SECP256K1_EC_COMPRESSED) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, ctmp2, &len, &pubkey_one, SECP256K1_EC_COMPRESSED) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(ctmp, ctmp2, 33) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, ctmp, &len, &pubkey, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, ctmp2, &len, &pubkey_one, SECP256K1_EC_COMPRESSED) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(ctmp, ctmp2, 33) == 0); /* Adds to two. */ pubkeys[1] = &pubkey_one; - memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_combine(CTX, &pubkey, pubkeys, 2) == 1); - SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_0_pubkey)) > 0); + memset(&pubkey, 255, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + SECP256K1_CHECKMEM_UNDEFINE(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_combine(CTX, &pubkey, pubkeys, 2) == 1); + SECP256K1_CHECKMEM_CHECK(&pubkey, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, zeros, sizeof(rustsecp256k1zkp_v0_10_1_pubkey)) > 0); } static void run_eckey_negate_test(void) { @@ -6405,22 +6405,22 @@ static void run_eckey_negate_test(void) { memcpy(seckey_tmp, seckey, 32); /* Verify negation changes the key and changes it back */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_negate(CTX, seckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(seckey, seckey_tmp, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_negate(CTX, seckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(seckey, seckey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_negate(CTX, seckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(seckey, seckey_tmp, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_negate(CTX, seckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(seckey, seckey_tmp, 32) == 0); /* Check that privkey alias gives same result */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_negate(CTX, seckey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_privkey_negate(CTX, seckey_tmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(seckey, seckey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_negate(CTX, seckey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_privkey_negate(CTX, seckey_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(seckey, seckey_tmp, 32) == 0); /* Negating all 0s fails */ memset(seckey, 0, 32); memset(seckey_tmp, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_negate(CTX, seckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_negate(CTX, seckey) == 0); /* Check that seckey is not modified */ - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(seckey, seckey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(seckey, seckey_tmp, 32) == 0); /* Negating an overflowing seckey fails and the seckey is zeroed. In this * test, the seckey has 16 random bytes to ensure that ec_seckey_negate @@ -6428,30 +6428,30 @@ static void run_eckey_negate_test(void) { random_scalar_order_b32(seckey); memset(seckey, 0xFF, 16); memset(seckey_tmp, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_negate(CTX, seckey) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(seckey, seckey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_negate(CTX, seckey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(seckey, seckey_tmp, 32) == 0); } -static void random_sign(rustsecp256k1zkp_v0_10_0_scalar *sigr, rustsecp256k1zkp_v0_10_0_scalar *sigs, const rustsecp256k1zkp_v0_10_0_scalar *key, const rustsecp256k1zkp_v0_10_0_scalar *msg, int *recid) { - rustsecp256k1zkp_v0_10_0_scalar nonce; +static void random_sign(rustsecp256k1zkp_v0_10_1_scalar *sigr, rustsecp256k1zkp_v0_10_1_scalar *sigs, const rustsecp256k1zkp_v0_10_1_scalar *key, const rustsecp256k1zkp_v0_10_1_scalar *msg, int *recid) { + rustsecp256k1zkp_v0_10_1_scalar nonce; do { random_scalar_order_test(&nonce); - } while(!rustsecp256k1zkp_v0_10_0_ecdsa_sig_sign(&CTX->ecmult_gen_ctx, sigr, sigs, key, msg, &nonce, recid)); + } while(!rustsecp256k1zkp_v0_10_1_ecdsa_sig_sign(&CTX->ecmult_gen_ctx, sigr, sigs, key, msg, &nonce, recid)); } static void test_ecdsa_sign_verify(void) { - rustsecp256k1zkp_v0_10_0_gej pubj; - rustsecp256k1zkp_v0_10_0_ge pub; - rustsecp256k1zkp_v0_10_0_scalar one; - rustsecp256k1zkp_v0_10_0_scalar msg, key; - rustsecp256k1zkp_v0_10_0_scalar sigr, sigs; + rustsecp256k1zkp_v0_10_1_gej pubj; + rustsecp256k1zkp_v0_10_1_ge pub; + rustsecp256k1zkp_v0_10_1_scalar one; + rustsecp256k1zkp_v0_10_1_scalar msg, key; + rustsecp256k1zkp_v0_10_1_scalar sigr, sigs; int getrec; int recid; random_scalar_order_test(&msg); random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &pubj, &key); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&pub, &pubj); - getrec = rustsecp256k1zkp_v0_10_0_testrand_bits(1); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &pubj, &key); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&pub, &pubj); + getrec = rustsecp256k1zkp_v0_10_1_testrand_bits(1); /* The specific way in which this conditional is written sidesteps a potential bug in clang. See the commit messages of the commit that introduced this comment for details. */ if (getrec) { @@ -6460,10 +6460,10 @@ static void test_ecdsa_sign_verify(void) { } else { random_sign(&sigr, &sigs, &key, &msg, NULL); } - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&one, 1); - rustsecp256k1zkp_v0_10_0_scalar_add(&msg, &msg, &one); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&one, 1); + rustsecp256k1zkp_v0_10_1_scalar_add(&msg, &msg, &one); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sigr, &sigs, &pub, &msg)); } static void run_ecdsa_sign_verify(void) { @@ -6520,9 +6520,9 @@ static int nonce_function_test_retry(unsigned char *nonce32, const unsigned char return nonce_function_rfc6979(nonce32, msg32, key32, algo16, data, counter - 5); } -static int is_empty_signature(const rustsecp256k1zkp_v0_10_0_ecdsa_signature *sig) { - static const unsigned char res[sizeof(rustsecp256k1zkp_v0_10_0_ecdsa_signature)] = {0}; - return rustsecp256k1zkp_v0_10_0_memcmp_var(sig, res, sizeof(rustsecp256k1zkp_v0_10_0_ecdsa_signature)) == 0; +static int is_empty_signature(const rustsecp256k1zkp_v0_10_1_ecdsa_signature *sig) { + static const unsigned char res[sizeof(rustsecp256k1zkp_v0_10_1_ecdsa_signature)] = {0}; + return rustsecp256k1zkp_v0_10_1_memcmp_var(sig, res, sizeof(rustsecp256k1zkp_v0_10_1_ecdsa_signature)) == 0; } static void test_ecdsa_end_to_end(void) { @@ -6530,191 +6530,191 @@ static void test_ecdsa_end_to_end(void) { unsigned char privkey[32]; unsigned char message[32]; unsigned char privkey2[32]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature[6]; - rustsecp256k1zkp_v0_10_0_scalar r, s; + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature[6]; + rustsecp256k1zkp_v0_10_1_scalar r, s; unsigned char sig[74]; size_t siglen = 74; unsigned char pubkeyc[65]; size_t pubkeyclen = 65; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; - rustsecp256k1zkp_v0_10_0_pubkey pubkey_tmp; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey_tmp; unsigned char seckey[300]; size_t seckeylen = 300; /* Generate a random key and message. */ { - rustsecp256k1zkp_v0_10_0_scalar msg, key; + rustsecp256k1zkp_v0_10_1_scalar msg, key; random_scalar_order_test(&msg); random_scalar_order_test(&key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(privkey, &key); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(message, &msg); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(privkey, &key); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(message, &msg); } /* Construct and verify corresponding public key. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_seckey_verify(CTX, privkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_seckey_verify(CTX, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, privkey) == 1); /* Verify exporting and importing public key. */ - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_serialize(CTX, pubkeyc, &pubkeyclen, &pubkey, rustsecp256k1zkp_v0_10_0_testrand_bits(1) == 1 ? SECP256K1_EC_COMPRESSED : SECP256K1_EC_UNCOMPRESSED)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_serialize(CTX, pubkeyc, &pubkeyclen, &pubkey, rustsecp256k1zkp_v0_10_1_testrand_bits(1) == 1 ? SECP256K1_EC_COMPRESSED : SECP256K1_EC_UNCOMPRESSED)); memset(&pubkey, 0, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pubkeyc, pubkeyclen) == 1); /* Verify negation changes the key and changes it back */ memcpy(&pubkey_tmp, &pubkey, sizeof(pubkey)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(CTX, &pubkey_tmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey_tmp, &pubkey, sizeof(pubkey)) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_negate(CTX, &pubkey_tmp) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey_tmp, &pubkey, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(CTX, &pubkey_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey_tmp, &pubkey, sizeof(pubkey)) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_negate(CTX, &pubkey_tmp) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey_tmp, &pubkey, sizeof(pubkey)) == 0); /* Verify private key import and export. */ - CHECK(ec_privkey_export_der(CTX, seckey, &seckeylen, privkey, rustsecp256k1zkp_v0_10_0_testrand_bits(1) == 1)); + CHECK(ec_privkey_export_der(CTX, seckey, &seckeylen, privkey, rustsecp256k1zkp_v0_10_1_testrand_bits(1) == 1)); CHECK(ec_privkey_import_der(CTX, privkey2, seckey, seckeylen) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(privkey, privkey2, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(privkey, privkey2, 32) == 0); /* Optionally tweak the keys using addition. */ - if (rustsecp256k1zkp_v0_10_0_testrand_int(3) == 0) { + if (rustsecp256k1zkp_v0_10_1_testrand_int(3) == 0) { int ret1; int ret2; int ret3; unsigned char rnd[32]; unsigned char privkey_tmp[32]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey2; - rustsecp256k1zkp_v0_10_0_testrand256_test(rnd); + rustsecp256k1zkp_v0_10_1_pubkey pubkey2; + rustsecp256k1zkp_v0_10_1_testrand256_test(rnd); memcpy(privkey_tmp, privkey, 32); - ret1 = rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_add(CTX, privkey, rnd); - ret2 = rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_add(CTX, &pubkey, rnd); + ret1 = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_add(CTX, privkey, rnd); + ret2 = rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_add(CTX, &pubkey, rnd); /* Check that privkey alias gives same result */ - ret3 = rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_add(CTX, privkey_tmp, rnd); + ret3 = rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_add(CTX, privkey_tmp, rnd); CHECK(ret1 == ret2); CHECK(ret2 == ret3); if (ret1 == 0) { return; } - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(privkey, privkey_tmp, 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey2, privkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(privkey, privkey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey2, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); } /* Optionally tweak the keys using multiplication. */ - if (rustsecp256k1zkp_v0_10_0_testrand_int(3) == 0) { + if (rustsecp256k1zkp_v0_10_1_testrand_int(3) == 0) { int ret1; int ret2; int ret3; unsigned char rnd[32]; unsigned char privkey_tmp[32]; - rustsecp256k1zkp_v0_10_0_pubkey pubkey2; - rustsecp256k1zkp_v0_10_0_testrand256_test(rnd); + rustsecp256k1zkp_v0_10_1_pubkey pubkey2; + rustsecp256k1zkp_v0_10_1_testrand256_test(rnd); memcpy(privkey_tmp, privkey, 32); - ret1 = rustsecp256k1zkp_v0_10_0_ec_seckey_tweak_mul(CTX, privkey, rnd); - ret2 = rustsecp256k1zkp_v0_10_0_ec_pubkey_tweak_mul(CTX, &pubkey, rnd); + ret1 = rustsecp256k1zkp_v0_10_1_ec_seckey_tweak_mul(CTX, privkey, rnd); + ret2 = rustsecp256k1zkp_v0_10_1_ec_pubkey_tweak_mul(CTX, &pubkey, rnd); /* Check that privkey alias gives same result */ - ret3 = rustsecp256k1zkp_v0_10_0_ec_privkey_tweak_mul(CTX, privkey_tmp, rnd); + ret3 = rustsecp256k1zkp_v0_10_1_ec_privkey_tweak_mul(CTX, privkey_tmp, rnd); CHECK(ret1 == ret2); CHECK(ret2 == ret3); if (ret1 == 0) { return; } - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(privkey, privkey_tmp, 32) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey2, privkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(privkey, privkey_tmp, 32) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey2, privkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&pubkey, &pubkey2, sizeof(pubkey)) == 0); } /* Sign. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[0], message, privkey, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[4], message, privkey, NULL, NULL) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[1], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[0], message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[4], message, privkey, NULL, NULL) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[1], message, privkey, NULL, extra) == 1); extra[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[2], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[2], message, privkey, NULL, extra) == 1); extra[31] = 0; extra[0] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &signature[3], message, privkey, NULL, extra) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[0], &signature[4], sizeof(signature[0])) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[0], &signature[1], sizeof(signature[0])) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[0], &signature[2], sizeof(signature[0])) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[0], &signature[3], sizeof(signature[0])) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[1], &signature[2], sizeof(signature[0])) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[1], &signature[3], sizeof(signature[0])) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[2], &signature[3], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &signature[3], message, privkey, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[0], &signature[4], sizeof(signature[0])) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[0], &signature[1], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[0], &signature[2], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[0], &signature[3], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[1], &signature[2], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[1], &signature[3], sizeof(signature[0])) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[2], &signature[3], sizeof(signature[0])) != 0); /* Verify. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[1], message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[2], message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[3], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[1], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[2], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[3], message, &pubkey) == 1); /* Test lower-S form, malleate, verify and fail, test again, malleate again */ - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, NULL, &signature[0])); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(CTX, &r, &s, &signature[0]); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(&signature[5], &r, &s); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, NULL, &signature[5])); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, &signature[5], &signature[5])); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, NULL, &signature[5])); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, &signature[5], &signature[5])); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&s, &s); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(&signature[5], &r, &s); - CHECK(!rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, NULL, &signature[5])); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&signature[5], &signature[0], 64) == 0); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, NULL, &signature[0])); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(CTX, &r, &s, &signature[0]); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(&signature[5], &r, &s); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, NULL, &signature[5])); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, &signature[5], &signature[5])); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, NULL, &signature[5])); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, &signature[5], &signature[5])); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&s, &s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(&signature[5], &r, &s); + CHECK(!rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, NULL, &signature[5])); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[5], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&signature[5], &signature[0], 64) == 0); /* Serialize/parse DER and verify again */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, sig, &siglen, &signature[0]) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, sig, &siglen, &signature[0]) == 1); memset(&signature[0], 0, sizeof(signature[0])); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &signature[0], sig, siglen) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &signature[0], sig, siglen) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 1); /* Serialize/destroy/parse DER and verify again. */ siglen = 74; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, sig, &siglen, &signature[0]) == 1); - sig[rustsecp256k1zkp_v0_10_0_testrand_int(siglen)] += 1 + rustsecp256k1zkp_v0_10_0_testrand_int(255); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &signature[0], sig, siglen) == 0 || - rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, sig, &siglen, &signature[0]) == 1); + sig[rustsecp256k1zkp_v0_10_1_testrand_int(siglen)] += 1 + rustsecp256k1zkp_v0_10_1_testrand_int(255); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &signature[0], sig, siglen) == 0 || + rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &signature[0], message, &pubkey) == 0); } static void test_random_pubkeys(void) { - rustsecp256k1zkp_v0_10_0_ge elem; - rustsecp256k1zkp_v0_10_0_ge elem2; + rustsecp256k1zkp_v0_10_1_ge elem; + rustsecp256k1zkp_v0_10_1_ge elem2; unsigned char in[65]; /* Generate some randomly sized pubkeys. */ - size_t len = rustsecp256k1zkp_v0_10_0_testrand_bits(2) == 0 ? 65 : 33; - if (rustsecp256k1zkp_v0_10_0_testrand_bits(2) == 0) { - len = rustsecp256k1zkp_v0_10_0_testrand_bits(6); + size_t len = rustsecp256k1zkp_v0_10_1_testrand_bits(2) == 0 ? 65 : 33; + if (rustsecp256k1zkp_v0_10_1_testrand_bits(2) == 0) { + len = rustsecp256k1zkp_v0_10_1_testrand_bits(6); } if (len == 65) { - in[0] = rustsecp256k1zkp_v0_10_0_testrand_bits(1) ? 4 : (rustsecp256k1zkp_v0_10_0_testrand_bits(1) ? 6 : 7); + in[0] = rustsecp256k1zkp_v0_10_1_testrand_bits(1) ? 4 : (rustsecp256k1zkp_v0_10_1_testrand_bits(1) ? 6 : 7); } else { - in[0] = rustsecp256k1zkp_v0_10_0_testrand_bits(1) ? 2 : 3; + in[0] = rustsecp256k1zkp_v0_10_1_testrand_bits(1) ? 2 : 3; } - if (rustsecp256k1zkp_v0_10_0_testrand_bits(3) == 0) { - in[0] = rustsecp256k1zkp_v0_10_0_testrand_bits(8); + if (rustsecp256k1zkp_v0_10_1_testrand_bits(3) == 0) { + in[0] = rustsecp256k1zkp_v0_10_1_testrand_bits(8); } if (len > 1) { - rustsecp256k1zkp_v0_10_0_testrand256(&in[1]); + rustsecp256k1zkp_v0_10_1_testrand256(&in[1]); } if (len > 33) { - rustsecp256k1zkp_v0_10_0_testrand256(&in[33]); + rustsecp256k1zkp_v0_10_1_testrand256(&in[33]); } - if (rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&elem, in, len)) { + if (rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&elem, in, len)) { unsigned char out[65]; unsigned char firstb; int res; size_t size = len; firstb = in[0]; /* If the pubkey can be parsed, it should round-trip... */ - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&elem, out, &size, len == 33)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&elem, out, &size, len == 33)); CHECK(size == len); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&in[1], &out[1], len-1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&in[1], &out[1], len-1) == 0); /* ... except for the type of hybrid inputs. */ if ((in[0] != 6) && (in[0] != 7)) { CHECK(in[0] == out[0]); } size = 65; - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&elem, in, &size, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&elem, in, &size, 0)); CHECK(size == 65); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&elem2, in, size)); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&elem2, &elem)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&elem2, in, size)); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&elem2, &elem)); /* Check that the X9.62 hybrid type is checked. */ - in[0] = rustsecp256k1zkp_v0_10_0_testrand_bits(1) ? 6 : 7; - res = rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&elem2, in, size); + in[0] = rustsecp256k1zkp_v0_10_1_testrand_bits(1) ? 6 : 7; + res = rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&elem2, in, size); if (firstb == 2 || firstb == 3) { if (in[0] == firstb + 4) { CHECK(res); @@ -6723,9 +6723,9 @@ static void test_random_pubkeys(void) { } } if (res) { - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&elem, &elem2)); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_serialize(&elem, out, &size, 0)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&in[1], &out[1], 64) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&elem, &elem2)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_serialize(&elem, out, &size, 0)); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&in[1], &out[1], 64) == 0); } } } @@ -6741,38 +6741,38 @@ static void run_pubkey_comparison(void) { 0xde, 0x36, 0x0e, 0x87, 0x59, 0x8f, 0x3c, 0x01, 0x36, 0x2a, 0x2a, 0xb8, 0xc6, 0xf4, 0x5e, 0x4d, 0xb2, 0xc2, 0xd5, 0x03, 0xa7, 0xf9, 0xf1, 0x4f, 0xa8, 0xfa, 0x95, 0xa8, 0xe9, 0x69, 0x76, 0x1c }; - rustsecp256k1zkp_v0_10_0_pubkey pk1; - rustsecp256k1zkp_v0_10_0_pubkey pk2; - - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk1, pk1_ser, sizeof(pk1_ser)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk2, pk2_ser, sizeof(pk2_ser)) == 1); - - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, NULL, &pk2) < 0)); - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk1, NULL) > 0)); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk1, &pk2) < 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk2, &pk1) > 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk1, &pk1) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk2, &pk2) == 0); + rustsecp256k1zkp_v0_10_1_pubkey pk1; + rustsecp256k1zkp_v0_10_1_pubkey pk2; + + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk1, pk1_ser, sizeof(pk1_ser)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk2, pk2_ser, sizeof(pk2_ser)) == 1); + + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, NULL, &pk2) < 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk1, NULL) > 0)); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk1, &pk2) < 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk2, &pk1) > 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk1, &pk1) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk2, &pk2) == 0); { - rustsecp256k1zkp_v0_10_0_pubkey pk_tmp; + rustsecp256k1zkp_v0_10_1_pubkey pk_tmp; memset(&pk_tmp, 0, sizeof(pk_tmp)); /* illegal pubkey */ - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk_tmp, &pk2) < 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk_tmp, &pk2) < 0)); { int32_t ecount = 0; - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk_tmp, &pk_tmp) == 0); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, counting_callback_fn, &ecount); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk_tmp, &pk_tmp) == 0); CHECK(ecount == 2); - rustsecp256k1zkp_v0_10_0_context_set_illegal_callback(CTX, NULL, NULL); + rustsecp256k1zkp_v0_10_1_context_set_illegal_callback(CTX, NULL, NULL); } - CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk2, &pk_tmp) > 0)); + CHECK_ILLEGAL_VOID(CTX, CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk2, &pk_tmp) > 0)); } /* Make pk2 the same as pk1 but with 3 rather than 2. Note that in * an uncompressed encoding, these would have the opposite ordering */ pk1_ser[0] = 3; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pk2, pk1_ser, sizeof(pk1_ser)) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk1, &pk2) < 0); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_cmp(CTX, &pk2, &pk1) > 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pk2, pk1_ser, sizeof(pk1_ser)) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk1, &pk2) < 0); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_cmp(CTX, &pk2, &pk1) > 0); } static void run_random_pubkeys(void) { @@ -6794,36 +6794,36 @@ static int test_ecdsa_der_parse(const unsigned char *sig, size_t siglen, int cer int ret = 0; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig_der; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig_der; unsigned char roundtrip_der[2048]; unsigned char compact_der[64]; size_t len_der = 2048; int parsed_der = 0, valid_der = 0, roundtrips_der = 0; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig_der_lax; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig_der_lax; unsigned char roundtrip_der_lax[2048]; unsigned char compact_der_lax[64]; size_t len_der_lax = 2048; int parsed_der_lax = 0, valid_der_lax = 0, roundtrips_der_lax = 0; - parsed_der = rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig_der, sig, siglen); + parsed_der = rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig_der, sig, siglen); if (parsed_der) { - ret |= (!rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, compact_der, &sig_der)) << 0; - valid_der = (rustsecp256k1zkp_v0_10_0_memcmp_var(compact_der, zeroes, 32) != 0) && (rustsecp256k1zkp_v0_10_0_memcmp_var(compact_der + 32, zeroes, 32) != 0); + ret |= (!rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, compact_der, &sig_der)) << 0; + valid_der = (rustsecp256k1zkp_v0_10_1_memcmp_var(compact_der, zeroes, 32) != 0) && (rustsecp256k1zkp_v0_10_1_memcmp_var(compact_der + 32, zeroes, 32) != 0); } if (valid_der) { - ret |= (!rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, roundtrip_der, &len_der, &sig_der)) << 1; - roundtrips_der = (len_der == siglen) && rustsecp256k1zkp_v0_10_0_memcmp_var(roundtrip_der, sig, siglen) == 0; + ret |= (!rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, roundtrip_der, &len_der, &sig_der)) << 1; + roundtrips_der = (len_der == siglen) && rustsecp256k1zkp_v0_10_1_memcmp_var(roundtrip_der, sig, siglen) == 0; } - parsed_der_lax = rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der_lax(CTX, &sig_der_lax, sig, siglen); + parsed_der_lax = rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der_lax(CTX, &sig_der_lax, sig, siglen); if (parsed_der_lax) { - ret |= (!rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, compact_der_lax, &sig_der_lax)) << 10; - valid_der_lax = (rustsecp256k1zkp_v0_10_0_memcmp_var(compact_der_lax, zeroes, 32) != 0) && (rustsecp256k1zkp_v0_10_0_memcmp_var(compact_der_lax + 32, zeroes, 32) != 0); + ret |= (!rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, compact_der_lax, &sig_der_lax)) << 10; + valid_der_lax = (rustsecp256k1zkp_v0_10_1_memcmp_var(compact_der_lax, zeroes, 32) != 0) && (rustsecp256k1zkp_v0_10_1_memcmp_var(compact_der_lax + 32, zeroes, 32) != 0); } if (valid_der_lax) { - ret |= (!rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, roundtrip_der_lax, &len_der_lax, &sig_der_lax)) << 11; - roundtrips_der_lax = (len_der_lax == siglen) && rustsecp256k1zkp_v0_10_0_memcmp_var(roundtrip_der_lax, sig, siglen) == 0; + ret |= (!rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, roundtrip_der_lax, &len_der_lax, &sig_der_lax)) << 11; + roundtrips_der_lax = (len_der_lax == siglen) && rustsecp256k1zkp_v0_10_1_memcmp_var(roundtrip_der_lax, sig, siglen) == 0; } if (certainly_der) { @@ -6839,7 +6839,7 @@ static int test_ecdsa_der_parse(const unsigned char *sig, size_t siglen, int cer if (valid_der) { ret |= (!roundtrips_der_lax) << 12; ret |= (len_der != len_der_lax) << 13; - ret |= ((len_der != len_der_lax) || (rustsecp256k1zkp_v0_10_0_memcmp_var(roundtrip_der_lax, roundtrip_der, len_der) != 0)) << 14; + ret |= ((len_der != len_der_lax) || (rustsecp256k1zkp_v0_10_1_memcmp_var(roundtrip_der_lax, roundtrip_der, len_der) != 0)) << 14; } ret |= (roundtrips_der != roundtrips_der_lax) << 15; if (parsed_der) { @@ -6863,27 +6863,27 @@ static void assign_big_endian(unsigned char *ptr, size_t ptrlen, uint32_t val) { static void damage_array(unsigned char *sig, size_t *len) { int pos; - int action = rustsecp256k1zkp_v0_10_0_testrand_bits(3); + int action = rustsecp256k1zkp_v0_10_1_testrand_bits(3); if (action < 1 && *len > 3) { /* Delete a byte. */ - pos = rustsecp256k1zkp_v0_10_0_testrand_int(*len); + pos = rustsecp256k1zkp_v0_10_1_testrand_int(*len); memmove(sig + pos, sig + pos + 1, *len - pos - 1); (*len)--; return; } else if (action < 2 && *len < 2048) { /* Insert a byte. */ - pos = rustsecp256k1zkp_v0_10_0_testrand_int(1 + *len); + pos = rustsecp256k1zkp_v0_10_1_testrand_int(1 + *len); memmove(sig + pos + 1, sig + pos, *len - pos); - sig[pos] = rustsecp256k1zkp_v0_10_0_testrand_bits(8); + sig[pos] = rustsecp256k1zkp_v0_10_1_testrand_bits(8); (*len)++; return; } else if (action < 4) { /* Modify a byte. */ - sig[rustsecp256k1zkp_v0_10_0_testrand_int(*len)] += 1 + rustsecp256k1zkp_v0_10_0_testrand_int(255); + sig[rustsecp256k1zkp_v0_10_1_testrand_int(*len)] += 1 + rustsecp256k1zkp_v0_10_1_testrand_int(255); return; } else { /* action < 8 */ /* Modify a bit. */ - sig[rustsecp256k1zkp_v0_10_0_testrand_int(*len)] ^= 1 << rustsecp256k1zkp_v0_10_0_testrand_bits(3); + sig[rustsecp256k1zkp_v0_10_1_testrand_int(*len)] ^= 1 << rustsecp256k1zkp_v0_10_1_testrand_bits(3); return; } } @@ -6896,23 +6896,23 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly int n; *len = 0; - der = rustsecp256k1zkp_v0_10_0_testrand_bits(2) == 0; + der = rustsecp256k1zkp_v0_10_1_testrand_bits(2) == 0; *certainly_der = der; *certainly_not_der = 0; - indet = der ? 0 : rustsecp256k1zkp_v0_10_0_testrand_int(10) == 0; + indet = der ? 0 : rustsecp256k1zkp_v0_10_1_testrand_int(10) == 0; for (n = 0; n < 2; n++) { /* We generate two classes of numbers: nlow==1 "low" ones (up to 32 bytes), nlow==0 "high" ones (32 bytes with 129 top bits set, or larger than 32 bytes) */ - nlow[n] = der ? 1 : (rustsecp256k1zkp_v0_10_0_testrand_bits(3) != 0); + nlow[n] = der ? 1 : (rustsecp256k1zkp_v0_10_1_testrand_bits(3) != 0); /* The length of the number in bytes (the first byte of which will always be nonzero) */ - nlen[n] = nlow[n] ? rustsecp256k1zkp_v0_10_0_testrand_int(33) : 32 + rustsecp256k1zkp_v0_10_0_testrand_int(200) * rustsecp256k1zkp_v0_10_0_testrand_bits(3) / 8; + nlen[n] = nlow[n] ? rustsecp256k1zkp_v0_10_1_testrand_int(33) : 32 + rustsecp256k1zkp_v0_10_1_testrand_int(200) * rustsecp256k1zkp_v0_10_1_testrand_bits(3) / 8; CHECK(nlen[n] <= 232); /* The top bit of the number. */ - nhbit[n] = (nlow[n] == 0 && nlen[n] == 32) ? 1 : (nlen[n] == 0 ? 0 : rustsecp256k1zkp_v0_10_0_testrand_bits(1)); + nhbit[n] = (nlow[n] == 0 && nlen[n] == 32) ? 1 : (nlen[n] == 0 ? 0 : rustsecp256k1zkp_v0_10_1_testrand_bits(1)); /* The top byte of the number (after the potential hardcoded 16 0xFF characters for "high" 32 bytes numbers) */ - nhbyte[n] = nlen[n] == 0 ? 0 : (nhbit[n] ? 128 + rustsecp256k1zkp_v0_10_0_testrand_bits(7) : 1 + rustsecp256k1zkp_v0_10_0_testrand_int(127)); + nhbyte[n] = nlen[n] == 0 ? 0 : (nhbit[n] ? 128 + rustsecp256k1zkp_v0_10_1_testrand_bits(7) : 1 + rustsecp256k1zkp_v0_10_1_testrand_int(127)); /* The number of zero bytes in front of the number (which is 0 or 1 in case of DER, otherwise we extend up to 300 bytes) */ - nzlen[n] = der ? ((nlen[n] == 0 || nhbit[n]) ? 1 : 0) : (nlow[n] ? rustsecp256k1zkp_v0_10_0_testrand_int(3) : rustsecp256k1zkp_v0_10_0_testrand_int(300 - nlen[n]) * rustsecp256k1zkp_v0_10_0_testrand_bits(3) / 8); + nzlen[n] = der ? ((nlen[n] == 0 || nhbit[n]) ? 1 : 0) : (nlow[n] ? rustsecp256k1zkp_v0_10_1_testrand_int(3) : rustsecp256k1zkp_v0_10_1_testrand_int(300 - nlen[n]) * rustsecp256k1zkp_v0_10_1_testrand_bits(3) / 8); if (nzlen[n] > ((nlen[n] == 0 || nhbit[n]) ? 1 : 0)) { *certainly_not_der = 1; } @@ -6921,7 +6921,7 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly nlenlen[n] = nlen[n] + nzlen[n] < 128 ? 0 : (nlen[n] + nzlen[n] < 256 ? 1 : 2); if (!der) { /* nlenlen[n] max 127 bytes */ - int add = rustsecp256k1zkp_v0_10_0_testrand_int(127 - nlenlen[n]) * rustsecp256k1zkp_v0_10_0_testrand_bits(4) * rustsecp256k1zkp_v0_10_0_testrand_bits(4) / 256; + int add = rustsecp256k1zkp_v0_10_1_testrand_int(127 - nlenlen[n]) * rustsecp256k1zkp_v0_10_1_testrand_bits(4) * rustsecp256k1zkp_v0_10_1_testrand_bits(4) / 256; nlenlen[n] += add; if (add != 0) { *certainly_not_der = 1; @@ -6935,7 +6935,7 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly CHECK(tlen <= 856); /* The length of the garbage inside the tuple. */ - elen = (der || indet) ? 0 : rustsecp256k1zkp_v0_10_0_testrand_int(980 - tlen) * rustsecp256k1zkp_v0_10_0_testrand_bits(3) / 8; + elen = (der || indet) ? 0 : rustsecp256k1zkp_v0_10_1_testrand_int(980 - tlen) * rustsecp256k1zkp_v0_10_1_testrand_bits(3) / 8; if (elen != 0) { *certainly_not_der = 1; } @@ -6943,7 +6943,7 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly CHECK(tlen <= 980); /* The length of the garbage after the end of the tuple. */ - glen = der ? 0 : rustsecp256k1zkp_v0_10_0_testrand_int(990 - tlen) * rustsecp256k1zkp_v0_10_0_testrand_bits(3) / 8; + glen = der ? 0 : rustsecp256k1zkp_v0_10_1_testrand_int(990 - tlen) * rustsecp256k1zkp_v0_10_1_testrand_bits(3) / 8; if (glen != 0) { *certainly_not_der = 1; } @@ -6958,7 +6958,7 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly } else { int tlenlen = tlen < 128 ? 0 : (tlen < 256 ? 1 : 2); if (!der) { - int add = rustsecp256k1zkp_v0_10_0_testrand_int(127 - tlenlen) * rustsecp256k1zkp_v0_10_0_testrand_bits(4) * rustsecp256k1zkp_v0_10_0_testrand_bits(4) / 256; + int add = rustsecp256k1zkp_v0_10_1_testrand_int(127 - tlenlen) * rustsecp256k1zkp_v0_10_1_testrand_bits(4) * rustsecp256k1zkp_v0_10_1_testrand_bits(4) / 256; tlenlen += add; if (add != 0) { *certainly_not_der = 1; @@ -7009,13 +7009,13 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly nlen[n]--; } /* Generate remaining random bytes of number */ - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(sig + *len, nlen[n]); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(sig + *len, nlen[n]); *len += nlen[n]; nlen[n] = 0; } /* Generate random garbage inside tuple. */ - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(sig + *len, elen); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(sig + *len, elen); *len += elen; /* Generate end-of-contents bytes. */ @@ -7027,7 +7027,7 @@ static void random_ber_signature(unsigned char *sig, size_t *len, int* certainly CHECK(tlen + glen <= 1121); /* Generate random garbage outside tuple. */ - rustsecp256k1zkp_v0_10_0_testrand_bytes_test(sig + *len, glen); + rustsecp256k1zkp_v0_10_1_testrand_bytes_test(sig + *len, glen); *len += glen; tlen += glen; CHECK(tlen <= 1121); @@ -7068,22 +7068,22 @@ static void run_ecdsa_der_parse(void) { /* Tests several edge cases. */ static void test_ecdsa_edge_cases(void) { int t; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; /* Test the case where ECDSA recomputes a point that is infinity. */ { - rustsecp256k1zkp_v0_10_0_gej keyj; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&ss, &ss); - rustsecp256k1zkp_v0_10_0_scalar_inverse(&ss, &ss); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sr, 1); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&CTX->ecmult_gen_ctx, &keyj, &sr); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&key, &keyj); + rustsecp256k1zkp_v0_10_1_gej keyj; + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&ss, &ss); + rustsecp256k1zkp_v0_10_1_scalar_inverse(&ss, &ss); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sr, 1); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&CTX->ecmult_gen_ctx, &keyj, &sr); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&key, &keyj); msg = ss; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); } /* Verify signature with r of zero fails. */ @@ -7095,14 +7095,14 @@ static void test_ecdsa_edge_cases(void) { 0x3b, 0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x41 }; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sr, 0); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key, pubkey_mods_zero, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify( &sr, &ss, &key, &msg) == 0); + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sr, 0); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key, pubkey_mods_zero, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify( &sr, &ss, &key, &msg) == 0); } /* Verify signature with s of zero fails. */ @@ -7114,14 +7114,14 @@ static void test_ecdsa_edge_cases(void) { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01 }; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sr, 1); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key, pubkey, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sr, 1); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key, pubkey, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); } /* Verify signature with message 0 passes. */ @@ -7140,23 +7140,23 @@ static void test_ecdsa_edge_cases(void) { 0x3b, 0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x43 }; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_ge key2; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 2); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, 0); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sr, 2); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key, pubkey, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key2, pubkey2, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&ss, &ss); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 0); + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_ge key2; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 2); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, 0); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sr, 2); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key, pubkey, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key2, pubkey2, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&ss, &ss); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 0); } /* Verify signature with message 1 passes. */ @@ -7181,24 +7181,24 @@ static void test_ecdsa_edge_cases(void) { 0x45, 0x51, 0x23, 0x19, 0x50, 0xb7, 0x5f, 0xc4, 0x40, 0x2d, 0xa1, 0x72, 0x2f, 0xc9, 0xba, 0xeb }; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_ge key2; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, 1); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sr, csr, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key, pubkey, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key2, pubkey2, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&ss, &ss); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 2); - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&ss, &ss); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 0); + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_ge key2; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sr, csr, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key, pubkey, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key2, pubkey2, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&ss, &ss); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 2); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&ss, &ss); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key2, &msg) == 0); } /* Verify signature with message -1 passes. */ @@ -7216,25 +7216,25 @@ static void test_ecdsa_edge_cases(void) { 0x45, 0x51, 0x23, 0x19, 0x50, 0xb7, 0x5f, 0xc4, 0x40, 0x2d, 0xa1, 0x72, 0x2f, 0xc9, 0xba, 0xee }; - rustsecp256k1zkp_v0_10_0_ge key; - rustsecp256k1zkp_v0_10_0_scalar msg; - rustsecp256k1zkp_v0_10_0_scalar sr, ss; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&msg, &msg); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(&sr, csr, NULL); - CHECK(rustsecp256k1zkp_v0_10_0_eckey_pubkey_parse(&key, pubkey, 33)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_negate(&ss, &ss); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ss, 3); - rustsecp256k1zkp_v0_10_0_scalar_inverse_var(&ss, &ss); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); + rustsecp256k1zkp_v0_10_1_ge key; + rustsecp256k1zkp_v0_10_1_scalar msg; + rustsecp256k1zkp_v0_10_1_scalar sr, ss; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&msg, &msg); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(&sr, csr, NULL); + CHECK(rustsecp256k1zkp_v0_10_1_eckey_pubkey_parse(&key, pubkey, 33)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_negate(&ss, &ss); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 1); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ss, 3); + rustsecp256k1zkp_v0_10_1_scalar_inverse_var(&ss, &ss); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sig_verify(&sr, &ss, &key, &msg) == 0); } /* Signature where s would be zero. */ { - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; size_t siglen; unsigned char signature[72]; static const unsigned char nonce[32] = { @@ -7261,42 +7261,42 @@ static void test_ecdsa_edge_cases(void) { 0xb8, 0x12, 0xe0, 0x0b, 0x81, 0x7a, 0x77, 0x62, 0x65, 0xdf, 0xdd, 0x31, 0xb9, 0x3e, 0x29, 0xa9, }; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce) == 0); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce2) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce2) == 0); msg[31] = 0xaa; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, NULL, msg, key, precomputed_nonce_function, nonce2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, NULL, key, precomputed_nonce_function, nonce2)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, NULL, precomputed_nonce_function, nonce2)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce2) == 1); - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, key) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, NULL, msg, &pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, NULL, &pubkey)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg, &pubkey) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ec_pubkey_create(CTX, &pubkey, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, NULL, msg, key, precomputed_nonce_function, nonce2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, NULL, key, precomputed_nonce_function, nonce2)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, NULL, precomputed_nonce_function, nonce2)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, precomputed_nonce_function, nonce2) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, key) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, NULL, msg, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, NULL, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg, &pubkey) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ec_pubkey_create(CTX, &pubkey, NULL)); /* That pubkeyload fails via an ARGCHECK is a little odd but makes sense because pubkeys are an opaque data type. */ - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, &sig, msg, &pubkey)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, &sig, msg, &pubkey)); siglen = 72; - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, NULL, &siglen, &sig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, signature, NULL, &sig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, signature, &siglen, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, signature, &siglen, &sig) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, NULL, signature, siglen)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, NULL, siglen)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &sig, signature, siglen) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, NULL, &siglen, &sig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, signature, NULL, &sig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, signature, &siglen, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, signature, &siglen, &sig) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, NULL, signature, siglen)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, NULL, siglen)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &sig, signature, siglen) == 1); siglen = 10; /* Too little room for a signature does not fail via ARGCHECK. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_der(CTX, signature, &siglen, &sig) == 0); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_normalize(CTX, NULL, NULL)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, NULL, &sig)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, signature, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_serialize_compact(CTX, signature, &sig) == 1); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, NULL, signature)); - CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &sig, NULL)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &sig, signature) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_der(CTX, signature, &siglen, &sig) == 0); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_normalize(CTX, NULL, NULL)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, NULL, &sig)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, signature, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_serialize_compact(CTX, signature, &sig) == 1); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, NULL, signature)); + CHECK_ILLEGAL(CTX, rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &sig, NULL)); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &sig, signature) == 1); memset(signature, 255, 64); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_compact(CTX, &sig, signature) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_compact(CTX, &sig, signature) == 0); } /* Nonce function corner cases. */ @@ -7305,43 +7305,43 @@ static void test_ecdsa_edge_cases(void) { int i; unsigned char key[32]; unsigned char msg[32]; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig2; - rustsecp256k1zkp_v0_10_0_scalar sr[512], ss; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig2; + rustsecp256k1zkp_v0_10_1_scalar sr[512], ss; const unsigned char *extra; extra = t == 0 ? NULL : zero; memset(msg, 0, 32); msg[31] = 1; /* High key results in signature failure. */ memset(key, 0xFF, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, NULL, extra) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, NULL, extra) == 0); CHECK(is_empty_signature(&sig)); /* Zero key results in signature failure. */ memset(key, 0, 32); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, NULL, extra) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, NULL, extra) == 0); CHECK(is_empty_signature(&sig)); /* Nonce function failure results in signature failure. */ key[31] = 1; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, nonce_function_test_fail, extra) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, nonce_function_test_fail, extra) == 0); CHECK(is_empty_signature(&sig)); /* The retry loop successfully makes its way to the first good value. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig, msg, key, nonce_function_test_retry, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig, msg, key, nonce_function_test_retry, extra) == 1); CHECK(!is_empty_signature(&sig)); - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig2, msg, key, nonce_function_rfc6979, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig2, msg, key, nonce_function_rfc6979, extra) == 1); CHECK(!is_empty_signature(&sig2)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&sig, &sig2, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&sig, &sig2, sizeof(sig)) == 0); /* The default nonce function is deterministic. */ - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); CHECK(!is_empty_signature(&sig2)); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&sig, &sig2, sizeof(sig)) == 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&sig, &sig2, sizeof(sig)) == 0); /* The default nonce function changes output with different messages. */ for(i = 0; i < 256; i++) { int j; msg[0] = i; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); CHECK(!is_empty_signature(&sig2)); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(CTX, &sr[i], &ss, &sig2); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(CTX, &sr[i], &ss, &sig2); for (j = 0; j < i; j++) { - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&sr[i], &sr[j])); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&sr[i], &sr[j])); } } msg[0] = 0; @@ -7350,11 +7350,11 @@ static void test_ecdsa_edge_cases(void) { for(i = 256; i < 512; i++) { int j; key[0] = i - 256; - CHECK(rustsecp256k1zkp_v0_10_0_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ecdsa_sign(CTX, &sig2, msg, key, NULL, extra) == 1); CHECK(!is_empty_signature(&sig2)); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(CTX, &sr[i], &ss, &sig2); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(CTX, &sr[i], &ss, &sig2); for (j = 0; j < i; j++) { - CHECK(!rustsecp256k1zkp_v0_10_0_scalar_eq(&sr[i], &sr[j])); + CHECK(!rustsecp256k1zkp_v0_10_1_scalar_eq(&sr[i], &sr[j])); } } key[0] = 0; @@ -7379,12 +7379,12 @@ static void test_ecdsa_edge_cases(void) { SECP256K1_CHECKMEM_CHECK(nonce3,32); CHECK(nonce_function_rfc6979(nonce4, zeros, zeros, zeros, (void *)zeros, 0) == 1); SECP256K1_CHECKMEM_CHECK(nonce4,32); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce2, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce3, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce, nonce4, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce2, nonce3, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce2, nonce4, 32) != 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(nonce3, nonce4, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce2, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce3, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce, nonce4, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce2, nonce3, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce2, nonce4, 32) != 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(nonce3, nonce4, 32) != 0); } @@ -7413,29 +7413,29 @@ static void run_ecdsa_edge_cases(void) { The tests check for known attacks (range checks in (r,s), arithmetic errors, malleability). */ static void test_ecdsa_wycheproof(void) { - #include "wycheproof/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.h" + #include "wycheproof/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.h" int t; for (t = 0; t < SECP256K1_ECDSA_WYCHEPROOF_NUMBER_TESTVECTORS; t++) { - rustsecp256k1zkp_v0_10_0_ecdsa_signature signature; - rustsecp256k1zkp_v0_10_0_sha256 hasher; - rustsecp256k1zkp_v0_10_0_pubkey pubkey; + rustsecp256k1zkp_v0_10_1_ecdsa_signature signature; + rustsecp256k1zkp_v0_10_1_sha256 hasher; + rustsecp256k1zkp_v0_10_1_pubkey pubkey; const unsigned char *msg, *sig, *pk; unsigned char out[32] = {0}; int actual_verify = 0; memset(&pubkey, 0, sizeof(pubkey)); pk = &wycheproof_ecdsa_public_keys[testvectors[t].pk_offset]; - CHECK(rustsecp256k1zkp_v0_10_0_ec_pubkey_parse(CTX, &pubkey, pk, 65) == 1); + CHECK(rustsecp256k1zkp_v0_10_1_ec_pubkey_parse(CTX, &pubkey, pk, 65) == 1); - rustsecp256k1zkp_v0_10_0_sha256_initialize(&hasher); + rustsecp256k1zkp_v0_10_1_sha256_initialize(&hasher); msg = &wycheproof_ecdsa_messages[testvectors[t].msg_offset]; - rustsecp256k1zkp_v0_10_0_sha256_write(&hasher, msg, testvectors[t].msg_len); - rustsecp256k1zkp_v0_10_0_sha256_finalize(&hasher, out); + rustsecp256k1zkp_v0_10_1_sha256_write(&hasher, msg, testvectors[t].msg_len); + rustsecp256k1zkp_v0_10_1_sha256_finalize(&hasher, out); sig = &wycheproof_ecdsa_signatures[testvectors[t].sig_offset]; - if (rustsecp256k1zkp_v0_10_0_ecdsa_signature_parse_der(CTX, &signature, sig, testvectors[t].sig_len) == 1) { - actual_verify = rustsecp256k1zkp_v0_10_0_ecdsa_verify(CTX, (const rustsecp256k1zkp_v0_10_0_ecdsa_signature *)&signature, out, &pubkey); + if (rustsecp256k1zkp_v0_10_1_ecdsa_signature_parse_der(CTX, &signature, sig, testvectors[t].sig_len) == 1) { + actual_verify = rustsecp256k1zkp_v0_10_1_ecdsa_verify(CTX, (const rustsecp256k1zkp_v0_10_1_ecdsa_signature *)&signature, out, &pubkey); } CHECK(testvectors[t].expected_verify == actual_verify); } @@ -7490,6 +7490,10 @@ static void run_ecdsa_wycheproof(void) { #include "modules/schnorrsig/tests_impl.h" #endif +#ifdef ENABLE_MODULE_SCHNORR_ADAPTOR +#include "modules/schnorr_adaptor/tests_impl.h" +#endif + #ifdef ENABLE_MODULE_ELLSWIFT #include "modules/ellswift/tests_impl.h" #endif @@ -7502,32 +7506,32 @@ static void run_ecdsa_wycheproof(void) { #include "modules/ecdsa_adaptor/tests_impl.h" #endif -static void run_rustsecp256k1zkp_v0_10_0_memczero_test(void) { +static void run_rustsecp256k1zkp_v0_10_1_memczero_test(void) { unsigned char buf1[6] = {1, 2, 3, 4, 5, 6}; unsigned char buf2[sizeof(buf1)]; - /* rustsecp256k1zkp_v0_10_0_memczero(..., ..., 0) is a noop. */ + /* rustsecp256k1zkp_v0_10_1_memczero(..., ..., 0) is a noop. */ memcpy(buf2, buf1, sizeof(buf1)); - rustsecp256k1zkp_v0_10_0_memczero(buf1, sizeof(buf1), 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf1, buf2, sizeof(buf1)) == 0); + rustsecp256k1zkp_v0_10_1_memczero(buf1, sizeof(buf1), 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf1, buf2, sizeof(buf1)) == 0); - /* rustsecp256k1zkp_v0_10_0_memczero(..., ..., 1) zeros the buffer. */ + /* rustsecp256k1zkp_v0_10_1_memczero(..., ..., 1) zeros the buffer. */ memset(buf2, 0, sizeof(buf2)); - rustsecp256k1zkp_v0_10_0_memczero(buf1, sizeof(buf1) , 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf1, buf2, sizeof(buf1)) == 0); + rustsecp256k1zkp_v0_10_1_memczero(buf1, sizeof(buf1) , 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf1, buf2, sizeof(buf1)) == 0); } -static void run_rustsecp256k1zkp_v0_10_0_byteorder_tests(void) { +static void run_rustsecp256k1zkp_v0_10_1_byteorder_tests(void) { { const uint32_t x = 0xFF03AB45; const unsigned char x_be[4] = {0xFF, 0x03, 0xAB, 0x45}; unsigned char buf[4]; uint32_t x_; - rustsecp256k1zkp_v0_10_0_write_be32(buf, x); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf, x_be, sizeof(buf)) == 0); + rustsecp256k1zkp_v0_10_1_write_be32(buf, x); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf, x_be, sizeof(buf)) == 0); - x_ = rustsecp256k1zkp_v0_10_0_read_be32(buf); + x_ = rustsecp256k1zkp_v0_10_1_read_be32(buf); CHECK(x == x_); } @@ -7537,10 +7541,10 @@ static void run_rustsecp256k1zkp_v0_10_0_byteorder_tests(void) { unsigned char buf[8]; uint64_t x_; - rustsecp256k1zkp_v0_10_0_write_be64(buf, x); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(buf, x_be, sizeof(buf)) == 0); + rustsecp256k1zkp_v0_10_1_write_be64(buf, x); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(buf, x_be, sizeof(buf)) == 0); - x_ = rustsecp256k1zkp_v0_10_0_read_be64(buf); + x_ = rustsecp256k1zkp_v0_10_1_read_be64(buf); CHECK(x == x_); } } @@ -7549,145 +7553,145 @@ static void int_cmov_test(void) { int r = INT_MAX; int a = 0; - rustsecp256k1zkp_v0_10_0_int_cmov(&r, &a, 0); + rustsecp256k1zkp_v0_10_1_int_cmov(&r, &a, 0); CHECK(r == INT_MAX); r = 0; a = INT_MAX; - rustsecp256k1zkp_v0_10_0_int_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_int_cmov(&r, &a, 1); CHECK(r == INT_MAX); a = 0; - rustsecp256k1zkp_v0_10_0_int_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_int_cmov(&r, &a, 1); CHECK(r == 0); a = 1; - rustsecp256k1zkp_v0_10_0_int_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_int_cmov(&r, &a, 1); CHECK(r == 1); r = 1; a = 0; - rustsecp256k1zkp_v0_10_0_int_cmov(&r, &a, 0); + rustsecp256k1zkp_v0_10_1_int_cmov(&r, &a, 0); CHECK(r == 1); } static void fe_cmov_test(void) { - static const rustsecp256k1zkp_v0_10_0_fe zero = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0); - static const rustsecp256k1zkp_v0_10_0_fe one = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1); - static const rustsecp256k1zkp_v0_10_0_fe max = SECP256K1_FE_CONST( + static const rustsecp256k1zkp_v0_10_1_fe zero = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 0); + static const rustsecp256k1zkp_v0_10_1_fe one = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 1); + static const rustsecp256k1zkp_v0_10_1_fe max = SECP256K1_FE_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL ); - rustsecp256k1zkp_v0_10_0_fe r = max; - rustsecp256k1zkp_v0_10_0_fe a = zero; + rustsecp256k1zkp_v0_10_1_fe r = max; + rustsecp256k1zkp_v0_10_1_fe a = zero; - rustsecp256k1zkp_v0_10_0_fe_cmov(&r, &a, 0); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r, &a, 0); CHECK(fe_identical(&r, &max)); r = zero; a = max; - rustsecp256k1zkp_v0_10_0_fe_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r, &a, 1); CHECK(fe_identical(&r, &max)); a = zero; - rustsecp256k1zkp_v0_10_0_fe_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r, &a, 1); CHECK(fe_identical(&r, &zero)); a = one; - rustsecp256k1zkp_v0_10_0_fe_cmov(&r, &a, 1); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r, &a, 1); CHECK(fe_identical(&r, &one)); r = one; a = zero; - rustsecp256k1zkp_v0_10_0_fe_cmov(&r, &a, 0); + rustsecp256k1zkp_v0_10_1_fe_cmov(&r, &a, 0); CHECK(fe_identical(&r, &one)); } static void fe_storage_cmov_test(void) { - static const rustsecp256k1zkp_v0_10_0_fe_storage zero = SECP256K1_FE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 0); - static const rustsecp256k1zkp_v0_10_0_fe_storage one = SECP256K1_FE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 1); - static const rustsecp256k1zkp_v0_10_0_fe_storage max = SECP256K1_FE_STORAGE_CONST( + static const rustsecp256k1zkp_v0_10_1_fe_storage zero = SECP256K1_FE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 0); + static const rustsecp256k1zkp_v0_10_1_fe_storage one = SECP256K1_FE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 1); + static const rustsecp256k1zkp_v0_10_1_fe_storage max = SECP256K1_FE_STORAGE_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL ); - rustsecp256k1zkp_v0_10_0_fe_storage r = max; - rustsecp256k1zkp_v0_10_0_fe_storage a = zero; + rustsecp256k1zkp_v0_10_1_fe_storage r = max; + rustsecp256k1zkp_v0_10_1_fe_storage a = zero; - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); r = zero; a = max; - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); a = zero; - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &zero, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &zero, sizeof(r)) == 0); a = one; - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &one, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &one, sizeof(r)) == 0); r = one; a = zero; - rustsecp256k1zkp_v0_10_0_fe_storage_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &one, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_fe_storage_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &one, sizeof(r)) == 0); } static void scalar_cmov_test(void) { - static const rustsecp256k1zkp_v0_10_0_scalar max = SECP256K1_SCALAR_CONST( + static const rustsecp256k1zkp_v0_10_1_scalar max = SECP256K1_SCALAR_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFEUL, 0xBAAEDCE6UL, 0xAF48A03BUL, 0xBFD25E8CUL, 0xD0364140UL ); - rustsecp256k1zkp_v0_10_0_scalar r = max; - rustsecp256k1zkp_v0_10_0_scalar a = rustsecp256k1zkp_v0_10_0_scalar_zero; + rustsecp256k1zkp_v0_10_1_scalar r = max; + rustsecp256k1zkp_v0_10_1_scalar a = rustsecp256k1zkp_v0_10_1_scalar_zero; - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); - r = rustsecp256k1zkp_v0_10_0_scalar_zero; a = max; - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + r = rustsecp256k1zkp_v0_10_1_scalar_zero; a = max; + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); - a = rustsecp256k1zkp_v0_10_0_scalar_zero; - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &rustsecp256k1zkp_v0_10_0_scalar_zero, sizeof(r)) == 0); + a = rustsecp256k1zkp_v0_10_1_scalar_zero; + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &rustsecp256k1zkp_v0_10_1_scalar_zero, sizeof(r)) == 0); - a = rustsecp256k1zkp_v0_10_0_scalar_one; - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &rustsecp256k1zkp_v0_10_0_scalar_one, sizeof(r)) == 0); + a = rustsecp256k1zkp_v0_10_1_scalar_one; + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &rustsecp256k1zkp_v0_10_1_scalar_one, sizeof(r)) == 0); - r = rustsecp256k1zkp_v0_10_0_scalar_one; a = rustsecp256k1zkp_v0_10_0_scalar_zero; - rustsecp256k1zkp_v0_10_0_scalar_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &rustsecp256k1zkp_v0_10_0_scalar_one, sizeof(r)) == 0); + r = rustsecp256k1zkp_v0_10_1_scalar_one; a = rustsecp256k1zkp_v0_10_1_scalar_zero; + rustsecp256k1zkp_v0_10_1_scalar_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &rustsecp256k1zkp_v0_10_1_scalar_one, sizeof(r)) == 0); } static void ge_storage_cmov_test(void) { - static const rustsecp256k1zkp_v0_10_0_ge_storage zero = SECP256K1_GE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); - static const rustsecp256k1zkp_v0_10_0_ge_storage one = SECP256K1_GE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1); - static const rustsecp256k1zkp_v0_10_0_ge_storage max = SECP256K1_GE_STORAGE_CONST( + static const rustsecp256k1zkp_v0_10_1_ge_storage zero = SECP256K1_GE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + static const rustsecp256k1zkp_v0_10_1_ge_storage one = SECP256K1_GE_STORAGE_CONST(0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1); + static const rustsecp256k1zkp_v0_10_1_ge_storage max = SECP256K1_GE_STORAGE_CONST( 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL, 0xFFFFFFFFUL ); - rustsecp256k1zkp_v0_10_0_ge_storage r = max; - rustsecp256k1zkp_v0_10_0_ge_storage a = zero; + rustsecp256k1zkp_v0_10_1_ge_storage r = max; + rustsecp256k1zkp_v0_10_1_ge_storage a = zero; - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); r = zero; a = max; - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &max, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &max, sizeof(r)) == 0); a = zero; - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &zero, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &zero, sizeof(r)) == 0); a = one; - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&r, &a, 1); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &one, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&r, &a, 1); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &one, sizeof(r)) == 0); r = one; a = zero; - rustsecp256k1zkp_v0_10_0_ge_storage_cmov(&r, &a, 0); - CHECK(rustsecp256k1zkp_v0_10_0_memcmp_var(&r, &one, sizeof(r)) == 0); + rustsecp256k1zkp_v0_10_1_ge_storage_cmov(&r, &a, 0); + CHECK(rustsecp256k1zkp_v0_10_1_memcmp_var(&r, &one, sizeof(r)) == 0); } static void run_cmov_tests(void) { @@ -7726,28 +7730,28 @@ int main(int argc, char **argv) { run_xoshiro256pp_tests(); /* find random seed */ - rustsecp256k1zkp_v0_10_0_testrand_init(argc > 2 ? argv[2] : NULL); + rustsecp256k1zkp_v0_10_1_testrand_init(argc > 2 ? argv[2] : NULL); /*** Setup test environment ***/ /* Create a global context available to all tests */ - CTX = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); + CTX = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); /* Randomize the context only with probability 15/16 to make sure we test without context randomization from time to time. TODO Reconsider this when recalibrating the tests. */ - if (rustsecp256k1zkp_v0_10_0_testrand_bits(4)) { + if (rustsecp256k1zkp_v0_10_1_testrand_bits(4)) { unsigned char rand32[32]; - rustsecp256k1zkp_v0_10_0_testrand256(rand32); - CHECK(rustsecp256k1zkp_v0_10_0_context_randomize(CTX, rand32)); + rustsecp256k1zkp_v0_10_1_testrand256(rand32); + CHECK(rustsecp256k1zkp_v0_10_1_context_randomize(CTX, rand32)); } - /* Make a writable copy of rustsecp256k1zkp_v0_10_0_context_static in order to test the effect of API functions + /* Make a writable copy of rustsecp256k1zkp_v0_10_1_context_static in order to test the effect of API functions that write to the context. The API does not support cloning the static context, so we use memcpy instead. The user is not supposed to copy a context but we should still ensure that the API functions handle copies of the static context gracefully. */ - STATIC_CTX = malloc(sizeof(*rustsecp256k1zkp_v0_10_0_context_static)); + STATIC_CTX = malloc(sizeof(*rustsecp256k1zkp_v0_10_1_context_static)); CHECK(STATIC_CTX != NULL); - memcpy(STATIC_CTX, rustsecp256k1zkp_v0_10_0_context_static, sizeof(rustsecp256k1zkp_v0_10_0_context)); - CHECK(!rustsecp256k1zkp_v0_10_0_context_is_proper(STATIC_CTX)); + memcpy(STATIC_CTX, rustsecp256k1zkp_v0_10_1_context_static, sizeof(rustsecp256k1zkp_v0_10_1_context)); + CHECK(!rustsecp256k1zkp_v0_10_1_context_is_proper(STATIC_CTX)); /*** Run actual tests ***/ @@ -7879,6 +7883,10 @@ int main(int argc, char **argv) { run_schnorrsig_tests(); #endif +#ifdef ENABLE_MODULE_SCHNORR_ADAPTOR + run_schnorr_adaptor_tests(); +#endif + #ifdef ENABLE_MODULE_ELLSWIFT run_ellswift_tests(); #endif @@ -7893,16 +7901,16 @@ int main(int argc, char **argv) { #endif /* util tests */ - run_rustsecp256k1zkp_v0_10_0_memczero_test(); - run_rustsecp256k1zkp_v0_10_0_byteorder_tests(); + run_rustsecp256k1zkp_v0_10_1_memczero_test(); + run_rustsecp256k1zkp_v0_10_1_byteorder_tests(); run_cmov_tests(); /*** Tear down test environment ***/ free(STATIC_CTX); - rustsecp256k1zkp_v0_10_0_context_destroy(CTX); + rustsecp256k1zkp_v0_10_1_context_destroy(CTX); - rustsecp256k1zkp_v0_10_0_testrand_finish(); + rustsecp256k1zkp_v0_10_1_testrand_finish(); printf("no problems found\n"); return 0; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/tests_exhaustive.c b/secp256k1-zkp-sys/depend/secp256k1/src/tests_exhaustive.c index 1c22a9ed..34f0e521 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/tests_exhaustive.c +++ b/secp256k1-zkp-sys/depend/secp256k1/src/tests_exhaustive.c @@ -42,10 +42,10 @@ SECP256K1_INLINE static int skip_section(uint64_t* iter) { return ((((uint32_t)*iter ^ (*iter >> 32)) * num_cores) >> 32) != this_core; } -static int rustsecp256k1zkp_v0_10_0_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg32, +static int rustsecp256k1zkp_v0_10_1_nonce_function_smallint(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int attempt) { - rustsecp256k1zkp_v0_10_0_scalar s; + rustsecp256k1zkp_v0_10_1_scalar s; int *idata = data; (void)msg32; (void)key32; @@ -57,94 +57,94 @@ static int rustsecp256k1zkp_v0_10_0_nonce_function_smallint(unsigned char *nonce if (attempt > 0) { *idata = (*idata + 1) % EXHAUSTIVE_TEST_ORDER; } - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s, *idata); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(nonce32, &s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s, *idata); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(nonce32, &s); return 1; } -static void test_exhaustive_endomorphism(const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_endomorphism(const rustsecp256k1zkp_v0_10_1_ge *group) { int i; for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_ge res; - rustsecp256k1zkp_v0_10_0_ge_mul_lambda(&res, &group[i]); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&group[i * EXHAUSTIVE_TEST_LAMBDA % EXHAUSTIVE_TEST_ORDER], &res)); + rustsecp256k1zkp_v0_10_1_ge res; + rustsecp256k1zkp_v0_10_1_ge_mul_lambda(&res, &group[i]); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&group[i * EXHAUSTIVE_TEST_LAMBDA % EXHAUSTIVE_TEST_ORDER], &res)); } } -static void test_exhaustive_addition(const rustsecp256k1zkp_v0_10_0_ge *group, const rustsecp256k1zkp_v0_10_0_gej *groupj) { +static void test_exhaustive_addition(const rustsecp256k1zkp_v0_10_1_ge *group, const rustsecp256k1zkp_v0_10_1_gej *groupj) { int i, j; uint64_t iter = 0; /* Sanity-check (and check infinity functions) */ - CHECK(rustsecp256k1zkp_v0_10_0_ge_is_infinity(&group[0])); - CHECK(rustsecp256k1zkp_v0_10_0_gej_is_infinity(&groupj[0])); + CHECK(rustsecp256k1zkp_v0_10_1_ge_is_infinity(&group[0])); + CHECK(rustsecp256k1zkp_v0_10_1_gej_is_infinity(&groupj[0])); for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) { - CHECK(!rustsecp256k1zkp_v0_10_0_ge_is_infinity(&group[i])); - CHECK(!rustsecp256k1zkp_v0_10_0_gej_is_infinity(&groupj[i])); + CHECK(!rustsecp256k1zkp_v0_10_1_ge_is_infinity(&group[i])); + CHECK(!rustsecp256k1zkp_v0_10_1_gej_is_infinity(&groupj[i])); } /* Check all addition formulae */ for (j = 0; j < EXHAUSTIVE_TEST_ORDER; j++) { - rustsecp256k1zkp_v0_10_0_fe fe_inv; + rustsecp256k1zkp_v0_10_1_fe fe_inv; if (skip_section(&iter)) continue; - rustsecp256k1zkp_v0_10_0_fe_inv(&fe_inv, &groupj[j].z); + rustsecp256k1zkp_v0_10_1_fe_inv(&fe_inv, &groupj[j].z); for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_ge zless_gej; - rustsecp256k1zkp_v0_10_0_gej tmp; + rustsecp256k1zkp_v0_10_1_ge zless_gej; + rustsecp256k1zkp_v0_10_1_gej tmp; /* add_var */ - rustsecp256k1zkp_v0_10_0_gej_add_var(&tmp, &groupj[i], &groupj[j], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej_add_var(&tmp, &groupj[i], &groupj[j], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); /* add_ge */ if (j > 0) { - rustsecp256k1zkp_v0_10_0_gej_add_ge(&tmp, &groupj[i], &group[j]); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&tmp, &groupj[i], &group[j]); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); } /* add_ge_var */ - rustsecp256k1zkp_v0_10_0_gej_add_ge_var(&tmp, &groupj[i], &group[j], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej_add_ge_var(&tmp, &groupj[i], &group[j], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); /* add_zinv_var */ zless_gej.infinity = groupj[j].infinity; zless_gej.x = groupj[j].x; zless_gej.y = groupj[j].y; - rustsecp256k1zkp_v0_10_0_gej_add_zinv_var(&tmp, &groupj[i], &zless_gej, &fe_inv); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej_add_zinv_var(&tmp, &groupj[i], &zless_gej, &fe_inv); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i + j) % EXHAUSTIVE_TEST_ORDER])); } } /* Check doubling */ for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_gej_double(&tmp, &groupj[i]); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(2 * i) % EXHAUSTIVE_TEST_ORDER])); - rustsecp256k1zkp_v0_10_0_gej_double_var(&tmp, &groupj[i], NULL); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(2 * i) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_gej_double(&tmp, &groupj[i]); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(2 * i) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_gej_double_var(&tmp, &groupj[i], NULL); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(2 * i) % EXHAUSTIVE_TEST_ORDER])); } /* Check negation */ for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_ge tmp; - rustsecp256k1zkp_v0_10_0_gej tmpj; - rustsecp256k1zkp_v0_10_0_ge_neg(&tmp, &group[i]); - CHECK(rustsecp256k1zkp_v0_10_0_ge_eq_var(&tmp, &group[EXHAUSTIVE_TEST_ORDER - i])); - rustsecp256k1zkp_v0_10_0_gej_neg(&tmpj, &groupj[i]); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmpj, &group[EXHAUSTIVE_TEST_ORDER - i])); + rustsecp256k1zkp_v0_10_1_ge tmp; + rustsecp256k1zkp_v0_10_1_gej tmpj; + rustsecp256k1zkp_v0_10_1_ge_neg(&tmp, &group[i]); + CHECK(rustsecp256k1zkp_v0_10_1_ge_eq_var(&tmp, &group[EXHAUSTIVE_TEST_ORDER - i])); + rustsecp256k1zkp_v0_10_1_gej_neg(&tmpj, &groupj[i]); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmpj, &group[EXHAUSTIVE_TEST_ORDER - i])); } } -static void test_exhaustive_ecmult(const rustsecp256k1zkp_v0_10_0_ge *group, const rustsecp256k1zkp_v0_10_0_gej *groupj) { +static void test_exhaustive_ecmult(const rustsecp256k1zkp_v0_10_1_ge *group, const rustsecp256k1zkp_v0_10_1_gej *groupj) { int i, j, r_log; uint64_t iter = 0; for (r_log = 1; r_log < EXHAUSTIVE_TEST_ORDER; r_log++) { for (j = 0; j < EXHAUSTIVE_TEST_ORDER; j++) { if (skip_section(&iter)) continue; for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_scalar na, ng; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&na, i); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ng, j); + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_scalar na, ng; + rustsecp256k1zkp_v0_10_1_scalar_set_int(&na, i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ng, j); - rustsecp256k1zkp_v0_10_0_ecmult(&tmp, &groupj[r_log], &na, &ng); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i * r_log + j) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_ecmult(&tmp, &groupj[r_log], &na, &ng); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i * r_log + j) % EXHAUSTIVE_TEST_ORDER])); } } } @@ -152,141 +152,141 @@ static void test_exhaustive_ecmult(const rustsecp256k1zkp_v0_10_0_ge *group, con for (j = 0; j < EXHAUSTIVE_TEST_ORDER; j++) { for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { int ret; - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_fe xn, xd, tmpf; - rustsecp256k1zkp_v0_10_0_scalar ng; + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_fe xn, xd, tmpf; + rustsecp256k1zkp_v0_10_1_scalar ng; if (skip_section(&iter)) continue; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&ng, j); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&ng, j); - /* Test rustsecp256k1zkp_v0_10_0_ecmult_const. */ - rustsecp256k1zkp_v0_10_0_ecmult_const(&tmp, &group[i], &ng); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i * j) % EXHAUSTIVE_TEST_ORDER])); + /* Test rustsecp256k1zkp_v0_10_1_ecmult_const. */ + rustsecp256k1zkp_v0_10_1_ecmult_const(&tmp, &group[i], &ng); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i * j) % EXHAUSTIVE_TEST_ORDER])); if (i != 0 && j != 0) { - /* Test rustsecp256k1zkp_v0_10_0_ecmult_const_xonly with all curve X coordinates, and xd=NULL. */ - ret = rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(&tmpf, &group[i].x, NULL, &ng, 0); + /* Test rustsecp256k1zkp_v0_10_1_ecmult_const_xonly with all curve X coordinates, and xd=NULL. */ + ret = rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(&tmpf, &group[i].x, NULL, &ng, 0); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&tmpf, &group[(i * j) % EXHAUSTIVE_TEST_ORDER].x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&tmpf, &group[(i * j) % EXHAUSTIVE_TEST_ORDER].x)); - /* Test rustsecp256k1zkp_v0_10_0_ecmult_const_xonly with all curve X coordinates, with random xd. */ + /* Test rustsecp256k1zkp_v0_10_1_ecmult_const_xonly with all curve X coordinates, with random xd. */ random_fe_non_zero(&xd); - rustsecp256k1zkp_v0_10_0_fe_mul(&xn, &xd, &group[i].x); - ret = rustsecp256k1zkp_v0_10_0_ecmult_const_xonly(&tmpf, &xn, &xd, &ng, 0); + rustsecp256k1zkp_v0_10_1_fe_mul(&xn, &xd, &group[i].x); + ret = rustsecp256k1zkp_v0_10_1_ecmult_const_xonly(&tmpf, &xn, &xd, &ng, 0); CHECK(ret); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&tmpf, &group[(i * j) % EXHAUSTIVE_TEST_ORDER].x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&tmpf, &group[(i * j) % EXHAUSTIVE_TEST_ORDER].x)); } } } } typedef struct { - rustsecp256k1zkp_v0_10_0_scalar sc[2]; - rustsecp256k1zkp_v0_10_0_ge pt[2]; + rustsecp256k1zkp_v0_10_1_scalar sc[2]; + rustsecp256k1zkp_v0_10_1_ge pt[2]; } ecmult_multi_data; -static int ecmult_multi_callback(rustsecp256k1zkp_v0_10_0_scalar *sc, rustsecp256k1zkp_v0_10_0_ge *pt, size_t idx, void *cbdata) { +static int ecmult_multi_callback(rustsecp256k1zkp_v0_10_1_scalar *sc, rustsecp256k1zkp_v0_10_1_ge *pt, size_t idx, void *cbdata) { ecmult_multi_data *data = (ecmult_multi_data*) cbdata; *sc = data->sc[idx]; *pt = data->pt[idx]; return 1; } -static void test_exhaustive_ecmult_multi(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_ecmult_multi(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { int i, j, k, x, y; uint64_t iter = 0; - rustsecp256k1zkp_v0_10_0_scratch *scratch = rustsecp256k1zkp_v0_10_0_scratch_create(&ctx->error_callback, 4096); + rustsecp256k1zkp_v0_10_1_scratch *scratch = rustsecp256k1zkp_v0_10_1_scratch_create(&ctx->error_callback, 4096); for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { for (j = 0; j < EXHAUSTIVE_TEST_ORDER; j++) { for (k = 0; k < EXHAUSTIVE_TEST_ORDER; k++) { for (x = 0; x < EXHAUSTIVE_TEST_ORDER; x++) { if (skip_section(&iter)) continue; for (y = 0; y < EXHAUSTIVE_TEST_ORDER; y++) { - rustsecp256k1zkp_v0_10_0_gej tmp; - rustsecp256k1zkp_v0_10_0_scalar g_sc; + rustsecp256k1zkp_v0_10_1_gej tmp; + rustsecp256k1zkp_v0_10_1_scalar g_sc; ecmult_multi_data data; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&data.sc[0], i); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&data.sc[1], j); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&g_sc, k); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&data.sc[0], i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&data.sc[1], j); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&g_sc, k); data.pt[0] = group[x]; data.pt[1] = group[y]; - rustsecp256k1zkp_v0_10_0_ecmult_multi_var(&ctx->error_callback, scratch, &tmp, &g_sc, ecmult_multi_callback, &data, 2); - CHECK(rustsecp256k1zkp_v0_10_0_gej_eq_ge_var(&tmp, &group[(i * x + j * y + k) % EXHAUSTIVE_TEST_ORDER])); + rustsecp256k1zkp_v0_10_1_ecmult_multi_var(&ctx->error_callback, scratch, &tmp, &g_sc, ecmult_multi_callback, &data, 2); + CHECK(rustsecp256k1zkp_v0_10_1_gej_eq_ge_var(&tmp, &group[(i * x + j * y + k) % EXHAUSTIVE_TEST_ORDER])); } } } } } - rustsecp256k1zkp_v0_10_0_scratch_destroy(&ctx->error_callback, scratch); + rustsecp256k1zkp_v0_10_1_scratch_destroy(&ctx->error_callback, scratch); } -static void r_from_k(rustsecp256k1zkp_v0_10_0_scalar *r, const rustsecp256k1zkp_v0_10_0_ge *group, int k, int* overflow) { - rustsecp256k1zkp_v0_10_0_fe x; +static void r_from_k(rustsecp256k1zkp_v0_10_1_scalar *r, const rustsecp256k1zkp_v0_10_1_ge *group, int k, int* overflow) { + rustsecp256k1zkp_v0_10_1_fe x; unsigned char x_bin[32]; k %= EXHAUSTIVE_TEST_ORDER; x = group[k].x; - rustsecp256k1zkp_v0_10_0_fe_normalize(&x); - rustsecp256k1zkp_v0_10_0_fe_get_b32(x_bin, &x); - rustsecp256k1zkp_v0_10_0_scalar_set_b32(r, x_bin, overflow); + rustsecp256k1zkp_v0_10_1_fe_normalize(&x); + rustsecp256k1zkp_v0_10_1_fe_get_b32(x_bin, &x); + rustsecp256k1zkp_v0_10_1_scalar_set_b32(r, x_bin, overflow); } -static void test_exhaustive_verify(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_verify(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { int s, r, msg, key; uint64_t iter = 0; for (s = 1; s < EXHAUSTIVE_TEST_ORDER; s++) { for (r = 1; r < EXHAUSTIVE_TEST_ORDER; r++) { for (msg = 1; msg < EXHAUSTIVE_TEST_ORDER; msg++) { for (key = 1; key < EXHAUSTIVE_TEST_ORDER; key++) { - rustsecp256k1zkp_v0_10_0_ge nonconst_ge; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; - rustsecp256k1zkp_v0_10_0_pubkey pk; - rustsecp256k1zkp_v0_10_0_scalar sk_s, msg_s, r_s, s_s; - rustsecp256k1zkp_v0_10_0_scalar s_times_k_s, msg_plus_r_times_sk_s; + rustsecp256k1zkp_v0_10_1_ge nonconst_ge; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_pubkey pk; + rustsecp256k1zkp_v0_10_1_scalar sk_s, msg_s, r_s, s_s; + rustsecp256k1zkp_v0_10_1_scalar s_times_k_s, msg_plus_r_times_sk_s; int k, should_verify; unsigned char msg32[32]; if (skip_section(&iter)) continue; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s_s, s); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&r_s, r); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg_s, msg); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sk_s, key); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s_s, s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&r_s, r); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg_s, msg); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sk_s, key); /* Verify by hand */ /* Run through every k value that gives us this r and check that *one* works. * Note there could be none, there could be multiple, ECDSA is weird. */ should_verify = 0; for (k = 0; k < EXHAUSTIVE_TEST_ORDER; k++) { - rustsecp256k1zkp_v0_10_0_scalar check_x_s; + rustsecp256k1zkp_v0_10_1_scalar check_x_s; r_from_k(&check_x_s, group, k, NULL); if (r_s == check_x_s) { - rustsecp256k1zkp_v0_10_0_scalar_set_int(&s_times_k_s, k); - rustsecp256k1zkp_v0_10_0_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s); - rustsecp256k1zkp_v0_10_0_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s); - rustsecp256k1zkp_v0_10_0_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s); - should_verify |= rustsecp256k1zkp_v0_10_0_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&s_times_k_s, k); + rustsecp256k1zkp_v0_10_1_scalar_mul(&s_times_k_s, &s_times_k_s, &s_s); + rustsecp256k1zkp_v0_10_1_scalar_mul(&msg_plus_r_times_sk_s, &r_s, &sk_s); + rustsecp256k1zkp_v0_10_1_scalar_add(&msg_plus_r_times_sk_s, &msg_plus_r_times_sk_s, &msg_s); + should_verify |= rustsecp256k1zkp_v0_10_1_scalar_eq(&s_times_k_s, &msg_plus_r_times_sk_s); } } /* nb we have a "high s" rule */ - should_verify &= !rustsecp256k1zkp_v0_10_0_scalar_is_high(&s_s); + should_verify &= !rustsecp256k1zkp_v0_10_1_scalar_is_high(&s_s); /* Verify by calling verify */ - rustsecp256k1zkp_v0_10_0_ecdsa_signature_save(&sig, &r_s, &s_s); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_save(&sig, &r_s, &s_s); memcpy(&nonconst_ge, &group[sk_s], sizeof(nonconst_ge)); - rustsecp256k1zkp_v0_10_0_pubkey_save(&pk, &nonconst_ge); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msg32, &msg_s); + rustsecp256k1zkp_v0_10_1_pubkey_save(&pk, &nonconst_ge); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msg32, &msg_s); CHECK(should_verify == - rustsecp256k1zkp_v0_10_0_ecdsa_verify(ctx, &sig, msg32, &pk)); + rustsecp256k1zkp_v0_10_1_ecdsa_verify(ctx, &sig, msg32, &pk)); } } } } } -static void test_exhaustive_sign(const rustsecp256k1zkp_v0_10_0_context *ctx, const rustsecp256k1zkp_v0_10_0_ge *group) { +static void test_exhaustive_sign(const rustsecp256k1zkp_v0_10_1_context *ctx, const rustsecp256k1zkp_v0_10_1_ge *group) { int i, j, k; uint64_t iter = 0; @@ -297,18 +297,18 @@ static void test_exhaustive_sign(const rustsecp256k1zkp_v0_10_0_context *ctx, co for (k = 1; k < EXHAUSTIVE_TEST_ORDER; k++) { /* nonce */ const int starting_k = k; int ret; - rustsecp256k1zkp_v0_10_0_ecdsa_signature sig; - rustsecp256k1zkp_v0_10_0_scalar sk, msg, r, s, expected_r; + rustsecp256k1zkp_v0_10_1_ecdsa_signature sig; + rustsecp256k1zkp_v0_10_1_scalar sk, msg, r, s, expected_r; unsigned char sk32[32], msg32[32]; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&msg, i); - rustsecp256k1zkp_v0_10_0_scalar_set_int(&sk, j); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(sk32, &sk); - rustsecp256k1zkp_v0_10_0_scalar_get_b32(msg32, &msg); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&msg, i); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&sk, j); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(sk32, &sk); + rustsecp256k1zkp_v0_10_1_scalar_get_b32(msg32, &msg); - ret = rustsecp256k1zkp_v0_10_0_ecdsa_sign(ctx, &sig, msg32, sk32, rustsecp256k1zkp_v0_10_0_nonce_function_smallint, &k); + ret = rustsecp256k1zkp_v0_10_1_ecdsa_sign(ctx, &sig, msg32, sk32, rustsecp256k1zkp_v0_10_1_nonce_function_smallint, &k); CHECK(ret == 1); - rustsecp256k1zkp_v0_10_0_ecdsa_signature_load(ctx, &r, &s, &sig); + rustsecp256k1zkp_v0_10_1_ecdsa_signature_load(ctx, &r, &s, &sig); /* Note that we compute expected_r *after* signing -- this is important * because our nonce-computing function function might change k during * signing. */ @@ -353,10 +353,10 @@ static void test_exhaustive_sign(const rustsecp256k1zkp_v0_10_0_context *ctx, co int main(int argc, char** argv) { int i; - rustsecp256k1zkp_v0_10_0_gej groupj[EXHAUSTIVE_TEST_ORDER]; - rustsecp256k1zkp_v0_10_0_ge group[EXHAUSTIVE_TEST_ORDER]; + rustsecp256k1zkp_v0_10_1_gej groupj[EXHAUSTIVE_TEST_ORDER]; + rustsecp256k1zkp_v0_10_1_ge group[EXHAUSTIVE_TEST_ORDER]; unsigned char rand32[32]; - rustsecp256k1zkp_v0_10_0_context *ctx; + rustsecp256k1zkp_v0_10_1_context *ctx; /* Disable buffering for stdout to improve reliability of getting * diagnostic information. Happens right at the start of main because @@ -375,7 +375,7 @@ int main(int argc, char** argv) { printf("test count = %i\n", count); /* find random seed */ - rustsecp256k1zkp_v0_10_0_testrand_init(argc > 2 ? argv[2] : NULL); + rustsecp256k1zkp_v0_10_1_testrand_init(argc > 2 ? argv[2] : NULL); /* set up split processing */ if (argc > 4) { @@ -389,43 +389,43 @@ int main(int argc, char** argv) { } /* Recreate the ecmult{,_gen} tables using the right generator (as selected via EXHAUSTIVE_TEST_ORDER) */ - rustsecp256k1zkp_v0_10_0_ecmult_gen_compute_table(&rustsecp256k1zkp_v0_10_0_ecmult_gen_prec_table[0][0], &rustsecp256k1zkp_v0_10_0_ge_const_g, ECMULT_GEN_PREC_BITS); - rustsecp256k1zkp_v0_10_0_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_0_pre_g, rustsecp256k1zkp_v0_10_0_pre_g_128, WINDOW_G, &rustsecp256k1zkp_v0_10_0_ge_const_g); + rustsecp256k1zkp_v0_10_1_ecmult_gen_compute_table(&rustsecp256k1zkp_v0_10_1_ecmult_gen_prec_table[0][0], &rustsecp256k1zkp_v0_10_1_ge_const_g, ECMULT_GEN_PREC_BITS); + rustsecp256k1zkp_v0_10_1_ecmult_compute_two_tables(rustsecp256k1zkp_v0_10_1_pre_g, rustsecp256k1zkp_v0_10_1_pre_g_128, WINDOW_G, &rustsecp256k1zkp_v0_10_1_ge_const_g); while (count--) { /* Build context */ - ctx = rustsecp256k1zkp_v0_10_0_context_create(SECP256K1_CONTEXT_NONE); - rustsecp256k1zkp_v0_10_0_testrand256(rand32); - CHECK(rustsecp256k1zkp_v0_10_0_context_randomize(ctx, rand32)); + ctx = rustsecp256k1zkp_v0_10_1_context_create(SECP256K1_CONTEXT_NONE); + rustsecp256k1zkp_v0_10_1_testrand256(rand32); + CHECK(rustsecp256k1zkp_v0_10_1_context_randomize(ctx, rand32)); /* Generate the entire group */ - rustsecp256k1zkp_v0_10_0_gej_set_infinity(&groupj[0]); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&group[0], &groupj[0]); + rustsecp256k1zkp_v0_10_1_gej_set_infinity(&groupj[0]); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&group[0], &groupj[0]); for (i = 1; i < EXHAUSTIVE_TEST_ORDER; i++) { - rustsecp256k1zkp_v0_10_0_gej_add_ge(&groupj[i], &groupj[i - 1], &rustsecp256k1zkp_v0_10_0_ge_const_g); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&group[i], &groupj[i]); + rustsecp256k1zkp_v0_10_1_gej_add_ge(&groupj[i], &groupj[i - 1], &rustsecp256k1zkp_v0_10_1_ge_const_g); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&group[i], &groupj[i]); if (count != 0) { /* Set a different random z-value for each Jacobian point, except z=1 is used in the last iteration. */ - rustsecp256k1zkp_v0_10_0_fe z; + rustsecp256k1zkp_v0_10_1_fe z; random_fe(&z); - rustsecp256k1zkp_v0_10_0_gej_rescale(&groupj[i], &z); + rustsecp256k1zkp_v0_10_1_gej_rescale(&groupj[i], &z); } /* Verify against ecmult_gen */ { - rustsecp256k1zkp_v0_10_0_scalar scalar_i; - rustsecp256k1zkp_v0_10_0_gej generatedj; - rustsecp256k1zkp_v0_10_0_ge generated; + rustsecp256k1zkp_v0_10_1_scalar scalar_i; + rustsecp256k1zkp_v0_10_1_gej generatedj; + rustsecp256k1zkp_v0_10_1_ge generated; - rustsecp256k1zkp_v0_10_0_scalar_set_int(&scalar_i, i); - rustsecp256k1zkp_v0_10_0_ecmult_gen(&ctx->ecmult_gen_ctx, &generatedj, &scalar_i); - rustsecp256k1zkp_v0_10_0_ge_set_gej(&generated, &generatedj); + rustsecp256k1zkp_v0_10_1_scalar_set_int(&scalar_i, i); + rustsecp256k1zkp_v0_10_1_ecmult_gen(&ctx->ecmult_gen_ctx, &generatedj, &scalar_i); + rustsecp256k1zkp_v0_10_1_ge_set_gej(&generated, &generatedj); CHECK(group[i].infinity == 0); CHECK(generated.infinity == 0); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&generated.x, &group[i].x)); - CHECK(rustsecp256k1zkp_v0_10_0_fe_equal(&generated.y, &group[i].y)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&generated.x, &group[i].x)); + CHECK(rustsecp256k1zkp_v0_10_1_fe_equal(&generated.y, &group[i].y)); } } @@ -456,10 +456,10 @@ int main(int argc, char** argv) { #endif #endif - rustsecp256k1zkp_v0_10_0_context_destroy(ctx); + rustsecp256k1zkp_v0_10_1_context_destroy(ctx); } - rustsecp256k1zkp_v0_10_0_testrand_finish(); + rustsecp256k1zkp_v0_10_1_testrand_finish(); printf("no problems found\n"); return 0; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/testutil.h b/secp256k1-zkp-sys/depend/secp256k1/src/testutil.h index a967f747..3054e59f 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/testutil.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/testutil.h @@ -10,20 +10,24 @@ #include "testrand.h" #include "util.h" -static void random_fe(rustsecp256k1zkp_v0_10_0_fe *x) { +static void random_fe(rustsecp256k1zkp_v0_10_1_fe *x) { unsigned char bin[32]; do { - rustsecp256k1zkp_v0_10_0_testrand256(bin); - if (rustsecp256k1zkp_v0_10_0_fe_set_b32_limit(x, bin)) { + rustsecp256k1zkp_v0_10_1_testrand256(bin); + if (rustsecp256k1zkp_v0_10_1_fe_set_b32_limit(x, bin)) { return; } } while(1); } -static void random_fe_non_zero(rustsecp256k1zkp_v0_10_0_fe *nz) { +static void random_fe_non_zero(rustsecp256k1zkp_v0_10_1_fe *nz) { do { random_fe(nz); - } while (rustsecp256k1zkp_v0_10_0_fe_is_zero(nz)); + } while (rustsecp256k1zkp_v0_10_1_fe_is_zero(nz)); +} + +static void rand_flip_bit(unsigned char *array, size_t n) { + array[rustsecp256k1zkp_v0_10_1_testrand_int(n)] ^= 1 << rustsecp256k1zkp_v0_10_1_testrand_int(8); } #endif /* SECP256K1_TESTUTIL_H */ diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/util.h b/secp256k1-zkp-sys/depend/secp256k1/src/util.h index 65a0cd30..66a92039 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/util.h +++ b/secp256k1-zkp-sys/depend/secp256k1/src/util.h @@ -61,35 +61,35 @@ typedef struct { void (*fn)(const char *text, void* data); const void* data; -} rustsecp256k1zkp_v0_10_0_callback; +} rustsecp256k1zkp_v0_10_1_callback; -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_callback_call(const rustsecp256k1zkp_v0_10_0_callback * const cb, const char * const text) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_callback_call(const rustsecp256k1zkp_v0_10_1_callback * const cb, const char * const text) { cb->fn(text, (void*)cb->data); } #ifndef USE_EXTERNAL_DEFAULT_CALLBACKS -static void rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn(const char* str, void* data) { +static void rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn(const char* str, void* data) { (void)data; fprintf(stderr, "[libsecp256k1] illegal argument: %s\n", str); abort(); } -static void rustsecp256k1zkp_v0_10_0_default_error_callback_fn(const char* str, void* data) { +static void rustsecp256k1zkp_v0_10_1_default_error_callback_fn(const char* str, void* data) { (void)data; fprintf(stderr, "[libsecp256k1] internal consistency check failed: %s\n", str); abort(); } #else -void rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn(const char* str, void* data); -void rustsecp256k1zkp_v0_10_0_default_error_callback_fn(const char* str, void* data); +void rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn(const char* str, void* data); +void rustsecp256k1zkp_v0_10_1_default_error_callback_fn(const char* str, void* data); #endif -static const rustsecp256k1zkp_v0_10_0_callback default_illegal_callback = { - rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn, +static const rustsecp256k1zkp_v0_10_1_callback default_illegal_callback = { + rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn, NULL }; -static const rustsecp256k1zkp_v0_10_0_callback default_error_callback = { - rustsecp256k1zkp_v0_10_0_default_error_callback_fn, +static const rustsecp256k1zkp_v0_10_1_callback default_error_callback = { + rustsecp256k1zkp_v0_10_1_default_error_callback_fn, NULL }; @@ -145,7 +145,7 @@ static const rustsecp256k1zkp_v0_10_0_callback default_error_callback = { #define ROUND_TO_ALIGN(size) ((((size) + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT) /* Extract the sign of an int64, take the abs and return a uint64, constant time. */ -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_sign_and_abs64(uint64_t *out, int64_t in) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_sign_and_abs64(uint64_t *out, int64_t in) { uint64_t mask0, mask1; int ret; ret = in < 0; @@ -156,7 +156,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_sign_and_abs64(uint64_t *ou return ret; } -SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_clz64_var(uint64_t x) { +SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_1_clz64_var(uint64_t x) { int ret; if (!x) { return 64; @@ -202,7 +202,7 @@ SECP256K1_INLINE static int rustsecp256k1zkp_v0_10_0_clz64_var(uint64_t x) { #endif /* Zero memory if flag == 1. Flag must be 0 or 1. Constant time. */ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_memczero(void *s, size_t len, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_memczero(void *s, size_t len, int flag) { unsigned char *p = (unsigned char *)s; /* Access flag with a volatile-qualified lvalue. This prevents clang from figuring out (after inlining) that flag can @@ -221,7 +221,7 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_memczero(void *s, size_t l * We use this to avoid possible compiler bugs with memcmp, e.g. * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95189 */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_memcmp_var(const void *s1, const void *s2, size_t n) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_memcmp_var(const void *s1, const void *s2, size_t n) { const unsigned char *p1 = s1, *p2 = s2; size_t i; @@ -235,7 +235,7 @@ static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_memcmp_var(const void *s1, } /** If flag is true, set *r equal to *a; otherwise leave it. Constant-time. Both *r and *a must be initialized and non-negative.*/ -static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_int_cmov(int *r, const int *a, int flag) { +static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_1_int_cmov(int *r, const int *a, int flag) { unsigned int mask0, mask1, r_masked, a_masked; /* Access flag with a volatile-qualified lvalue. This prevents clang from figuring out (after inlining) that flag can @@ -290,8 +290,8 @@ static SECP256K1_INLINE void rustsecp256k1zkp_v0_10_0_int_cmov(int *r, const int /* Determine the number of trailing zero bits in a (non-zero) 32-bit x. * This function is only intended to be used as fallback for - * rustsecp256k1zkp_v0_10_0_ctz32_var, but permits it to be tested separately. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz32_var_debruijn(uint32_t x) { + * rustsecp256k1zkp_v0_10_1_ctz32_var, but permits it to be tested separately. */ +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_ctz32_var_debruijn(uint32_t x) { static const uint8_t debruijn[32] = { 0x00, 0x01, 0x02, 0x18, 0x03, 0x13, 0x06, 0x19, 0x16, 0x04, 0x14, 0x0A, 0x10, 0x07, 0x0C, 0x1A, 0x1F, 0x17, 0x12, 0x05, 0x15, 0x09, 0x0F, 0x0B, @@ -302,8 +302,8 @@ static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz32_var_debruijn(uint32_t /* Determine the number of trailing zero bits in a (non-zero) 64-bit x. * This function is only intended to be used as fallback for - * rustsecp256k1zkp_v0_10_0_ctz64_var, but permits it to be tested separately. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz64_var_debruijn(uint64_t x) { + * rustsecp256k1zkp_v0_10_1_ctz64_var, but permits it to be tested separately. */ +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_ctz64_var_debruijn(uint64_t x) { static const uint8_t debruijn[64] = { 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28, 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11, @@ -314,7 +314,7 @@ static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz64_var_debruijn(uint64_t } /* Determine the number of trailing zero bits in a (non-zero) 32-bit x. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz32_var(uint32_t x) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_ctz32_var(uint32_t x) { VERIFY_CHECK(x != 0); #if (__has_builtin(__builtin_ctz) || SECP256K1_GNUC_PREREQ(3,4)) /* If the unsigned type is sufficient to represent the largest uint32_t, consider __builtin_ctz. */ @@ -327,12 +327,12 @@ static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz32_var(uint32_t x) { return __builtin_ctzl(x); #else /* If no suitable CTZ builtin is available, use a (variable time) software emulation. */ - return rustsecp256k1zkp_v0_10_0_ctz32_var_debruijn(x); + return rustsecp256k1zkp_v0_10_1_ctz32_var_debruijn(x); #endif } /* Determine the number of trailing zero bits in a (non-zero) 64-bit x. */ -static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz64_var(uint64_t x) { +static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_1_ctz64_var(uint64_t x) { VERIFY_CHECK(x != 0); #if (__has_builtin(__builtin_ctzl) || SECP256K1_GNUC_PREREQ(3,4)) /* If the unsigned long type is sufficient to represent the largest uint64_t, consider __builtin_ctzl. */ @@ -345,12 +345,12 @@ static SECP256K1_INLINE int rustsecp256k1zkp_v0_10_0_ctz64_var(uint64_t x) { return __builtin_ctzll(x); #else /* If no suitable CTZ builtin is available, use a (variable time) software emulation. */ - return rustsecp256k1zkp_v0_10_0_ctz64_var_debruijn(x); + return rustsecp256k1zkp_v0_10_1_ctz64_var_debruijn(x); #endif } /* Read a uint32_t in big endian */ -SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_0_read_be32(const unsigned char* p) { +SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_1_read_be32(const unsigned char* p) { return (uint32_t)p[0] << 24 | (uint32_t)p[1] << 16 | (uint32_t)p[2] << 8 | @@ -358,7 +358,7 @@ SECP256K1_INLINE static uint32_t rustsecp256k1zkp_v0_10_0_read_be32(const unsign } /* Write a uint32_t in big endian */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_write_be32(unsigned char* p, uint32_t x) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_write_be32(unsigned char* p, uint32_t x) { p[3] = x; p[2] = x >> 8; p[1] = x >> 16; @@ -366,7 +366,7 @@ SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_write_be32(unsigned char* } /* Read a uint64_t in big endian */ -SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_read_be64(const unsigned char* p) { +SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_1_read_be64(const unsigned char* p) { return (uint64_t)p[0] << 56 | (uint64_t)p[1] << 48 | (uint64_t)p[2] << 40 | @@ -378,7 +378,7 @@ SECP256K1_INLINE static uint64_t rustsecp256k1zkp_v0_10_0_read_be64(const unsign } /* Write a uint64_t in big endian */ -SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_0_write_be64(unsigned char* p, uint64_t x) { +SECP256K1_INLINE static void rustsecp256k1zkp_v0_10_1_write_be64(unsigned char* p, uint64_t x) { p[7] = x; p[6] = x >> 8; p[5] = x >> 16; diff --git a/secp256k1-zkp-sys/depend/secp256k1/src/wycheproof/WYCHEPROOF_COPYING b/secp256k1-zkp-sys/depend/secp256k1/src/wycheproof/WYCHEPROOF_COPYING index 5f6a097d..6c8e6f72 100644 --- a/secp256k1-zkp-sys/depend/secp256k1/src/wycheproof/WYCHEPROOF_COPYING +++ b/secp256k1-zkp-sys/depend/secp256k1/src/wycheproof/WYCHEPROOF_COPYING @@ -1,10 +1,10 @@ -* The file `ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.json` in this directory +* The file `ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.json` in this directory comes from Google's project Wycheproof with git commit `b063b4aedae951c69df014cd25fa6d69ae9e8cb9`, see - https://github.com/google/wycheproof/blob/b063b4aedae951c69df014cd25fa6d69ae9e8cb9/testvectors_v1/ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.json + https://github.com/google/wycheproof/blob/b063b4aedae951c69df014cd25fa6d69ae9e8cb9/testvectors_v1/ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.json -* The file `ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.h` is generated from - `ecdsa_rustsecp256k1zkp_v0_10_0_sha256_bitcoin_test.json` using the script +* The file `ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.h` is generated from + `ecdsa_rustsecp256k1zkp_v0_10_1_sha256_bitcoin_test.json` using the script `tests_wycheproof_generate.py`. ------------------------------------------------------------------------------- diff --git a/secp256k1-zkp-sys/src/error_callbacks.rs b/secp256k1-zkp-sys/src/error_callbacks.rs index 7b3e4dc4..42a34933 100644 --- a/secp256k1-zkp-sys/src/error_callbacks.rs +++ b/secp256k1-zkp-sys/src/error_callbacks.rs @@ -9,7 +9,7 @@ use secp256k1_sys::types::{c_char, c_void}; #[no_mangle] #[cfg(not(rust_secp_zkp_no_symbol_renaming))] -pub unsafe extern "C" fn rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn( +pub unsafe extern "C" fn rustsecp256k1zkp_v0_10_1_default_illegal_callback_fn( _: *const c_char, _data: *mut c_void, ) { @@ -18,7 +18,7 @@ pub unsafe extern "C" fn rustsecp256k1zkp_v0_10_0_default_illegal_callback_fn( #[no_mangle] #[cfg(not(rust_secp_zkp_no_symbol_renaming))] -pub unsafe extern "C" fn rustsecp256k1zkp_v0_10_0_default_error_callback_fn( +pub unsafe extern "C" fn rustsecp256k1zkp_v0_10_1_default_error_callback_fn( _: *const c_char, _data: *mut c_void, ) { diff --git a/secp256k1-zkp-sys/src/zkp.rs b/secp256k1-zkp-sys/src/zkp.rs index d3b31f34..1838a8b2 100644 --- a/secp256k1-zkp-sys/src/zkp.rs +++ b/secp256k1-zkp-sys/src/zkp.rs @@ -2,11 +2,12 @@ use core::{ fmt, hash::{self, Hash}, }; -use {types::*, Context, PublicKey, Signature}; +use {types::*, Context, PublicKey, Signature, Keypair, XOnlyPublicKey}; /// Rangeproof maximum length pub const RANGEPROOF_MAX_LENGTH: size_t = 5134; pub const ECDSA_ADAPTOR_SIGNATURE_LENGTH: size_t = 162; +pub const SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH: size_t = 65; /// The maximum number of whitelist keys. pub const WHITELIST_MAX_N_KEYS: size_t = 255; @@ -14,7 +15,7 @@ pub const WHITELIST_MAX_N_KEYS: size_t = 255; extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_pedersen_commitment_parse" + link_name = "rustsecp256k1zkp_v0_10_1_pedersen_commitment_parse" )] // Parse a 33-byte commitment into 64 byte internal commitment object pub fn secp256k1_pedersen_commitment_parse( @@ -25,7 +26,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_pedersen_commitment_serialize" + link_name = "rustsecp256k1zkp_v0_10_1_pedersen_commitment_serialize" )] // Serialize a 64-byte commit object into a 33 byte serialized byte sequence pub fn secp256k1_pedersen_commitment_serialize( @@ -36,7 +37,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_pedersen_commit" + link_name = "rustsecp256k1zkp_v0_10_1_pedersen_commit" )] // Generates a pedersen commitment: *commit = blind * G + value * G2. // The commitment is 33 bytes, the blinding factor is 32 bytes. @@ -50,7 +51,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_pedersen_blind_generator_blind_sum" + link_name = "rustsecp256k1zkp_v0_10_1_pedersen_blind_generator_blind_sum" )] /// Sets the final Pedersen blinding factor correctly when the generators themselves /// have blinding factors. @@ -92,7 +93,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_pedersen_verify_tally" + link_name = "rustsecp256k1zkp_v0_10_1_pedersen_verify_tally" )] // Takes two list of 64-byte commitments and sums the first set and // subtracts the second and verifies that they sum to 0. @@ -107,7 +108,7 @@ extern "C" { #[cfg(feature = "std")] #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_rangeproof_info" + link_name = "rustsecp256k1zkp_v0_10_1_rangeproof_info" )] pub fn secp256k1_rangeproof_info( ctx: *const Context, @@ -122,7 +123,7 @@ extern "C" { #[cfg(feature = "std")] #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_rangeproof_rewind" + link_name = "rustsecp256k1zkp_v0_10_1_rangeproof_rewind" )] pub fn secp256k1_rangeproof_rewind( ctx: *const Context, @@ -144,7 +145,7 @@ extern "C" { #[cfg(feature = "std")] #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_rangeproof_verify" + link_name = "rustsecp256k1zkp_v0_10_1_rangeproof_verify" )] pub fn secp256k1_rangeproof_verify( ctx: *const Context, @@ -161,7 +162,7 @@ extern "C" { #[cfg(feature = "std")] #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_rangeproof_sign" + link_name = "rustsecp256k1zkp_v0_10_1_rangeproof_sign" )] pub fn secp256k1_rangeproof_sign( ctx: *const Context, @@ -183,7 +184,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_initialize" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_initialize" )] pub fn secp256k1_surjectionproof_initialize( ctx: *const Context, @@ -199,7 +200,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_serialize" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_serialize" )] pub fn secp256k1_surjectionproof_serialize( ctx: *const Context, @@ -210,7 +211,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_serialized_size" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_serialized_size" )] pub fn secp256k1_surjectionproof_serialized_size( ctx: *const Context, @@ -219,7 +220,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_parse" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_parse" )] pub fn secp256k1_surjectionproof_parse( ctx: *const Context, @@ -230,7 +231,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_generate" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_generate" )] pub fn secp256k1_surjectionproof_generate( ctx: *const Context, @@ -245,7 +246,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_surjectionproof_verify" + link_name = "rustsecp256k1zkp_v0_10_1_surjectionproof_verify" )] pub fn secp256k1_surjectionproof_verify( ctx: *const Context, @@ -257,7 +258,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_generator_generate_blinded" + link_name = "rustsecp256k1zkp_v0_10_1_generator_generate_blinded" )] pub fn secp256k1_generator_generate_blinded( ctx: *const Context, @@ -268,7 +269,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_generator_serialize" + link_name = "rustsecp256k1zkp_v0_10_1_generator_serialize" )] pub fn secp256k1_generator_serialize( ctx: *const Context, @@ -278,7 +279,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_generator_parse" + link_name = "rustsecp256k1zkp_v0_10_1_generator_parse" )] pub fn secp256k1_generator_parse( ctx: *const Context, @@ -288,13 +289,13 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_nonce_function_ecdsa_adaptor" + link_name = "rustsecp256k1zkp_v0_10_1_nonce_function_ecdsa_adaptor" )] pub static secp256k1_nonce_function_ecdsa_adaptor: EcdsaAdaptorNonceFn; #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_encrypt" + link_name = "rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_encrypt" )] pub fn secp256k1_ecdsa_adaptor_encrypt( cx: *const Context, @@ -308,7 +309,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_verify" + link_name = "rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_verify" )] pub fn secp256k1_ecdsa_adaptor_verify( cx: *const Context, @@ -320,7 +321,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_decrypt" + link_name = "rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_decrypt" )] pub fn secp256k1_ecdsa_adaptor_decrypt( cx: *const Context, @@ -331,7 +332,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_ecdsa_adaptor_recover" + link_name = "rustsecp256k1zkp_v0_10_1_ecdsa_adaptor_recover" )] pub fn secp256k1_ecdsa_adaptor_recover( cx: *const Context, @@ -343,7 +344,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_whitelist_signature_parse" + link_name = "rustsecp256k1zkp_v0_10_1_whitelist_signature_parse" )] pub fn secp256k1_whitelist_signature_parse( cx: *const Context, @@ -354,7 +355,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_whitelist_signature_serialize" + link_name = "rustsecp256k1zkp_v0_10_1_whitelist_signature_serialize" )] pub fn secp256k1_whitelist_signature_serialize( ctx: *const Context, @@ -365,7 +366,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_whitelist_sign" + link_name = "rustsecp256k1zkp_v0_10_1_whitelist_sign" )] pub fn secp256k1_whitelist_sign( ctx: *const Context, @@ -381,7 +382,7 @@ extern "C" { #[cfg_attr( not(rust_secp_zkp_no_symbol_renaming), - link_name = "rustsecp256k1zkp_v0_10_0_whitelist_verify" + link_name = "rustsecp256k1zkp_v0_10_1_whitelist_verify" )] pub fn secp256k1_whitelist_verify( ctx: *const Context, @@ -391,6 +392,59 @@ extern "C" { n_keys: size_t, sub_pubkey: *const PublicKey, ) -> c_int; + + #[cfg_attr( + not(rust_secp_zkp_no_symbol_renaming), + link_name = "rustsecp256k1zkp_v0_10_1_nonce_function_schnorr_adaptor" + )] + pub static secp256k1_nonce_function_schnorr_adaptor: SchnorrAdaptorNonceFn; + + #[cfg_attr( + not(rust_secp_zkp_no_symbol_renaming), + link_name = "rustsecp256k1zkp_v0_10_1_schnorr_adaptor_presign" + )] + pub fn secp256k1_schnorr_adaptor_presign( + ctx: *const Context, + pre_sig65: *mut SchnorrAdaptorPreSignature, + msg32: *const c_uchar, + keypair: *const Keypair, + adaptor: *const PublicKey, + aux_rand32: *const c_uchar, + ) -> c_int; + + #[cfg_attr( + not(rust_secp_zkp_no_symbol_renaming), + link_name = "rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract" + )] + pub fn secp256k1_schnorr_adaptor_extract( + ctx: *const Context, + adaptor: *mut PublicKey, + pre_sig65: *const SchnorrAdaptorPreSignature, + msg32: *const c_uchar, + pubkey: *const XOnlyPublicKey, + ) -> c_int; + + #[cfg_attr( + not(rust_secp_zkp_no_symbol_renaming), + link_name = "rustsecp256k1zkp_v0_10_1_schnorr_adaptor_adapt" + )] + pub fn secp256k1_schnorr_adaptor_adapt( + ctx: *const Context, + sig64: *mut c_uchar, + pre_sig65: *const SchnorrAdaptorPreSignature, + sec_adaptor32: *const c_uchar, + ) -> c_int; + + #[cfg_attr( + not(rust_secp_zkp_no_symbol_renaming), + link_name = "rustsecp256k1zkp_v0_10_1_schnorr_adaptor_extract_sec" + )] + pub fn secp256k1_schnorr_adaptor_extract_sec( + ctx: *const Context, + sec_adaptor32: *mut c_uchar, + pre_sig65: *const SchnorrAdaptorPreSignature, + sig64: *const c_uchar, + ) -> c_int; } #[repr(C)] @@ -649,3 +703,58 @@ impl PartialEq for EcdsaAdaptorSignature { } impl Eq for EcdsaAdaptorSignature {} + +/// Same as secp256k1_nonce_function_hardened, but introduces +/// an extra argument for a compressed 33-byte adaptor point. +pub type SchnorrAdaptorNonceFn = Option< + unsafe extern "C" fn( + nonce32: *mut c_uchar, + msg32: *const c_uchar, + key32: *const c_uchar, + adaptor33: *const c_uchar, + xonly_pk32: *const c_uchar, + algo: *const c_uchar, + algo_len: size_t, + data: *mut c_void, + ) -> c_int, +>; + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct SchnorrAdaptorPreSignature([u8; SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]); +impl_array_newtype!(SchnorrAdaptorPreSignature, u8, SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH); +impl_raw_debug!(SchnorrAdaptorPreSignature); + +impl Default for SchnorrAdaptorPreSignature { + fn default() -> SchnorrAdaptorPreSignature { + SchnorrAdaptorPreSignature::new() + } +} + +impl SchnorrAdaptorPreSignature { + /// Create a new (zeroed) Schnorr adaptor signature usable for the FFI interface + pub fn new() -> Self { + SchnorrAdaptorPreSignature([0u8; SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]) + } + + /// Create a new Schnorr adaptor signature usable for the FFI interface from raw bytes + /// + /// # Safety + /// + /// Does not check the validity of the underlying representation. If it is + /// invalid the result may be assertation failures (and process aborts) from + /// the underlying library. You should not use this method except with data + /// that you obtained from the FFI interface of the same version of this + /// library. + pub unsafe fn from_array_unchecked(data: [c_uchar; SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]) -> Self { + Self(data) + } +} + +impl PartialEq for SchnorrAdaptorPreSignature { + fn eq(&self, other: &Self) -> bool { + self.0[..] == other.0[..] + } +} + +impl Eq for SchnorrAdaptorPreSignature {} \ No newline at end of file diff --git a/src/lib.rs b/src/lib.rs index 7a6d4bda..f4d23b36 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -74,7 +74,7 @@ pub use crate::zkp::*; pub use secp256k1::Error as UpstreamError; -/// An ECDSA error +/// The main error type for this library. #[derive(Copy, PartialEq, Eq, Clone, Debug)] pub enum Error { /// Calling through to `secp256k1` resulted in an error. @@ -111,6 +111,14 @@ pub enum Error { CannotCreateWhitelistSignature, /// The given whitelist signature doesn't correctly prove inclusion in the whitelist. InvalidWhitelistProof, + /// Given bytes don't represent a valid Schnorr adaptor pre-signature + InvalidSchnorrAdaptorPreSignature, + /// Failed to extract the adaptor point from pre-signature due to an error within `libsecp256k1-zkp` + CannotExtractAdaptorPoint, + /// Failed to adapt the schnorr adaptor pre-signature due to an error within `libsecp256k1-zkp` + CannotAdaptPreSignature, + /// Failed to extract the secret from pre-signature due to an error within `libsecp256k1-zkp` + CannotExtractSecretAdaptor, } // Passthrough Debug to Display, since errors should be user-visible @@ -134,10 +142,14 @@ impl fmt::Display for Error { Error::InvalidPakList => "invalid PAK list", Error::CannotCreateWhitelistSignature => { "cannot create whitelist signature with the given data" - } + }, Error::InvalidWhitelistProof => { "given whitelist signature doesn't correctly prove inclusion in the whitelist" - } + }, + Error::InvalidSchnorrAdaptorPreSignature => "malformed Schnorr adaptor pre-signature", + Error::CannotExtractAdaptorPoint => "failed to extract adaptor point from the pre-signature", + Error::CannotAdaptPreSignature => "failed to adapt the pre-signature into a BIP340 signature", + Error::CannotExtractSecretAdaptor => "failed to extract secret adaptor from the pre-signature", }; f.write_str(str) diff --git a/src/zkp/mod.rs b/src/zkp/mod.rs index d16ad2aa..25419fc7 100644 --- a/src/zkp/mod.rs +++ b/src/zkp/mod.rs @@ -8,6 +8,7 @@ mod rangeproof; mod surjection_proof; mod tag; mod whitelist; +mod schnorr_adaptor; pub use self::ecdsa_adaptor::*; pub use self::generator::*; @@ -19,3 +20,4 @@ pub use self::rangeproof::*; pub use self::surjection_proof::*; pub use self::tag::*; pub use self::whitelist::*; +pub use self::schnorr_adaptor::*; diff --git a/src/zkp/schnorr_adaptor.rs b/src/zkp/schnorr_adaptor.rs new file mode 100644 index 00000000..bc6abfc7 --- /dev/null +++ b/src/zkp/schnorr_adaptor.rs @@ -0,0 +1,298 @@ +//! Bindings for Schnorr based adaptor signatures in secp256k1-zkp. + +use crate::ffi::{self, CPtr, SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH}; +#[cfg(feature = "rand-std")] +use crate::rand::thread_rng; +#[cfg(feature = "actual-rand")] +use crate::rand::{CryptoRng, Rng}; +use crate::{PublicKey, Secp256k1, SecretKey, Keypair, XOnlyPublicKey}; +use crate::constants::{SECRET_KEY_SIZE,SCHNORR_SIGNATURE_SIZE}; +use crate::schnorr::Signature as SchnorrSignature; +use crate::{Message, Signing}; +use crate::{from_hex, Error}; +use core::{fmt, ptr, str}; + +/// Represents an adaptor signature +#[derive(Debug, PartialEq, Clone, Copy, Eq)] +pub struct SchnorrAdaptorPreSignature(ffi::SchnorrAdaptorPreSignature); + +impl fmt::LowerHex for SchnorrAdaptorPreSignature { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for ch in self.0.as_ref().iter() { + write!(f, "{:02x}", ch)?; + } + Ok(()) + } +} + +impl fmt::Display for SchnorrAdaptorPreSignature { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::LowerHex::fmt(self, f) + } +} + +impl str::FromStr for SchnorrAdaptorPreSignature { + type Err = Error; + fn from_str(s: &str) -> Result { + let mut res = [0; SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]; + match from_hex(s, &mut res) { + Ok(SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH) => { + SchnorrAdaptorPreSignature::from_slice(&res[0..SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]) + } + _ => Err(Error::InvalidSchnorrAdaptorPreSignature), + } + } +} + +#[cfg(feature = "serde")] +impl ::serde::Serialize for SchnorrAdaptorPreSignature { + fn serialize(&self, s: S) -> Result { + if s.is_human_readable() { + s.collect_str(self) + } else { + s.serialize_bytes(self.0.as_ref()) + } + } +} + +#[cfg(feature = "serde")] +impl<'de> ::serde::Deserialize<'de> for SchnorrAdaptorPreSignature { + fn deserialize>(d: D) -> Result { + use crate::serde_util; + + if d.is_human_readable() { + d.deserialize_str(serde_util::FromStrVisitor::new("an ASCII hex string")) + } else { + d.deserialize_bytes(serde_util::BytesVisitor::new( + "a bytestring", + SchnorrAdaptorPreSignature::from_slice, + )) + } + } +} + +impl CPtr for SchnorrAdaptorPreSignature { + type Target = ffi::SchnorrAdaptorPreSignature; + fn as_c_ptr(&self) -> *const Self::Target { + self.as_ptr() + } + + fn as_mut_c_ptr(&mut self) -> *mut Self::Target { + self.as_mut_ptr() + } +} + +impl AsRef<[u8]> for SchnorrAdaptorPreSignature { + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } +} + +impl SchnorrAdaptorPreSignature { + /// Creates an [`SchnorrAdaptorPreSignature`] directly from a slice + #[inline] + pub fn from_slice(data: &[u8]) -> Result { + match data.len() { + SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH => { + let mut ret = [0; SCHNORR_ADAPTOR_PRESIGNATURE_LENGTH]; + ret[..].copy_from_slice(data); + unsafe { + Ok(SchnorrAdaptorPreSignature( + ffi::SchnorrAdaptorPreSignature::from_array_unchecked(ret), + )) + } + } + _ => Err(Error::InvalidSchnorrAdaptorPreSignature), + } + } + + /// Obtains a raw const pointer suitable for use with FFI functions + #[inline] + pub fn as_ptr(&self) -> *const ffi::SchnorrAdaptorPreSignature { + &self.0 + } + + /// Obtains a raw mutable pointer suitable for use with FFI functions + #[inline] + pub fn as_mut_ptr(&mut self) -> *mut ffi::SchnorrAdaptorPreSignature { + &mut self.0 + } +} + +impl SchnorrAdaptorPreSignature { + /// Creates a Schnorr adaptor pre-signature. The auxiliary randomness is + /// generated using the ThreadRng random number generator. + /// Requires compilation with "rand-std" feature. + #[cfg(feature = "rand-std")] + pub fn presign( + secp: &Secp256k1, + msg: &Message, + keypair: &Keypair, + adaptor: &PublicKey, + ) -> SchnorrAdaptorPreSignature { + let mut rng = thread_rng(); + SchnorrAdaptorPreSignature::presign_with_rng(secp, msg, keypair, adaptor, &mut rng) + } + + /// Creates an adaptor signature along with a proof to verify the adaptor signature, + /// This function derives a nonce using a similar process as described in BIP-340. + /// The nonce derivation process is strengthened against side channel + /// attacks by providing auxiliary randomness using the provided random number generator. + /// Requires compilation with "rand" feature. + #[cfg(feature = "actual-rand")] + pub fn presign_with_rng( + secp: &Secp256k1, + msg: &Message, + keypair: &Keypair, + adaptor: &PublicKey, + rng: &mut R, + ) -> SchnorrAdaptorPreSignature { + let mut aux = [0u8; 32]; + rng.fill_bytes(&mut aux); + SchnorrAdaptorPreSignature::presign_with_aux_rand(secp, msg, keypair, adaptor, &aux) + } + + /// Creates a Schnorr adaptor pre-signature without using any auxiliary + /// random data. Note that using this function is still considered safe. + pub fn presign_no_aux_rand( + secp: &Secp256k1, + msg: &Message, + keypair: &Keypair, + adaptor: &PublicKey, + ) -> SchnorrAdaptorPreSignature { + let mut pre_sig = ffi::SchnorrAdaptorPreSignature::new(); + + let res = unsafe { + ffi::secp256k1_schnorr_adaptor_presign( + secp.ctx().as_ptr(), + &mut pre_sig, + msg.as_c_ptr(), + keypair.as_c_ptr(), + adaptor.as_c_ptr(), + ptr::null_mut(), + ) + }; + debug_assert_eq!(res, 1); + + SchnorrAdaptorPreSignature(pre_sig) + } + + /// Creates a Schnorr adaptor pre-signature given an auxiliary random + /// data. Note that using this function is still considered safe. + pub fn presign_with_aux_rand( + secp: &Secp256k1, + msg: &Message, + keypair: &Keypair, + adaptor: &PublicKey, + aux_rand: &[u8; 32], + ) -> SchnorrAdaptorPreSignature { + let mut pre_sig = ffi::SchnorrAdaptorPreSignature::new(); + + let res = unsafe { + ffi::secp256k1_schnorr_adaptor_presign( + secp.ctx().as_ptr(), + &mut pre_sig, + msg.as_c_ptr(), + keypair.as_c_ptr(), + adaptor.as_c_ptr(), + aux_rand.as_c_ptr() as *const ffi::types::c_uchar, + ) + }; + debug_assert_eq!(res, 1); + + SchnorrAdaptorPreSignature(pre_sig) + } + + /// Extracts the adaptor point from a Schnorr adaptor pre-signature. + pub fn extract_adaptor(&self, msg: &Message, pubkey: &XOnlyPublicKey) -> Result { + unsafe { + let mut adaptor = ffi::PublicKey::new(); + let res = ffi::secp256k1_schnorr_adaptor_extract( + ffi::secp256k1_context_no_precomp, + &mut adaptor, + self.as_c_ptr(), + msg.as_c_ptr(), + pubkey.as_c_ptr(), + ); + + if res != 1 { + return Err(Error::CannotExtractAdaptorPoint); + } + + Ok(adaptor.into()) + } + } + + /// Adapts the Schnorr adaptor pre-signature to produce a BIP-340 Schnorr signature + pub fn adapt(&self, sec_adaptor: &SecretKey) -> Result { + let mut sig = [0u8; SCHNORR_SIGNATURE_SIZE]; + + let res = unsafe { + ffi::secp256k1_schnorr_adaptor_adapt( + ffi::secp256k1_context_no_precomp, + sig.as_mut_c_ptr(), + self.as_c_ptr(), + sec_adaptor.as_c_ptr(), + ) + }; + + if res != 1 { + return Err(Error::CannotAdaptPreSignature); + } + + Ok(SchnorrSignature::from_slice(&sig)?) + } + + /// Extract a secret adaptor from Schnorr adaptor pre-signature and BIP340 Schnorr signature + pub fn extract_secadaptor(&self, sig: &SchnorrSignature) -> Result { + let mut sec_adaptor = [0u8; SECRET_KEY_SIZE]; + + let res = unsafe { + ffi::secp256k1_schnorr_adaptor_extract_sec( + ffi::secp256k1_context_no_precomp, + sec_adaptor.as_mut_c_ptr(), + self.as_c_ptr(), + sig.as_c_ptr(), + ) + }; + + if res != 1 { + return Err(Error::CannotExtractSecretAdaptor); + } + + Ok(SecretKey::from_slice(&sec_adaptor)?) + } +} + +#[cfg(test)] +#[allow(unused_imports)] +mod tests { + use super::*; + + #[test] + #[cfg(feature = "rand-std")] + fn test_schnorr_adaptor_correctness() { + let secp = Secp256k1::new(); + let mut rng = rand::thread_rng(); + + let msg:[u8; 32] = rng.gen(); + let msg = Message::from_digest(msg); + + let keypair = Keypair::new(&secp, &mut rng); + let (pubkey, _parity) = keypair.x_only_public_key(); + + let (secret_adaptor, adaptor) = secp.generate_keypair(&mut rng); + + let pre_sig = SchnorrAdaptorPreSignature::presign(&secp, &msg, &keypair, &adaptor); + + let extracted_adaptor = pre_sig.extract_adaptor(&msg, &pubkey).unwrap(); + assert_eq!(adaptor, extracted_adaptor); + + let sig = pre_sig.adapt(&secret_adaptor).unwrap(); + let res = secp.verify_schnorr(&sig, &msg, &pubkey); + assert_eq!(res, Ok(())); + + let extracted_secadaptor = pre_sig.extract_secadaptor(&sig).unwrap(); + assert_eq!(secret_adaptor, extracted_secadaptor); + } +}