csecp256k1

Haskell FFI bindings to bitcoin-core/secp256k1 (docs.ppad.tech/csecp256k1).
git clone git://git.ppad.tech/csecp256k1.git
Log | Files | Refs | README | LICENSE

scalar_low_impl.h (6629B)


      1 /***********************************************************************
      2  * Copyright (c) 2015 Andrew Poelstra                                  *
      3  * Distributed under the MIT software license, see the accompanying    *
      4  * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
      5  ***********************************************************************/
      6 
      7 #ifndef SECP256K1_SCALAR_REPR_IMPL_H
      8 #define SECP256K1_SCALAR_REPR_IMPL_H
      9 
     10 #include "checkmem.h"
     11 #include "scalar.h"
     12 #include "util.h"
     13 
     14 #include <string.h>
     15 
     16 SECP256K1_INLINE static int haskellsecp256k1_v0_1_0_scalar_is_even(const haskellsecp256k1_v0_1_0_scalar *a) {
     17     SECP256K1_SCALAR_VERIFY(a);
     18 
     19     return !(*a & 1);
     20 }
     21 
     22 SECP256K1_INLINE static void haskellsecp256k1_v0_1_0_scalar_clear(haskellsecp256k1_v0_1_0_scalar *r) { *r = 0; }
     23 
     24 SECP256K1_INLINE static void haskellsecp256k1_v0_1_0_scalar_set_int(haskellsecp256k1_v0_1_0_scalar *r, unsigned int v) {
     25     *r = v % EXHAUSTIVE_TEST_ORDER;
     26 
     27     SECP256K1_SCALAR_VERIFY(r);
     28 }
     29 
     30 SECP256K1_INLINE static unsigned int haskellsecp256k1_v0_1_0_scalar_get_bits(const haskellsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
     31     SECP256K1_SCALAR_VERIFY(a);
     32 
     33     if (offset < 32)
     34         return ((*a >> offset) & ((((uint32_t)1) << count) - 1));
     35     else
     36         return 0;
     37 }
     38 
     39 SECP256K1_INLINE static unsigned int haskellsecp256k1_v0_1_0_scalar_get_bits_var(const haskellsecp256k1_v0_1_0_scalar *a, unsigned int offset, unsigned int count) {
     40     SECP256K1_SCALAR_VERIFY(a);
     41 
     42     return haskellsecp256k1_v0_1_0_scalar_get_bits(a, offset, count);
     43 }
     44 
     45 SECP256K1_INLINE static int haskellsecp256k1_v0_1_0_scalar_check_overflow(const haskellsecp256k1_v0_1_0_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; }
     46 
     47 static int haskellsecp256k1_v0_1_0_scalar_add(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *a, const haskellsecp256k1_v0_1_0_scalar *b) {
     48     SECP256K1_SCALAR_VERIFY(a);
     49     SECP256K1_SCALAR_VERIFY(b);
     50 
     51     *r = (*a + *b) % EXHAUSTIVE_TEST_ORDER;
     52 
     53     SECP256K1_SCALAR_VERIFY(r);
     54     return *r < *b;
     55 }
     56 
     57 static void haskellsecp256k1_v0_1_0_scalar_cadd_bit(haskellsecp256k1_v0_1_0_scalar *r, unsigned int bit, int flag) {
     58     SECP256K1_SCALAR_VERIFY(r);
     59 
     60     if (flag && bit < 32)
     61         *r += ((uint32_t)1 << bit);
     62 
     63     SECP256K1_SCALAR_VERIFY(r);
     64     VERIFY_CHECK(bit < 32);
     65     /* Verify that adding (1 << bit) will not overflow any in-range scalar *r by overflowing the underlying uint32_t. */
     66     VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER);
     67 }
     68 
     69 static void haskellsecp256k1_v0_1_0_scalar_set_b32(haskellsecp256k1_v0_1_0_scalar *r, const unsigned char *b32, int *overflow) {
     70     int i;
     71     int over = 0;
     72     *r = 0;
     73     for (i = 0; i < 32; i++) {
     74         *r = (*r * 0x100) + b32[i];
     75         if (*r >= EXHAUSTIVE_TEST_ORDER) {
     76             over = 1;
     77             *r %= EXHAUSTIVE_TEST_ORDER;
     78         }
     79     }
     80     if (overflow) *overflow = over;
     81 
     82     SECP256K1_SCALAR_VERIFY(r);
     83 }
     84 
     85 static void haskellsecp256k1_v0_1_0_scalar_get_b32(unsigned char *bin, const haskellsecp256k1_v0_1_0_scalar* a) {
     86     SECP256K1_SCALAR_VERIFY(a);
     87 
     88     memset(bin, 0, 32);
     89     bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a;
     90 }
     91 
     92 SECP256K1_INLINE static int haskellsecp256k1_v0_1_0_scalar_is_zero(const haskellsecp256k1_v0_1_0_scalar *a) {
     93     SECP256K1_SCALAR_VERIFY(a);
     94 
     95     return *a == 0;
     96 }
     97 
     98 static void haskellsecp256k1_v0_1_0_scalar_negate(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *a) {
     99     SECP256K1_SCALAR_VERIFY(a);
    100 
    101     if (*a == 0) {
    102         *r = 0;
    103     } else {
    104         *r = EXHAUSTIVE_TEST_ORDER - *a;
    105     }
    106 
    107     SECP256K1_SCALAR_VERIFY(r);
    108 }
    109 
    110 SECP256K1_INLINE static int haskellsecp256k1_v0_1_0_scalar_is_one(const haskellsecp256k1_v0_1_0_scalar *a) {
    111     SECP256K1_SCALAR_VERIFY(a);
    112 
    113     return *a == 1;
    114 }
    115 
    116 static int haskellsecp256k1_v0_1_0_scalar_is_high(const haskellsecp256k1_v0_1_0_scalar *a) {
    117     SECP256K1_SCALAR_VERIFY(a);
    118 
    119     return *a > EXHAUSTIVE_TEST_ORDER / 2;
    120 }
    121 
    122 static int haskellsecp256k1_v0_1_0_scalar_cond_negate(haskellsecp256k1_v0_1_0_scalar *r, int flag) {
    123     SECP256K1_SCALAR_VERIFY(r);
    124 
    125     if (flag) haskellsecp256k1_v0_1_0_scalar_negate(r, r);
    126 
    127     SECP256K1_SCALAR_VERIFY(r);
    128     return flag ? -1 : 1;
    129 }
    130 
    131 static void haskellsecp256k1_v0_1_0_scalar_mul(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *a, const haskellsecp256k1_v0_1_0_scalar *b) {
    132     SECP256K1_SCALAR_VERIFY(a);
    133     SECP256K1_SCALAR_VERIFY(b);
    134 
    135     *r = (*a * *b) % EXHAUSTIVE_TEST_ORDER;
    136 
    137     SECP256K1_SCALAR_VERIFY(r);
    138 }
    139 
    140 static void haskellsecp256k1_v0_1_0_scalar_split_128(haskellsecp256k1_v0_1_0_scalar *r1, haskellsecp256k1_v0_1_0_scalar *r2, const haskellsecp256k1_v0_1_0_scalar *a) {
    141     SECP256K1_SCALAR_VERIFY(a);
    142 
    143     *r1 = *a;
    144     *r2 = 0;
    145 
    146     SECP256K1_SCALAR_VERIFY(r1);
    147     SECP256K1_SCALAR_VERIFY(r2);
    148 }
    149 
    150 SECP256K1_INLINE static int haskellsecp256k1_v0_1_0_scalar_eq(const haskellsecp256k1_v0_1_0_scalar *a, const haskellsecp256k1_v0_1_0_scalar *b) {
    151     SECP256K1_SCALAR_VERIFY(a);
    152     SECP256K1_SCALAR_VERIFY(b);
    153 
    154     return *a == *b;
    155 }
    156 
    157 static SECP256K1_INLINE void haskellsecp256k1_v0_1_0_scalar_cmov(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *a, int flag) {
    158     uint32_t mask0, mask1;
    159     volatile int vflag = flag;
    160     SECP256K1_SCALAR_VERIFY(a);
    161     SECP256K1_CHECKMEM_CHECK_VERIFY(r, sizeof(*r));
    162 
    163     mask0 = vflag + ~((uint32_t)0);
    164     mask1 = ~mask0;
    165     *r = (*r & mask0) | (*a & mask1);
    166 
    167     SECP256K1_SCALAR_VERIFY(r);
    168 }
    169 
    170 static void haskellsecp256k1_v0_1_0_scalar_inverse(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *x) {
    171     int i;
    172     *r = 0;
    173     SECP256K1_SCALAR_VERIFY(x);
    174 
    175     for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++)
    176         if ((i * *x) % EXHAUSTIVE_TEST_ORDER == 1)
    177             *r = i;
    178 
    179     SECP256K1_SCALAR_VERIFY(r);
    180     /* If this VERIFY_CHECK triggers we were given a noninvertible scalar (and thus
    181      * have a composite group order; fix it in exhaustive_tests.c). */
    182     VERIFY_CHECK(*r != 0);
    183 }
    184 
    185 static void haskellsecp256k1_v0_1_0_scalar_inverse_var(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *x) {
    186     SECP256K1_SCALAR_VERIFY(x);
    187 
    188     haskellsecp256k1_v0_1_0_scalar_inverse(r, x);
    189 
    190     SECP256K1_SCALAR_VERIFY(r);
    191 }
    192 
    193 static void haskellsecp256k1_v0_1_0_scalar_half(haskellsecp256k1_v0_1_0_scalar *r, const haskellsecp256k1_v0_1_0_scalar *a) {
    194     SECP256K1_SCALAR_VERIFY(a);
    195 
    196     *r = (*a + ((-(uint32_t)(*a & 1)) & EXHAUSTIVE_TEST_ORDER)) >> 1;
    197 
    198     SECP256K1_SCALAR_VERIFY(r);
    199 }
    200 
    201 #endif /* SECP256K1_SCALAR_REPR_IMPL_H */