Coverage Report

Created: 2024-10-21 15:10

/root/bitcoin/src/secp256k1/src/field_5x52_int128_impl.h
Line
Count
Source (jump to first uncovered line)
1
/***********************************************************************
2
 * Copyright (c) 2013, 2014 Pieter Wuille                              *
3
 * Distributed under the MIT software license, see the accompanying    *
4
 * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
5
 ***********************************************************************/
6
7
#ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
8
#define SECP256K1_FIELD_INNER5X52_IMPL_H
9
10
#include <stdint.h>
11
12
#include "int128.h"
13
#include "util.h"
14
15
#define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0)
16
#define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
17
18
0
SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
19
0
    secp256k1_uint128 c, d;
20
0
    uint64_t t3, t4, tx, u0;
21
0
    uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
22
0
    const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
23
24
0
    VERIFY_BITS(a[0], 56);
25
0
    VERIFY_BITS(a[1], 56);
26
0
    VERIFY_BITS(a[2], 56);
27
0
    VERIFY_BITS(a[3], 56);
28
0
    VERIFY_BITS(a[4], 52);
29
0
    VERIFY_BITS(b[0], 56);
30
0
    VERIFY_BITS(b[1], 56);
31
0
    VERIFY_BITS(b[2], 56);
32
0
    VERIFY_BITS(b[3], 56);
33
0
    VERIFY_BITS(b[4], 52);
34
0
    VERIFY_CHECK(r != b);
35
0
    VERIFY_CHECK(a != b);
36
37
    /*  [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
38
     *  for 0 <= x <= 4, px is a shorthand for sum(a[i]*b[x-i], i=0..x).
39
     *  for 4 <= x <= 8, px is a shorthand for sum(a[i]*b[x-i], i=(x-4)..4)
40
     *  Note that [x 0 0 0 0 0] = [x*R].
41
     */
42
43
0
    secp256k1_u128_mul(&d, a0, b[3]);
44
0
    secp256k1_u128_accum_mul(&d, a1, b[2]);
45
0
    secp256k1_u128_accum_mul(&d, a2, b[1]);
46
0
    secp256k1_u128_accum_mul(&d, a3, b[0]);
47
0
    VERIFY_BITS_128(&d, 114);
48
    /* [d 0 0 0] = [p3 0 0 0] */
49
0
    secp256k1_u128_mul(&c, a4, b[4]);
50
0
    VERIFY_BITS_128(&c, 112);
51
    /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
52
0
    secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
53
0
    VERIFY_BITS_128(&d, 115);
54
0
    VERIFY_BITS_128(&c, 48);
55
    /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
56
0
    t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
57
0
    VERIFY_BITS(t3, 52);
58
0
    VERIFY_BITS_128(&d, 63);
59
    /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
60
61
0
    secp256k1_u128_accum_mul(&d, a0, b[4]);
62
0
    secp256k1_u128_accum_mul(&d, a1, b[3]);
63
0
    secp256k1_u128_accum_mul(&d, a2, b[2]);
64
0
    secp256k1_u128_accum_mul(&d, a3, b[1]);
65
0
    secp256k1_u128_accum_mul(&d, a4, b[0]);
66
0
    VERIFY_BITS_128(&d, 115);
67
    /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
68
0
    secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
69
0
    VERIFY_BITS_128(&d, 116);
70
    /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
71
0
    t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
72
0
    VERIFY_BITS(t4, 52);
73
0
    VERIFY_BITS_128(&d, 64);
74
    /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
75
0
    tx = (t4 >> 48); t4 &= (M >> 4);
76
0
    VERIFY_BITS(tx, 4);
77
0
    VERIFY_BITS(t4, 48);
78
    /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
79
80
0
    secp256k1_u128_mul(&c, a0, b[0]);
81
0
    VERIFY_BITS_128(&c, 112);
82
    /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
83
0
    secp256k1_u128_accum_mul(&d, a1, b[4]);
84
0
    secp256k1_u128_accum_mul(&d, a2, b[3]);
85
0
    secp256k1_u128_accum_mul(&d, a3, b[2]);
86
0
    secp256k1_u128_accum_mul(&d, a4, b[1]);
87
0
    VERIFY_BITS_128(&d, 114);
88
    /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
89
0
    u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
90
0
    VERIFY_BITS(u0, 52);
91
0
    VERIFY_BITS_128(&d, 62);
92
    /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
93
    /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
94
0
    u0 = (u0 << 4) | tx;
95
0
    VERIFY_BITS(u0, 56);
96
    /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
97
0
    secp256k1_u128_accum_mul(&c, u0, R >> 4);
98
0
    VERIFY_BITS_128(&c, 113);
99
    /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
100
0
    r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
101
0
    VERIFY_BITS(r[0], 52);
102
0
    VERIFY_BITS_128(&c, 61);
103
    /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
104
105
0
    secp256k1_u128_accum_mul(&c, a0, b[1]);
106
0
    secp256k1_u128_accum_mul(&c, a1, b[0]);
107
0
    VERIFY_BITS_128(&c, 114);
108
    /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
109
0
    secp256k1_u128_accum_mul(&d, a2, b[4]);
110
0
    secp256k1_u128_accum_mul(&d, a3, b[3]);
111
0
    secp256k1_u128_accum_mul(&d, a4, b[2]);
112
0
    VERIFY_BITS_128(&d, 114);
113
    /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
114
0
    secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
115
0
    VERIFY_BITS_128(&c, 115);
116
0
    VERIFY_BITS_128(&d, 62);
117
    /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
118
0
    r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
119
0
    VERIFY_BITS(r[1], 52);
120
0
    VERIFY_BITS_128(&c, 63);
121
    /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
122
123
0
    secp256k1_u128_accum_mul(&c, a0, b[2]);
124
0
    secp256k1_u128_accum_mul(&c, a1, b[1]);
125
0
    secp256k1_u128_accum_mul(&c, a2, b[0]);
126
0
    VERIFY_BITS_128(&c, 114);
127
    /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
128
0
    secp256k1_u128_accum_mul(&d, a3, b[4]);
129
0
    secp256k1_u128_accum_mul(&d, a4, b[3]);
130
0
    VERIFY_BITS_128(&d, 114);
131
    /* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
132
0
    secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
133
0
    VERIFY_BITS_128(&c, 115);
134
0
    VERIFY_BITS_128(&d, 50);
135
    /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
136
137
0
    r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
138
0
    VERIFY_BITS(r[2], 52);
139
0
    VERIFY_BITS_128(&c, 63);
140
    /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
141
0
    secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
142
0
    secp256k1_u128_accum_u64(&c, t3);
143
0
    VERIFY_BITS_128(&c, 100);
144
    /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
145
0
    r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
146
0
    VERIFY_BITS(r[3], 52);
147
0
    VERIFY_BITS_128(&c, 48);
148
    /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
149
0
    r[4] = secp256k1_u128_to_u64(&c) + t4;
150
0
    VERIFY_BITS(r[4], 49);
151
    /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
152
0
}
153
154
0
SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
155
0
    secp256k1_uint128 c, d;
156
0
    uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
157
0
    uint64_t t3, t4, tx, u0;
158
0
    const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
159
160
0
    VERIFY_BITS(a[0], 56);
161
0
    VERIFY_BITS(a[1], 56);
162
0
    VERIFY_BITS(a[2], 56);
163
0
    VERIFY_BITS(a[3], 56);
164
0
    VERIFY_BITS(a[4], 52);
165
166
    /**  [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
167
     *  px is a shorthand for sum(a[i]*a[x-i], i=0..x).
168
     *  Note that [x 0 0 0 0 0] = [x*R].
169
     */
170
171
0
    secp256k1_u128_mul(&d, a0*2, a3);
172
0
    secp256k1_u128_accum_mul(&d, a1*2, a2);
173
0
    VERIFY_BITS_128(&d, 114);
174
    /* [d 0 0 0] = [p3 0 0 0] */
175
0
    secp256k1_u128_mul(&c, a4, a4);
176
0
    VERIFY_BITS_128(&c, 112);
177
    /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
178
0
    secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
179
0
    VERIFY_BITS_128(&d, 115);
180
0
    VERIFY_BITS_128(&c, 48);
181
    /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
182
0
    t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
183
0
    VERIFY_BITS(t3, 52);
184
0
    VERIFY_BITS_128(&d, 63);
185
    /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
186
187
0
    a4 *= 2;
188
0
    secp256k1_u128_accum_mul(&d, a0, a4);
189
0
    secp256k1_u128_accum_mul(&d, a1*2, a3);
190
0
    secp256k1_u128_accum_mul(&d, a2, a2);
191
0
    VERIFY_BITS_128(&d, 115);
192
    /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
193
0
    secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
194
0
    VERIFY_BITS_128(&d, 116);
195
    /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
196
0
    t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
197
0
    VERIFY_BITS(t4, 52);
198
0
    VERIFY_BITS_128(&d, 64);
199
    /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
200
0
    tx = (t4 >> 48); t4 &= (M >> 4);
201
0
    VERIFY_BITS(tx, 4);
202
0
    VERIFY_BITS(t4, 48);
203
    /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
204
205
0
    secp256k1_u128_mul(&c, a0, a0);
206
0
    VERIFY_BITS_128(&c, 112);
207
    /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
208
0
    secp256k1_u128_accum_mul(&d, a1, a4);
209
0
    secp256k1_u128_accum_mul(&d, a2*2, a3);
210
0
    VERIFY_BITS_128(&d, 114);
211
    /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
212
0
    u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
213
0
    VERIFY_BITS(u0, 52);
214
0
    VERIFY_BITS_128(&d, 62);
215
    /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
216
    /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
217
0
    u0 = (u0 << 4) | tx;
218
0
    VERIFY_BITS(u0, 56);
219
    /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
220
0
    secp256k1_u128_accum_mul(&c, u0, R >> 4);
221
0
    VERIFY_BITS_128(&c, 113);
222
    /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
223
0
    r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
224
0
    VERIFY_BITS(r[0], 52);
225
0
    VERIFY_BITS_128(&c, 61);
226
    /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
227
228
0
    a0 *= 2;
229
0
    secp256k1_u128_accum_mul(&c, a0, a1);
230
0
    VERIFY_BITS_128(&c, 114);
231
    /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
232
0
    secp256k1_u128_accum_mul(&d, a2, a4);
233
0
    secp256k1_u128_accum_mul(&d, a3, a3);
234
0
    VERIFY_BITS_128(&d, 114);
235
    /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
236
0
    secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
237
0
    VERIFY_BITS_128(&c, 115);
238
0
    VERIFY_BITS_128(&d, 62);
239
    /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
240
0
    r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
241
0
    VERIFY_BITS(r[1], 52);
242
0
    VERIFY_BITS_128(&c, 63);
243
    /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
244
245
0
    secp256k1_u128_accum_mul(&c, a0, a2);
246
0
    secp256k1_u128_accum_mul(&c, a1, a1);
247
0
    VERIFY_BITS_128(&c, 114);
248
    /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
249
0
    secp256k1_u128_accum_mul(&d, a3, a4);
250
0
    VERIFY_BITS_128(&d, 114);
251
    /* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
252
0
    secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
253
0
    VERIFY_BITS_128(&c, 115);
254
0
    VERIFY_BITS_128(&d, 50);
255
    /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
256
0
    r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
257
0
    VERIFY_BITS(r[2], 52);
258
0
    VERIFY_BITS_128(&c, 63);
259
    /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
260
261
0
    secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
262
0
    secp256k1_u128_accum_u64(&c, t3);
263
0
    VERIFY_BITS_128(&c, 100);
264
    /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
265
0
    r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
266
0
    VERIFY_BITS(r[3], 52);
267
0
    VERIFY_BITS_128(&c, 48);
268
    /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
269
0
    r[4] = secp256k1_u128_to_u64(&c) + t4;
270
0
    VERIFY_BITS(r[4], 49);
271
    /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
272
0
}
273
274
#endif /* SECP256K1_FIELD_INNER5X52_IMPL_H */