Rich Salz | 2039c42 | 2016-05-17 14:51:34 -0400 | [diff] [blame] | 1 | /* |
Matt Caswell | 8020d79 | 2021-03-11 13:27:36 +0000 | [diff] [blame] | 2 | * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 3 | * |
Richard Levitte | 2a7b6f3 | 2018-12-06 13:54:02 +0100 | [diff] [blame] | 4 | * Licensed under the Apache License 2.0 (the "License"). You may not use |
Rich Salz | 2039c42 | 2016-05-17 14:51:34 -0400 | [diff] [blame] | 5 | * this file except in compliance with the License. You can obtain a copy |
| 6 | * in the file LICENSE in the source distribution or at |
| 7 | * https://www.openssl.org/source/license.html |
Bodo Möller | 46a6437 | 2005-05-16 01:43:31 +0000 | [diff] [blame] | 8 | */ |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 9 | |
Pauli | c5f8713 | 2020-02-12 15:03:51 +1000 | [diff] [blame] | 10 | /* |
| 11 | * RSA low level APIs are deprecated for public use, but still ok for |
| 12 | * internal use. |
| 13 | */ |
| 14 | #include "internal/deprecated.h" |
| 15 | |
Richard Levitte | b39fc56 | 2015-05-14 16:56:48 +0200 | [diff] [blame] | 16 | #include "internal/cryptlib.h" |
Dr. Matthias St. Pierre | 25f2138 | 2019-09-28 00:45:33 +0200 | [diff] [blame] | 17 | #include "crypto/bn.h" |
Dr. Matthias St. Pierre | 706457b | 2019-09-28 00:45:40 +0200 | [diff] [blame] | 18 | #include "rsa_local.h" |
| 19 | #include "internal/constant_time.h" |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 20 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 21 | static int rsa_ossl_public_encrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 22 | unsigned char *to, RSA *rsa, int padding); |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 23 | static int rsa_ossl_private_encrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 24 | unsigned char *to, RSA *rsa, int padding); |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 25 | static int rsa_ossl_public_decrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 26 | unsigned char *to, RSA *rsa, int padding); |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 27 | static int rsa_ossl_private_decrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 28 | unsigned char *to, RSA *rsa, int padding); |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 29 | static int rsa_ossl_mod_exp(BIGNUM *r0, const BIGNUM *i, RSA *rsa, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 30 | BN_CTX *ctx); |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 31 | static int rsa_ossl_init(RSA *rsa); |
| 32 | static int rsa_ossl_finish(RSA *rsa); |
| 33 | static RSA_METHOD rsa_pkcs1_ossl_meth = { |
Rich Salz | 076fc55 | 2017-04-07 12:07:42 -0400 | [diff] [blame] | 34 | "OpenSSL PKCS#1 RSA", |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 35 | rsa_ossl_public_encrypt, |
| 36 | rsa_ossl_public_decrypt, /* signature verification */ |
| 37 | rsa_ossl_private_encrypt, /* signing */ |
| 38 | rsa_ossl_private_decrypt, |
| 39 | rsa_ossl_mod_exp, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 40 | BN_mod_exp_mont, /* XXX probably we should not use Montgomery |
| 41 | * if e == 3 */ |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 42 | rsa_ossl_init, |
| 43 | rsa_ossl_finish, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 44 | RSA_FLAG_FIPS_METHOD, /* flags */ |
| 45 | NULL, |
| 46 | 0, /* rsa_sign */ |
| 47 | 0, /* rsa_verify */ |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 48 | NULL, /* rsa_keygen */ |
| 49 | NULL /* rsa_multi_prime_keygen */ |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 50 | }; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 51 | |
Rich Salz | 076fc55 | 2017-04-07 12:07:42 -0400 | [diff] [blame] | 52 | static const RSA_METHOD *default_RSA_meth = &rsa_pkcs1_ossl_meth; |
| 53 | |
| 54 | void RSA_set_default_method(const RSA_METHOD *meth) |
| 55 | { |
| 56 | default_RSA_meth = meth; |
| 57 | } |
| 58 | |
| 59 | const RSA_METHOD *RSA_get_default_method(void) |
| 60 | { |
| 61 | return default_RSA_meth; |
| 62 | } |
| 63 | |
Rich Salz | b0700d2 | 2015-10-27 15:11:48 -0400 | [diff] [blame] | 64 | const RSA_METHOD *RSA_PKCS1_OpenSSL(void) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 65 | { |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 66 | return &rsa_pkcs1_ossl_meth; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 67 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 68 | |
Rich Salz | 076fc55 | 2017-04-07 12:07:42 -0400 | [diff] [blame] | 69 | const RSA_METHOD *RSA_null_method(void) |
| 70 | { |
| 71 | return NULL; |
| 72 | } |
| 73 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 74 | static int rsa_ossl_public_encrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 75 | unsigned char *to, RSA *rsa, int padding) |
| 76 | { |
| 77 | BIGNUM *f, *ret; |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 78 | int i, num = 0, r = -1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 79 | unsigned char *buf = NULL; |
| 80 | BN_CTX *ctx = NULL; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 81 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 82 | if (BN_num_bits(rsa->n) > OPENSSL_RSA_MAX_MODULUS_BITS) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 83 | ERR_raise(ERR_LIB_RSA, RSA_R_MODULUS_TOO_LARGE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 84 | return -1; |
| 85 | } |
Bodo Möller | 5e3225c | 2006-09-28 13:45:34 +0000 | [diff] [blame] | 86 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 87 | if (BN_ucmp(rsa->n, rsa->e) <= 0) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 88 | ERR_raise(ERR_LIB_RSA, RSA_R_BAD_E_VALUE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 89 | return -1; |
| 90 | } |
Bodo Möller | 5e3225c | 2006-09-28 13:45:34 +0000 | [diff] [blame] | 91 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 92 | /* for large moduli, enforce exponent limit */ |
| 93 | if (BN_num_bits(rsa->n) > OPENSSL_RSA_SMALL_MODULUS_BITS) { |
| 94 | if (BN_num_bits(rsa->e) > OPENSSL_RSA_MAX_PUBEXP_BITS) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 95 | ERR_raise(ERR_LIB_RSA, RSA_R_BAD_E_VALUE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 96 | return -1; |
| 97 | } |
| 98 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 99 | |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 100 | if ((ctx = BN_CTX_new_ex(rsa->libctx)) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 101 | goto err; |
| 102 | BN_CTX_start(ctx); |
| 103 | f = BN_CTX_get(ctx); |
| 104 | ret = BN_CTX_get(ctx); |
| 105 | num = BN_num_bytes(rsa->n); |
| 106 | buf = OPENSSL_malloc(num); |
Paul Yang | edea42c | 2017-06-22 18:52:29 +0800 | [diff] [blame] | 107 | if (ret == NULL || buf == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 108 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 109 | goto err; |
| 110 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 111 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 112 | switch (padding) { |
| 113 | case RSA_PKCS1_PADDING: |
Pauli | 23b2fc0 | 2020-09-30 14:20:14 +1000 | [diff] [blame] | 114 | i = ossl_rsa_padding_add_PKCS1_type_2_ex(rsa->libctx, buf, num, |
| 115 | from, flen); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 116 | break; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 117 | case RSA_PKCS1_OAEP_PADDING: |
Pauli | 23b2fc0 | 2020-09-30 14:20:14 +1000 | [diff] [blame] | 118 | i = ossl_rsa_padding_add_PKCS1_OAEP_mgf1_ex(rsa->libctx, buf, num, |
| 119 | from, flen, NULL, 0, |
| 120 | NULL, NULL); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 121 | break; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 122 | case RSA_NO_PADDING: |
| 123 | i = RSA_padding_add_none(buf, num, from, flen); |
| 124 | break; |
| 125 | default: |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 126 | ERR_raise(ERR_LIB_RSA, RSA_R_UNKNOWN_PADDING_TYPE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 127 | goto err; |
| 128 | } |
| 129 | if (i <= 0) |
| 130 | goto err; |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 131 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 132 | if (BN_bin2bn(buf, num, f) == NULL) |
| 133 | goto err; |
Geoff Thorpe | 79221bc | 2003-02-14 23:21:19 +0000 | [diff] [blame] | 134 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 135 | if (BN_ucmp(f, rsa->n) >= 0) { |
| 136 | /* usually the padding functions would catch this */ |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 137 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_MODULUS); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 138 | goto err; |
| 139 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 140 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 141 | if (rsa->flags & RSA_FLAG_CACHE_PUBLIC) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 142 | if (!BN_MONT_CTX_set_locked(&rsa->_method_mod_n, rsa->lock, |
| 143 | rsa->n, ctx)) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 144 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 145 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 146 | if (!rsa->meth->bn_mod_exp(ret, f, rsa->e, rsa->n, ctx, |
| 147 | rsa->_method_mod_n)) |
| 148 | goto err; |
| 149 | |
| 150 | /* |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 151 | * BN_bn2binpad puts in leading 0 bytes if the number is less than |
| 152 | * the length of the modulus. |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 153 | */ |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 154 | r = BN_bn2binpad(ret, to, num); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 155 | err: |
Shane Lontis | ce1415e | 2019-03-19 09:58:09 +1000 | [diff] [blame] | 156 | BN_CTX_end(ctx); |
Rich Salz | 23a1d5e | 2015-04-30 21:37:06 -0400 | [diff] [blame] | 157 | BN_CTX_free(ctx); |
Rich Salz | 4b45c6e | 2015-04-30 17:57:32 -0400 | [diff] [blame] | 158 | OPENSSL_clear_free(buf, num); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 159 | return r; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 160 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 161 | |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 162 | static BN_BLINDING *rsa_get_blinding(RSA *rsa, int *local, BN_CTX *ctx) |
Nils Larsch | 800e400 | 2005-04-26 22:31:48 +0000 | [diff] [blame] | 163 | { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 164 | BN_BLINDING *ret; |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 165 | |
Rich Salz | cd3f8c1 | 2021-02-18 15:31:56 -0500 | [diff] [blame] | 166 | if (!CRYPTO_THREAD_write_lock(rsa->lock)) |
| 167 | return NULL; |
Bodo Möller | c554155 | 2003-03-20 17:31:30 +0000 | [diff] [blame] | 168 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 169 | if (rsa->blinding == NULL) { |
Alessandro Ghedini | d188a53 | 2016-03-04 15:43:46 +0000 | [diff] [blame] | 170 | rsa->blinding = RSA_setup_blinding(rsa, ctx); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 171 | } |
Nils Larsch | 800e400 | 2005-04-26 22:31:48 +0000 | [diff] [blame] | 172 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 173 | ret = rsa->blinding; |
| 174 | if (ret == NULL) |
| 175 | goto err; |
Nils Larsch | 800e400 | 2005-04-26 22:31:48 +0000 | [diff] [blame] | 176 | |
Alessandro Ghedini | 0b1a07c | 2016-03-08 22:37:01 +0000 | [diff] [blame] | 177 | if (BN_BLINDING_is_current_thread(ret)) { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 178 | /* rsa->blinding is ours! */ |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 179 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 180 | *local = 1; |
| 181 | } else { |
| 182 | /* resort to rsa->mt_blinding instead */ |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 183 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 184 | /* |
| 185 | * instructs rsa_blinding_convert(), rsa_blinding_invert() that the |
| 186 | * BN_BLINDING is shared, meaning that accesses require locks, and |
| 187 | * that the blinding factor must be stored outside the BN_BLINDING |
| 188 | */ |
| 189 | *local = 0; |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 190 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 191 | if (rsa->mt_blinding == NULL) { |
Alessandro Ghedini | d188a53 | 2016-03-04 15:43:46 +0000 | [diff] [blame] | 192 | rsa->mt_blinding = RSA_setup_blinding(rsa, ctx); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 193 | } |
| 194 | ret = rsa->mt_blinding; |
| 195 | } |
Bodo Möller | 5679bcc | 2003-04-02 09:50:22 +0000 | [diff] [blame] | 196 | |
Bodo Möller | 675f605 | 2006-06-14 08:55:23 +0000 | [diff] [blame] | 197 | err: |
Alessandro Ghedini | d188a53 | 2016-03-04 15:43:46 +0000 | [diff] [blame] | 198 | CRYPTO_THREAD_unlock(rsa->lock); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 199 | return ret; |
Nils Larsch | 800e400 | 2005-04-26 22:31:48 +0000 | [diff] [blame] | 200 | } |
| 201 | |
Bodo Möller | e5641d7 | 2011-10-19 14:59:27 +0000 | [diff] [blame] | 202 | static int rsa_blinding_convert(BN_BLINDING *b, BIGNUM *f, BIGNUM *unblind, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 203 | BN_CTX *ctx) |
| 204 | { |
Paul Yang | 90862ab | 2017-08-23 01:36:49 +0800 | [diff] [blame] | 205 | if (unblind == NULL) { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 206 | /* |
| 207 | * Local blinding: store the unblinding factor in BN_BLINDING. |
| 208 | */ |
| 209 | return BN_BLINDING_convert_ex(f, NULL, b, ctx); |
Paul Yang | 90862ab | 2017-08-23 01:36:49 +0800 | [diff] [blame] | 210 | } else { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 211 | /* |
| 212 | * Shared blinding: store the unblinding factor outside BN_BLINDING. |
| 213 | */ |
| 214 | int ret; |
Alessandro Ghedini | 0b1a07c | 2016-03-08 22:37:01 +0000 | [diff] [blame] | 215 | |
Jiasheng Jiang | aefbcde | 2022-02-05 18:00:51 +0800 | [diff] [blame] | 216 | if (!BN_BLINDING_lock(b)) |
| 217 | return 0; |
| 218 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 219 | ret = BN_BLINDING_convert_ex(f, unblind, b, ctx); |
Alessandro Ghedini | 0b1a07c | 2016-03-08 22:37:01 +0000 | [diff] [blame] | 220 | BN_BLINDING_unlock(b); |
| 221 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 222 | return ret; |
| 223 | } |
| 224 | } |
Bodo Möller | e5641d7 | 2011-10-19 14:59:27 +0000 | [diff] [blame] | 225 | |
| 226 | static int rsa_blinding_invert(BN_BLINDING *b, BIGNUM *f, BIGNUM *unblind, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 227 | BN_CTX *ctx) |
| 228 | { |
| 229 | /* |
| 230 | * For local blinding, unblind is set to NULL, and BN_BLINDING_invert_ex |
| 231 | * will use the unblinding factor stored in BN_BLINDING. If BN_BLINDING |
| 232 | * is shared between threads, unblind must be non-null: |
| 233 | * BN_BLINDING_invert_ex will then use the local unblinding factor, and |
| 234 | * will only read the modulus from BN_BLINDING. In both cases it's safe |
| 235 | * to access the blinding without a lock. |
| 236 | */ |
| 237 | return BN_BLINDING_invert_ex(f, unblind, b, ctx); |
| 238 | } |
Bodo Möller | 5679bcc | 2003-04-02 09:50:22 +0000 | [diff] [blame] | 239 | |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 240 | /* signing */ |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 241 | static int rsa_ossl_private_encrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 242 | unsigned char *to, RSA *rsa, int padding) |
| 243 | { |
| 244 | BIGNUM *f, *ret, *res; |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 245 | int i, num = 0, r = -1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 246 | unsigned char *buf = NULL; |
| 247 | BN_CTX *ctx = NULL; |
| 248 | int local_blinding = 0; |
| 249 | /* |
| 250 | * Used only if the blinding structure is shared. A non-NULL unblind |
| 251 | * instructs rsa_blinding_convert() and rsa_blinding_invert() to store |
| 252 | * the unblinding factor outside the blinding structure. |
| 253 | */ |
| 254 | BIGNUM *unblind = NULL; |
| 255 | BN_BLINDING *blinding = NULL; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 256 | |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 257 | if ((ctx = BN_CTX_new_ex(rsa->libctx)) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 258 | goto err; |
| 259 | BN_CTX_start(ctx); |
| 260 | f = BN_CTX_get(ctx); |
| 261 | ret = BN_CTX_get(ctx); |
| 262 | num = BN_num_bytes(rsa->n); |
| 263 | buf = OPENSSL_malloc(num); |
Paul Yang | edea42c | 2017-06-22 18:52:29 +0800 | [diff] [blame] | 264 | if (ret == NULL || buf == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 265 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 266 | goto err; |
| 267 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 268 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 269 | switch (padding) { |
| 270 | case RSA_PKCS1_PADDING: |
| 271 | i = RSA_padding_add_PKCS1_type_1(buf, num, from, flen); |
| 272 | break; |
| 273 | case RSA_X931_PADDING: |
| 274 | i = RSA_padding_add_X931(buf, num, from, flen); |
| 275 | break; |
| 276 | case RSA_NO_PADDING: |
| 277 | i = RSA_padding_add_none(buf, num, from, flen); |
| 278 | break; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 279 | default: |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 280 | ERR_raise(ERR_LIB_RSA, RSA_R_UNKNOWN_PADDING_TYPE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 281 | goto err; |
| 282 | } |
| 283 | if (i <= 0) |
| 284 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 285 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 286 | if (BN_bin2bn(buf, num, f) == NULL) |
| 287 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 288 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 289 | if (BN_ucmp(f, rsa->n) >= 0) { |
| 290 | /* usually the padding functions would catch this */ |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 291 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_MODULUS); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 292 | goto err; |
| 293 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 294 | |
Andy Polyakov | 2cc3f68 | 2018-11-07 22:07:22 +0100 | [diff] [blame] | 295 | if (rsa->flags & RSA_FLAG_CACHE_PUBLIC) |
| 296 | if (!BN_MONT_CTX_set_locked(&rsa->_method_mod_n, rsa->lock, |
| 297 | rsa->n, ctx)) |
| 298 | goto err; |
| 299 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 300 | if (!(rsa->flags & RSA_FLAG_NO_BLINDING)) { |
| 301 | blinding = rsa_get_blinding(rsa, &local_blinding, ctx); |
| 302 | if (blinding == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 303 | ERR_raise(ERR_LIB_RSA, ERR_R_INTERNAL_ERROR); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 304 | goto err; |
| 305 | } |
| 306 | } |
Bodo Möller | 46a6437 | 2005-05-16 01:43:31 +0000 | [diff] [blame] | 307 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 308 | if (blinding != NULL) { |
| 309 | if (!local_blinding && ((unblind = BN_CTX_get(ctx)) == NULL)) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 310 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 311 | goto err; |
| 312 | } |
| 313 | if (!rsa_blinding_convert(blinding, f, unblind, ctx)) |
| 314 | goto err; |
| 315 | } |
Bodo Möller | 46a6437 | 2005-05-16 01:43:31 +0000 | [diff] [blame] | 316 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 317 | if ((rsa->flags & RSA_FLAG_EXT_PKEY) || |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 318 | (rsa->version == RSA_ASN1_VERSION_MULTI) || |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 319 | ((rsa->p != NULL) && |
| 320 | (rsa->q != NULL) && |
| 321 | (rsa->dmp1 != NULL) && (rsa->dmq1 != NULL) && (rsa->iqmp != NULL))) { |
| 322 | if (!rsa->meth->rsa_mod_exp(ret, f, rsa, ctx)) |
| 323 | goto err; |
| 324 | } else { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 325 | BIGNUM *d = BN_new(); |
| 326 | if (d == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 327 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 328 | goto err; |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 329 | } |
David von Oheimb | 7408f67 | 2019-07-26 11:03:12 +0200 | [diff] [blame] | 330 | if (rsa->d == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 331 | ERR_raise(ERR_LIB_RSA, RSA_R_MISSING_PRIVATE_KEY); |
David von Oheimb | 7408f67 | 2019-07-26 11:03:12 +0200 | [diff] [blame] | 332 | BN_free(d); |
| 333 | goto err; |
| 334 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 335 | BN_with_flags(d, rsa->d, BN_FLG_CONSTTIME); |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 336 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 337 | if (!rsa->meth->bn_mod_exp(ret, f, d, rsa->n, ctx, |
| 338 | rsa->_method_mod_n)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 339 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 340 | goto err; |
| 341 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 342 | /* We MUST free d before any further use of rsa->d */ |
| 343 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 344 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 345 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 346 | if (blinding) |
| 347 | if (!rsa_blinding_invert(blinding, ret, unblind, ctx)) |
| 348 | goto err; |
| 349 | |
| 350 | if (padding == RSA_X931_PADDING) { |
Pauli | 3d3cbce | 2018-07-31 13:11:00 +1000 | [diff] [blame] | 351 | if (!BN_sub(f, rsa->n, ret)) |
| 352 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 353 | if (BN_cmp(ret, f) > 0) |
| 354 | res = f; |
| 355 | else |
| 356 | res = ret; |
Paul Yang | 90862ab | 2017-08-23 01:36:49 +0800 | [diff] [blame] | 357 | } else { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 358 | res = ret; |
Paul Yang | 90862ab | 2017-08-23 01:36:49 +0800 | [diff] [blame] | 359 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 360 | |
| 361 | /* |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 362 | * BN_bn2binpad puts in leading 0 bytes if the number is less than |
| 363 | * the length of the modulus. |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 364 | */ |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 365 | r = BN_bn2binpad(res, to, num); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 366 | err: |
Shane Lontis | ce1415e | 2019-03-19 09:58:09 +1000 | [diff] [blame] | 367 | BN_CTX_end(ctx); |
Rich Salz | 23a1d5e | 2015-04-30 21:37:06 -0400 | [diff] [blame] | 368 | BN_CTX_free(ctx); |
Rich Salz | 4b45c6e | 2015-04-30 17:57:32 -0400 | [diff] [blame] | 369 | OPENSSL_clear_free(buf, num); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 370 | return r; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 371 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 372 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 373 | static int rsa_ossl_private_decrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 374 | unsigned char *to, RSA *rsa, int padding) |
| 375 | { |
| 376 | BIGNUM *f, *ret; |
| 377 | int j, num = 0, r = -1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 378 | unsigned char *buf = NULL; |
| 379 | BN_CTX *ctx = NULL; |
| 380 | int local_blinding = 0; |
| 381 | /* |
| 382 | * Used only if the blinding structure is shared. A non-NULL unblind |
| 383 | * instructs rsa_blinding_convert() and rsa_blinding_invert() to store |
| 384 | * the unblinding factor outside the blinding structure. |
| 385 | */ |
| 386 | BIGNUM *unblind = NULL; |
| 387 | BN_BLINDING *blinding = NULL; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 388 | |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 389 | if ((ctx = BN_CTX_new_ex(rsa->libctx)) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 390 | goto err; |
| 391 | BN_CTX_start(ctx); |
| 392 | f = BN_CTX_get(ctx); |
| 393 | ret = BN_CTX_get(ctx); |
| 394 | num = BN_num_bytes(rsa->n); |
| 395 | buf = OPENSSL_malloc(num); |
Paul Yang | edea42c | 2017-06-22 18:52:29 +0800 | [diff] [blame] | 396 | if (ret == NULL || buf == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 397 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 398 | goto err; |
| 399 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 400 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 401 | /* |
| 402 | * This check was for equality but PGP does evil things and chops off the |
| 403 | * top '0' bytes |
| 404 | */ |
| 405 | if (flen > num) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 406 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_GREATER_THAN_MOD_LEN); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 407 | goto err; |
| 408 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 409 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 410 | /* make data into a big number */ |
| 411 | if (BN_bin2bn(from, (int)flen, f) == NULL) |
| 412 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 413 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 414 | if (BN_ucmp(f, rsa->n) >= 0) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 415 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_MODULUS); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 416 | goto err; |
| 417 | } |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 418 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 419 | if (!(rsa->flags & RSA_FLAG_NO_BLINDING)) { |
| 420 | blinding = rsa_get_blinding(rsa, &local_blinding, ctx); |
| 421 | if (blinding == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 422 | ERR_raise(ERR_LIB_RSA, ERR_R_INTERNAL_ERROR); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 423 | goto err; |
| 424 | } |
| 425 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 426 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 427 | if (blinding != NULL) { |
| 428 | if (!local_blinding && ((unblind = BN_CTX_get(ctx)) == NULL)) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 429 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 430 | goto err; |
| 431 | } |
| 432 | if (!rsa_blinding_convert(blinding, f, unblind, ctx)) |
| 433 | goto err; |
| 434 | } |
Bodo Möller | 46a6437 | 2005-05-16 01:43:31 +0000 | [diff] [blame] | 435 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 436 | /* do the decrypt */ |
| 437 | if ((rsa->flags & RSA_FLAG_EXT_PKEY) || |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 438 | (rsa->version == RSA_ASN1_VERSION_MULTI) || |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 439 | ((rsa->p != NULL) && |
| 440 | (rsa->q != NULL) && |
| 441 | (rsa->dmp1 != NULL) && (rsa->dmq1 != NULL) && (rsa->iqmp != NULL))) { |
| 442 | if (!rsa->meth->rsa_mod_exp(ret, f, rsa, ctx)) |
| 443 | goto err; |
| 444 | } else { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 445 | BIGNUM *d = BN_new(); |
| 446 | if (d == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 447 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 448 | goto err; |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 449 | } |
David von Oheimb | 7408f67 | 2019-07-26 11:03:12 +0200 | [diff] [blame] | 450 | if (rsa->d == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 451 | ERR_raise(ERR_LIB_RSA, RSA_R_MISSING_PRIVATE_KEY); |
David von Oheimb | 7408f67 | 2019-07-26 11:03:12 +0200 | [diff] [blame] | 452 | BN_free(d); |
| 453 | goto err; |
| 454 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 455 | BN_with_flags(d, rsa->d, BN_FLG_CONSTTIME); |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 456 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 457 | if (rsa->flags & RSA_FLAG_CACHE_PUBLIC) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 458 | if (!BN_MONT_CTX_set_locked(&rsa->_method_mod_n, rsa->lock, |
| 459 | rsa->n, ctx)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 460 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 461 | goto err; |
| 462 | } |
| 463 | if (!rsa->meth->bn_mod_exp(ret, f, d, rsa->n, ctx, |
| 464 | rsa->_method_mod_n)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 465 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 466 | goto err; |
| 467 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 468 | /* We MUST free d before any further use of rsa->d */ |
| 469 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 470 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 471 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 472 | if (blinding) |
| 473 | if (!rsa_blinding_invert(blinding, ret, unblind, ctx)) |
| 474 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 475 | |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 476 | j = BN_bn2binpad(ret, buf, num); |
Pauli | 4a3dd62 | 2019-09-09 10:14:32 +1000 | [diff] [blame] | 477 | if (j < 0) |
| 478 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 479 | |
| 480 | switch (padding) { |
| 481 | case RSA_PKCS1_PADDING: |
| 482 | r = RSA_padding_check_PKCS1_type_2(to, num, buf, j, num); |
| 483 | break; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 484 | case RSA_PKCS1_OAEP_PADDING: |
| 485 | r = RSA_padding_check_PKCS1_OAEP(to, num, buf, j, num, NULL, 0); |
| 486 | break; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 487 | case RSA_NO_PADDING: |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 488 | memcpy(to, buf, (r = j)); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 489 | break; |
| 490 | default: |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 491 | ERR_raise(ERR_LIB_RSA, RSA_R_UNKNOWN_PADDING_TYPE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 492 | goto err; |
| 493 | } |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 494 | #ifndef FIPS_MODULE |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 495 | /* |
| 496 | * This trick doesn't work in the FIPS provider because libcrypto manages |
| 497 | * the error stack. Instead we opt not to put an error on the stack at all |
| 498 | * in case of padding failure in the FIPS provider. |
| 499 | */ |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 500 | ERR_raise(ERR_LIB_RSA, RSA_R_PADDING_CHECK_FAILED); |
Bernd Edlinger | 94dc53a | 2019-03-20 20:01:12 +0100 | [diff] [blame] | 501 | err_clear_last_constant_time(1 & ~constant_time_msb(r)); |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 502 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 503 | |
| 504 | err: |
Shane Lontis | ce1415e | 2019-03-19 09:58:09 +1000 | [diff] [blame] | 505 | BN_CTX_end(ctx); |
Rich Salz | 23a1d5e | 2015-04-30 21:37:06 -0400 | [diff] [blame] | 506 | BN_CTX_free(ctx); |
Rich Salz | 4b45c6e | 2015-04-30 17:57:32 -0400 | [diff] [blame] | 507 | OPENSSL_clear_free(buf, num); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 508 | return r; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 509 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 510 | |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 511 | /* signature verification */ |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 512 | static int rsa_ossl_public_decrypt(int flen, const unsigned char *from, |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 513 | unsigned char *to, RSA *rsa, int padding) |
| 514 | { |
| 515 | BIGNUM *f, *ret; |
| 516 | int i, num = 0, r = -1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 517 | unsigned char *buf = NULL; |
| 518 | BN_CTX *ctx = NULL; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 519 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 520 | if (BN_num_bits(rsa->n) > OPENSSL_RSA_MAX_MODULUS_BITS) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 521 | ERR_raise(ERR_LIB_RSA, RSA_R_MODULUS_TOO_LARGE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 522 | return -1; |
| 523 | } |
Bodo Möller | 5e3225c | 2006-09-28 13:45:34 +0000 | [diff] [blame] | 524 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 525 | if (BN_ucmp(rsa->n, rsa->e) <= 0) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 526 | ERR_raise(ERR_LIB_RSA, RSA_R_BAD_E_VALUE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 527 | return -1; |
| 528 | } |
Bodo Möller | 5e3225c | 2006-09-28 13:45:34 +0000 | [diff] [blame] | 529 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 530 | /* for large moduli, enforce exponent limit */ |
| 531 | if (BN_num_bits(rsa->n) > OPENSSL_RSA_SMALL_MODULUS_BITS) { |
| 532 | if (BN_num_bits(rsa->e) > OPENSSL_RSA_MAX_PUBEXP_BITS) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 533 | ERR_raise(ERR_LIB_RSA, RSA_R_BAD_E_VALUE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 534 | return -1; |
| 535 | } |
| 536 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 537 | |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 538 | if ((ctx = BN_CTX_new_ex(rsa->libctx)) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 539 | goto err; |
| 540 | BN_CTX_start(ctx); |
| 541 | f = BN_CTX_get(ctx); |
| 542 | ret = BN_CTX_get(ctx); |
| 543 | num = BN_num_bytes(rsa->n); |
| 544 | buf = OPENSSL_malloc(num); |
Paul Yang | edea42c | 2017-06-22 18:52:29 +0800 | [diff] [blame] | 545 | if (ret == NULL || buf == NULL) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 546 | ERR_raise(ERR_LIB_RSA, ERR_R_MALLOC_FAILURE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 547 | goto err; |
| 548 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 549 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 550 | /* |
| 551 | * This check was for equality but PGP does evil things and chops off the |
| 552 | * top '0' bytes |
| 553 | */ |
| 554 | if (flen > num) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 555 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_GREATER_THAN_MOD_LEN); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 556 | goto err; |
| 557 | } |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 558 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 559 | if (BN_bin2bn(from, flen, f) == NULL) |
| 560 | goto err; |
Bodo Möller | 24cff6c | 2001-07-25 17:02:58 +0000 | [diff] [blame] | 561 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 562 | if (BN_ucmp(f, rsa->n) >= 0) { |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 563 | ERR_raise(ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_MODULUS); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 564 | goto err; |
| 565 | } |
Geoff Thorpe | 79221bc | 2003-02-14 23:21:19 +0000 | [diff] [blame] | 566 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 567 | if (rsa->flags & RSA_FLAG_CACHE_PUBLIC) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 568 | if (!BN_MONT_CTX_set_locked(&rsa->_method_mod_n, rsa->lock, |
| 569 | rsa->n, ctx)) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 570 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 571 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 572 | if (!rsa->meth->bn_mod_exp(ret, f, rsa->e, rsa->n, ctx, |
| 573 | rsa->_method_mod_n)) |
| 574 | goto err; |
Dr. Stephen Henson | 499fca2 | 2005-05-28 20:44:02 +0000 | [diff] [blame] | 575 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 576 | if ((padding == RSA_X931_PADDING) && ((bn_get_words(ret)[0] & 0xf) != 12)) |
| 577 | if (!BN_sub(ret, rsa->n, ret)) |
| 578 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 579 | |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 580 | i = BN_bn2binpad(ret, buf, num); |
Pauli | 4a3dd62 | 2019-09-09 10:14:32 +1000 | [diff] [blame] | 581 | if (i < 0) |
| 582 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 583 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 584 | switch (padding) { |
| 585 | case RSA_PKCS1_PADDING: |
| 586 | r = RSA_padding_check_PKCS1_type_1(to, num, buf, i, num); |
| 587 | break; |
| 588 | case RSA_X931_PADDING: |
| 589 | r = RSA_padding_check_X931(to, num, buf, i, num); |
| 590 | break; |
| 591 | case RSA_NO_PADDING: |
Andy Polyakov | 582ad5d | 2018-02-04 15:24:54 +0100 | [diff] [blame] | 592 | memcpy(to, buf, (r = i)); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 593 | break; |
| 594 | default: |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 595 | ERR_raise(ERR_LIB_RSA, RSA_R_UNKNOWN_PADDING_TYPE); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 596 | goto err; |
| 597 | } |
| 598 | if (r < 0) |
Richard Levitte | 9311d0c | 2020-11-04 12:23:19 +0100 | [diff] [blame] | 599 | ERR_raise(ERR_LIB_RSA, RSA_R_PADDING_CHECK_FAILED); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 600 | |
| 601 | err: |
Shane Lontis | ce1415e | 2019-03-19 09:58:09 +1000 | [diff] [blame] | 602 | BN_CTX_end(ctx); |
Rich Salz | 23a1d5e | 2015-04-30 21:37:06 -0400 | [diff] [blame] | 603 | BN_CTX_free(ctx); |
Rich Salz | 4b45c6e | 2015-04-30 17:57:32 -0400 | [diff] [blame] | 604 | OPENSSL_clear_free(buf, num); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 605 | return r; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 606 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 607 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 608 | static int rsa_ossl_mod_exp(BIGNUM *r0, const BIGNUM *I, RSA *rsa, BN_CTX *ctx) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 609 | { |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 610 | BIGNUM *r1, *m1, *vrfy; |
| 611 | int ret = 0, smooth = 0; |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 612 | #ifndef FIPS_MODULE |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 613 | BIGNUM *r2, *m[RSA_MAX_PRIME_NUM - 2]; |
| 614 | int i, ex_primes = 0; |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 615 | RSA_PRIME_INFO *pinfo; |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 616 | #endif |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 617 | |
Pascal Cuoq | c804d23 | 2015-05-05 11:20:39 +0200 | [diff] [blame] | 618 | BN_CTX_start(ctx); |
| 619 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 620 | r1 = BN_CTX_get(ctx); |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 621 | #ifndef FIPS_MODULE |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 622 | r2 = BN_CTX_get(ctx); |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 623 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 624 | m1 = BN_CTX_get(ctx); |
| 625 | vrfy = BN_CTX_get(ctx); |
Bernd Edlinger | 5625567 | 2017-06-13 22:08:03 +0200 | [diff] [blame] | 626 | if (vrfy == NULL) |
| 627 | goto err; |
Matt Caswell | 18125f7 | 2014-10-28 23:00:29 +0000 | [diff] [blame] | 628 | |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 629 | #ifndef FIPS_MODULE |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 630 | if (rsa->version == RSA_ASN1_VERSION_MULTI |
Bernd Edlinger | a147158 | 2017-12-07 13:03:15 +0100 | [diff] [blame] | 631 | && ((ex_primes = sk_RSA_PRIME_INFO_num(rsa->prime_infos)) <= 0 |
| 632 | || ex_primes > RSA_MAX_PRIME_NUM - 2)) |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 633 | goto err; |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 634 | #endif |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 635 | |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 636 | if (rsa->flags & RSA_FLAG_CACHE_PRIVATE) { |
| 637 | BIGNUM *factor = BN_new(); |
| 638 | |
| 639 | if (factor == NULL) |
| 640 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 641 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 642 | /* |
FdaSilvaYY | 0d4fb84 | 2016-02-05 15:23:54 -0500 | [diff] [blame] | 643 | * Make sure BN_mod_inverse in Montgomery initialization uses the |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 644 | * BN_FLG_CONSTTIME flag |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 645 | */ |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 646 | if (!(BN_with_flags(factor, rsa->p, BN_FLG_CONSTTIME), |
| 647 | BN_MONT_CTX_set_locked(&rsa->_method_mod_p, rsa->lock, |
| 648 | factor, ctx)) |
| 649 | || !(BN_with_flags(factor, rsa->q, BN_FLG_CONSTTIME), |
| 650 | BN_MONT_CTX_set_locked(&rsa->_method_mod_q, rsa->lock, |
| 651 | factor, ctx))) { |
| 652 | BN_free(factor); |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 653 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 654 | } |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 655 | #ifndef FIPS_MODULE |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 656 | for (i = 0; i < ex_primes; i++) { |
| 657 | pinfo = sk_RSA_PRIME_INFO_value(rsa->prime_infos, i); |
| 658 | BN_with_flags(factor, pinfo->r, BN_FLG_CONSTTIME); |
| 659 | if (!BN_MONT_CTX_set_locked(&pinfo->m, rsa->lock, factor, ctx)) { |
| 660 | BN_free(factor); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 661 | goto err; |
| 662 | } |
| 663 | } |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 664 | #endif |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 665 | /* |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 666 | * We MUST free |factor| before any further use of the prime factors |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 667 | */ |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 668 | BN_free(factor); |
| 669 | |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 670 | smooth = (rsa->meth->bn_mod_exp == BN_mod_exp_mont) |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 671 | #ifndef FIPS_MODULE |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 672 | && (ex_primes == 0) |
| 673 | #endif |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 674 | && (BN_num_bits(rsa->q) == BN_num_bits(rsa->p)); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 675 | } |
Bodo Möller | 7c9882e | 2008-02-27 06:01:28 +0000 | [diff] [blame] | 676 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 677 | if (rsa->flags & RSA_FLAG_CACHE_PUBLIC) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 678 | if (!BN_MONT_CTX_set_locked(&rsa->_method_mod_n, rsa->lock, |
| 679 | rsa->n, ctx)) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 680 | goto err; |
Bodo Möller | 7c9882e | 2008-02-27 06:01:28 +0000 | [diff] [blame] | 681 | |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 682 | if (smooth) { |
| 683 | /* |
| 684 | * Conversion from Montgomery domain, a.k.a. Montgomery reduction, |
| 685 | * accepts values in [0-m*2^w) range. w is m's bit width rounded up |
| 686 | * to limb width. So that at the very least if |I| is fully reduced, |
| 687 | * i.e. less than p*q, we can count on from-to round to perform |
| 688 | * below modulo operations on |I|. Unlike BN_mod it's constant time. |
| 689 | */ |
| 690 | if (/* m1 = I moq q */ |
| 691 | !bn_from_mont_fixed_top(m1, I, rsa->_method_mod_q, ctx) |
| 692 | || !bn_to_mont_fixed_top(m1, m1, rsa->_method_mod_q, ctx) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 693 | /* r1 = I mod p */ |
| 694 | || !bn_from_mont_fixed_top(r1, I, rsa->_method_mod_p, ctx) |
| 695 | || !bn_to_mont_fixed_top(r1, r1, rsa->_method_mod_p, ctx) |
Andrey Matyukov | c781eb1 | 2020-12-08 22:53:39 +0300 | [diff] [blame] | 696 | /* |
| 697 | * Use parallel exponentiations optimization if possible, |
| 698 | * otherwise fallback to two sequential exponentiations: |
| 699 | * m1 = m1^dmq1 mod q |
| 700 | * r1 = r1^dmp1 mod p |
| 701 | */ |
| 702 | || !BN_mod_exp_mont_consttime_x2(m1, m1, rsa->dmq1, rsa->q, |
| 703 | rsa->_method_mod_q, |
| 704 | r1, r1, rsa->dmp1, rsa->p, |
| 705 | rsa->_method_mod_p, |
| 706 | ctx) |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 707 | /* r1 = (r1 - m1) mod p */ |
| 708 | /* |
| 709 | * bn_mod_sub_fixed_top is not regular modular subtraction, |
| 710 | * it can tolerate subtrahend to be larger than modulus, but |
| 711 | * not bit-wise wider. This makes up for uncommon q>p case, |
| 712 | * when |m1| can be larger than |rsa->p|. |
| 713 | */ |
| 714 | || !bn_mod_sub_fixed_top(r1, r1, m1, rsa->p) |
| 715 | |
Andy Polyakov | d1c008f | 2018-09-05 14:33:21 +0200 | [diff] [blame] | 716 | /* r1 = r1 * iqmp mod p */ |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 717 | || !bn_to_mont_fixed_top(r1, r1, rsa->_method_mod_p, ctx) |
| 718 | || !bn_mul_mont_fixed_top(r1, r1, rsa->iqmp, rsa->_method_mod_p, |
| 719 | ctx) |
Andy Polyakov | d1c008f | 2018-09-05 14:33:21 +0200 | [diff] [blame] | 720 | /* r0 = r1 * q + m1 */ |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 721 | || !bn_mul_fixed_top(r0, r1, rsa->q, ctx) |
| 722 | || !bn_mod_add_fixed_top(r0, r0, m1, rsa->n)) |
| 723 | goto err; |
| 724 | |
| 725 | goto tail; |
| 726 | } |
| 727 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 728 | /* compute I mod q */ |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 729 | { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 730 | BIGNUM *c = BN_new(); |
| 731 | if (c == NULL) |
| 732 | goto err; |
| 733 | BN_with_flags(c, I, BN_FLG_CONSTTIME); |
| 734 | |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 735 | if (!BN_mod(r1, c, rsa->q, ctx)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 736 | BN_free(c); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 737 | goto err; |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 738 | } |
| 739 | |
| 740 | { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 741 | BIGNUM *dmq1 = BN_new(); |
| 742 | if (dmq1 == NULL) { |
| 743 | BN_free(c); |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 744 | goto err; |
| 745 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 746 | BN_with_flags(dmq1, rsa->dmq1, BN_FLG_CONSTTIME); |
| 747 | |
| 748 | /* compute r1^dmq1 mod q */ |
| 749 | if (!rsa->meth->bn_mod_exp(m1, r1, dmq1, rsa->q, ctx, |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 750 | rsa->_method_mod_q)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 751 | BN_free(c); |
| 752 | BN_free(dmq1); |
| 753 | goto err; |
| 754 | } |
| 755 | /* We MUST free dmq1 before any further use of rsa->dmq1 */ |
| 756 | BN_free(dmq1); |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 757 | } |
| 758 | |
| 759 | /* compute I mod p */ |
| 760 | if (!BN_mod(r1, c, rsa->p, ctx)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 761 | BN_free(c); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 762 | goto err; |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 763 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 764 | /* We MUST free c before any further use of I */ |
| 765 | BN_free(c); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 766 | } |
Bodo Möller | 126fe08 | 2000-12-19 12:31:41 +0000 | [diff] [blame] | 767 | |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 768 | { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 769 | BIGNUM *dmp1 = BN_new(); |
| 770 | if (dmp1 == NULL) |
| 771 | goto err; |
| 772 | BN_with_flags(dmp1, rsa->dmp1, BN_FLG_CONSTTIME); |
| 773 | |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 774 | /* compute r1^dmp1 mod p */ |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 775 | if (!rsa->meth->bn_mod_exp(r0, r1, dmp1, rsa->p, ctx, |
| 776 | rsa->_method_mod_p)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 777 | BN_free(dmp1); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 778 | goto err; |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 779 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 780 | /* We MUST free dmp1 before any further use of rsa->dmp1 */ |
| 781 | BN_free(dmp1); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 782 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 783 | |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 784 | #ifndef FIPS_MODULE |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 785 | if (ex_primes > 0) { |
| 786 | BIGNUM *di = BN_new(), *cc = BN_new(); |
| 787 | |
| 788 | if (cc == NULL || di == NULL) { |
| 789 | BN_free(cc); |
| 790 | BN_free(di); |
| 791 | goto err; |
| 792 | } |
| 793 | |
| 794 | for (i = 0; i < ex_primes; i++) { |
| 795 | /* prepare m_i */ |
| 796 | if ((m[i] = BN_CTX_get(ctx)) == NULL) { |
| 797 | BN_free(cc); |
| 798 | BN_free(di); |
| 799 | goto err; |
| 800 | } |
| 801 | |
| 802 | pinfo = sk_RSA_PRIME_INFO_value(rsa->prime_infos, i); |
| 803 | |
| 804 | /* prepare c and d_i */ |
| 805 | BN_with_flags(cc, I, BN_FLG_CONSTTIME); |
| 806 | BN_with_flags(di, pinfo->d, BN_FLG_CONSTTIME); |
| 807 | |
| 808 | if (!BN_mod(r1, cc, pinfo->r, ctx)) { |
| 809 | BN_free(cc); |
| 810 | BN_free(di); |
| 811 | goto err; |
| 812 | } |
| 813 | /* compute r1 ^ d_i mod r_i */ |
| 814 | if (!rsa->meth->bn_mod_exp(m[i], r1, di, pinfo->r, ctx, pinfo->m)) { |
| 815 | BN_free(cc); |
| 816 | BN_free(di); |
| 817 | goto err; |
| 818 | } |
| 819 | } |
| 820 | |
| 821 | BN_free(cc); |
| 822 | BN_free(di); |
| 823 | } |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 824 | #endif |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 825 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 826 | if (!BN_sub(r0, r0, m1)) |
| 827 | goto err; |
| 828 | /* |
| 829 | * This will help stop the size of r0 increasing, which does affect the |
| 830 | * multiply if it optimised for a power of 2 size |
| 831 | */ |
| 832 | if (BN_is_negative(r0)) |
| 833 | if (!BN_add(r0, r0, rsa->p)) |
| 834 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 835 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 836 | if (!BN_mul(r1, r0, rsa->iqmp, ctx)) |
| 837 | goto err; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 838 | |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 839 | { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 840 | BIGNUM *pr1 = BN_new(); |
| 841 | if (pr1 == NULL) |
| 842 | goto err; |
| 843 | BN_with_flags(pr1, r1, BN_FLG_CONSTTIME); |
| 844 | |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 845 | if (!BN_mod(r0, pr1, rsa->p, ctx)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 846 | BN_free(pr1); |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 847 | goto err; |
| 848 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 849 | /* We MUST free pr1 before any further use of r1 */ |
| 850 | BN_free(pr1); |
Matt Caswell | fd7d252 | 2015-11-24 11:09:00 +0000 | [diff] [blame] | 851 | } |
Bodo Möller | bd31fb2 | 2007-03-28 00:15:28 +0000 | [diff] [blame] | 852 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 853 | /* |
| 854 | * If p < q it is occasionally possible for the correction of adding 'p' |
| 855 | * if r0 is negative above to leave the result still negative. This can |
| 856 | * break the private key operations: the following second correction |
| 857 | * should *always* correct this rare occurrence. This will *never* happen |
| 858 | * with OpenSSL generated keys because they ensure p > q [steve] |
| 859 | */ |
| 860 | if (BN_is_negative(r0)) |
| 861 | if (!BN_add(r0, r0, rsa->p)) |
| 862 | goto err; |
| 863 | if (!BN_mul(r1, r0, rsa->q, ctx)) |
| 864 | goto err; |
| 865 | if (!BN_add(r0, r1, m1)) |
| 866 | goto err; |
Bodo Möller | bd31fb2 | 2007-03-28 00:15:28 +0000 | [diff] [blame] | 867 | |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 868 | #ifndef FIPS_MODULE |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 869 | /* add m_i to m in multi-prime case */ |
| 870 | if (ex_primes > 0) { |
| 871 | BIGNUM *pr2 = BN_new(); |
| 872 | |
| 873 | if (pr2 == NULL) |
| 874 | goto err; |
| 875 | |
| 876 | for (i = 0; i < ex_primes; i++) { |
| 877 | pinfo = sk_RSA_PRIME_INFO_value(rsa->prime_infos, i); |
| 878 | if (!BN_sub(r1, m[i], r0)) { |
| 879 | BN_free(pr2); |
| 880 | goto err; |
| 881 | } |
| 882 | |
| 883 | if (!BN_mul(r2, r1, pinfo->t, ctx)) { |
| 884 | BN_free(pr2); |
| 885 | goto err; |
| 886 | } |
| 887 | |
| 888 | BN_with_flags(pr2, r2, BN_FLG_CONSTTIME); |
| 889 | |
| 890 | if (!BN_mod(r1, pr2, pinfo->r, ctx)) { |
| 891 | BN_free(pr2); |
| 892 | goto err; |
| 893 | } |
| 894 | |
| 895 | if (BN_is_negative(r1)) |
| 896 | if (!BN_add(r1, r1, pinfo->r)) { |
| 897 | BN_free(pr2); |
| 898 | goto err; |
| 899 | } |
| 900 | if (!BN_mul(r1, r1, pinfo->pp, ctx)) { |
| 901 | BN_free(pr2); |
| 902 | goto err; |
| 903 | } |
| 904 | if (!BN_add(r0, r0, r1)) { |
| 905 | BN_free(pr2); |
| 906 | goto err; |
| 907 | } |
| 908 | } |
| 909 | BN_free(pr2); |
| 910 | } |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 911 | #endif |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 912 | |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 913 | tail: |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 914 | if (rsa->e && rsa->n) { |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 915 | if (rsa->meth->bn_mod_exp == BN_mod_exp_mont) { |
| 916 | if (!BN_mod_exp_mont(vrfy, r0, rsa->e, rsa->n, ctx, |
| 917 | rsa->_method_mod_n)) |
| 918 | goto err; |
| 919 | } else { |
| 920 | bn_correct_top(r0); |
| 921 | if (!rsa->meth->bn_mod_exp(vrfy, r0, rsa->e, rsa->n, ctx, |
| 922 | rsa->_method_mod_n)) |
| 923 | goto err; |
| 924 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 925 | /* |
| 926 | * If 'I' was greater than (or equal to) rsa->n, the operation will |
| 927 | * be equivalent to using 'I mod n'. However, the result of the |
| 928 | * verify will *always* be less than 'n' so we don't check for |
| 929 | * absolute equality, just congruency. |
| 930 | */ |
| 931 | if (!BN_sub(vrfy, vrfy, I)) |
| 932 | goto err; |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 933 | if (BN_is_zero(vrfy)) { |
| 934 | bn_correct_top(r0); |
| 935 | ret = 1; |
| 936 | goto err; /* not actually error */ |
| 937 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 938 | if (!BN_mod(vrfy, vrfy, rsa->n, ctx)) |
| 939 | goto err; |
| 940 | if (BN_is_negative(vrfy)) |
| 941 | if (!BN_add(vrfy, vrfy, rsa->n)) |
| 942 | goto err; |
| 943 | if (!BN_is_zero(vrfy)) { |
| 944 | /* |
| 945 | * 'I' and 'vrfy' aren't congruent mod n. Don't leak |
| 946 | * miscalculated CRT output, just do a raw (slower) mod_exp and |
| 947 | * return that instead. |
| 948 | */ |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 949 | |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 950 | BIGNUM *d = BN_new(); |
| 951 | if (d == NULL) |
| 952 | goto err; |
| 953 | BN_with_flags(d, rsa->d, BN_FLG_CONSTTIME); |
Bodo Möller | 46a6437 | 2005-05-16 01:43:31 +0000 | [diff] [blame] | 954 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 955 | if (!rsa->meth->bn_mod_exp(r0, I, d, rsa->n, ctx, |
| 956 | rsa->_method_mod_n)) { |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 957 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 958 | goto err; |
| 959 | } |
Matt Caswell | 5584f65 | 2016-05-26 10:55:11 +0100 | [diff] [blame] | 960 | /* We MUST free d before any further use of rsa->d */ |
| 961 | BN_free(d); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 962 | } |
| 963 | } |
Andy Polyakov | 41bfd5e | 2018-08-10 19:46:03 +0200 | [diff] [blame] | 964 | /* |
| 965 | * It's unfortunate that we have to bn_correct_top(r0). What hopefully |
| 966 | * saves the day is that correction is highly unlike, and private key |
| 967 | * operations are customarily performed on blinded message. Which means |
| 968 | * that attacker won't observe correlation with chosen plaintext. |
| 969 | * Secondly, remaining code would still handle it in same computational |
| 970 | * time and even conceal memory access pattern around corrected top. |
| 971 | */ |
| 972 | bn_correct_top(r0); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 973 | ret = 1; |
| 974 | err: |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 975 | BN_CTX_end(ctx); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 976 | return ret; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 977 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 978 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 979 | static int rsa_ossl_init(RSA *rsa) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 980 | { |
| 981 | rsa->flags |= RSA_FLAG_CACHE_PUBLIC | RSA_FLAG_CACHE_PRIVATE; |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 982 | return 1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 983 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 984 | |
Rich Salz | bf16055 | 2015-11-01 19:55:56 -0500 | [diff] [blame] | 985 | static int rsa_ossl_finish(RSA *rsa) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 986 | { |
Richard Levitte | f844f9e | 2020-04-13 22:34:56 +0200 | [diff] [blame] | 987 | #ifndef FIPS_MODULE |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 988 | int i; |
| 989 | RSA_PRIME_INFO *pinfo; |
| 990 | |
Paul Yang | 665d899 | 2017-08-02 02:19:43 +0800 | [diff] [blame] | 991 | for (i = 0; i < sk_RSA_PRIME_INFO_num(rsa->prime_infos); i++) { |
| 992 | pinfo = sk_RSA_PRIME_INFO_value(rsa->prime_infos, i); |
| 993 | BN_MONT_CTX_free(pinfo->m); |
| 994 | } |
Matt Caswell | afb638f | 2020-01-17 14:47:18 +0000 | [diff] [blame] | 995 | #endif |
| 996 | |
| 997 | BN_MONT_CTX_free(rsa->_method_mod_n); |
| 998 | BN_MONT_CTX_free(rsa->_method_mod_p); |
| 999 | BN_MONT_CTX_free(rsa->_method_mod_q); |
Paul Yang | 8686c47 | 2017-08-23 01:25:23 +0800 | [diff] [blame] | 1000 | return 1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 1001 | } |