Rich Salz | 846e33c | 2016-05-17 14:18:30 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 1995-2016 The OpenSSL Project Authors. All Rights Reserved. |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 3 | * |
Rich Salz | 846e33c | 2016-05-17 14:18:30 -0400 | [diff] [blame] | 4 | * Licensed under the OpenSSL license (the "License"). You may not use |
| 5 | * this file except in compliance with the License. You can obtain a copy |
| 6 | * in the file LICENSE in the source distribution or at |
| 7 | * https://www.openssl.org/source/license.html |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 8 | */ |
Rich Salz | 846e33c | 2016-05-17 14:18:30 -0400 | [diff] [blame] | 9 | |
Nils Larsch | ddac197 | 2006-03-10 23:06:27 +0000 | [diff] [blame] | 10 | /* ==================================================================== |
| 11 | * Copyright 2005 Nokia. All rights reserved. |
| 12 | * |
| 13 | * The portions of the attached software ("Contribution") is developed by |
| 14 | * Nokia Corporation and is licensed pursuant to the OpenSSL open source |
| 15 | * license. |
| 16 | * |
| 17 | * The Contribution, originally written by Mika Kousa and Pasi Eronen of |
| 18 | * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites |
| 19 | * support (see RFC 4279) to OpenSSL. |
| 20 | * |
| 21 | * No patent licenses or other rights except those expressly stated in |
| 22 | * the OpenSSL open source license shall be deemed granted or received |
| 23 | * expressly, by implication, estoppel, or otherwise. |
| 24 | * |
| 25 | * No assurances are provided by Nokia that the Contribution does not |
| 26 | * infringe the patent or other intellectual property rights of any third |
| 27 | * party or that the license provides you with all the necessary rights |
| 28 | * to make use of the Contribution. |
| 29 | * |
| 30 | * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN |
| 31 | * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA |
| 32 | * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY |
| 33 | * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR |
| 34 | * OTHERWISE. |
| 35 | */ |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 36 | |
| 37 | #include <stdio.h> |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 38 | #include "ssl_locl.h" |
Lutz Jänicke | 7b63c0f | 2002-07-10 07:01:54 +0000 | [diff] [blame] | 39 | #include <openssl/evp.h> |
Ben Laurie | dbad169 | 2001-07-30 23:57:25 +0000 | [diff] [blame] | 40 | #include <openssl/md5.h> |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 41 | |
Bodo Möller | 027e257 | 2001-04-08 13:47:51 +0000 | [diff] [blame] | 42 | static int ssl3_generate_key_block(SSL *s, unsigned char *km, int num) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 43 | { |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 44 | EVP_MD_CTX *m5; |
| 45 | EVP_MD_CTX *s1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 46 | unsigned char buf[16], smd[SHA_DIGEST_LENGTH]; |
| 47 | unsigned char c = 'A'; |
| 48 | unsigned int i, j, k; |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 49 | int ret = 0; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 50 | |
Ulf Möller | ca570cf | 1999-06-04 21:54:13 +0000 | [diff] [blame] | 51 | #ifdef CHARSET_EBCDIC |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 52 | c = os_toascii[c]; /* 'A' in ASCII */ |
Ulf Möller | ca570cf | 1999-06-04 21:54:13 +0000 | [diff] [blame] | 53 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 54 | k = 0; |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 55 | m5 = EVP_MD_CTX_new(); |
| 56 | s1 = EVP_MD_CTX_new(); |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 57 | if (m5 == NULL || s1 == NULL) { |
| 58 | SSLerr(SSL_F_SSL3_GENERATE_KEY_BLOCK, ERR_R_MALLOC_FAILURE); |
| 59 | goto err; |
| 60 | } |
| 61 | EVP_MD_CTX_set_flags(m5, EVP_MD_CTX_FLAG_NON_FIPS_ALLOW); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 62 | for (i = 0; (int)i < num; i += MD5_DIGEST_LENGTH) { |
| 63 | k++; |
Bernd Edlinger | a6fd7c1 | 2017-01-01 01:43:20 +0100 | [diff] [blame] | 64 | if (k > sizeof(buf)) { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 65 | /* bug: 'buf' is too small for this ciphersuite */ |
| 66 | SSLerr(SSL_F_SSL3_GENERATE_KEY_BLOCK, ERR_R_INTERNAL_ERROR); |
Bernd Edlinger | a6fd7c1 | 2017-01-01 01:43:20 +0100 | [diff] [blame] | 67 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 68 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 69 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 70 | for (j = 0; j < k; j++) |
| 71 | buf[j] = c; |
| 72 | c++; |
Dr. Stephen Henson | d166ed8 | 2016-06-18 15:46:13 +0100 | [diff] [blame] | 73 | if (!EVP_DigestInit_ex(s1, EVP_sha1(), NULL) |
| 74 | || !EVP_DigestUpdate(s1, buf, k) |
| 75 | || !EVP_DigestUpdate(s1, s->session->master_key, |
| 76 | s->session->master_key_length) |
| 77 | || !EVP_DigestUpdate(s1, s->s3->server_random, SSL3_RANDOM_SIZE) |
| 78 | || !EVP_DigestUpdate(s1, s->s3->client_random, SSL3_RANDOM_SIZE) |
| 79 | || !EVP_DigestFinal_ex(s1, smd, NULL) |
| 80 | || !EVP_DigestInit_ex(m5, EVP_md5(), NULL) |
| 81 | || !EVP_DigestUpdate(m5, s->session->master_key, |
| 82 | s->session->master_key_length) |
| 83 | || !EVP_DigestUpdate(m5, smd, SHA_DIGEST_LENGTH)) |
| 84 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 85 | if ((int)(i + MD5_DIGEST_LENGTH) > num) { |
Dr. Stephen Henson | d166ed8 | 2016-06-18 15:46:13 +0100 | [diff] [blame] | 86 | if (!EVP_DigestFinal_ex(m5, smd, NULL)) |
| 87 | goto err; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 88 | memcpy(km, smd, (num - i)); |
Dr. Stephen Henson | d166ed8 | 2016-06-18 15:46:13 +0100 | [diff] [blame] | 89 | } else { |
| 90 | if (!EVP_DigestFinal_ex(m5, km, NULL)) |
| 91 | goto err; |
| 92 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 93 | |
| 94 | km += MD5_DIGEST_LENGTH; |
| 95 | } |
Rich Salz | e0f9bf1 | 2015-05-29 12:22:43 -0400 | [diff] [blame] | 96 | OPENSSL_cleanse(smd, sizeof(smd)); |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 97 | ret = 1; |
| 98 | err: |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 99 | EVP_MD_CTX_free(m5); |
| 100 | EVP_MD_CTX_free(s1); |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 101 | return ret; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 102 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 103 | |
Ulf Möller | 6b691a5 | 1999-04-19 21:31:43 +0000 | [diff] [blame] | 104 | int ssl3_change_cipher_state(SSL *s, int which) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 105 | { |
| 106 | unsigned char *p, *mac_secret; |
| 107 | unsigned char exp_key[EVP_MAX_KEY_LENGTH]; |
| 108 | unsigned char exp_iv[EVP_MAX_IV_LENGTH]; |
Kurt Roeckx | 361a119 | 2015-12-05 02:04:41 +0100 | [diff] [blame] | 109 | unsigned char *ms, *key, *iv; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 110 | EVP_CIPHER_CTX *dd; |
| 111 | const EVP_CIPHER *c; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 112 | #ifndef OPENSSL_NO_COMP |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 113 | COMP_METHOD *comp; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 114 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 115 | const EVP_MD *m; |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 116 | int mdi; |
| 117 | size_t n, i, j, k, cl; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 118 | int reuse_dd = 0; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 119 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 120 | c = s->s3->tmp.new_sym_enc; |
| 121 | m = s->s3->tmp.new_hash; |
| 122 | /* m == NULL will lead to a crash later */ |
| 123 | OPENSSL_assert(m); |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 124 | #ifndef OPENSSL_NO_COMP |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 125 | if (s->s3->tmp.new_compression == NULL) |
| 126 | comp = NULL; |
| 127 | else |
| 128 | comp = s->s3->tmp.new_compression->method; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 129 | #endif |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 130 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 131 | if (which & SSL3_CC_READ) { |
| 132 | if (s->enc_read_ctx != NULL) |
| 133 | reuse_dd = 1; |
Richard Levitte | 846ec07 | 2015-12-13 22:08:41 +0100 | [diff] [blame] | 134 | else if ((s->enc_read_ctx = EVP_CIPHER_CTX_new()) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 135 | goto err; |
| 136 | else |
| 137 | /* |
FdaSilvaYY | 8483a00 | 2016-03-10 21:34:48 +0100 | [diff] [blame] | 138 | * make sure it's initialised in case we exit later with an error |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 139 | */ |
Richard Levitte | 846ec07 | 2015-12-13 22:08:41 +0100 | [diff] [blame] | 140 | EVP_CIPHER_CTX_reset(s->enc_read_ctx); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 141 | dd = s->enc_read_ctx; |
Dr. Stephen Henson | b948e2c | 2007-06-04 17:04:40 +0000 | [diff] [blame] | 142 | |
Matt Caswell | 5f3d93e | 2015-11-06 16:31:21 +0000 | [diff] [blame] | 143 | if (ssl_replace_hash(&s->read_hash, m) == NULL) { |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 144 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR); |
| 145 | goto err2; |
Matt Caswell | 69f6823 | 2015-03-06 14:37:17 +0000 | [diff] [blame] | 146 | } |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 147 | #ifndef OPENSSL_NO_COMP |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 148 | /* COMPRESS */ |
Rich Salz | efa7dd6 | 2015-05-01 10:15:18 -0400 | [diff] [blame] | 149 | COMP_CTX_free(s->expand); |
| 150 | s->expand = NULL; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 151 | if (comp != NULL) { |
| 152 | s->expand = COMP_CTX_new(comp); |
| 153 | if (s->expand == NULL) { |
| 154 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, |
| 155 | SSL_R_COMPRESSION_LIBRARY_ERROR); |
| 156 | goto err2; |
| 157 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 158 | } |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 159 | #endif |
Matt Caswell | de07f31 | 2015-02-03 14:26:50 +0000 | [diff] [blame] | 160 | RECORD_LAYER_reset_read_sequence(&s->rlayer); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 161 | mac_secret = &(s->s3->read_mac_secret[0]); |
| 162 | } else { |
| 163 | if (s->enc_write_ctx != NULL) |
| 164 | reuse_dd = 1; |
Richard Levitte | 846ec07 | 2015-12-13 22:08:41 +0100 | [diff] [blame] | 165 | else if ((s->enc_write_ctx = EVP_CIPHER_CTX_new()) == NULL) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 166 | goto err; |
| 167 | else |
| 168 | /* |
FdaSilvaYY | 8483a00 | 2016-03-10 21:34:48 +0100 | [diff] [blame] | 169 | * make sure it's initialised in case we exit later with an error |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 170 | */ |
Richard Levitte | 846ec07 | 2015-12-13 22:08:41 +0100 | [diff] [blame] | 171 | EVP_CIPHER_CTX_reset(s->enc_write_ctx); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 172 | dd = s->enc_write_ctx; |
Matt Caswell | 5f3d93e | 2015-11-06 16:31:21 +0000 | [diff] [blame] | 173 | if (ssl_replace_hash(&s->write_hash, m) == NULL) { |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 174 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR); |
| 175 | goto err2; |
Matt Caswell | 69f6823 | 2015-03-06 14:37:17 +0000 | [diff] [blame] | 176 | } |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 177 | #ifndef OPENSSL_NO_COMP |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 178 | /* COMPRESS */ |
Rich Salz | efa7dd6 | 2015-05-01 10:15:18 -0400 | [diff] [blame] | 179 | COMP_CTX_free(s->compress); |
| 180 | s->compress = NULL; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 181 | if (comp != NULL) { |
| 182 | s->compress = COMP_CTX_new(comp); |
| 183 | if (s->compress == NULL) { |
| 184 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, |
| 185 | SSL_R_COMPRESSION_LIBRARY_ERROR); |
| 186 | goto err2; |
| 187 | } |
| 188 | } |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 189 | #endif |
Matt Caswell | de07f31 | 2015-02-03 14:26:50 +0000 | [diff] [blame] | 190 | RECORD_LAYER_reset_write_sequence(&s->rlayer); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 191 | mac_secret = &(s->s3->write_mac_secret[0]); |
| 192 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 193 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 194 | if (reuse_dd) |
Richard Levitte | 846ec07 | 2015-12-13 22:08:41 +0100 | [diff] [blame] | 195 | EVP_CIPHER_CTX_reset(dd); |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 196 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 197 | p = s->s3->tmp.key_block; |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 198 | mdi = EVP_MD_size(m); |
| 199 | if (mdi < 0) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 200 | goto err2; |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 201 | i = mdi; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 202 | cl = EVP_CIPHER_key_length(c); |
Kurt Roeckx | 361a119 | 2015-12-05 02:04:41 +0100 | [diff] [blame] | 203 | j = cl; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 204 | k = EVP_CIPHER_iv_length(c); |
| 205 | if ((which == SSL3_CHANGE_CIPHER_CLIENT_WRITE) || |
| 206 | (which == SSL3_CHANGE_CIPHER_SERVER_READ)) { |
| 207 | ms = &(p[0]); |
| 208 | n = i + i; |
| 209 | key = &(p[n]); |
| 210 | n += j + j; |
| 211 | iv = &(p[n]); |
| 212 | n += k + k; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 213 | } else { |
| 214 | n = i; |
| 215 | ms = &(p[n]); |
| 216 | n += i + j; |
| 217 | key = &(p[n]); |
| 218 | n += j + k; |
| 219 | iv = &(p[n]); |
| 220 | n += k; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 221 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 222 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 223 | if (n > s->s3->tmp.key_block_length) { |
| 224 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR); |
| 225 | goto err2; |
| 226 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 227 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 228 | memcpy(mac_secret, ms, i); |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 229 | |
Bernd Edlinger | a6fd7c1 | 2017-01-01 01:43:20 +0100 | [diff] [blame] | 230 | if (!EVP_CipherInit_ex(dd, c, NULL, key, iv, (which & SSL3_CC_WRITE))) |
| 231 | goto err2; |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 232 | |
Dr. Stephen Henson | 1cf218b | 2012-08-28 23:17:28 +0000 | [diff] [blame] | 233 | #ifdef OPENSSL_SSL_TRACE_CRYPTO |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 234 | if (s->msg_callback) { |
| 235 | |
| 236 | int wh = which & SSL3_CC_WRITE ? |
| 237 | TLS1_RT_CRYPTO_WRITE : TLS1_RT_CRYPTO_READ; |
| 238 | s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_MAC, |
| 239 | mac_secret, EVP_MD_size(m), s, s->msg_callback_arg); |
| 240 | if (c->key_len) |
| 241 | s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_KEY, |
| 242 | key, c->key_len, s, s->msg_callback_arg); |
| 243 | if (k) { |
| 244 | s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_IV, |
| 245 | iv, k, s, s->msg_callback_arg); |
| 246 | } |
| 247 | } |
Dr. Stephen Henson | 1cf218b | 2012-08-28 23:17:28 +0000 | [diff] [blame] | 248 | #endif |
| 249 | |
Rich Salz | e0f9bf1 | 2015-05-29 12:22:43 -0400 | [diff] [blame] | 250 | OPENSSL_cleanse(exp_key, sizeof(exp_key)); |
| 251 | OPENSSL_cleanse(exp_iv, sizeof(exp_iv)); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 252 | return (1); |
| 253 | err: |
| 254 | SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_MALLOC_FAILURE); |
| 255 | err2: |
Rich Salz | e0f9bf1 | 2015-05-29 12:22:43 -0400 | [diff] [blame] | 256 | OPENSSL_cleanse(exp_key, sizeof(exp_key)); |
| 257 | OPENSSL_cleanse(exp_iv, sizeof(exp_iv)); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 258 | return (0); |
| 259 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 260 | |
Ulf Möller | 6b691a5 | 1999-04-19 21:31:43 +0000 | [diff] [blame] | 261 | int ssl3_setup_key_block(SSL *s) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 262 | { |
| 263 | unsigned char *p; |
| 264 | const EVP_CIPHER *c; |
| 265 | const EVP_MD *hash; |
| 266 | int num; |
| 267 | int ret = 0; |
| 268 | SSL_COMP *comp; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 269 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 270 | if (s->s3->tmp.key_block_length != 0) |
| 271 | return (1); |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 272 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 273 | if (!ssl_cipher_get_evp(s->session, &c, &hash, NULL, NULL, &comp, 0)) { |
| 274 | SSLerr(SSL_F_SSL3_SETUP_KEY_BLOCK, SSL_R_CIPHER_OR_HASH_UNAVAILABLE); |
| 275 | return (0); |
| 276 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 277 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 278 | s->s3->tmp.new_sym_enc = c; |
| 279 | s->s3->tmp.new_hash = hash; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 280 | #ifdef OPENSSL_NO_COMP |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 281 | s->s3->tmp.new_compression = NULL; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 282 | #else |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 283 | s->s3->tmp.new_compression = comp; |
Dr. Stephen Henson | 09b6c2e | 2005-09-30 23:35:33 +0000 | [diff] [blame] | 284 | #endif |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 285 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 286 | num = EVP_MD_size(hash); |
| 287 | if (num < 0) |
| 288 | return 0; |
Ben Laurie | 0eab41f | 2008-12-29 16:11:58 +0000 | [diff] [blame] | 289 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 290 | num = EVP_CIPHER_key_length(c) + num + EVP_CIPHER_iv_length(c); |
| 291 | num *= 2; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 292 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 293 | ssl3_cleanup_key_block(s); |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 294 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 295 | if ((p = OPENSSL_malloc(num)) == NULL) |
| 296 | goto err; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 297 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 298 | s->s3->tmp.key_block_length = num; |
| 299 | s->s3->tmp.key_block = p; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 300 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 301 | ret = ssl3_generate_key_block(s, p, num); |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 302 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 303 | if (!(s->options & SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS)) { |
| 304 | /* |
| 305 | * enable vulnerability countermeasure for CBC ciphers with known-IV |
| 306 | * problem (http://www.openssl.org/~bodo/tls-cbc.txt) |
| 307 | */ |
| 308 | s->s3->need_empty_fragments = 1; |
Bodo Möller | c21506b | 2002-06-14 12:21:11 +0000 | [diff] [blame] | 309 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 310 | if (s->session->cipher != NULL) { |
| 311 | if (s->session->cipher->algorithm_enc == SSL_eNULL) |
| 312 | s->s3->need_empty_fragments = 0; |
| 313 | |
Bodo Möller | 82b0bf0 | 2002-04-13 22:47:20 +0000 | [diff] [blame] | 314 | #ifndef OPENSSL_NO_RC4 |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 315 | if (s->session->cipher->algorithm_enc == SSL_RC4) |
| 316 | s->s3->need_empty_fragments = 0; |
Bodo Möller | 82b0bf0 | 2002-04-13 22:47:20 +0000 | [diff] [blame] | 317 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 318 | } |
| 319 | } |
Bodo Möller | 82b0bf0 | 2002-04-13 22:47:20 +0000 | [diff] [blame] | 320 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 321 | return ret; |
| 322 | |
| 323 | err: |
| 324 | SSLerr(SSL_F_SSL3_SETUP_KEY_BLOCK, ERR_R_MALLOC_FAILURE); |
| 325 | return (0); |
| 326 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 327 | |
Ulf Möller | 6b691a5 | 1999-04-19 21:31:43 +0000 | [diff] [blame] | 328 | void ssl3_cleanup_key_block(SSL *s) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 329 | { |
Rich Salz | 4b45c6e | 2015-04-30 17:57:32 -0400 | [diff] [blame] | 330 | OPENSSL_clear_free(s->s3->tmp.key_block, s->s3->tmp.key_block_length); |
| 331 | s->s3->tmp.key_block = NULL; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 332 | s->s3->tmp.key_block_length = 0; |
| 333 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 334 | |
Matt Caswell | 2c4a056 | 2016-06-03 11:59:19 +0100 | [diff] [blame] | 335 | int ssl3_init_finished_mac(SSL *s) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 336 | { |
Matt Caswell | 2c4a056 | 2016-06-03 11:59:19 +0100 | [diff] [blame] | 337 | BIO *buf = BIO_new(BIO_s_mem()); |
| 338 | |
| 339 | if (buf == NULL) { |
| 340 | SSLerr(SSL_F_SSL3_INIT_FINISHED_MAC, ERR_R_MALLOC_FAILURE); |
| 341 | return 0; |
| 342 | } |
Dr. Stephen Henson | 85fb6fd | 2015-06-20 15:37:22 +0100 | [diff] [blame] | 343 | ssl3_free_digest_list(s); |
Matt Caswell | 2c4a056 | 2016-06-03 11:59:19 +0100 | [diff] [blame] | 344 | s->s3->handshake_buffer = buf; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 345 | (void)BIO_set_close(s->s3->handshake_buffer, BIO_CLOSE); |
Matt Caswell | 2c4a056 | 2016-06-03 11:59:19 +0100 | [diff] [blame] | 346 | return 1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 347 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 348 | |
Dr. Stephen Henson | c723820 | 2015-06-20 15:44:03 +0100 | [diff] [blame] | 349 | /* |
| 350 | * Free digest list. Also frees handshake buffer since they are always freed |
| 351 | * together. |
| 352 | */ |
| 353 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 354 | void ssl3_free_digest_list(SSL *s) |
| 355 | { |
Dr. Stephen Henson | c723820 | 2015-06-20 15:44:03 +0100 | [diff] [blame] | 356 | BIO_free(s->s3->handshake_buffer); |
| 357 | s->s3->handshake_buffer = NULL; |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 358 | EVP_MD_CTX_free(s->s3->handshake_dgst); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 359 | s->s3->handshake_dgst = NULL; |
| 360 | } |
Dr. Stephen Henson | 8102566 | 2007-08-31 12:42:53 +0000 | [diff] [blame] | 361 | |
Matt Caswell | 7ee8627 | 2016-09-07 11:34:39 +0100 | [diff] [blame] | 362 | int ssl3_finish_mac(SSL *s, const unsigned char *buf, size_t len) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 363 | { |
Matt Caswell | 7ee8627 | 2016-09-07 11:34:39 +0100 | [diff] [blame] | 364 | if (s->s3->handshake_dgst == NULL) { |
| 365 | int ret; |
Dr. Stephen Henson | d166ed8 | 2016-06-18 15:46:13 +0100 | [diff] [blame] | 366 | /* Note: this writes to a memory BIO so a failure is a fatal error */ |
Matt Caswell | 7ee8627 | 2016-09-07 11:34:39 +0100 | [diff] [blame] | 367 | if (len > INT_MAX) |
| 368 | return 0; |
| 369 | ret = BIO_write(s->s3->handshake_buffer, (void *)buf, (int)len); |
| 370 | return ret > 0 && ret == (int)len; |
| 371 | } else { |
Dr. Stephen Henson | d166ed8 | 2016-06-18 15:46:13 +0100 | [diff] [blame] | 372 | return EVP_DigestUpdate(s->s3->handshake_dgst, buf, len); |
Matt Caswell | 7ee8627 | 2016-09-07 11:34:39 +0100 | [diff] [blame] | 373 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 374 | } |
Ralf S. Engelschall | 58964a4 | 1998-12-21 10:56:39 +0000 | [diff] [blame] | 375 | |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 376 | int ssl3_digest_cached_records(SSL *s, int keep) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 377 | { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 378 | const EVP_MD *md; |
| 379 | long hdatalen; |
| 380 | void *hdata; |
Ben Laurie | 6ba71a7 | 2008-12-27 02:00:38 +0000 | [diff] [blame] | 381 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 382 | if (s->s3->handshake_dgst == NULL) { |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 383 | hdatalen = BIO_get_mem_data(s->s3->handshake_buffer, &hdata); |
| 384 | if (hdatalen <= 0) { |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 385 | SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS, |
| 386 | SSL_R_BAD_HANDSHAKE_LENGTH); |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 387 | return 0; |
| 388 | } |
| 389 | |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 390 | s->s3->handshake_dgst = EVP_MD_CTX_new(); |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 391 | if (s->s3->handshake_dgst == NULL) { |
| 392 | SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS, ERR_R_MALLOC_FAILURE); |
| 393 | return 0; |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 394 | } |
| 395 | |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 396 | md = ssl_handshake_md(s); |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 397 | if (md == NULL || !EVP_DigestInit_ex(s->s3->handshake_dgst, md, NULL) |
| 398 | || !EVP_DigestUpdate(s->s3->handshake_dgst, hdata, hdatalen)) { |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 399 | SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS, ERR_R_INTERNAL_ERROR); |
| 400 | return 0; |
| 401 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 402 | } |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 403 | if (keep == 0) { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 404 | BIO_free(s->s3->handshake_buffer); |
| 405 | s->s3->handshake_buffer = NULL; |
| 406 | } |
Ben Laurie | 6ba71a7 | 2008-12-27 02:00:38 +0000 | [diff] [blame] | 407 | |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 408 | return 1; |
| 409 | } |
Ben Laurie | 6ba71a7 | 2008-12-27 02:00:38 +0000 | [diff] [blame] | 410 | |
Matt Caswell | 6db6bc5 | 2016-10-04 21:14:24 +0100 | [diff] [blame] | 411 | size_t ssl3_final_finish_mac(SSL *s, const char *sender, size_t len, |
| 412 | unsigned char *p) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 413 | { |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 414 | int ret; |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 415 | EVP_MD_CTX *ctx = NULL; |
Ben Laurie | 6ba71a7 | 2008-12-27 02:00:38 +0000 | [diff] [blame] | 416 | |
Dr. Stephen Henson | 124037f | 2015-06-16 14:44:29 +0100 | [diff] [blame] | 417 | if (!ssl3_digest_cached_records(s, 0)) |
| 418 | return 0; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 419 | |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 420 | if (EVP_MD_CTX_type(s->s3->handshake_dgst) != NID_md5_sha1) { |
Dr. Stephen Henson | 600fdc7 | 2015-11-27 21:26:36 +0000 | [diff] [blame] | 421 | SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, SSL_R_NO_REQUIRED_DIGEST); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 422 | return 0; |
| 423 | } |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 424 | |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 425 | ctx = EVP_MD_CTX_new(); |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 426 | if (ctx == NULL) { |
| 427 | SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_MALLOC_FAILURE); |
| 428 | return 0; |
| 429 | } |
Matt Caswell | d356dc5 | 2016-06-10 16:51:39 +0100 | [diff] [blame] | 430 | if (!EVP_MD_CTX_copy_ex(ctx, s->s3->handshake_dgst)) { |
| 431 | SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_INTERNAL_ERROR); |
| 432 | return 0; |
| 433 | } |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 434 | |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 435 | ret = EVP_MD_CTX_size(ctx); |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 436 | if (ret < 0) { |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 437 | EVP_MD_CTX_reset(ctx); |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 438 | return 0; |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 439 | } |
Ben Laurie | 0eab41f | 2008-12-29 16:11:58 +0000 | [diff] [blame] | 440 | |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 441 | if ((sender != NULL && EVP_DigestUpdate(ctx, sender, len) <= 0) |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 442 | || EVP_MD_CTX_ctrl(ctx, EVP_CTRL_SSL3_MASTER_SECRET, |
Matt Caswell | 348240c | 2016-10-19 15:11:24 +0100 | [diff] [blame] | 443 | (int)s->session->master_key_length, |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 444 | s->session->master_key) <= 0 |
| 445 | || EVP_DigestFinal_ex(ctx, p, NULL) <= 0) { |
Dr. Stephen Henson | 600fdc7 | 2015-11-27 21:26:36 +0000 | [diff] [blame] | 446 | SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_INTERNAL_ERROR); |
Matt Caswell | 5f3d93e | 2015-11-06 16:31:21 +0000 | [diff] [blame] | 447 | ret = 0; |
| 448 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 449 | |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 450 | EVP_MD_CTX_free(ctx); |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 451 | |
Dr. Stephen Henson | 28ba254 | 2015-11-25 18:20:50 +0000 | [diff] [blame] | 452 | return ret; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 453 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 454 | |
Ulf Möller | 6b691a5 | 1999-04-19 21:31:43 +0000 | [diff] [blame] | 455 | int ssl3_generate_master_secret(SSL *s, unsigned char *out, unsigned char *p, |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 456 | size_t len, size_t *secret_size) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 457 | { |
| 458 | static const unsigned char *salt[3] = { |
Ulf Möller | ca570cf | 1999-06-04 21:54:13 +0000 | [diff] [blame] | 459 | #ifndef CHARSET_EBCDIC |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 460 | (const unsigned char *)"A", |
| 461 | (const unsigned char *)"BB", |
| 462 | (const unsigned char *)"CCC", |
Ulf Möller | ca570cf | 1999-06-04 21:54:13 +0000 | [diff] [blame] | 463 | #else |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 464 | (const unsigned char *)"\x41", |
| 465 | (const unsigned char *)"\x42\x42", |
| 466 | (const unsigned char *)"\x43\x43\x43", |
Ulf Möller | ca570cf | 1999-06-04 21:54:13 +0000 | [diff] [blame] | 467 | #endif |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 468 | }; |
| 469 | unsigned char buf[EVP_MAX_MD_SIZE]; |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 470 | EVP_MD_CTX *ctx = EVP_MD_CTX_new(); |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 471 | int i, ret = 1; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 472 | unsigned int n; |
Dr. Stephen Henson | 81f57e5 | 2012-08-28 23:19:25 +0000 | [diff] [blame] | 473 | #ifdef OPENSSL_SSL_TRACE_CRYPTO |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 474 | unsigned char *tmpout = out; |
Dr. Stephen Henson | 1cf218b | 2012-08-28 23:17:28 +0000 | [diff] [blame] | 475 | #endif |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 476 | size_t ret_secret_size = 0; |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 477 | |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 478 | if (ctx == NULL) { |
| 479 | SSLerr(SSL_F_SSL3_GENERATE_MASTER_SECRET, ERR_R_MALLOC_FAILURE); |
| 480 | return 0; |
| 481 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 482 | for (i = 0; i < 3; i++) { |
Richard Levitte | 6e59a89 | 2015-11-27 14:02:12 +0100 | [diff] [blame] | 483 | if (EVP_DigestInit_ex(ctx, s->ctx->sha1, NULL) <= 0 |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 484 | || EVP_DigestUpdate(ctx, salt[i], |
| 485 | strlen((const char *)salt[i])) <= 0 |
| 486 | || EVP_DigestUpdate(ctx, p, len) <= 0 |
| 487 | || EVP_DigestUpdate(ctx, &(s->s3->client_random[0]), |
| 488 | SSL3_RANDOM_SIZE) <= 0 |
| 489 | || EVP_DigestUpdate(ctx, &(s->s3->server_random[0]), |
| 490 | SSL3_RANDOM_SIZE) <= 0 |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 491 | /* TODO(size_t) : convert me */ |
Emilia Kasper | a230b26 | 2016-08-05 19:03:17 +0200 | [diff] [blame] | 492 | || EVP_DigestFinal_ex(ctx, buf, &n) <= 0 |
| 493 | || EVP_DigestInit_ex(ctx, s->ctx->md5, NULL) <= 0 |
| 494 | || EVP_DigestUpdate(ctx, p, len) <= 0 |
| 495 | || EVP_DigestUpdate(ctx, buf, n) <= 0 |
| 496 | || EVP_DigestFinal_ex(ctx, out, &n) <= 0) { |
Matt Caswell | 5f3d93e | 2015-11-06 16:31:21 +0000 | [diff] [blame] | 497 | SSLerr(SSL_F_SSL3_GENERATE_MASTER_SECRET, ERR_R_INTERNAL_ERROR); |
| 498 | ret = 0; |
| 499 | break; |
| 500 | } |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 501 | out += n; |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 502 | ret_secret_size += n; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 503 | } |
Richard Levitte | bfb0641 | 2015-12-02 00:49:35 +0100 | [diff] [blame] | 504 | EVP_MD_CTX_free(ctx); |
Dr. Stephen Henson | 1cf218b | 2012-08-28 23:17:28 +0000 | [diff] [blame] | 505 | |
Dr. Stephen Henson | 81f57e5 | 2012-08-28 23:19:25 +0000 | [diff] [blame] | 506 | #ifdef OPENSSL_SSL_TRACE_CRYPTO |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 507 | if (ret && s->msg_callback) { |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 508 | s->msg_callback(2, s->version, TLS1_RT_CRYPTO_PREMASTER, |
| 509 | p, len, s, s->msg_callback_arg); |
| 510 | s->msg_callback(2, s->version, TLS1_RT_CRYPTO_CLIENT_RANDOM, |
| 511 | s->s3->client_random, SSL3_RANDOM_SIZE, |
| 512 | s, s->msg_callback_arg); |
| 513 | s->msg_callback(2, s->version, TLS1_RT_CRYPTO_SERVER_RANDOM, |
| 514 | s->s3->server_random, SSL3_RANDOM_SIZE, |
| 515 | s, s->msg_callback_arg); |
| 516 | s->msg_callback(2, s->version, TLS1_RT_CRYPTO_MASTER, |
| 517 | tmpout, SSL3_MASTER_SECRET_SIZE, |
| 518 | s, s->msg_callback_arg); |
| 519 | } |
Dr. Stephen Henson | 1cf218b | 2012-08-28 23:17:28 +0000 | [diff] [blame] | 520 | #endif |
Rich Salz | e0f9bf1 | 2015-05-29 12:22:43 -0400 | [diff] [blame] | 521 | OPENSSL_cleanse(buf, sizeof(buf)); |
Matt Caswell | 8c1a534 | 2016-10-03 23:22:07 +0100 | [diff] [blame] | 522 | if (ret) |
| 523 | *secret_size = ret_secret_size; |
| 524 | return ret; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 525 | } |
Ralf S. Engelschall | d02b48c | 1998-12-21 10:52:47 +0000 | [diff] [blame] | 526 | |
Ulf Möller | 6b691a5 | 1999-04-19 21:31:43 +0000 | [diff] [blame] | 527 | int ssl3_alert_code(int code) |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 528 | { |
| 529 | switch (code) { |
| 530 | case SSL_AD_CLOSE_NOTIFY: |
| 531 | return (SSL3_AD_CLOSE_NOTIFY); |
| 532 | case SSL_AD_UNEXPECTED_MESSAGE: |
| 533 | return (SSL3_AD_UNEXPECTED_MESSAGE); |
| 534 | case SSL_AD_BAD_RECORD_MAC: |
| 535 | return (SSL3_AD_BAD_RECORD_MAC); |
| 536 | case SSL_AD_DECRYPTION_FAILED: |
| 537 | return (SSL3_AD_BAD_RECORD_MAC); |
| 538 | case SSL_AD_RECORD_OVERFLOW: |
| 539 | return (SSL3_AD_BAD_RECORD_MAC); |
| 540 | case SSL_AD_DECOMPRESSION_FAILURE: |
| 541 | return (SSL3_AD_DECOMPRESSION_FAILURE); |
| 542 | case SSL_AD_HANDSHAKE_FAILURE: |
| 543 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 544 | case SSL_AD_NO_CERTIFICATE: |
| 545 | return (SSL3_AD_NO_CERTIFICATE); |
| 546 | case SSL_AD_BAD_CERTIFICATE: |
| 547 | return (SSL3_AD_BAD_CERTIFICATE); |
| 548 | case SSL_AD_UNSUPPORTED_CERTIFICATE: |
| 549 | return (SSL3_AD_UNSUPPORTED_CERTIFICATE); |
| 550 | case SSL_AD_CERTIFICATE_REVOKED: |
| 551 | return (SSL3_AD_CERTIFICATE_REVOKED); |
| 552 | case SSL_AD_CERTIFICATE_EXPIRED: |
| 553 | return (SSL3_AD_CERTIFICATE_EXPIRED); |
| 554 | case SSL_AD_CERTIFICATE_UNKNOWN: |
| 555 | return (SSL3_AD_CERTIFICATE_UNKNOWN); |
| 556 | case SSL_AD_ILLEGAL_PARAMETER: |
| 557 | return (SSL3_AD_ILLEGAL_PARAMETER); |
| 558 | case SSL_AD_UNKNOWN_CA: |
| 559 | return (SSL3_AD_BAD_CERTIFICATE); |
| 560 | case SSL_AD_ACCESS_DENIED: |
| 561 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 562 | case SSL_AD_DECODE_ERROR: |
| 563 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 564 | case SSL_AD_DECRYPT_ERROR: |
| 565 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 566 | case SSL_AD_EXPORT_RESTRICTION: |
| 567 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 568 | case SSL_AD_PROTOCOL_VERSION: |
| 569 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 570 | case SSL_AD_INSUFFICIENT_SECURITY: |
| 571 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 572 | case SSL_AD_INTERNAL_ERROR: |
| 573 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 574 | case SSL_AD_USER_CANCELLED: |
| 575 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 576 | case SSL_AD_NO_RENEGOTIATION: |
| 577 | return (-1); /* Don't send it :-) */ |
| 578 | case SSL_AD_UNSUPPORTED_EXTENSION: |
| 579 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 580 | case SSL_AD_CERTIFICATE_UNOBTAINABLE: |
| 581 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 582 | case SSL_AD_UNRECOGNIZED_NAME: |
| 583 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 584 | case SSL_AD_BAD_CERTIFICATE_STATUS_RESPONSE: |
| 585 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 586 | case SSL_AD_BAD_CERTIFICATE_HASH_VALUE: |
| 587 | return (SSL3_AD_HANDSHAKE_FAILURE); |
| 588 | case SSL_AD_UNKNOWN_PSK_IDENTITY: |
| 589 | return (TLS1_AD_UNKNOWN_PSK_IDENTITY); |
| 590 | case SSL_AD_INAPPROPRIATE_FALLBACK: |
| 591 | return (TLS1_AD_INAPPROPRIATE_FALLBACK); |
Emilia Kasper | 0621786 | 2015-09-22 15:20:26 +0200 | [diff] [blame] | 592 | case SSL_AD_NO_APPLICATION_PROTOCOL: |
| 593 | return (TLS1_AD_NO_APPLICATION_PROTOCOL); |
Matt Caswell | 42c28b6 | 2017-03-10 15:09:24 +0000 | [diff] [blame] | 594 | case SSL_AD_CERTIFICATE_REQUIRED: |
| 595 | return SSL_AD_HANDSHAKE_FAILURE; |
Matt Caswell | 0f113f3 | 2015-01-22 03:40:55 +0000 | [diff] [blame] | 596 | default: |
| 597 | return (-1); |
| 598 | } |
| 599 | } |