blob: 88e74edf2beb856b99777e1d1d828be00ec2d96c [file] [log] [blame]
Rich Salz846e33c2016-05-17 14:18:30 -04001/*
2 * Copyright 1995-2016 The OpenSSL Project Authors. All Rights Reserved.
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00003 *
Rich Salz846e33c2016-05-17 14:18:30 -04004 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00008 */
Rich Salz846e33c2016-05-17 14:18:30 -04009
Nils Larschddac1972006-03-10 23:06:27 +000010/* ====================================================================
11 * Copyright 2005 Nokia. All rights reserved.
12 *
13 * The portions of the attached software ("Contribution") is developed by
14 * Nokia Corporation and is licensed pursuant to the OpenSSL open source
15 * license.
16 *
17 * The Contribution, originally written by Mika Kousa and Pasi Eronen of
18 * Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
19 * support (see RFC 4279) to OpenSSL.
20 *
21 * No patent licenses or other rights except those expressly stated in
22 * the OpenSSL open source license shall be deemed granted or received
23 * expressly, by implication, estoppel, or otherwise.
24 *
25 * No assurances are provided by Nokia that the Contribution does not
26 * infringe the patent or other intellectual property rights of any third
27 * party or that the license provides you with all the necessary rights
28 * to make use of the Contribution.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
31 * ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
32 * SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
33 * OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
34 * OTHERWISE.
35 */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000036
37#include <stdio.h>
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000038#include "ssl_locl.h"
Lutz Jänicke7b63c0f2002-07-10 07:01:54 +000039#include <openssl/evp.h>
Ben Lauriedbad1692001-07-30 23:57:25 +000040#include <openssl/md5.h>
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000041
Bodo Möller027e2572001-04-08 13:47:51 +000042static int ssl3_generate_key_block(SSL *s, unsigned char *km, int num)
Matt Caswell0f113f32015-01-22 03:40:55 +000043{
Richard Levitte6e59a892015-11-27 14:02:12 +010044 EVP_MD_CTX *m5;
45 EVP_MD_CTX *s1;
Matt Caswell0f113f32015-01-22 03:40:55 +000046 unsigned char buf[16], smd[SHA_DIGEST_LENGTH];
47 unsigned char c = 'A';
48 unsigned int i, j, k;
Richard Levitte6e59a892015-11-27 14:02:12 +010049 int ret = 0;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000050
Ulf Möllerca570cf1999-06-04 21:54:13 +000051#ifdef CHARSET_EBCDIC
Matt Caswell0f113f32015-01-22 03:40:55 +000052 c = os_toascii[c]; /* 'A' in ASCII */
Ulf Möllerca570cf1999-06-04 21:54:13 +000053#endif
Matt Caswell0f113f32015-01-22 03:40:55 +000054 k = 0;
Richard Levittebfb06412015-12-02 00:49:35 +010055 m5 = EVP_MD_CTX_new();
56 s1 = EVP_MD_CTX_new();
Richard Levitte6e59a892015-11-27 14:02:12 +010057 if (m5 == NULL || s1 == NULL) {
58 SSLerr(SSL_F_SSL3_GENERATE_KEY_BLOCK, ERR_R_MALLOC_FAILURE);
59 goto err;
60 }
61 EVP_MD_CTX_set_flags(m5, EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
Matt Caswell0f113f32015-01-22 03:40:55 +000062 for (i = 0; (int)i < num; i += MD5_DIGEST_LENGTH) {
63 k++;
Bernd Edlingera6fd7c12017-01-01 01:43:20 +010064 if (k > sizeof(buf)) {
Matt Caswell0f113f32015-01-22 03:40:55 +000065 /* bug: 'buf' is too small for this ciphersuite */
66 SSLerr(SSL_F_SSL3_GENERATE_KEY_BLOCK, ERR_R_INTERNAL_ERROR);
Bernd Edlingera6fd7c12017-01-01 01:43:20 +010067 goto err;
Matt Caswell0f113f32015-01-22 03:40:55 +000068 }
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000069
Matt Caswell0f113f32015-01-22 03:40:55 +000070 for (j = 0; j < k; j++)
71 buf[j] = c;
72 c++;
Dr. Stephen Hensond166ed82016-06-18 15:46:13 +010073 if (!EVP_DigestInit_ex(s1, EVP_sha1(), NULL)
74 || !EVP_DigestUpdate(s1, buf, k)
75 || !EVP_DigestUpdate(s1, s->session->master_key,
76 s->session->master_key_length)
77 || !EVP_DigestUpdate(s1, s->s3->server_random, SSL3_RANDOM_SIZE)
78 || !EVP_DigestUpdate(s1, s->s3->client_random, SSL3_RANDOM_SIZE)
79 || !EVP_DigestFinal_ex(s1, smd, NULL)
80 || !EVP_DigestInit_ex(m5, EVP_md5(), NULL)
81 || !EVP_DigestUpdate(m5, s->session->master_key,
82 s->session->master_key_length)
83 || !EVP_DigestUpdate(m5, smd, SHA_DIGEST_LENGTH))
84 goto err;
Matt Caswell0f113f32015-01-22 03:40:55 +000085 if ((int)(i + MD5_DIGEST_LENGTH) > num) {
Dr. Stephen Hensond166ed82016-06-18 15:46:13 +010086 if (!EVP_DigestFinal_ex(m5, smd, NULL))
87 goto err;
Matt Caswell0f113f32015-01-22 03:40:55 +000088 memcpy(km, smd, (num - i));
Dr. Stephen Hensond166ed82016-06-18 15:46:13 +010089 } else {
90 if (!EVP_DigestFinal_ex(m5, km, NULL))
91 goto err;
92 }
Matt Caswell0f113f32015-01-22 03:40:55 +000093
94 km += MD5_DIGEST_LENGTH;
95 }
Rich Salze0f9bf12015-05-29 12:22:43 -040096 OPENSSL_cleanse(smd, sizeof(smd));
Richard Levitte6e59a892015-11-27 14:02:12 +010097 ret = 1;
98 err:
Richard Levittebfb06412015-12-02 00:49:35 +010099 EVP_MD_CTX_free(m5);
100 EVP_MD_CTX_free(s1);
Richard Levitte6e59a892015-11-27 14:02:12 +0100101 return ret;
Matt Caswell0f113f32015-01-22 03:40:55 +0000102}
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000103
Ulf Möller6b691a51999-04-19 21:31:43 +0000104int ssl3_change_cipher_state(SSL *s, int which)
Matt Caswell0f113f32015-01-22 03:40:55 +0000105{
106 unsigned char *p, *mac_secret;
107 unsigned char exp_key[EVP_MAX_KEY_LENGTH];
108 unsigned char exp_iv[EVP_MAX_IV_LENGTH];
Kurt Roeckx361a1192015-12-05 02:04:41 +0100109 unsigned char *ms, *key, *iv;
Matt Caswell0f113f32015-01-22 03:40:55 +0000110 EVP_CIPHER_CTX *dd;
111 const EVP_CIPHER *c;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000112#ifndef OPENSSL_NO_COMP
Matt Caswell0f113f32015-01-22 03:40:55 +0000113 COMP_METHOD *comp;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000114#endif
Matt Caswell0f113f32015-01-22 03:40:55 +0000115 const EVP_MD *m;
Matt Caswell8c1a5342016-10-03 23:22:07 +0100116 int mdi;
117 size_t n, i, j, k, cl;
Matt Caswell0f113f32015-01-22 03:40:55 +0000118 int reuse_dd = 0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000119
Matt Caswell0f113f32015-01-22 03:40:55 +0000120 c = s->s3->tmp.new_sym_enc;
121 m = s->s3->tmp.new_hash;
122 /* m == NULL will lead to a crash later */
123 OPENSSL_assert(m);
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000124#ifndef OPENSSL_NO_COMP
Matt Caswell0f113f32015-01-22 03:40:55 +0000125 if (s->s3->tmp.new_compression == NULL)
126 comp = NULL;
127 else
128 comp = s->s3->tmp.new_compression->method;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000129#endif
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000130
Matt Caswell0f113f32015-01-22 03:40:55 +0000131 if (which & SSL3_CC_READ) {
132 if (s->enc_read_ctx != NULL)
133 reuse_dd = 1;
Richard Levitte846ec072015-12-13 22:08:41 +0100134 else if ((s->enc_read_ctx = EVP_CIPHER_CTX_new()) == NULL)
Matt Caswell0f113f32015-01-22 03:40:55 +0000135 goto err;
136 else
137 /*
FdaSilvaYY8483a002016-03-10 21:34:48 +0100138 * make sure it's initialised in case we exit later with an error
Matt Caswell0f113f32015-01-22 03:40:55 +0000139 */
Richard Levitte846ec072015-12-13 22:08:41 +0100140 EVP_CIPHER_CTX_reset(s->enc_read_ctx);
Matt Caswell0f113f32015-01-22 03:40:55 +0000141 dd = s->enc_read_ctx;
Dr. Stephen Hensonb948e2c2007-06-04 17:04:40 +0000142
Matt Caswell5f3d93e2015-11-06 16:31:21 +0000143 if (ssl_replace_hash(&s->read_hash, m) == NULL) {
Emilia Kaspera230b262016-08-05 19:03:17 +0200144 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR);
145 goto err2;
Matt Caswell69f68232015-03-06 14:37:17 +0000146 }
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000147#ifndef OPENSSL_NO_COMP
Matt Caswell0f113f32015-01-22 03:40:55 +0000148 /* COMPRESS */
Rich Salzefa7dd62015-05-01 10:15:18 -0400149 COMP_CTX_free(s->expand);
150 s->expand = NULL;
Matt Caswell0f113f32015-01-22 03:40:55 +0000151 if (comp != NULL) {
152 s->expand = COMP_CTX_new(comp);
153 if (s->expand == NULL) {
154 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE,
155 SSL_R_COMPRESSION_LIBRARY_ERROR);
156 goto err2;
157 }
Matt Caswell0f113f32015-01-22 03:40:55 +0000158 }
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000159#endif
Matt Caswellde07f312015-02-03 14:26:50 +0000160 RECORD_LAYER_reset_read_sequence(&s->rlayer);
Matt Caswell0f113f32015-01-22 03:40:55 +0000161 mac_secret = &(s->s3->read_mac_secret[0]);
162 } else {
163 if (s->enc_write_ctx != NULL)
164 reuse_dd = 1;
Richard Levitte846ec072015-12-13 22:08:41 +0100165 else if ((s->enc_write_ctx = EVP_CIPHER_CTX_new()) == NULL)
Matt Caswell0f113f32015-01-22 03:40:55 +0000166 goto err;
167 else
168 /*
FdaSilvaYY8483a002016-03-10 21:34:48 +0100169 * make sure it's initialised in case we exit later with an error
Matt Caswell0f113f32015-01-22 03:40:55 +0000170 */
Richard Levitte846ec072015-12-13 22:08:41 +0100171 EVP_CIPHER_CTX_reset(s->enc_write_ctx);
Matt Caswell0f113f32015-01-22 03:40:55 +0000172 dd = s->enc_write_ctx;
Matt Caswell5f3d93e2015-11-06 16:31:21 +0000173 if (ssl_replace_hash(&s->write_hash, m) == NULL) {
Emilia Kaspera230b262016-08-05 19:03:17 +0200174 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR);
175 goto err2;
Matt Caswell69f68232015-03-06 14:37:17 +0000176 }
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000177#ifndef OPENSSL_NO_COMP
Matt Caswell0f113f32015-01-22 03:40:55 +0000178 /* COMPRESS */
Rich Salzefa7dd62015-05-01 10:15:18 -0400179 COMP_CTX_free(s->compress);
180 s->compress = NULL;
Matt Caswell0f113f32015-01-22 03:40:55 +0000181 if (comp != NULL) {
182 s->compress = COMP_CTX_new(comp);
183 if (s->compress == NULL) {
184 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE,
185 SSL_R_COMPRESSION_LIBRARY_ERROR);
186 goto err2;
187 }
188 }
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000189#endif
Matt Caswellde07f312015-02-03 14:26:50 +0000190 RECORD_LAYER_reset_write_sequence(&s->rlayer);
Matt Caswell0f113f32015-01-22 03:40:55 +0000191 mac_secret = &(s->s3->write_mac_secret[0]);
192 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000193
Matt Caswell0f113f32015-01-22 03:40:55 +0000194 if (reuse_dd)
Richard Levitte846ec072015-12-13 22:08:41 +0100195 EVP_CIPHER_CTX_reset(dd);
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000196
Matt Caswell0f113f32015-01-22 03:40:55 +0000197 p = s->s3->tmp.key_block;
Matt Caswell8c1a5342016-10-03 23:22:07 +0100198 mdi = EVP_MD_size(m);
199 if (mdi < 0)
Matt Caswell0f113f32015-01-22 03:40:55 +0000200 goto err2;
Matt Caswell8c1a5342016-10-03 23:22:07 +0100201 i = mdi;
Matt Caswell0f113f32015-01-22 03:40:55 +0000202 cl = EVP_CIPHER_key_length(c);
Kurt Roeckx361a1192015-12-05 02:04:41 +0100203 j = cl;
Matt Caswell0f113f32015-01-22 03:40:55 +0000204 k = EVP_CIPHER_iv_length(c);
205 if ((which == SSL3_CHANGE_CIPHER_CLIENT_WRITE) ||
206 (which == SSL3_CHANGE_CIPHER_SERVER_READ)) {
207 ms = &(p[0]);
208 n = i + i;
209 key = &(p[n]);
210 n += j + j;
211 iv = &(p[n]);
212 n += k + k;
Matt Caswell0f113f32015-01-22 03:40:55 +0000213 } else {
214 n = i;
215 ms = &(p[n]);
216 n += i + j;
217 key = &(p[n]);
218 n += j + k;
219 iv = &(p[n]);
220 n += k;
Matt Caswell0f113f32015-01-22 03:40:55 +0000221 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000222
Matt Caswell0f113f32015-01-22 03:40:55 +0000223 if (n > s->s3->tmp.key_block_length) {
224 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_INTERNAL_ERROR);
225 goto err2;
226 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000227
Matt Caswell0f113f32015-01-22 03:40:55 +0000228 memcpy(mac_secret, ms, i);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000229
Bernd Edlingera6fd7c12017-01-01 01:43:20 +0100230 if (!EVP_CipherInit_ex(dd, c, NULL, key, iv, (which & SSL3_CC_WRITE)))
231 goto err2;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000232
Dr. Stephen Henson1cf218b2012-08-28 23:17:28 +0000233#ifdef OPENSSL_SSL_TRACE_CRYPTO
Matt Caswell0f113f32015-01-22 03:40:55 +0000234 if (s->msg_callback) {
235
236 int wh = which & SSL3_CC_WRITE ?
237 TLS1_RT_CRYPTO_WRITE : TLS1_RT_CRYPTO_READ;
238 s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_MAC,
239 mac_secret, EVP_MD_size(m), s, s->msg_callback_arg);
240 if (c->key_len)
241 s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_KEY,
242 key, c->key_len, s, s->msg_callback_arg);
243 if (k) {
244 s->msg_callback(2, s->version, wh | TLS1_RT_CRYPTO_IV,
245 iv, k, s, s->msg_callback_arg);
246 }
247 }
Dr. Stephen Henson1cf218b2012-08-28 23:17:28 +0000248#endif
249
Rich Salze0f9bf12015-05-29 12:22:43 -0400250 OPENSSL_cleanse(exp_key, sizeof(exp_key));
251 OPENSSL_cleanse(exp_iv, sizeof(exp_iv));
Matt Caswell0f113f32015-01-22 03:40:55 +0000252 return (1);
253 err:
254 SSLerr(SSL_F_SSL3_CHANGE_CIPHER_STATE, ERR_R_MALLOC_FAILURE);
255 err2:
Rich Salze0f9bf12015-05-29 12:22:43 -0400256 OPENSSL_cleanse(exp_key, sizeof(exp_key));
257 OPENSSL_cleanse(exp_iv, sizeof(exp_iv));
Matt Caswell0f113f32015-01-22 03:40:55 +0000258 return (0);
259}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000260
Ulf Möller6b691a51999-04-19 21:31:43 +0000261int ssl3_setup_key_block(SSL *s)
Matt Caswell0f113f32015-01-22 03:40:55 +0000262{
263 unsigned char *p;
264 const EVP_CIPHER *c;
265 const EVP_MD *hash;
266 int num;
267 int ret = 0;
268 SSL_COMP *comp;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000269
Matt Caswell0f113f32015-01-22 03:40:55 +0000270 if (s->s3->tmp.key_block_length != 0)
271 return (1);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000272
Matt Caswell0f113f32015-01-22 03:40:55 +0000273 if (!ssl_cipher_get_evp(s->session, &c, &hash, NULL, NULL, &comp, 0)) {
274 SSLerr(SSL_F_SSL3_SETUP_KEY_BLOCK, SSL_R_CIPHER_OR_HASH_UNAVAILABLE);
275 return (0);
276 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000277
Matt Caswell0f113f32015-01-22 03:40:55 +0000278 s->s3->tmp.new_sym_enc = c;
279 s->s3->tmp.new_hash = hash;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000280#ifdef OPENSSL_NO_COMP
Matt Caswell0f113f32015-01-22 03:40:55 +0000281 s->s3->tmp.new_compression = NULL;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000282#else
Matt Caswell0f113f32015-01-22 03:40:55 +0000283 s->s3->tmp.new_compression = comp;
Dr. Stephen Henson09b6c2e2005-09-30 23:35:33 +0000284#endif
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000285
Matt Caswell0f113f32015-01-22 03:40:55 +0000286 num = EVP_MD_size(hash);
287 if (num < 0)
288 return 0;
Ben Laurie0eab41f2008-12-29 16:11:58 +0000289
Matt Caswell0f113f32015-01-22 03:40:55 +0000290 num = EVP_CIPHER_key_length(c) + num + EVP_CIPHER_iv_length(c);
291 num *= 2;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000292
Matt Caswell0f113f32015-01-22 03:40:55 +0000293 ssl3_cleanup_key_block(s);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000294
Matt Caswell0f113f32015-01-22 03:40:55 +0000295 if ((p = OPENSSL_malloc(num)) == NULL)
296 goto err;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000297
Matt Caswell0f113f32015-01-22 03:40:55 +0000298 s->s3->tmp.key_block_length = num;
299 s->s3->tmp.key_block = p;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000300
Matt Caswell0f113f32015-01-22 03:40:55 +0000301 ret = ssl3_generate_key_block(s, p, num);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000302
Matt Caswell0f113f32015-01-22 03:40:55 +0000303 if (!(s->options & SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS)) {
304 /*
305 * enable vulnerability countermeasure for CBC ciphers with known-IV
306 * problem (http://www.openssl.org/~bodo/tls-cbc.txt)
307 */
308 s->s3->need_empty_fragments = 1;
Bodo Möllerc21506b2002-06-14 12:21:11 +0000309
Matt Caswell0f113f32015-01-22 03:40:55 +0000310 if (s->session->cipher != NULL) {
311 if (s->session->cipher->algorithm_enc == SSL_eNULL)
312 s->s3->need_empty_fragments = 0;
313
Bodo Möller82b0bf02002-04-13 22:47:20 +0000314#ifndef OPENSSL_NO_RC4
Matt Caswell0f113f32015-01-22 03:40:55 +0000315 if (s->session->cipher->algorithm_enc == SSL_RC4)
316 s->s3->need_empty_fragments = 0;
Bodo Möller82b0bf02002-04-13 22:47:20 +0000317#endif
Matt Caswell0f113f32015-01-22 03:40:55 +0000318 }
319 }
Bodo Möller82b0bf02002-04-13 22:47:20 +0000320
Matt Caswell0f113f32015-01-22 03:40:55 +0000321 return ret;
322
323 err:
324 SSLerr(SSL_F_SSL3_SETUP_KEY_BLOCK, ERR_R_MALLOC_FAILURE);
325 return (0);
326}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000327
Ulf Möller6b691a51999-04-19 21:31:43 +0000328void ssl3_cleanup_key_block(SSL *s)
Matt Caswell0f113f32015-01-22 03:40:55 +0000329{
Rich Salz4b45c6e2015-04-30 17:57:32 -0400330 OPENSSL_clear_free(s->s3->tmp.key_block, s->s3->tmp.key_block_length);
331 s->s3->tmp.key_block = NULL;
Matt Caswell0f113f32015-01-22 03:40:55 +0000332 s->s3->tmp.key_block_length = 0;
333}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000334
Matt Caswell2c4a0562016-06-03 11:59:19 +0100335int ssl3_init_finished_mac(SSL *s)
Matt Caswell0f113f32015-01-22 03:40:55 +0000336{
Matt Caswell2c4a0562016-06-03 11:59:19 +0100337 BIO *buf = BIO_new(BIO_s_mem());
338
339 if (buf == NULL) {
340 SSLerr(SSL_F_SSL3_INIT_FINISHED_MAC, ERR_R_MALLOC_FAILURE);
341 return 0;
342 }
Dr. Stephen Henson85fb6fd2015-06-20 15:37:22 +0100343 ssl3_free_digest_list(s);
Matt Caswell2c4a0562016-06-03 11:59:19 +0100344 s->s3->handshake_buffer = buf;
Matt Caswell0f113f32015-01-22 03:40:55 +0000345 (void)BIO_set_close(s->s3->handshake_buffer, BIO_CLOSE);
Matt Caswell2c4a0562016-06-03 11:59:19 +0100346 return 1;
Matt Caswell0f113f32015-01-22 03:40:55 +0000347}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000348
Dr. Stephen Hensonc7238202015-06-20 15:44:03 +0100349/*
350 * Free digest list. Also frees handshake buffer since they are always freed
351 * together.
352 */
353
Matt Caswell0f113f32015-01-22 03:40:55 +0000354void ssl3_free_digest_list(SSL *s)
355{
Dr. Stephen Hensonc7238202015-06-20 15:44:03 +0100356 BIO_free(s->s3->handshake_buffer);
357 s->s3->handshake_buffer = NULL;
Richard Levittebfb06412015-12-02 00:49:35 +0100358 EVP_MD_CTX_free(s->s3->handshake_dgst);
Matt Caswell0f113f32015-01-22 03:40:55 +0000359 s->s3->handshake_dgst = NULL;
360}
Dr. Stephen Henson81025662007-08-31 12:42:53 +0000361
Matt Caswell7ee86272016-09-07 11:34:39 +0100362int ssl3_finish_mac(SSL *s, const unsigned char *buf, size_t len)
Matt Caswell0f113f32015-01-22 03:40:55 +0000363{
Matt Caswell7ee86272016-09-07 11:34:39 +0100364 if (s->s3->handshake_dgst == NULL) {
365 int ret;
Dr. Stephen Hensond166ed82016-06-18 15:46:13 +0100366 /* Note: this writes to a memory BIO so a failure is a fatal error */
Matt Caswell7ee86272016-09-07 11:34:39 +0100367 if (len > INT_MAX)
368 return 0;
369 ret = BIO_write(s->s3->handshake_buffer, (void *)buf, (int)len);
370 return ret > 0 && ret == (int)len;
371 } else {
Dr. Stephen Hensond166ed82016-06-18 15:46:13 +0100372 return EVP_DigestUpdate(s->s3->handshake_dgst, buf, len);
Matt Caswell7ee86272016-09-07 11:34:39 +0100373 }
Matt Caswell0f113f32015-01-22 03:40:55 +0000374}
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000375
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100376int ssl3_digest_cached_records(SSL *s, int keep)
Matt Caswell0f113f32015-01-22 03:40:55 +0000377{
Matt Caswell0f113f32015-01-22 03:40:55 +0000378 const EVP_MD *md;
379 long hdatalen;
380 void *hdata;
Ben Laurie6ba71a72008-12-27 02:00:38 +0000381
Matt Caswell0f113f32015-01-22 03:40:55 +0000382 if (s->s3->handshake_dgst == NULL) {
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100383 hdatalen = BIO_get_mem_data(s->s3->handshake_buffer, &hdata);
384 if (hdatalen <= 0) {
Emilia Kaspera230b262016-08-05 19:03:17 +0200385 SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS,
386 SSL_R_BAD_HANDSHAKE_LENGTH);
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100387 return 0;
388 }
389
Richard Levittebfb06412015-12-02 00:49:35 +0100390 s->s3->handshake_dgst = EVP_MD_CTX_new();
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000391 if (s->s3->handshake_dgst == NULL) {
392 SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS, ERR_R_MALLOC_FAILURE);
393 return 0;
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100394 }
395
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000396 md = ssl_handshake_md(s);
Emilia Kaspera230b262016-08-05 19:03:17 +0200397 if (md == NULL || !EVP_DigestInit_ex(s->s3->handshake_dgst, md, NULL)
398 || !EVP_DigestUpdate(s->s3->handshake_dgst, hdata, hdatalen)) {
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000399 SSLerr(SSL_F_SSL3_DIGEST_CACHED_RECORDS, ERR_R_INTERNAL_ERROR);
400 return 0;
401 }
Matt Caswell0f113f32015-01-22 03:40:55 +0000402 }
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100403 if (keep == 0) {
Matt Caswell0f113f32015-01-22 03:40:55 +0000404 BIO_free(s->s3->handshake_buffer);
405 s->s3->handshake_buffer = NULL;
406 }
Ben Laurie6ba71a72008-12-27 02:00:38 +0000407
Matt Caswell0f113f32015-01-22 03:40:55 +0000408 return 1;
409}
Ben Laurie6ba71a72008-12-27 02:00:38 +0000410
Matt Caswell6db6bc52016-10-04 21:14:24 +0100411size_t ssl3_final_finish_mac(SSL *s, const char *sender, size_t len,
412 unsigned char *p)
Matt Caswell0f113f32015-01-22 03:40:55 +0000413{
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000414 int ret;
Richard Levitte6e59a892015-11-27 14:02:12 +0100415 EVP_MD_CTX *ctx = NULL;
Ben Laurie6ba71a72008-12-27 02:00:38 +0000416
Dr. Stephen Henson124037f2015-06-16 14:44:29 +0100417 if (!ssl3_digest_cached_records(s, 0))
418 return 0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000419
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000420 if (EVP_MD_CTX_type(s->s3->handshake_dgst) != NID_md5_sha1) {
Dr. Stephen Henson600fdc72015-11-27 21:26:36 +0000421 SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, SSL_R_NO_REQUIRED_DIGEST);
Matt Caswell0f113f32015-01-22 03:40:55 +0000422 return 0;
423 }
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000424
Richard Levittebfb06412015-12-02 00:49:35 +0100425 ctx = EVP_MD_CTX_new();
Richard Levitte6e59a892015-11-27 14:02:12 +0100426 if (ctx == NULL) {
427 SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_MALLOC_FAILURE);
428 return 0;
429 }
Matt Caswelld356dc52016-06-10 16:51:39 +0100430 if (!EVP_MD_CTX_copy_ex(ctx, s->s3->handshake_dgst)) {
431 SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_INTERNAL_ERROR);
432 return 0;
433 }
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000434
Richard Levitte6e59a892015-11-27 14:02:12 +0100435 ret = EVP_MD_CTX_size(ctx);
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000436 if (ret < 0) {
Richard Levittebfb06412015-12-02 00:49:35 +0100437 EVP_MD_CTX_reset(ctx);
Matt Caswell0f113f32015-01-22 03:40:55 +0000438 return 0;
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000439 }
Ben Laurie0eab41f2008-12-29 16:11:58 +0000440
Richard Levitte6e59a892015-11-27 14:02:12 +0100441 if ((sender != NULL && EVP_DigestUpdate(ctx, sender, len) <= 0)
Emilia Kaspera230b262016-08-05 19:03:17 +0200442 || EVP_MD_CTX_ctrl(ctx, EVP_CTRL_SSL3_MASTER_SECRET,
Matt Caswell348240c2016-10-19 15:11:24 +0100443 (int)s->session->master_key_length,
Emilia Kaspera230b262016-08-05 19:03:17 +0200444 s->session->master_key) <= 0
445 || EVP_DigestFinal_ex(ctx, p, NULL) <= 0) {
Dr. Stephen Henson600fdc72015-11-27 21:26:36 +0000446 SSLerr(SSL_F_SSL3_FINAL_FINISH_MAC, ERR_R_INTERNAL_ERROR);
Matt Caswell5f3d93e2015-11-06 16:31:21 +0000447 ret = 0;
448 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000449
Richard Levittebfb06412015-12-02 00:49:35 +0100450 EVP_MD_CTX_free(ctx);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000451
Dr. Stephen Henson28ba2542015-11-25 18:20:50 +0000452 return ret;
Matt Caswell0f113f32015-01-22 03:40:55 +0000453}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000454
Ulf Möller6b691a51999-04-19 21:31:43 +0000455int ssl3_generate_master_secret(SSL *s, unsigned char *out, unsigned char *p,
Matt Caswell8c1a5342016-10-03 23:22:07 +0100456 size_t len, size_t *secret_size)
Matt Caswell0f113f32015-01-22 03:40:55 +0000457{
458 static const unsigned char *salt[3] = {
Ulf Möllerca570cf1999-06-04 21:54:13 +0000459#ifndef CHARSET_EBCDIC
Matt Caswell0f113f32015-01-22 03:40:55 +0000460 (const unsigned char *)"A",
461 (const unsigned char *)"BB",
462 (const unsigned char *)"CCC",
Ulf Möllerca570cf1999-06-04 21:54:13 +0000463#else
Matt Caswell0f113f32015-01-22 03:40:55 +0000464 (const unsigned char *)"\x41",
465 (const unsigned char *)"\x42\x42",
466 (const unsigned char *)"\x43\x43\x43",
Ulf Möllerca570cf1999-06-04 21:54:13 +0000467#endif
Matt Caswell0f113f32015-01-22 03:40:55 +0000468 };
469 unsigned char buf[EVP_MAX_MD_SIZE];
Richard Levittebfb06412015-12-02 00:49:35 +0100470 EVP_MD_CTX *ctx = EVP_MD_CTX_new();
Matt Caswell8c1a5342016-10-03 23:22:07 +0100471 int i, ret = 1;
Matt Caswell0f113f32015-01-22 03:40:55 +0000472 unsigned int n;
Dr. Stephen Henson81f57e52012-08-28 23:19:25 +0000473#ifdef OPENSSL_SSL_TRACE_CRYPTO
Matt Caswell0f113f32015-01-22 03:40:55 +0000474 unsigned char *tmpout = out;
Dr. Stephen Henson1cf218b2012-08-28 23:17:28 +0000475#endif
Matt Caswell8c1a5342016-10-03 23:22:07 +0100476 size_t ret_secret_size = 0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000477
Richard Levitte6e59a892015-11-27 14:02:12 +0100478 if (ctx == NULL) {
479 SSLerr(SSL_F_SSL3_GENERATE_MASTER_SECRET, ERR_R_MALLOC_FAILURE);
480 return 0;
481 }
Matt Caswell0f113f32015-01-22 03:40:55 +0000482 for (i = 0; i < 3; i++) {
Richard Levitte6e59a892015-11-27 14:02:12 +0100483 if (EVP_DigestInit_ex(ctx, s->ctx->sha1, NULL) <= 0
Emilia Kaspera230b262016-08-05 19:03:17 +0200484 || EVP_DigestUpdate(ctx, salt[i],
485 strlen((const char *)salt[i])) <= 0
486 || EVP_DigestUpdate(ctx, p, len) <= 0
487 || EVP_DigestUpdate(ctx, &(s->s3->client_random[0]),
488 SSL3_RANDOM_SIZE) <= 0
489 || EVP_DigestUpdate(ctx, &(s->s3->server_random[0]),
490 SSL3_RANDOM_SIZE) <= 0
Matt Caswell8c1a5342016-10-03 23:22:07 +0100491 /* TODO(size_t) : convert me */
Emilia Kaspera230b262016-08-05 19:03:17 +0200492 || EVP_DigestFinal_ex(ctx, buf, &n) <= 0
493 || EVP_DigestInit_ex(ctx, s->ctx->md5, NULL) <= 0
494 || EVP_DigestUpdate(ctx, p, len) <= 0
495 || EVP_DigestUpdate(ctx, buf, n) <= 0
496 || EVP_DigestFinal_ex(ctx, out, &n) <= 0) {
Matt Caswell5f3d93e2015-11-06 16:31:21 +0000497 SSLerr(SSL_F_SSL3_GENERATE_MASTER_SECRET, ERR_R_INTERNAL_ERROR);
498 ret = 0;
499 break;
500 }
Matt Caswell0f113f32015-01-22 03:40:55 +0000501 out += n;
Matt Caswell8c1a5342016-10-03 23:22:07 +0100502 ret_secret_size += n;
Matt Caswell0f113f32015-01-22 03:40:55 +0000503 }
Richard Levittebfb06412015-12-02 00:49:35 +0100504 EVP_MD_CTX_free(ctx);
Dr. Stephen Henson1cf218b2012-08-28 23:17:28 +0000505
Dr. Stephen Henson81f57e52012-08-28 23:19:25 +0000506#ifdef OPENSSL_SSL_TRACE_CRYPTO
Matt Caswell8c1a5342016-10-03 23:22:07 +0100507 if (ret && s->msg_callback) {
Matt Caswell0f113f32015-01-22 03:40:55 +0000508 s->msg_callback(2, s->version, TLS1_RT_CRYPTO_PREMASTER,
509 p, len, s, s->msg_callback_arg);
510 s->msg_callback(2, s->version, TLS1_RT_CRYPTO_CLIENT_RANDOM,
511 s->s3->client_random, SSL3_RANDOM_SIZE,
512 s, s->msg_callback_arg);
513 s->msg_callback(2, s->version, TLS1_RT_CRYPTO_SERVER_RANDOM,
514 s->s3->server_random, SSL3_RANDOM_SIZE,
515 s, s->msg_callback_arg);
516 s->msg_callback(2, s->version, TLS1_RT_CRYPTO_MASTER,
517 tmpout, SSL3_MASTER_SECRET_SIZE,
518 s, s->msg_callback_arg);
519 }
Dr. Stephen Henson1cf218b2012-08-28 23:17:28 +0000520#endif
Rich Salze0f9bf12015-05-29 12:22:43 -0400521 OPENSSL_cleanse(buf, sizeof(buf));
Matt Caswell8c1a5342016-10-03 23:22:07 +0100522 if (ret)
523 *secret_size = ret_secret_size;
524 return ret;
Matt Caswell0f113f32015-01-22 03:40:55 +0000525}
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000526
Ulf Möller6b691a51999-04-19 21:31:43 +0000527int ssl3_alert_code(int code)
Matt Caswell0f113f32015-01-22 03:40:55 +0000528{
529 switch (code) {
530 case SSL_AD_CLOSE_NOTIFY:
531 return (SSL3_AD_CLOSE_NOTIFY);
532 case SSL_AD_UNEXPECTED_MESSAGE:
533 return (SSL3_AD_UNEXPECTED_MESSAGE);
534 case SSL_AD_BAD_RECORD_MAC:
535 return (SSL3_AD_BAD_RECORD_MAC);
536 case SSL_AD_DECRYPTION_FAILED:
537 return (SSL3_AD_BAD_RECORD_MAC);
538 case SSL_AD_RECORD_OVERFLOW:
539 return (SSL3_AD_BAD_RECORD_MAC);
540 case SSL_AD_DECOMPRESSION_FAILURE:
541 return (SSL3_AD_DECOMPRESSION_FAILURE);
542 case SSL_AD_HANDSHAKE_FAILURE:
543 return (SSL3_AD_HANDSHAKE_FAILURE);
544 case SSL_AD_NO_CERTIFICATE:
545 return (SSL3_AD_NO_CERTIFICATE);
546 case SSL_AD_BAD_CERTIFICATE:
547 return (SSL3_AD_BAD_CERTIFICATE);
548 case SSL_AD_UNSUPPORTED_CERTIFICATE:
549 return (SSL3_AD_UNSUPPORTED_CERTIFICATE);
550 case SSL_AD_CERTIFICATE_REVOKED:
551 return (SSL3_AD_CERTIFICATE_REVOKED);
552 case SSL_AD_CERTIFICATE_EXPIRED:
553 return (SSL3_AD_CERTIFICATE_EXPIRED);
554 case SSL_AD_CERTIFICATE_UNKNOWN:
555 return (SSL3_AD_CERTIFICATE_UNKNOWN);
556 case SSL_AD_ILLEGAL_PARAMETER:
557 return (SSL3_AD_ILLEGAL_PARAMETER);
558 case SSL_AD_UNKNOWN_CA:
559 return (SSL3_AD_BAD_CERTIFICATE);
560 case SSL_AD_ACCESS_DENIED:
561 return (SSL3_AD_HANDSHAKE_FAILURE);
562 case SSL_AD_DECODE_ERROR:
563 return (SSL3_AD_HANDSHAKE_FAILURE);
564 case SSL_AD_DECRYPT_ERROR:
565 return (SSL3_AD_HANDSHAKE_FAILURE);
566 case SSL_AD_EXPORT_RESTRICTION:
567 return (SSL3_AD_HANDSHAKE_FAILURE);
568 case SSL_AD_PROTOCOL_VERSION:
569 return (SSL3_AD_HANDSHAKE_FAILURE);
570 case SSL_AD_INSUFFICIENT_SECURITY:
571 return (SSL3_AD_HANDSHAKE_FAILURE);
572 case SSL_AD_INTERNAL_ERROR:
573 return (SSL3_AD_HANDSHAKE_FAILURE);
574 case SSL_AD_USER_CANCELLED:
575 return (SSL3_AD_HANDSHAKE_FAILURE);
576 case SSL_AD_NO_RENEGOTIATION:
577 return (-1); /* Don't send it :-) */
578 case SSL_AD_UNSUPPORTED_EXTENSION:
579 return (SSL3_AD_HANDSHAKE_FAILURE);
580 case SSL_AD_CERTIFICATE_UNOBTAINABLE:
581 return (SSL3_AD_HANDSHAKE_FAILURE);
582 case SSL_AD_UNRECOGNIZED_NAME:
583 return (SSL3_AD_HANDSHAKE_FAILURE);
584 case SSL_AD_BAD_CERTIFICATE_STATUS_RESPONSE:
585 return (SSL3_AD_HANDSHAKE_FAILURE);
586 case SSL_AD_BAD_CERTIFICATE_HASH_VALUE:
587 return (SSL3_AD_HANDSHAKE_FAILURE);
588 case SSL_AD_UNKNOWN_PSK_IDENTITY:
589 return (TLS1_AD_UNKNOWN_PSK_IDENTITY);
590 case SSL_AD_INAPPROPRIATE_FALLBACK:
591 return (TLS1_AD_INAPPROPRIATE_FALLBACK);
Emilia Kasper06217862015-09-22 15:20:26 +0200592 case SSL_AD_NO_APPLICATION_PROTOCOL:
593 return (TLS1_AD_NO_APPLICATION_PROTOCOL);
Matt Caswell42c28b62017-03-10 15:09:24 +0000594 case SSL_AD_CERTIFICATE_REQUIRED:
595 return SSL_AD_HANDSHAKE_FAILURE;
Matt Caswell0f113f32015-01-22 03:40:55 +0000596 default:
597 return (-1);
598 }
599}