blob: 368f90e3b8694e8247af45d102cf724c5bf3712c [file] [log] [blame]
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001/* ssl/ssl_ciph.c */
Ralf S. Engelschall58964a41998-12-21 10:56:39 +00002/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00003 * All rights reserved.
4 *
5 * This package is an SSL implementation written
6 * by Eric Young (eay@cryptsoft.com).
7 * The implementation was written so as to conform with Netscapes SSL.
8 *
9 * This library is free for commercial and non-commercial use as long as
10 * the following conditions are aheared to. The following conditions
11 * apply to all code found in this distribution, be it the RC4, RSA,
12 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
13 * included with this distribution is covered by the same copyright terms
14 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
15 *
16 * Copyright remains Eric Young's, and as such any Copyright notices in
17 * the code are not to be removed.
18 * If this package is used in a product, Eric Young should be given attribution
19 * as the author of the parts of the library used.
20 * This can be in the form of a textual message at program startup or
21 * in documentation (online or textual) provided with the package.
22 *
23 * Redistribution and use in source and binary forms, with or without
24 * modification, are permitted provided that the following conditions
25 * are met:
26 * 1. Redistributions of source code must retain the copyright
27 * notice, this list of conditions and the following disclaimer.
28 * 2. Redistributions in binary form must reproduce the above copyright
29 * notice, this list of conditions and the following disclaimer in the
30 * documentation and/or other materials provided with the distribution.
31 * 3. All advertising materials mentioning features or use of this software
32 * must display the following acknowledgement:
33 * "This product includes cryptographic software written by
34 * Eric Young (eay@cryptsoft.com)"
35 * The word 'cryptographic' can be left out if the rouines from the library
36 * being used are not cryptographic related :-).
37 * 4. If you include any Windows specific code (or a derivative thereof) from
38 * the apps directory (application code) you must include an acknowledgement:
39 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
40 *
41 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
42 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
43 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
44 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
45 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
46 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
47 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
48 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
49 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
50 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
51 * SUCH DAMAGE.
52 *
53 * The licence and distribution terms for any publically available version or
54 * derivative of this code cannot be changed. i.e. this code cannot simply be
55 * copied and put under another distribution licence
56 * [including the GNU Public Licence.]
57 */
58
59#include <stdio.h>
Bodo Möllerec577821999-04-23 22:13:45 +000060#include <openssl/objects.h>
61#include <openssl/comp.h>
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000062#include "ssl_locl.h"
63
64#define SSL_ENC_DES_IDX 0
65#define SSL_ENC_3DES_IDX 1
66#define SSL_ENC_RC4_IDX 2
67#define SSL_ENC_RC2_IDX 3
68#define SSL_ENC_IDEA_IDX 4
69#define SSL_ENC_eFZA_IDX 5
70#define SSL_ENC_NULL_IDX 6
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +000071#define SSL_ENC_AES128_IDX 7
72#define SSL_ENC_AES256_IDX 8
73#define SSL_ENC_NUM_IDX 9
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000074
Ben Lauriee7788021999-04-17 21:25:43 +000075static const EVP_CIPHER *ssl_cipher_methods[SSL_ENC_NUM_IDX]={
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000076 NULL,NULL,NULL,NULL,NULL,NULL,
77 };
78
Ben Laurief73e07c1999-04-12 17:23:57 +000079static STACK_OF(SSL_COMP) *ssl_comp_methods=NULL;
Mark J. Cox413c4f41999-02-16 09:22:21 +000080
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000081#define SSL_MD_MD5_IDX 0
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000082#define SSL_MD_SHA1_IDX 1
83#define SSL_MD_NUM_IDX 2
Ben Lauriee7788021999-04-17 21:25:43 +000084static const EVP_MD *ssl_digest_methods[SSL_MD_NUM_IDX]={
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000085 NULL,NULL,
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000086 };
87
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000088#define CIPHER_ADD 1
89#define CIPHER_KILL 2
90#define CIPHER_DEL 3
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000091#define CIPHER_ORD 4
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +000092#define CIPHER_SPECIAL 5
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +000093
Ralf S. Engelschall58964a41998-12-21 10:56:39 +000094typedef struct cipher_order_st
95 {
96 SSL_CIPHER *cipher;
97 int active;
98 int dead;
99 struct cipher_order_st *next,*prev;
100 } CIPHER_ORDER;
101
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000102static const SSL_CIPHER cipher_aliases[]={
Dr. Stephen Hensonbc420ac1999-03-12 01:43:28 +0000103 /* Don't include eNULL unless specifically enabled */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000104 {0,SSL_TXT_ALL, 0,SSL_ALL & ~SSL_eNULL, SSL_ALL ,0,0,0,SSL_ALL,SSL_ALL}, /* must be first */
Richard Levittef9b3bff2000-11-30 22:53:34 +0000105 {0,SSL_TXT_kKRB5,0,SSL_kKRB5,0,0,0,0,SSL_MKEY_MASK,0}, /* VRS Kerberos5 */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000106 {0,SSL_TXT_kRSA,0,SSL_kRSA, 0,0,0,0,SSL_MKEY_MASK,0},
107 {0,SSL_TXT_kDHr,0,SSL_kDHr, 0,0,0,0,SSL_MKEY_MASK,0},
108 {0,SSL_TXT_kDHd,0,SSL_kDHd, 0,0,0,0,SSL_MKEY_MASK,0},
109 {0,SSL_TXT_kEDH,0,SSL_kEDH, 0,0,0,0,SSL_MKEY_MASK,0},
110 {0,SSL_TXT_kFZA,0,SSL_kFZA, 0,0,0,0,SSL_MKEY_MASK,0},
111 {0,SSL_TXT_DH, 0,SSL_DH, 0,0,0,0,SSL_MKEY_MASK,0},
112 {0,SSL_TXT_EDH, 0,SSL_EDH, 0,0,0,0,SSL_MKEY_MASK|SSL_AUTH_MASK,0},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000113
Richard Levittef9b3bff2000-11-30 22:53:34 +0000114 {0,SSL_TXT_aKRB5,0,SSL_aKRB5,0,0,0,0,SSL_AUTH_MASK,0}, /* VRS Kerberos5 */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000115 {0,SSL_TXT_aRSA,0,SSL_aRSA, 0,0,0,0,SSL_AUTH_MASK,0},
116 {0,SSL_TXT_aDSS,0,SSL_aDSS, 0,0,0,0,SSL_AUTH_MASK,0},
117 {0,SSL_TXT_aFZA,0,SSL_aFZA, 0,0,0,0,SSL_AUTH_MASK,0},
118 {0,SSL_TXT_aNULL,0,SSL_aNULL,0,0,0,0,SSL_AUTH_MASK,0},
119 {0,SSL_TXT_aDH, 0,SSL_aDH, 0,0,0,0,SSL_AUTH_MASK,0},
120 {0,SSL_TXT_DSS, 0,SSL_DSS, 0,0,0,0,SSL_AUTH_MASK,0},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000121
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000122 {0,SSL_TXT_DES, 0,SSL_DES, 0,0,0,0,SSL_ENC_MASK,0},
123 {0,SSL_TXT_3DES,0,SSL_3DES, 0,0,0,0,SSL_ENC_MASK,0},
124 {0,SSL_TXT_RC4, 0,SSL_RC4, 0,0,0,0,SSL_ENC_MASK,0},
125 {0,SSL_TXT_RC2, 0,SSL_RC2, 0,0,0,0,SSL_ENC_MASK,0},
126 {0,SSL_TXT_IDEA,0,SSL_IDEA, 0,0,0,0,SSL_ENC_MASK,0},
127 {0,SSL_TXT_eNULL,0,SSL_eNULL,0,0,0,0,SSL_ENC_MASK,0},
128 {0,SSL_TXT_eFZA,0,SSL_eFZA, 0,0,0,0,SSL_ENC_MASK,0},
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000129 {0,SSL_TXT_AES, 0,SSL_AES, 0,0,0,0,SSL_ENC_MASK,0},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000130
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000131 {0,SSL_TXT_MD5, 0,SSL_MD5, 0,0,0,0,SSL_MAC_MASK,0},
132 {0,SSL_TXT_SHA1,0,SSL_SHA1, 0,0,0,0,SSL_MAC_MASK,0},
133 {0,SSL_TXT_SHA, 0,SSL_SHA, 0,0,0,0,SSL_MAC_MASK,0},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000134
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000135 {0,SSL_TXT_NULL,0,SSL_NULL, 0,0,0,0,SSL_ENC_MASK,0},
Richard Levittef9b3bff2000-11-30 22:53:34 +0000136 {0,SSL_TXT_KRB5,0,SSL_KRB5, 0,0,0,0,SSL_AUTH_MASK|SSL_MKEY_MASK,0},
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000137 {0,SSL_TXT_RSA, 0,SSL_RSA, 0,0,0,0,SSL_AUTH_MASK|SSL_MKEY_MASK,0},
138 {0,SSL_TXT_ADH, 0,SSL_ADH, 0,0,0,0,SSL_AUTH_MASK|SSL_MKEY_MASK,0},
139 {0,SSL_TXT_FZA, 0,SSL_FZA, 0,0,0,0,SSL_AUTH_MASK|SSL_MKEY_MASK|SSL_ENC_MASK,0},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000140
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000141 {0,SSL_TXT_SSLV2, 0,SSL_SSLV2, 0,0,0,0,SSL_SSL_MASK,0},
142 {0,SSL_TXT_SSLV3, 0,SSL_SSLV3, 0,0,0,0,SSL_SSL_MASK,0},
143 {0,SSL_TXT_TLSV1, 0,SSL_TLSV1, 0,0,0,0,SSL_SSL_MASK,0},
144
145 {0,SSL_TXT_EXP ,0, 0,SSL_EXPORT, 0,0,0,0,SSL_EXP_MASK},
146 {0,SSL_TXT_EXPORT,0, 0,SSL_EXPORT, 0,0,0,0,SSL_EXP_MASK},
147 {0,SSL_TXT_EXP40, 0, 0, SSL_EXP40, 0,0,0,0,SSL_STRONG_MASK},
148 {0,SSL_TXT_EXP56, 0, 0, SSL_EXP56, 0,0,0,0,SSL_STRONG_MASK},
149 {0,SSL_TXT_LOW, 0, 0, SSL_LOW, 0,0,0,0,SSL_STRONG_MASK},
150 {0,SSL_TXT_MEDIUM,0, 0,SSL_MEDIUM, 0,0,0,0,SSL_STRONG_MASK},
151 {0,SSL_TXT_HIGH, 0, 0, SSL_HIGH, 0,0,0,0,SSL_STRONG_MASK},
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000152 };
153
154static int init_ciphers=1;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000155
Ulf Möller6b691a51999-04-19 21:31:43 +0000156static void load_ciphers(void)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000157 {
158 init_ciphers=0;
159 ssl_cipher_methods[SSL_ENC_DES_IDX]=
160 EVP_get_cipherbyname(SN_des_cbc);
161 ssl_cipher_methods[SSL_ENC_3DES_IDX]=
162 EVP_get_cipherbyname(SN_des_ede3_cbc);
163 ssl_cipher_methods[SSL_ENC_RC4_IDX]=
164 EVP_get_cipherbyname(SN_rc4);
165 ssl_cipher_methods[SSL_ENC_RC2_IDX]=
166 EVP_get_cipherbyname(SN_rc2_cbc);
167 ssl_cipher_methods[SSL_ENC_IDEA_IDX]=
168 EVP_get_cipherbyname(SN_idea_cbc);
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000169 ssl_cipher_methods[SSL_ENC_AES128_IDX]=
170 EVP_get_cipherbyname(SN_aes_128_cbc);
171 ssl_cipher_methods[SSL_ENC_AES256_IDX]=
172 EVP_get_cipherbyname(SN_aes_256_cbc);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000173
174 ssl_digest_methods[SSL_MD_MD5_IDX]=
175 EVP_get_digestbyname(SN_md5);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000176 ssl_digest_methods[SSL_MD_SHA1_IDX]=
177 EVP_get_digestbyname(SN_sha1);
178 }
179
Ulf Möller6b691a51999-04-19 21:31:43 +0000180int ssl_cipher_get_evp(SSL_SESSION *s, const EVP_CIPHER **enc,
181 const EVP_MD **md, SSL_COMP **comp)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000182 {
183 int i;
Mark J. Cox413c4f41999-02-16 09:22:21 +0000184 SSL_CIPHER *c;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000185
Mark J. Cox413c4f41999-02-16 09:22:21 +0000186 c=s->cipher;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000187 if (c == NULL) return(0);
Mark J. Cox413c4f41999-02-16 09:22:21 +0000188 if (comp != NULL)
189 {
190 SSL_COMP ctmp;
191
192 if (s->compress_meth == 0)
193 *comp=NULL;
194 else if (ssl_comp_methods == NULL)
195 {
196 /* bad */
197 *comp=NULL;
198 }
199 else
200 {
201
202 ctmp.id=s->compress_meth;
Ben Laurief73e07c1999-04-12 17:23:57 +0000203 i=sk_SSL_COMP_find(ssl_comp_methods,&ctmp);
Mark J. Cox413c4f41999-02-16 09:22:21 +0000204 if (i >= 0)
Ben Laurief73e07c1999-04-12 17:23:57 +0000205 *comp=sk_SSL_COMP_value(ssl_comp_methods,i);
Mark J. Cox413c4f41999-02-16 09:22:21 +0000206 else
207 *comp=NULL;
208 }
209 }
210
211 if ((enc == NULL) || (md == NULL)) return(0);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000212
213 switch (c->algorithms & SSL_ENC_MASK)
214 {
215 case SSL_DES:
216 i=SSL_ENC_DES_IDX;
217 break;
218 case SSL_3DES:
219 i=SSL_ENC_3DES_IDX;
220 break;
221 case SSL_RC4:
222 i=SSL_ENC_RC4_IDX;
223 break;
224 case SSL_RC2:
225 i=SSL_ENC_RC2_IDX;
226 break;
227 case SSL_IDEA:
228 i=SSL_ENC_IDEA_IDX;
229 break;
230 case SSL_eNULL:
231 i=SSL_ENC_NULL_IDX;
232 break;
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000233 case SSL_AES:
Ben Laurie259810e2001-02-06 14:09:13 +0000234 switch(c->alg_bits)
235 {
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000236 case 128: i=SSL_ENC_AES128_IDX; break;
237 case 256: i=SSL_ENC_AES256_IDX; break;
Ben Laurie259810e2001-02-06 14:09:13 +0000238 default: i=-1; break;
239 }
240 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000241 default:
242 i= -1;
243 break;
244 }
245
246 if ((i < 0) || (i > SSL_ENC_NUM_IDX))
247 *enc=NULL;
248 else
249 {
250 if (i == SSL_ENC_NULL_IDX)
251 *enc=EVP_enc_null();
252 else
253 *enc=ssl_cipher_methods[i];
254 }
255
256 switch (c->algorithms & SSL_MAC_MASK)
257 {
258 case SSL_MD5:
259 i=SSL_MD_MD5_IDX;
260 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000261 case SSL_SHA1:
262 i=SSL_MD_SHA1_IDX;
263 break;
264 default:
265 i= -1;
266 break;
267 }
268 if ((i < 0) || (i > SSL_MD_NUM_IDX))
269 *md=NULL;
270 else
271 *md=ssl_digest_methods[i];
272
273 if ((*enc != NULL) && (*md != NULL))
274 return(1);
275 else
276 return(0);
277 }
278
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000279#define ITEM_SEP(a) \
280 (((a) == ':') || ((a) == ' ') || ((a) == ';') || ((a) == ','))
281
Ulf Möller6b691a51999-04-19 21:31:43 +0000282static void ll_append_tail(CIPHER_ORDER **head, CIPHER_ORDER *curr,
283 CIPHER_ORDER **tail)
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000284 {
285 if (curr == *tail) return;
286 if (curr == *head)
287 *head=curr->next;
288 if (curr->prev != NULL)
289 curr->prev->next=curr->next;
290 if (curr->next != NULL) /* should always be true */
291 curr->next->prev=curr->prev;
292 (*tail)->next=curr;
293 curr->prev= *tail;
294 curr->next=NULL;
295 *tail=curr;
296 }
297
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000298static unsigned long ssl_cipher_get_disabled(void)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000299 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000300 unsigned long mask;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000301
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000302 mask = SSL_kFZA;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000303#ifdef NO_RSA
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000304 mask |= SSL_aRSA|SSL_kRSA;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000305#endif
306#ifdef NO_DSA
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000307 mask |= SSL_aDSS;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000308#endif
309#ifdef NO_DH
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000310 mask |= SSL_kDHr|SSL_kDHd|SSL_kEDH|SSL_aDH;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000311#endif
Richard Levittef9b3bff2000-11-30 22:53:34 +0000312#ifdef NO_KRB5
313 mask |= SSL_kKRB5|SSL_aKRB5;
314#endif
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000315
Ben Laurie988788f1999-03-06 12:09:36 +0000316#ifdef SSL_FORBID_ENULL
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000317 mask |= SSL_eNULL;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000318#endif
319
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000320 mask |= (ssl_cipher_methods[SSL_ENC_DES_IDX ] == NULL) ? SSL_DES :0;
321 mask |= (ssl_cipher_methods[SSL_ENC_3DES_IDX] == NULL) ? SSL_3DES:0;
322 mask |= (ssl_cipher_methods[SSL_ENC_RC4_IDX ] == NULL) ? SSL_RC4 :0;
323 mask |= (ssl_cipher_methods[SSL_ENC_RC2_IDX ] == NULL) ? SSL_RC2 :0;
324 mask |= (ssl_cipher_methods[SSL_ENC_IDEA_IDX] == NULL) ? SSL_IDEA:0;
325 mask |= (ssl_cipher_methods[SSL_ENC_eFZA_IDX] == NULL) ? SSL_eFZA:0;
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000326 mask |= (ssl_cipher_methods[SSL_ENC_AES128_IDX] == NULL) ? SSL_AES:0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000327
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000328 mask |= (ssl_digest_methods[SSL_MD_MD5_IDX ] == NULL) ? SSL_MD5 :0;
329 mask |= (ssl_digest_methods[SSL_MD_SHA1_IDX] == NULL) ? SSL_SHA1:0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000330
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000331 return(mask);
332 }
333
334static void ssl_cipher_collect_ciphers(const SSL_METHOD *ssl_method,
335 int num_of_ciphers, unsigned long mask, CIPHER_ORDER *list,
336 CIPHER_ORDER **head_p, CIPHER_ORDER **tail_p)
337 {
Dr. Stephen Henson82fc1d92000-02-03 02:56:48 +0000338 int i, list_num;
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000339 SSL_CIPHER *c;
340
341 /*
342 * We have num_of_ciphers descriptions compiled in, depending on the
343 * method selected (SSLv2 and/or SSLv3, TLSv1 etc).
344 * These will later be sorted in a linked list with at most num
345 * entries.
346 */
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000347
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000348 /* Get the initial list of ciphers */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000349 list_num = 0; /* actual count of ciphers */
350 for (i = 0; i < num_of_ciphers; i++)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000351 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000352 c = ssl_method->get_cipher(i);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000353 /* drop those that use any of that is not available */
354 if ((c != NULL) && c->valid && !(c->algorithms & mask))
355 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000356 list[list_num].cipher = c;
357 list[list_num].next = NULL;
358 list[list_num].prev = NULL;
359 list[list_num].active = 0;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000360 list_num++;
Richard Levittef9b3bff2000-11-30 22:53:34 +0000361#ifdef KSSL_DEBUG
362 printf("\t%d: %s %lx %lx\n",i,c->name,c->id,c->algorithms);
363#endif /* KSSL_DEBUG */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000364 /*
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000365 if (!sk_push(ca_list,(char *)c)) goto err;
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000366 */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000367 }
368 }
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000369
370 /*
371 * Prepare linked list from list entries
372 */
373 for (i = 1; i < list_num - 1; i++)
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000374 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000375 list[i].prev = &(list[i-1]);
376 list[i].next = &(list[i+1]);
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000377 }
378 if (list_num > 0)
379 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000380 (*head_p) = &(list[0]);
381 (*head_p)->prev = NULL;
382 (*head_p)->next = &(list[1]);
383 (*tail_p) = &(list[list_num - 1]);
384 (*tail_p)->prev = &(list[list_num - 2]);
385 (*tail_p)->next = NULL;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000386 }
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000387 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000388
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000389static void ssl_cipher_collect_aliases(SSL_CIPHER **ca_list,
390 int num_of_group_aliases, unsigned long mask,
391 CIPHER_ORDER *head)
392 {
393 CIPHER_ORDER *ciph_curr;
394 SSL_CIPHER **ca_curr;
395 int i;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000396
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000397 /*
398 * First, add the real ciphers as already collected
399 */
400 ciph_curr = head;
401 ca_curr = ca_list;
402 while (ciph_curr != NULL)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000403 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000404 *ca_curr = ciph_curr->cipher;
405 ca_curr++;
406 ciph_curr = ciph_curr->next;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000407 }
408
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000409 /*
410 * Now we add the available ones from the cipher_aliases[] table.
411 * They represent either an algorithm, that must be fully
412 * supported (not match any bit in mask) or represent a cipher
413 * strength value (will be added in any case because algorithms=0).
414 */
415 for (i = 0; i < num_of_group_aliases; i++)
416 {
417 if ((i == 0) || /* always fetch "ALL" */
418 !(cipher_aliases[i].algorithms & mask))
419 {
420 *ca_curr = (SSL_CIPHER *)(cipher_aliases + i);
421 ca_curr++;
422 }
423 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000424
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000425 *ca_curr = NULL; /* end of list */
426 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000427
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000428static void ssl_cipher_apply_rule(unsigned long algorithms, unsigned long mask,
429 unsigned long algo_strength, unsigned long mask_strength,
430 int rule, int strength_bits, CIPHER_ORDER *list,
431 CIPHER_ORDER **head_p, CIPHER_ORDER **tail_p)
432 {
433 CIPHER_ORDER *head, *tail, *curr, *curr2, *tail2;
434 SSL_CIPHER *cp;
435 unsigned long ma, ma_s;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000436
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000437#ifdef CIPHER_DEBUG
438 printf("Applying rule %d with %08lx %08lx %08lx %08lx (%d)\n",
439 rule, algorithms, mask, algo_strength, mask_strength,
440 strength_bits);
441#endif
442
443 curr = head = *head_p;
444 curr2 = head;
445 tail2 = tail = *tail_p;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000446 for (;;)
447 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000448 if ((curr == NULL) || (curr == tail2)) break;
449 curr = curr2;
450 curr2 = curr->next;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000451
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000452 cp = curr->cipher;
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000453
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000454 /*
455 * Selection criteria is either the number of strength_bits
456 * or the algorithm used.
457 */
458 if (strength_bits == -1)
459 {
460 ma = mask & cp->algorithms;
461 ma_s = mask_strength & cp->algo_strength;
462
463#ifdef CIPHER_DEBUG
464 printf("\nName: %s:\nAlgo = %08lx Algo_strength = %08lx\nMask = %08lx Mask_strength %08lx\n", cp->name, cp->algorithms, cp->algo_strength, mask, mask_strength);
465 printf("ma = %08lx ma_s %08lx, ma&algo=%08lx, ma_s&algos=%08lx\n", ma, ma_s, ma&algorithms, ma_s&algo_strength);
466#endif
467 /*
468 * Select: if none of the mask bit was met from the
469 * cipher or not all of the bits were met, the
470 * selection does not apply.
471 */
472 if (((ma == 0) && (ma_s == 0)) ||
473 ((ma & algorithms) != ma) ||
474 ((ma_s & algo_strength) != ma_s))
475 continue; /* does not apply */
476 }
477 else if (strength_bits != cp->strength_bits)
478 continue; /* does not apply */
479
480#ifdef CIPHER_DEBUG
481 printf("Action = %d\n", rule);
482#endif
483
484 /* add the cipher if it has not been added yet. */
485 if (rule == CIPHER_ADD)
486 {
487 if (!curr->active)
488 {
489 ll_append_tail(&head, curr, &tail);
490 curr->active = 1;
491 }
492 }
493 /* Move the added cipher to this location */
494 else if (rule == CIPHER_ORD)
495 {
496 if (curr->active)
497 {
498 ll_append_tail(&head, curr, &tail);
499 }
500 }
501 else if (rule == CIPHER_DEL)
502 curr->active = 0;
503 else if (rule == CIPHER_KILL)
504 {
505 if (head == curr)
506 head = curr->next;
507 else
508 curr->prev->next = curr->next;
509 if (tail == curr)
510 tail = curr->prev;
511 curr->active = 0;
512 if (curr->next != NULL)
513 curr->next->prev = curr->prev;
514 if (curr->prev != NULL)
515 curr->prev->next = curr->next;
516 curr->next = NULL;
517 curr->prev = NULL;
518 }
519 }
520
521 *head_p = head;
522 *tail_p = tail;
523 }
524
525static int ssl_cipher_strength_sort(CIPHER_ORDER *list, CIPHER_ORDER **head_p,
526 CIPHER_ORDER **tail_p)
527 {
528 int max_strength_bits, i, *number_uses;
529 CIPHER_ORDER *curr;
530
531 /*
532 * This routine sorts the ciphers with descending strength. The sorting
533 * must keep the pre-sorted sequence, so we apply the normal sorting
534 * routine as '+' movement to the end of the list.
535 */
536 max_strength_bits = 0;
537 curr = *head_p;
538 while (curr != NULL)
539 {
540 if (curr->active &&
541 (curr->cipher->strength_bits > max_strength_bits))
542 max_strength_bits = curr->cipher->strength_bits;
543 curr = curr->next;
544 }
545
Richard Levitte26a3a482000-06-01 22:19:21 +0000546 number_uses = OPENSSL_malloc((max_strength_bits + 1) * sizeof(int));
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000547 if (!number_uses)
548 {
549 SSLerr(SSL_F_SSL_CIPHER_STRENGTH_SORT,ERR_R_MALLOC_FAILURE);
550 return(0);
551 }
552 memset(number_uses, 0, (max_strength_bits + 1) * sizeof(int));
553
554 /*
555 * Now find the strength_bits values actually used
556 */
557 curr = *head_p;
558 while (curr != NULL)
559 {
560 if (curr->active)
561 number_uses[curr->cipher->strength_bits]++;
562 curr = curr->next;
563 }
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000564 /*
565 * Go through the list of used strength_bits values in descending
Dr. Stephen Henson64287002000-01-22 23:34:44 +0000566 * order.
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000567 */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000568 for (i = max_strength_bits; i >= 0; i--)
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000569 if (number_uses[i] > 0)
570 ssl_cipher_apply_rule(0, 0, 0, 0, CIPHER_ORD, i,
571 list, head_p, tail_p);
572
Richard Levitte26a3a482000-06-01 22:19:21 +0000573 OPENSSL_free(number_uses);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000574 return(1);
575 }
576
577static int ssl_cipher_process_rulestr(const char *rule_str,
578 CIPHER_ORDER *list, CIPHER_ORDER **head_p,
579 CIPHER_ORDER **tail_p, SSL_CIPHER **ca_list)
580 {
581 unsigned long algorithms, mask, algo_strength, mask_strength;
582 const char *l, *start, *buf;
583 int j, multi, found, rule, retval, ok, buflen;
584 char ch;
585
586 retval = 1;
587 l = rule_str;
588 for (;;)
589 {
590 ch = *l;
591
592 if (ch == '\0')
593 break; /* done */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000594 if (ch == '-')
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000595 { rule = CIPHER_DEL; l++; }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000596 else if (ch == '+')
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000597 { rule = CIPHER_ORD; l++; }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000598 else if (ch == '!')
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000599 { rule = CIPHER_KILL; l++; }
600 else if (ch == '@')
601 { rule = CIPHER_SPECIAL; l++; }
602 else
603 { rule = CIPHER_ADD; }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000604
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000605 if (ITEM_SEP(ch))
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000606 {
607 l++;
608 continue;
609 }
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000610
611 algorithms = mask = algo_strength = mask_strength = 0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000612
613 start=l;
614 for (;;)
615 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000616 ch = *l;
617 buf = l;
618 buflen = 0;
Ulf Möllerca570cf1999-06-04 21:54:13 +0000619#ifndef CHARSET_EBCDIC
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000620 while ( ((ch >= 'A') && (ch <= 'Z')) ||
621 ((ch >= '0') && (ch <= '9')) ||
622 ((ch >= 'a') && (ch <= 'z')) ||
623 (ch == '-'))
Ulf Möllerca570cf1999-06-04 21:54:13 +0000624#else
625 while ( isalnum(ch) || (ch == '-'))
626#endif
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000627 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000628 ch = *(++l);
629 buflen++;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000630 }
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000631
632 if (buflen == 0)
633 {
634 /*
Bodo Möller4013f3b2000-03-06 10:21:57 +0000635 * We hit something we cannot deal with,
Ulf Möller657e60f2000-02-03 23:23:24 +0000636 * it is no command or separator nor
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000637 * alphanumeric, so we call this an error.
638 */
639 SSLerr(SSL_F_SSL_CIPHER_PROCESS_RULESTR,
640 SSL_R_INVALID_COMMAND);
641 retval = found = 0;
642 l++;
643 break;
644 }
645
646 if (rule == CIPHER_SPECIAL)
647 {
Bodo Möller4013f3b2000-03-06 10:21:57 +0000648 found = 0; /* unused -- avoid compiler warning */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000649 break; /* special treatment */
650 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000651
652 /* check for multi-part specification */
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000653 if (ch == '+')
654 {
655 multi=1;
656 l++;
657 }
658 else
659 multi=0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000660
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000661 /*
Bodo Möller4013f3b2000-03-06 10:21:57 +0000662 * Now search for the cipher alias in the ca_list. Be careful
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000663 * with the strncmp, because the "buflen" limitation
664 * will make the rule "ADH:SOME" and the cipher
665 * "ADH-MY-CIPHER" look like a match for buflen=3.
Bodo Möller4013f3b2000-03-06 10:21:57 +0000666 * So additionally check whether the cipher name found
667 * has the correct length. We can save a strlen() call:
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000668 * just checking for the '\0' at the right place is
669 * sufficient, we have to strncmp() anyway.
670 */
671 j = found = 0;
672 while (ca_list[j])
673 {
674 if ((ca_list[j]->name[buflen] == '\0') &&
675 !strncmp(buf, ca_list[j]->name, buflen))
676 {
677 found = 1;
678 break;
679 }
680 else
681 j++;
682 }
683 if (!found)
684 break; /* ignore this entry */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000685
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000686 algorithms |= ca_list[j]->algorithms;
687 mask |= ca_list[j]->mask;
688 algo_strength |= ca_list[j]->algo_strength;
689 mask_strength |= ca_list[j]->mask_strength;
690
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000691 if (!multi) break;
692 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000693
Bodo Möller4013f3b2000-03-06 10:21:57 +0000694 /*
695 * Ok, we have the rule, now apply it
696 */
697 if (rule == CIPHER_SPECIAL)
698 { /* special command */
699 ok = 0;
700 if ((buflen == 8) &&
701 !strncmp(buf, "STRENGTH", 8))
702 ok = ssl_cipher_strength_sort(list,
703 head_p, tail_p);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000704 else
Bodo Möller4013f3b2000-03-06 10:21:57 +0000705 SSLerr(SSL_F_SSL_CIPHER_PROCESS_RULESTR,
706 SSL_R_INVALID_COMMAND);
707 if (ok == 0)
708 retval = 0;
709 /*
710 * We do not support any "multi" options
711 * together with "@", so throw away the
712 * rest of the command, if any left, until
713 * end or ':' is found.
714 */
715 while ((*l != '\0') && ITEM_SEP(*l))
716 l++;
717 }
718 else if (found)
719 {
720 ssl_cipher_apply_rule(algorithms, mask,
721 algo_strength, mask_strength, rule, -1,
722 list, head_p, tail_p);
723 }
724 else
725 {
726 while ((*l != '\0') && ITEM_SEP(*l))
727 l++;
728 }
729 if (*l == '\0') break; /* done */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000730 }
731
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000732 return(retval);
733 }
734
735STACK_OF(SSL_CIPHER) *ssl_create_cipher_list(const SSL_METHOD *ssl_method,
736 STACK_OF(SSL_CIPHER) **cipher_list,
737 STACK_OF(SSL_CIPHER) **cipher_list_by_id,
738 const char *rule_str)
739 {
740 int ok, num_of_ciphers, num_of_alias_max, num_of_group_aliases;
741 unsigned long disabled_mask;
742 STACK_OF(SSL_CIPHER) *cipherstack;
743 const char *rule_p;
744 CIPHER_ORDER *list = NULL, *head = NULL, *tail = NULL, *curr;
745 SSL_CIPHER **ca_list = NULL;
746
747 /*
748 * Return with error if nothing to do.
749 */
750 if (rule_str == NULL) return(NULL);
751
752 if (init_ciphers) load_ciphers();
753
754 /*
755 * To reduce the work to do we only want to process the compiled
756 * in algorithms, so we first get the mask of disabled ciphers.
757 */
758 disabled_mask = ssl_cipher_get_disabled();
759
760 /*
761 * Now we have to collect the available ciphers from the compiled
762 * in ciphers. We cannot get more than the number compiled in, so
763 * it is used for allocation.
764 */
765 num_of_ciphers = ssl_method->num_ciphers();
Richard Levittef9b3bff2000-11-30 22:53:34 +0000766#ifdef KSSL_DEBUG
767 printf("ssl_create_cipher_list() for %d ciphers\n", num_of_ciphers);
768#endif /* KSSL_DEBUG */
Richard Levitte26a3a482000-06-01 22:19:21 +0000769 list = (CIPHER_ORDER *)OPENSSL_malloc(sizeof(CIPHER_ORDER) * num_of_ciphers);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000770 if (list == NULL)
771 {
772 SSLerr(SSL_F_SSL_CREATE_CIPHER_LIST,ERR_R_MALLOC_FAILURE);
773 return(NULL); /* Failure */
774 }
775
776 ssl_cipher_collect_ciphers(ssl_method, num_of_ciphers, disabled_mask,
777 list, &head, &tail);
778
779 /*
780 * We also need cipher aliases for selecting based on the rule_str.
781 * There might be two types of entries in the rule_str: 1) names
782 * of ciphers themselves 2) aliases for groups of ciphers.
783 * For 1) we need the available ciphers and for 2) the cipher
Ulf Möller657e60f2000-02-03 23:23:24 +0000784 * groups of cipher_aliases added together in one list (otherwise
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000785 * we would be happy with just the cipher_aliases table).
786 */
787 num_of_group_aliases = sizeof(cipher_aliases) / sizeof(SSL_CIPHER);
788 num_of_alias_max = num_of_ciphers + num_of_group_aliases + 1;
789 ca_list =
Richard Levitte26a3a482000-06-01 22:19:21 +0000790 (SSL_CIPHER **)OPENSSL_malloc(sizeof(SSL_CIPHER *) * num_of_alias_max);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000791 if (ca_list == NULL)
792 {
Richard Levitte26a3a482000-06-01 22:19:21 +0000793 OPENSSL_free(list);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000794 SSLerr(SSL_F_SSL_CREATE_CIPHER_LIST,ERR_R_MALLOC_FAILURE);
795 return(NULL); /* Failure */
796 }
797 ssl_cipher_collect_aliases(ca_list, num_of_group_aliases, disabled_mask,
798 head);
799
800 /*
801 * If the rule_string begins with DEFAULT, apply the default rule
802 * before using the (possibly available) additional rules.
803 */
804 ok = 1;
805 rule_p = rule_str;
806 if (strncmp(rule_str,"DEFAULT",7) == 0)
807 {
808 ok = ssl_cipher_process_rulestr(SSL_DEFAULT_CIPHER_LIST,
809 list, &head, &tail, ca_list);
810 rule_p += 7;
811 if (*rule_p == ':')
812 rule_p++;
813 }
814
815 if (ok && (strlen(rule_p) > 0))
816 ok = ssl_cipher_process_rulestr(rule_p, list, &head, &tail,
817 ca_list);
818
Richard Levitte26a3a482000-06-01 22:19:21 +0000819 OPENSSL_free(ca_list); /* Not needed anymore */
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000820
821 if (!ok)
822 { /* Rule processing failure */
Richard Levitte26a3a482000-06-01 22:19:21 +0000823 OPENSSL_free(list);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000824 return(NULL);
825 }
826 /*
827 * Allocate new "cipherstack" for the result, return with error
828 * if we cannot get one.
829 */
Richard Levitte62324622000-09-17 18:21:27 +0000830 if ((cipherstack = sk_SSL_CIPHER_new_null()) == NULL)
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000831 {
Richard Levitte26a3a482000-06-01 22:19:21 +0000832 OPENSSL_free(list);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000833 return(NULL);
834 }
835
836 /*
837 * The cipher selection for the list is done. The ciphers are added
838 * to the resulting precedence to the STACK_OF(SSL_CIPHER).
839 */
840 for (curr = head; curr != NULL; curr = curr->next)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000841 {
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000842 if (curr->active)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000843 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000844 sk_SSL_CIPHER_push(cipherstack, curr->cipher);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000845#ifdef CIPHER_DEBUG
Ralf S. Engelschall58964a41998-12-21 10:56:39 +0000846 printf("<%s>\n",curr->cipher->name);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000847#endif
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000848 }
849 }
Richard Levitte26a3a482000-06-01 22:19:21 +0000850 OPENSSL_free(list); /* Not needed any longer */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000851
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000852 /*
853 * The following passage is a little bit odd. If pointer variables
854 * were supplied to hold STACK_OF(SSL_CIPHER) return information,
855 * the old memory pointed to is free()ed. Then, however, the
856 * cipher_list entry will be assigned just a copy of the returned
857 * cipher stack. For cipher_list_by_id a copy of the cipher stack
858 * will be created. See next comment...
859 */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000860 if (cipher_list != NULL)
861 {
862 if (*cipher_list != NULL)
Ben Laurief73e07c1999-04-12 17:23:57 +0000863 sk_SSL_CIPHER_free(*cipher_list);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000864 *cipher_list = cipherstack;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000865 }
866
867 if (cipher_list_by_id != NULL)
868 {
869 if (*cipher_list_by_id != NULL)
Ben Laurief73e07c1999-04-12 17:23:57 +0000870 sk_SSL_CIPHER_free(*cipher_list_by_id);
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000871 *cipher_list_by_id = sk_SSL_CIPHER_dup(cipherstack);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000872 }
873
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000874 /*
875 * Now it is getting really strange. If something failed during
Ulf Möller657e60f2000-02-03 23:23:24 +0000876 * the previous pointer assignment or if one of the pointers was
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000877 * not requested, the error condition is met. That might be
878 * discussable. The strange thing is however that in this case
879 * the memory "ret" pointed to is "free()ed" and hence the pointer
880 * cipher_list becomes wild. The memory reserved for
881 * cipher_list_by_id however is not "free()ed" and stays intact.
882 */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000883 if ( (cipher_list_by_id == NULL) ||
884 (*cipher_list_by_id == NULL) ||
885 (cipher_list == NULL) ||
886 (*cipher_list == NULL))
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000887 {
888 sk_SSL_CIPHER_free(cipherstack);
889 return(NULL);
890 }
891
Ben Laurief73e07c1999-04-12 17:23:57 +0000892 sk_SSL_CIPHER_set_cmp_func(*cipher_list_by_id,ssl_cipher_ptr_id_cmp);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000893
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000894 return(cipherstack);
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000895 }
896
Ulf Möller6b691a51999-04-19 21:31:43 +0000897char *SSL_CIPHER_description(SSL_CIPHER *cipher, char *buf, int len)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000898 {
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000899 int is_export,pkl,kl;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000900 char *ver,*exp;
901 char *kx,*au,*enc,*mac;
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000902 unsigned long alg,alg2,alg_s;
Richard Levittef9b3bff2000-11-30 22:53:34 +0000903#ifdef KSSL_DEBUG
904 static char *format="%-23s %s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s%s AL=%lx\n";
905#else
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000906 static char *format="%-23s %s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s%s\n";
Richard Levittef9b3bff2000-11-30 22:53:34 +0000907#endif /* KSSL_DEBUG */
908
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000909 alg=cipher->algorithms;
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000910 alg_s=cipher->algo_strength;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000911 alg2=cipher->algorithm2;
912
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +0000913 is_export=SSL_C_IS_EXPORT(cipher);
914 pkl=SSL_C_EXPORT_PKEYLENGTH(cipher);
915 kl=SSL_C_EXPORT_KEYLENGTH(cipher);
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000916 exp=is_export?" export":"";
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000917
918 if (alg & SSL_SSLV2)
919 ver="SSLv2";
920 else if (alg & SSL_SSLV3)
921 ver="SSLv3";
922 else
923 ver="unknown";
924
925 switch (alg&SSL_MKEY_MASK)
926 {
927 case SSL_kRSA:
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000928 kx=is_export?(pkl == 512 ? "RSA(512)" : "RSA(1024)"):"RSA";
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000929 break;
930 case SSL_kDHr:
931 kx="DH/RSA";
932 break;
933 case SSL_kDHd:
934 kx="DH/DSS";
935 break;
Richard Levittef9b3bff2000-11-30 22:53:34 +0000936 case SSL_kKRB5: /* VRS */
937 case SSL_KRB5: /* VRS */
938 kx="KRB5";
939 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000940 case SSL_kFZA:
941 kx="Fortezza";
942 break;
943 case SSL_kEDH:
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000944 kx=is_export?(pkl == 512 ? "DH(512)" : "DH(1024)"):"DH";
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000945 break;
946 default:
947 kx="unknown";
948 }
949
950 switch (alg&SSL_AUTH_MASK)
951 {
952 case SSL_aRSA:
953 au="RSA";
954 break;
955 case SSL_aDSS:
956 au="DSS";
957 break;
958 case SSL_aDH:
959 au="DH";
960 break;
Richard Levittef9b3bff2000-11-30 22:53:34 +0000961 case SSL_aKRB5: /* VRS */
962 case SSL_KRB5: /* VRS */
963 au="KRB5";
964 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000965 case SSL_aFZA:
966 case SSL_aNULL:
967 au="None";
968 break;
969 default:
970 au="unknown";
971 break;
972 }
973
974 switch (alg&SSL_ENC_MASK)
975 {
976 case SSL_DES:
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000977 enc=(is_export && kl == 5)?"DES(40)":"DES(56)";
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000978 break;
979 case SSL_3DES:
980 enc="3DES(168)";
981 break;
982 case SSL_RC4:
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000983 enc=is_export?(kl == 5 ? "RC4(40)" : "RC4(56)")
Ben Laurie06ab81f1999-02-21 20:03:24 +0000984 :((alg2&SSL2_CF_8_BYTE_ENC)?"RC4(64)":"RC4(128)");
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000985 break;
986 case SSL_RC2:
Bodo Möllerb1c4fe31999-06-07 20:26:51 +0000987 enc=is_export?(kl == 5 ? "RC2(40)" : "RC2(56)"):"RC2(128)";
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +0000988 break;
989 case SSL_IDEA:
990 enc="IDEA(128)";
991 break;
992 case SSL_eFZA:
993 enc="Fortezza";
994 break;
995 case SSL_eNULL:
996 enc="None";
997 break;
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +0000998 case SSL_AES:
999 switch(cipher->strength_bits)
Ben Laurie259810e2001-02-06 14:09:13 +00001000 {
Dr. Stephen Hensondeb2c1a2001-02-07 18:15:18 +00001001 case 128: enc="AES(128)"; break;
1002 case 192: enc="AES(192)"; break;
1003 case 256: enc="AES(256)"; break;
1004 default: enc="AES(???)"; break;
Ben Laurie259810e2001-02-06 14:09:13 +00001005 }
1006 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001007 default:
1008 enc="unknown";
1009 break;
1010 }
1011
1012 switch (alg&SSL_MAC_MASK)
1013 {
1014 case SSL_MD5:
1015 mac="MD5";
1016 break;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001017 case SSL_SHA1:
1018 mac="SHA1";
1019 break;
1020 default:
1021 mac="unknown";
1022 break;
1023 }
1024
1025 if (buf == NULL)
1026 {
Bodo Möller063c0502000-05-21 14:17:01 +00001027 len=128;
Richard Levitte26a3a482000-06-01 22:19:21 +00001028 buf=OPENSSL_malloc(len);
1029 if (buf == NULL) return("OPENSSL_malloc Error");
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001030 }
1031 else if (len < 128)
1032 return("Buffer too small");
1033
Richard Levittef9b3bff2000-11-30 22:53:34 +00001034#ifdef KSSL_DEBUG
1035 BIO_snprintf(buf,len,format,cipher->name,ver,kx,au,enc,mac,exp,alg);
1036#else
Bodo Möller063c0502000-05-21 14:17:01 +00001037 BIO_snprintf(buf,len,format,cipher->name,ver,kx,au,enc,mac,exp);
Richard Levittef9b3bff2000-11-30 22:53:34 +00001038#endif /* KSSL_DEBUG */
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001039 return(buf);
1040 }
1041
Ulf Möller6b691a51999-04-19 21:31:43 +00001042char *SSL_CIPHER_get_version(SSL_CIPHER *c)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001043 {
1044 int i;
1045
Ralf S. Engelschall58964a41998-12-21 10:56:39 +00001046 if (c == NULL) return("(NONE)");
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001047 i=(int)(c->id>>24L);
1048 if (i == 3)
Ralf S. Engelschall58964a41998-12-21 10:56:39 +00001049 return("TLSv1/SSLv3");
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001050 else if (i == 2)
1051 return("SSLv2");
1052 else
1053 return("unknown");
1054 }
1055
1056/* return the actual cipher being used */
Ulf Möller6b691a51999-04-19 21:31:43 +00001057const char *SSL_CIPHER_get_name(SSL_CIPHER *c)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001058 {
1059 if (c != NULL)
1060 return(c->name);
1061 return("(NONE)");
1062 }
1063
Ulf Möller657e60f2000-02-03 23:23:24 +00001064/* number of bits for symmetric cipher */
Ulf Möller6b691a51999-04-19 21:31:43 +00001065int SSL_CIPHER_get_bits(SSL_CIPHER *c, int *alg_bits)
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001066 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +00001067 int ret=0;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001068
1069 if (c != NULL)
1070 {
Dr. Stephen Henson018e57c2000-01-22 03:17:06 +00001071 if (alg_bits != NULL) *alg_bits = c->alg_bits;
1072 ret = c->strength_bits;
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001073 }
Ralf S. Engelschalld02b48c1998-12-21 10:52:47 +00001074 return(ret);
1075 }
1076
Ulf Möller6b691a51999-04-19 21:31:43 +00001077SSL_COMP *ssl3_comp_find(STACK_OF(SSL_COMP) *sk, int n)
Mark J. Cox413c4f41999-02-16 09:22:21 +00001078 {
1079 SSL_COMP *ctmp;
1080 int i,nn;
1081
1082 if ((n == 0) || (sk == NULL)) return(NULL);
Ben Laurief73e07c1999-04-12 17:23:57 +00001083 nn=sk_SSL_COMP_num(sk);
Mark J. Cox413c4f41999-02-16 09:22:21 +00001084 for (i=0; i<nn; i++)
1085 {
Ben Laurief73e07c1999-04-12 17:23:57 +00001086 ctmp=sk_SSL_COMP_value(sk,i);
Mark J. Cox413c4f41999-02-16 09:22:21 +00001087 if (ctmp->id == n)
1088 return(ctmp);
1089 }
1090 return(NULL);
1091 }
1092
Geoff Thorpeccd86b62000-06-01 02:36:58 +00001093static int sk_comp_cmp(const SSL_COMP * const *a,
1094 const SSL_COMP * const *b)
Mark J. Cox413c4f41999-02-16 09:22:21 +00001095 {
1096 return((*a)->id-(*b)->id);
1097 }
1098
Ulf Möller6b691a51999-04-19 21:31:43 +00001099STACK_OF(SSL_COMP) *SSL_COMP_get_compression_methods(void)
Mark J. Cox413c4f41999-02-16 09:22:21 +00001100 {
1101 return(ssl_comp_methods);
1102 }
1103
Ulf Möller6b691a51999-04-19 21:31:43 +00001104int SSL_COMP_add_compression_method(int id, COMP_METHOD *cm)
Mark J. Cox413c4f41999-02-16 09:22:21 +00001105 {
1106 SSL_COMP *comp;
Ben Laurief73e07c1999-04-12 17:23:57 +00001107 STACK_OF(SSL_COMP) *sk;
Mark J. Cox413c4f41999-02-16 09:22:21 +00001108
Richard Levitte9f495242000-12-04 17:17:03 +00001109 if (cm == NULL || cm->type == NID_undef)
1110 return 1;
1111
Richard Levitte058123a2000-11-30 12:19:54 +00001112 MemCheck_off();
Richard Levitte26a3a482000-06-01 22:19:21 +00001113 comp=(SSL_COMP *)OPENSSL_malloc(sizeof(SSL_COMP));
Mark J. Cox413c4f41999-02-16 09:22:21 +00001114 comp->id=id;
1115 comp->method=cm;
1116 if (ssl_comp_methods == NULL)
Ben Laurief73e07c1999-04-12 17:23:57 +00001117 sk=ssl_comp_methods=sk_SSL_COMP_new(sk_comp_cmp);
Mark J. Cox413c4f41999-02-16 09:22:21 +00001118 else
1119 sk=ssl_comp_methods;
Ben Laurief73e07c1999-04-12 17:23:57 +00001120 if ((sk == NULL) || !sk_SSL_COMP_push(sk,comp))
Mark J. Cox413c4f41999-02-16 09:22:21 +00001121 {
Richard Levitte058123a2000-11-30 12:19:54 +00001122 MemCheck_on();
Mark J. Cox413c4f41999-02-16 09:22:21 +00001123 SSLerr(SSL_F_SSL_COMP_ADD_COMPRESSION_METHOD,ERR_R_MALLOC_FAILURE);
1124 return(0);
1125 }
1126 else
Richard Levitte058123a2000-11-30 12:19:54 +00001127 {
1128 MemCheck_on();
Mark J. Cox413c4f41999-02-16 09:22:21 +00001129 return(1);
Richard Levitte058123a2000-11-30 12:19:54 +00001130 }
Mark J. Cox413c4f41999-02-16 09:22:21 +00001131 }