blob: 9125264ed98e6a950ab4bf34c5aa11bc5a85959d [file] [log] [blame]
Rich Salzaa6bb132016-05-17 15:38:09 -04001/*
2 * Copyright 2006-2016 The OpenSSL Project Authors. All Rights Reserved.
Ben Laurieaa6d1a02006-08-31 14:04:04 +00003 *
Rich Salzaa6bb132016-05-17 15:38:09 -04004 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
Ben Laurieaa6d1a02006-08-31 14:04:04 +00008 */
9
Richard Levitteb39fc562015-05-14 16:56:48 +020010#include "internal/cryptlib.h"
Ben Laurieaa6d1a02006-08-31 14:04:04 +000011
12#include <openssl/aes.h>
13#include "aes_locl.h"
14
Ben Laurie5f09d0e2007-05-13 12:57:59 +000015#define N_WORDS (AES_BLOCK_SIZE / sizeof(unsigned long))
16typedef struct {
Matt Caswell0f113f32015-01-22 03:40:55 +000017 unsigned long data[N_WORDS];
Ben Laurie5f09d0e2007-05-13 12:57:59 +000018} aes_block_t;
19
Dr. Stephen Henson9660cbc2007-05-15 23:50:55 +000020/* XXX: probably some better way to do this */
Ben Laurie5f09d0e2007-05-13 12:57:59 +000021#if defined(__i386__) || defined(__x86_64__)
Matt Caswell0f113f32015-01-22 03:40:55 +000022# define UNALIGNED_MEMOPS_ARE_FAST 1
Andy Polyakovd8803d52007-08-29 21:30:13 +000023#else
Matt Caswell0f113f32015-01-22 03:40:55 +000024# define UNALIGNED_MEMOPS_ARE_FAST 0
Ben Laurie5f09d0e2007-05-13 12:57:59 +000025#endif
26
Andy Polyakovd8803d52007-08-29 21:30:13 +000027#if UNALIGNED_MEMOPS_ARE_FAST
Matt Caswell0f113f32015-01-22 03:40:55 +000028# define load_block(d, s) (d) = *(const aes_block_t *)(s)
29# define store_block(d, s) *(aes_block_t *)(d) = (s)
Ben Laurie5f09d0e2007-05-13 12:57:59 +000030#else
Matt Caswell0f113f32015-01-22 03:40:55 +000031# define load_block(d, s) memcpy((d).data, (s), AES_BLOCK_SIZE)
32# define store_block(d, s) memcpy((d), (s).data, AES_BLOCK_SIZE)
Ben Laurie5f09d0e2007-05-13 12:57:59 +000033#endif
34
Ben Laurieaa6d1a02006-08-31 14:04:04 +000035/* N.B. The IV for this mode is _twice_ the block size */
36
37void AES_ige_encrypt(const unsigned char *in, unsigned char *out,
Matt Caswell0f113f32015-01-22 03:40:55 +000038 size_t length, const AES_KEY *key,
39 unsigned char *ivec, const int enc)
40{
41 size_t n;
42 size_t len = length;
Ben Laurieaa6d1a02006-08-31 14:04:04 +000043
Matt Caswell0f113f32015-01-22 03:40:55 +000044 OPENSSL_assert(in && out && key && ivec);
45 OPENSSL_assert((AES_ENCRYPT == enc) || (AES_DECRYPT == enc));
46 OPENSSL_assert((length % AES_BLOCK_SIZE) == 0);
Ben Laurieaa6d1a02006-08-31 14:04:04 +000047
Matt Caswell0f113f32015-01-22 03:40:55 +000048 len = length / AES_BLOCK_SIZE;
Ben Laurie5f09d0e2007-05-13 12:57:59 +000049
Matt Caswell0f113f32015-01-22 03:40:55 +000050 if (AES_ENCRYPT == enc) {
51 if (in != out &&
52 (UNALIGNED_MEMOPS_ARE_FAST
53 || ((size_t)in | (size_t)out | (size_t)ivec) % sizeof(long) ==
54 0)) {
55 aes_block_t *ivp = (aes_block_t *) ivec;
56 aes_block_t *iv2p = (aes_block_t *) (ivec + AES_BLOCK_SIZE);
Ben Laurie69ab0852007-05-13 15:14:38 +000057
Matt Caswell0f113f32015-01-22 03:40:55 +000058 while (len) {
59 aes_block_t *inp = (aes_block_t *) in;
60 aes_block_t *outp = (aes_block_t *) out;
Ben Laurie69ab0852007-05-13 15:14:38 +000061
Matt Caswell0f113f32015-01-22 03:40:55 +000062 for (n = 0; n < N_WORDS; ++n)
63 outp->data[n] = inp->data[n] ^ ivp->data[n];
64 AES_encrypt((unsigned char *)outp->data,
65 (unsigned char *)outp->data, key);
66 for (n = 0; n < N_WORDS; ++n)
67 outp->data[n] ^= iv2p->data[n];
68 ivp = outp;
69 iv2p = inp;
70 --len;
71 in += AES_BLOCK_SIZE;
72 out += AES_BLOCK_SIZE;
73 }
74 memcpy(ivec, ivp->data, AES_BLOCK_SIZE);
75 memcpy(ivec + AES_BLOCK_SIZE, iv2p->data, AES_BLOCK_SIZE);
76 } else {
77 aes_block_t tmp, tmp2;
78 aes_block_t iv;
79 aes_block_t iv2;
Ben Laurie69ab0852007-05-13 15:14:38 +000080
Matt Caswell0f113f32015-01-22 03:40:55 +000081 load_block(iv, ivec);
82 load_block(iv2, ivec + AES_BLOCK_SIZE);
Ben Laurie69ab0852007-05-13 15:14:38 +000083
Matt Caswell0f113f32015-01-22 03:40:55 +000084 while (len) {
85 load_block(tmp, in);
86 for (n = 0; n < N_WORDS; ++n)
87 tmp2.data[n] = tmp.data[n] ^ iv.data[n];
88 AES_encrypt((unsigned char *)tmp2.data,
89 (unsigned char *)tmp2.data, key);
90 for (n = 0; n < N_WORDS; ++n)
91 tmp2.data[n] ^= iv2.data[n];
92 store_block(out, tmp2);
93 iv = tmp2;
94 iv2 = tmp;
95 --len;
96 in += AES_BLOCK_SIZE;
97 out += AES_BLOCK_SIZE;
98 }
99 memcpy(ivec, iv.data, AES_BLOCK_SIZE);
100 memcpy(ivec + AES_BLOCK_SIZE, iv2.data, AES_BLOCK_SIZE);
101 }
102 } else {
103 if (in != out &&
104 (UNALIGNED_MEMOPS_ARE_FAST
105 || ((size_t)in | (size_t)out | (size_t)ivec) % sizeof(long) ==
106 0)) {
107 aes_block_t *ivp = (aes_block_t *) ivec;
108 aes_block_t *iv2p = (aes_block_t *) (ivec + AES_BLOCK_SIZE);
Ben Laurie69ab0852007-05-13 15:14:38 +0000109
Matt Caswell0f113f32015-01-22 03:40:55 +0000110 while (len) {
111 aes_block_t tmp;
112 aes_block_t *inp = (aes_block_t *) in;
113 aes_block_t *outp = (aes_block_t *) out;
Ben Laurie69ab0852007-05-13 15:14:38 +0000114
Matt Caswell0f113f32015-01-22 03:40:55 +0000115 for (n = 0; n < N_WORDS; ++n)
116 tmp.data[n] = inp->data[n] ^ iv2p->data[n];
117 AES_decrypt((unsigned char *)tmp.data,
118 (unsigned char *)outp->data, key);
119 for (n = 0; n < N_WORDS; ++n)
120 outp->data[n] ^= ivp->data[n];
121 ivp = inp;
122 iv2p = outp;
123 --len;
124 in += AES_BLOCK_SIZE;
125 out += AES_BLOCK_SIZE;
126 }
127 memcpy(ivec, ivp->data, AES_BLOCK_SIZE);
128 memcpy(ivec + AES_BLOCK_SIZE, iv2p->data, AES_BLOCK_SIZE);
129 } else {
130 aes_block_t tmp, tmp2;
131 aes_block_t iv;
132 aes_block_t iv2;
Ben Laurie69ab0852007-05-13 15:14:38 +0000133
Matt Caswell0f113f32015-01-22 03:40:55 +0000134 load_block(iv, ivec);
135 load_block(iv2, ivec + AES_BLOCK_SIZE);
Ben Laurie69ab0852007-05-13 15:14:38 +0000136
Matt Caswell0f113f32015-01-22 03:40:55 +0000137 while (len) {
138 load_block(tmp, in);
139 tmp2 = tmp;
140 for (n = 0; n < N_WORDS; ++n)
141 tmp.data[n] ^= iv2.data[n];
142 AES_decrypt((unsigned char *)tmp.data,
143 (unsigned char *)tmp.data, key);
144 for (n = 0; n < N_WORDS; ++n)
145 tmp.data[n] ^= iv.data[n];
146 store_block(out, tmp);
147 iv = tmp2;
148 iv2 = tmp;
149 --len;
150 in += AES_BLOCK_SIZE;
151 out += AES_BLOCK_SIZE;
152 }
153 memcpy(ivec, iv.data, AES_BLOCK_SIZE);
154 memcpy(ivec + AES_BLOCK_SIZE, iv2.data, AES_BLOCK_SIZE);
155 }
156 }
157}
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000158
159/*
160 * Note that its effectively impossible to do biIGE in anything other
161 * than a single pass, so no provision is made for chaining.
162 */
163
164/* N.B. The IV for this mode is _four times_ the block size */
165
166void AES_bi_ige_encrypt(const unsigned char *in, unsigned char *out,
Matt Caswell0f113f32015-01-22 03:40:55 +0000167 size_t length, const AES_KEY *key,
168 const AES_KEY *key2, const unsigned char *ivec,
169 const int enc)
170{
171 size_t n;
172 size_t len = length;
173 unsigned char tmp[AES_BLOCK_SIZE];
174 unsigned char tmp2[AES_BLOCK_SIZE];
175 unsigned char tmp3[AES_BLOCK_SIZE];
176 unsigned char prev[AES_BLOCK_SIZE];
177 const unsigned char *iv;
178 const unsigned char *iv2;
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000179
Matt Caswell0f113f32015-01-22 03:40:55 +0000180 OPENSSL_assert(in && out && key && ivec);
181 OPENSSL_assert((AES_ENCRYPT == enc) || (AES_DECRYPT == enc));
182 OPENSSL_assert((length % AES_BLOCK_SIZE) == 0);
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000183
Matt Caswell0f113f32015-01-22 03:40:55 +0000184 if (AES_ENCRYPT == enc) {
185 /*
186 * XXX: Do a separate case for when in != out (strictly should check
187 * for overlap, too)
188 */
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000189
Matt Caswell0f113f32015-01-22 03:40:55 +0000190 /* First the forward pass */
191 iv = ivec;
192 iv2 = ivec + AES_BLOCK_SIZE;
193 while (len >= AES_BLOCK_SIZE) {
194 for (n = 0; n < AES_BLOCK_SIZE; ++n)
195 out[n] = in[n] ^ iv[n];
196 AES_encrypt(out, out, key);
197 for (n = 0; n < AES_BLOCK_SIZE; ++n)
198 out[n] ^= iv2[n];
199 iv = out;
200 memcpy(prev, in, AES_BLOCK_SIZE);
201 iv2 = prev;
202 len -= AES_BLOCK_SIZE;
203 in += AES_BLOCK_SIZE;
204 out += AES_BLOCK_SIZE;
205 }
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000206
Matt Caswell0f113f32015-01-22 03:40:55 +0000207 /* And now backwards */
208 iv = ivec + AES_BLOCK_SIZE * 2;
209 iv2 = ivec + AES_BLOCK_SIZE * 3;
210 len = length;
211 while (len >= AES_BLOCK_SIZE) {
212 out -= AES_BLOCK_SIZE;
213 /*
214 * XXX: reduce copies by alternating between buffers
215 */
216 memcpy(tmp, out, AES_BLOCK_SIZE);
217 for (n = 0; n < AES_BLOCK_SIZE; ++n)
218 out[n] ^= iv[n];
219 /*
220 * hexdump(stdout, "out ^ iv", out, AES_BLOCK_SIZE);
221 */
222 AES_encrypt(out, out, key);
223 /*
224 * hexdump(stdout,"enc", out, AES_BLOCK_SIZE);
225 */
226 /*
227 * hexdump(stdout,"iv2", iv2, AES_BLOCK_SIZE);
228 */
229 for (n = 0; n < AES_BLOCK_SIZE; ++n)
230 out[n] ^= iv2[n];
231 /*
232 * hexdump(stdout,"out", out, AES_BLOCK_SIZE);
233 */
234 iv = out;
235 memcpy(prev, tmp, AES_BLOCK_SIZE);
236 iv2 = prev;
237 len -= AES_BLOCK_SIZE;
238 }
239 } else {
240 /* First backwards */
241 iv = ivec + AES_BLOCK_SIZE * 2;
242 iv2 = ivec + AES_BLOCK_SIZE * 3;
243 in += length;
244 out += length;
245 while (len >= AES_BLOCK_SIZE) {
246 in -= AES_BLOCK_SIZE;
247 out -= AES_BLOCK_SIZE;
248 memcpy(tmp, in, AES_BLOCK_SIZE);
249 memcpy(tmp2, in, AES_BLOCK_SIZE);
250 for (n = 0; n < AES_BLOCK_SIZE; ++n)
251 tmp[n] ^= iv2[n];
252 AES_decrypt(tmp, out, key);
253 for (n = 0; n < AES_BLOCK_SIZE; ++n)
254 out[n] ^= iv[n];
255 memcpy(tmp3, tmp2, AES_BLOCK_SIZE);
256 iv = tmp3;
257 iv2 = out;
258 len -= AES_BLOCK_SIZE;
259 }
Ben Laurieaa6d1a02006-08-31 14:04:04 +0000260
Matt Caswell0f113f32015-01-22 03:40:55 +0000261 /* And now forwards */
262 iv = ivec;
263 iv2 = ivec + AES_BLOCK_SIZE;
264 len = length;
265 while (len >= AES_BLOCK_SIZE) {
266 memcpy(tmp, out, AES_BLOCK_SIZE);
267 memcpy(tmp2, out, AES_BLOCK_SIZE);
268 for (n = 0; n < AES_BLOCK_SIZE; ++n)
269 tmp[n] ^= iv2[n];
270 AES_decrypt(tmp, out, key);
271 for (n = 0; n < AES_BLOCK_SIZE; ++n)
272 out[n] ^= iv[n];
273 memcpy(tmp3, tmp2, AES_BLOCK_SIZE);
274 iv = tmp3;
275 iv2 = out;
276 len -= AES_BLOCK_SIZE;
277 in += AES_BLOCK_SIZE;
278 out += AES_BLOCK_SIZE;
279 }
280 }
281}