blob: a7a163f5cb2a3bb35255adf48ec14d2d42f4ae57 [file] [log] [blame]
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -07001/*
2 * WPA Supplicant / wrapper functions for libgcrypt
Roshan Pius3a1667e2018-07-03 15:17:14 -07003 * Copyright (c) 2004-2017, Jouni Malinen <j@w1.fi>
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -07004 *
Dmitry Shmidtc5ec7f52012-03-06 16:33:24 -08005 * This software may be distributed under the terms of the BSD license.
6 * See README for more details.
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -07007 */
8
9#include "includes.h"
10#include <gcrypt.h>
11
12#include "common.h"
Roshan Pius3a1667e2018-07-03 15:17:14 -070013#include "md5.h"
14#include "sha1.h"
15#include "sha256.h"
16#include "sha384.h"
17#include "sha512.h"
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070018#include "crypto.h"
19
Roshan Pius3a1667e2018-07-03 15:17:14 -070020static int gnutls_digest_vector(int algo, size_t num_elem,
21 const u8 *addr[], const size_t *len, u8 *mac)
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070022{
23 gcry_md_hd_t hd;
24 unsigned char *p;
25 size_t i;
26
Roshan Pius3a1667e2018-07-03 15:17:14 -070027 if (TEST_FAIL())
28 return -1;
29
30 if (gcry_md_open(&hd, algo, 0) != GPG_ERR_NO_ERROR)
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070031 return -1;
32 for (i = 0; i < num_elem; i++)
33 gcry_md_write(hd, addr[i], len[i]);
Roshan Pius3a1667e2018-07-03 15:17:14 -070034 p = gcry_md_read(hd, algo);
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070035 if (p)
Roshan Pius3a1667e2018-07-03 15:17:14 -070036 memcpy(mac, p, gcry_md_get_algo_dlen(algo));
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070037 gcry_md_close(hd);
38 return 0;
39}
40
41
Roshan Pius3a1667e2018-07-03 15:17:14 -070042int md4_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
43{
44 return gnutls_digest_vector(GCRY_MD_MD4, num_elem, addr, len, mac);
45}
46
47
Dmitry Shmidtd2986c22017-10-23 14:22:09 -070048int des_encrypt(const u8 *clear, const u8 *key, u8 *cypher)
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070049{
50 gcry_cipher_hd_t hd;
51 u8 pkey[8], next, tmp;
52 int i;
53
54 /* Add parity bits to the key */
55 next = 0;
56 for (i = 0; i < 7; i++) {
57 tmp = key[i];
58 pkey[i] = (tmp >> i) | next | 1;
59 next = tmp << (7 - i);
60 }
61 pkey[i] = next | 1;
62
63 gcry_cipher_open(&hd, GCRY_CIPHER_DES, GCRY_CIPHER_MODE_ECB, 0);
64 gcry_err_code(gcry_cipher_setkey(hd, pkey, 8));
65 gcry_cipher_encrypt(hd, cypher, 8, clear, 8);
66 gcry_cipher_close(hd);
Dmitry Shmidtd2986c22017-10-23 14:22:09 -070067 return 0;
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070068}
69
70
71int md5_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
72{
Roshan Pius3a1667e2018-07-03 15:17:14 -070073 return gnutls_digest_vector(GCRY_MD_MD5, num_elem, addr, len, mac);
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -070074}
75
76
77int sha1_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
78{
Roshan Pius3a1667e2018-07-03 15:17:14 -070079 return gnutls_digest_vector(GCRY_MD_SHA1, num_elem, addr, len, mac);
80}
81
82
83int sha256_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
84{
85 return gnutls_digest_vector(GCRY_MD_SHA256, num_elem, addr, len, mac);
86}
87
88
89int sha384_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
90{
91 return gnutls_digest_vector(GCRY_MD_SHA384, num_elem, addr, len, mac);
92}
93
94
95int sha512_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
96{
97 return gnutls_digest_vector(GCRY_MD_SHA512, num_elem, addr, len, mac);
98}
99
100
101static int gnutls_hmac_vector(int algo, const u8 *key, size_t key_len,
102 size_t num_elem, const u8 *addr[],
103 const size_t *len, u8 *mac)
104{
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700105 gcry_md_hd_t hd;
106 unsigned char *p;
107 size_t i;
108
Roshan Pius3a1667e2018-07-03 15:17:14 -0700109 if (TEST_FAIL())
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700110 return -1;
Roshan Pius3a1667e2018-07-03 15:17:14 -0700111
112 if (gcry_md_open(&hd, algo, GCRY_MD_FLAG_HMAC) != GPG_ERR_NO_ERROR)
113 return -1;
114 if (gcry_md_setkey(hd, key, key_len) != GPG_ERR_NO_ERROR) {
115 gcry_md_close(hd);
116 return -1;
117 }
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700118 for (i = 0; i < num_elem; i++)
119 gcry_md_write(hd, addr[i], len[i]);
Roshan Pius3a1667e2018-07-03 15:17:14 -0700120 p = gcry_md_read(hd, algo);
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700121 if (p)
Roshan Pius3a1667e2018-07-03 15:17:14 -0700122 memcpy(mac, p, gcry_md_get_algo_dlen(algo));
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700123 gcry_md_close(hd);
124 return 0;
125}
126
127
Roshan Pius3a1667e2018-07-03 15:17:14 -0700128int hmac_md5_vector(const u8 *key, size_t key_len, size_t num_elem,
129 const u8 *addr[], const size_t *len, u8 *mac)
130{
131 return gnutls_hmac_vector(GCRY_MD_MD5, key, key_len, num_elem, addr,
132 len, mac);
133}
134
135
136int hmac_md5(const u8 *key, size_t key_len, const u8 *data, size_t data_len,
137 u8 *mac)
138{
139 return hmac_md5_vector(key, key_len, 1, &data, &data_len, mac);
140}
141
142
143int hmac_sha1_vector(const u8 *key, size_t key_len, size_t num_elem,
144 const u8 *addr[], const size_t *len, u8 *mac)
145{
146 return gnutls_hmac_vector(GCRY_MD_SHA1, key, key_len, num_elem, addr,
147 len, mac);
148}
149
150
151int hmac_sha1(const u8 *key, size_t key_len, const u8 *data, size_t data_len,
152 u8 *mac)
153{
154 return hmac_sha1_vector(key, key_len, 1, &data, &data_len, mac);
155}
156
157
158#ifdef CONFIG_SHA256
159
160int hmac_sha256_vector(const u8 *key, size_t key_len, size_t num_elem,
161 const u8 *addr[], const size_t *len, u8 *mac)
162{
163 return gnutls_hmac_vector(GCRY_MD_SHA256, key, key_len, num_elem, addr,
164 len, mac);
165}
166
167
168int hmac_sha256(const u8 *key, size_t key_len, const u8 *data,
169 size_t data_len, u8 *mac)
170{
171 return hmac_sha256_vector(key, key_len, 1, &data, &data_len, mac);
172}
173
174#endif /* CONFIG_SHA256 */
175
176
177#ifdef CONFIG_SHA384
178
179int hmac_sha384_vector(const u8 *key, size_t key_len, size_t num_elem,
180 const u8 *addr[], const size_t *len, u8 *mac)
181{
182 return gnutls_hmac_vector(GCRY_MD_SHA384, key, key_len, num_elem, addr,
183 len, mac);
184}
185
186
187int hmac_sha384(const u8 *key, size_t key_len, const u8 *data,
188 size_t data_len, u8 *mac)
189{
190 return hmac_sha384_vector(key, key_len, 1, &data, &data_len, mac);
191}
192
193#endif /* CONFIG_SHA384 */
194
195
196#ifdef CONFIG_SHA512
197
198int hmac_sha512_vector(const u8 *key, size_t key_len, size_t num_elem,
199 const u8 *addr[], const size_t *len, u8 *mac)
200{
201 return gnutls_hmac_vector(GCRY_MD_SHA512, key, key_len, num_elem, addr,
202 len, mac);
203}
204
205
206int hmac_sha512(const u8 *key, size_t key_len, const u8 *data,
207 size_t data_len, u8 *mac)
208{
209 return hmac_sha512_vector(key, key_len, 1, &data, &data_len, mac);
210}
211
212#endif /* CONFIG_SHA512 */
213
214
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700215void * aes_encrypt_init(const u8 *key, size_t len)
216{
217 gcry_cipher_hd_t hd;
218
Roshan Pius3a1667e2018-07-03 15:17:14 -0700219 if (TEST_FAIL())
220 return NULL;
221
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700222 if (gcry_cipher_open(&hd, GCRY_CIPHER_AES, GCRY_CIPHER_MODE_ECB, 0) !=
223 GPG_ERR_NO_ERROR) {
224 printf("cipher open failed\n");
225 return NULL;
226 }
227 if (gcry_cipher_setkey(hd, key, len) != GPG_ERR_NO_ERROR) {
228 printf("setkey failed\n");
229 gcry_cipher_close(hd);
230 return NULL;
231 }
232
233 return hd;
234}
235
236
Dmitry Shmidtd2986c22017-10-23 14:22:09 -0700237int aes_encrypt(void *ctx, const u8 *plain, u8 *crypt)
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700238{
239 gcry_cipher_hd_t hd = ctx;
240 gcry_cipher_encrypt(hd, crypt, 16, plain, 16);
Dmitry Shmidtd2986c22017-10-23 14:22:09 -0700241 return 0;
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700242}
243
244
245void aes_encrypt_deinit(void *ctx)
246{
247 gcry_cipher_hd_t hd = ctx;
248 gcry_cipher_close(hd);
249}
250
251
252void * aes_decrypt_init(const u8 *key, size_t len)
253{
254 gcry_cipher_hd_t hd;
255
Roshan Pius3a1667e2018-07-03 15:17:14 -0700256 if (TEST_FAIL())
257 return NULL;
258
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700259 if (gcry_cipher_open(&hd, GCRY_CIPHER_AES, GCRY_CIPHER_MODE_ECB, 0) !=
260 GPG_ERR_NO_ERROR)
261 return NULL;
262 if (gcry_cipher_setkey(hd, key, len) != GPG_ERR_NO_ERROR) {
263 gcry_cipher_close(hd);
264 return NULL;
265 }
266
267 return hd;
268}
269
270
Dmitry Shmidtd2986c22017-10-23 14:22:09 -0700271int aes_decrypt(void *ctx, const u8 *crypt, u8 *plain)
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700272{
273 gcry_cipher_hd_t hd = ctx;
274 gcry_cipher_decrypt(hd, plain, 16, crypt, 16);
Dmitry Shmidtd2986c22017-10-23 14:22:09 -0700275 return 0;
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700276}
277
278
279void aes_decrypt_deinit(void *ctx)
280{
281 gcry_cipher_hd_t hd = ctx;
282 gcry_cipher_close(hd);
283}
284
285
Roshan Pius3a1667e2018-07-03 15:17:14 -0700286int crypto_dh_init(u8 generator, const u8 *prime, size_t prime_len, u8 *privkey,
287 u8 *pubkey)
288{
289 size_t pubkey_len, pad;
290
291 if (os_get_random(privkey, prime_len) < 0)
292 return -1;
293 if (os_memcmp(privkey, prime, prime_len) > 0) {
294 /* Make sure private value is smaller than prime */
295 privkey[0] = 0;
296 }
297
298 pubkey_len = prime_len;
299 if (crypto_mod_exp(&generator, 1, privkey, prime_len, prime, prime_len,
300 pubkey, &pubkey_len) < 0)
301 return -1;
302 if (pubkey_len < prime_len) {
303 pad = prime_len - pubkey_len;
304 os_memmove(pubkey + pad, pubkey, pubkey_len);
305 os_memset(pubkey, 0, pad);
306 }
307
308 return 0;
309}
310
311
312int crypto_dh_derive_secret(u8 generator, const u8 *prime, size_t prime_len,
Hai Shalom021b0b52019-04-10 11:17:58 -0700313 const u8 *order, size_t order_len,
Roshan Pius3a1667e2018-07-03 15:17:14 -0700314 const u8 *privkey, size_t privkey_len,
315 const u8 *pubkey, size_t pubkey_len,
316 u8 *secret, size_t *len)
317{
Hai Shalom021b0b52019-04-10 11:17:58 -0700318 gcry_mpi_t pub = NULL;
319 int res = -1;
320
321 if (pubkey_len > prime_len ||
322 (pubkey_len == prime_len &&
323 os_memcmp(pubkey, prime, prime_len) >= 0))
324 return -1;
325
326 if (gcry_mpi_scan(&pub, GCRYMPI_FMT_USG, pubkey, pubkey_len, NULL) !=
327 GPG_ERR_NO_ERROR ||
328 gcry_mpi_cmp_ui(pub, 1) <= 0)
329 goto fail;
330
331 if (order) {
332 gcry_mpi_t p = NULL, q = NULL, tmp;
333 int failed;
334
335 /* verify: pubkey^q == 1 mod p */
336 tmp = gcry_mpi_new(prime_len * 8);
337 failed = !tmp ||
338 gcry_mpi_scan(&p, GCRYMPI_FMT_USG, prime, prime_len,
339 NULL) != GPG_ERR_NO_ERROR ||
340 gcry_mpi_scan(&q, GCRYMPI_FMT_USG, order, order_len,
341 NULL) != GPG_ERR_NO_ERROR;
342 if (!failed) {
343 gcry_mpi_powm(tmp, pub, q, p);
344 failed = gcry_mpi_cmp_ui(tmp, 1) != 0;
345 }
346 gcry_mpi_release(p);
347 gcry_mpi_release(q);
348 gcry_mpi_release(tmp);
349 if (failed)
350 goto fail;
351 }
352
353 res = crypto_mod_exp(pubkey, pubkey_len, privkey, privkey_len,
354 prime, prime_len, secret, len);
355fail:
356 gcry_mpi_release(pub);
357 return res;
Roshan Pius3a1667e2018-07-03 15:17:14 -0700358}
359
360
Dmitry Shmidt8d520ff2011-05-09 14:06:53 -0700361int crypto_mod_exp(const u8 *base, size_t base_len,
362 const u8 *power, size_t power_len,
363 const u8 *modulus, size_t modulus_len,
364 u8 *result, size_t *result_len)
365{
366 gcry_mpi_t bn_base = NULL, bn_exp = NULL, bn_modulus = NULL,
367 bn_result = NULL;
368 int ret = -1;
369
370 if (gcry_mpi_scan(&bn_base, GCRYMPI_FMT_USG, base, base_len, NULL) !=
371 GPG_ERR_NO_ERROR ||
372 gcry_mpi_scan(&bn_exp, GCRYMPI_FMT_USG, power, power_len, NULL) !=
373 GPG_ERR_NO_ERROR ||
374 gcry_mpi_scan(&bn_modulus, GCRYMPI_FMT_USG, modulus, modulus_len,
375 NULL) != GPG_ERR_NO_ERROR)
376 goto error;
377 bn_result = gcry_mpi_new(modulus_len * 8);
378
379 gcry_mpi_powm(bn_result, bn_base, bn_exp, bn_modulus);
380
381 if (gcry_mpi_print(GCRYMPI_FMT_USG, result, *result_len, result_len,
382 bn_result) != GPG_ERR_NO_ERROR)
383 goto error;
384
385 ret = 0;
386
387error:
388 gcry_mpi_release(bn_base);
389 gcry_mpi_release(bn_exp);
390 gcry_mpi_release(bn_modulus);
391 gcry_mpi_release(bn_result);
392 return ret;
393}
394
395
396struct crypto_cipher {
397 gcry_cipher_hd_t enc;
398 gcry_cipher_hd_t dec;
399};
400
401
402struct crypto_cipher * crypto_cipher_init(enum crypto_cipher_alg alg,
403 const u8 *iv, const u8 *key,
404 size_t key_len)
405{
406 struct crypto_cipher *ctx;
407 gcry_error_t res;
408 enum gcry_cipher_algos a;
409 int ivlen;
410
411 ctx = os_zalloc(sizeof(*ctx));
412 if (ctx == NULL)
413 return NULL;
414
415 switch (alg) {
416 case CRYPTO_CIPHER_ALG_RC4:
417 a = GCRY_CIPHER_ARCFOUR;
418 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_STREAM,
419 0);
420 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_STREAM, 0);
421 break;
422 case CRYPTO_CIPHER_ALG_AES:
423 if (key_len == 24)
424 a = GCRY_CIPHER_AES192;
425 else if (key_len == 32)
426 a = GCRY_CIPHER_AES256;
427 else
428 a = GCRY_CIPHER_AES;
429 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
430 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
431 break;
432 case CRYPTO_CIPHER_ALG_3DES:
433 a = GCRY_CIPHER_3DES;
434 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
435 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
436 break;
437 case CRYPTO_CIPHER_ALG_DES:
438 a = GCRY_CIPHER_DES;
439 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
440 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
441 break;
442 case CRYPTO_CIPHER_ALG_RC2:
443 if (key_len == 5)
444 a = GCRY_CIPHER_RFC2268_40;
445 else
446 a = GCRY_CIPHER_RFC2268_128;
447 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
448 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
449 break;
450 default:
451 os_free(ctx);
452 return NULL;
453 }
454
455 if (res != GPG_ERR_NO_ERROR) {
456 os_free(ctx);
457 return NULL;
458 }
459
460 if (gcry_cipher_setkey(ctx->enc, key, key_len) != GPG_ERR_NO_ERROR ||
461 gcry_cipher_setkey(ctx->dec, key, key_len) != GPG_ERR_NO_ERROR) {
462 gcry_cipher_close(ctx->enc);
463 gcry_cipher_close(ctx->dec);
464 os_free(ctx);
465 return NULL;
466 }
467
468 ivlen = gcry_cipher_get_algo_blklen(a);
469 if (gcry_cipher_setiv(ctx->enc, iv, ivlen) != GPG_ERR_NO_ERROR ||
470 gcry_cipher_setiv(ctx->dec, iv, ivlen) != GPG_ERR_NO_ERROR) {
471 gcry_cipher_close(ctx->enc);
472 gcry_cipher_close(ctx->dec);
473 os_free(ctx);
474 return NULL;
475 }
476
477 return ctx;
478}
479
480
481int crypto_cipher_encrypt(struct crypto_cipher *ctx, const u8 *plain,
482 u8 *crypt, size_t len)
483{
484 if (gcry_cipher_encrypt(ctx->enc, crypt, len, plain, len) !=
485 GPG_ERR_NO_ERROR)
486 return -1;
487 return 0;
488}
489
490
491int crypto_cipher_decrypt(struct crypto_cipher *ctx, const u8 *crypt,
492 u8 *plain, size_t len)
493{
494 if (gcry_cipher_decrypt(ctx->dec, plain, len, crypt, len) !=
495 GPG_ERR_NO_ERROR)
496 return -1;
497 return 0;
498}
499
500
501void crypto_cipher_deinit(struct crypto_cipher *ctx)
502{
503 gcry_cipher_close(ctx->enc);
504 gcry_cipher_close(ctx->dec);
505 os_free(ctx);
506}
Sunil Ravia04bd252022-05-02 22:54:18 -0700507
508
509void crypto_unload(void)
510{
511}