Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Cryptographic API. |
| 3 | * |
| 4 | * s390 implementation of the AES Cipher Algorithm. |
| 5 | * |
| 6 | * s390 Version: |
Heiko Carstens | a53c8fa | 2012-07-20 11:15:04 +0200 | [diff] [blame] | 7 | * Copyright IBM Corp. 2005, 2007 |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 8 | * Author(s): Jan Glauber (jang@de.ibm.com) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 9 | * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 10 | * |
Sebastian Siewior | f8246af | 2007-10-05 16:52:01 +0800 | [diff] [blame] | 11 | * Derived from "crypto/aes_generic.c" |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 12 | * |
| 13 | * This program is free software; you can redistribute it and/or modify it |
| 14 | * under the terms of the GNU General Public License as published by the Free |
| 15 | * Software Foundation; either version 2 of the License, or (at your option) |
| 16 | * any later version. |
| 17 | * |
| 18 | */ |
| 19 | |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 20 | #define KMSG_COMPONENT "aes_s390" |
| 21 | #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt |
| 22 | |
Sebastian Siewior | 89e12654 | 2007-10-17 23:18:57 +0800 | [diff] [blame] | 23 | #include <crypto/aes.h> |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 24 | #include <crypto/algapi.h> |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 25 | #include <crypto/internal/skcipher.h> |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 26 | #include <linux/err.h> |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 27 | #include <linux/module.h> |
Hendrik Brueckner | d05377c | 2015-02-19 17:34:07 +0100 | [diff] [blame] | 28 | #include <linux/cpufeature.h> |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 29 | #include <linux/init.h> |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 30 | #include <linux/spinlock.h> |
Harald Freudenberger | a4f2779 | 2016-12-15 14:58:08 +0100 | [diff] [blame] | 31 | #include <linux/fips.h> |
Stephan Mueller | 49abc0d | 2016-02-17 07:00:01 +0100 | [diff] [blame] | 32 | #include <crypto/xts.h> |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 33 | #include <asm/cpacf.h> |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 34 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 35 | static u8 *ctrblk; |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 36 | static DEFINE_SPINLOCK(ctrblk_lock); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 37 | |
| 38 | static cpacf_mask_t km_functions, kmc_functions, kmctr_functions; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 39 | |
| 40 | struct s390_aes_ctx { |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 41 | u8 key[AES_MAX_KEY_SIZE]; |
| 42 | int key_len; |
Martin Schwidefsky | edc63a3 | 2016-08-15 09:19:16 +0200 | [diff] [blame] | 43 | unsigned long fc; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 44 | union { |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 45 | struct crypto_skcipher *blk; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 46 | struct crypto_cipher *cip; |
| 47 | } fallback; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 48 | }; |
| 49 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 50 | struct s390_xts_ctx { |
| 51 | u8 key[32]; |
Gerald Schaefer | 9dda276 | 2013-11-19 17:12:47 +0100 | [diff] [blame] | 52 | u8 pcc_key[32]; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 53 | int key_len; |
Martin Schwidefsky | edc63a3 | 2016-08-15 09:19:16 +0200 | [diff] [blame] | 54 | unsigned long fc; |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 55 | struct crypto_skcipher *fallback; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 56 | }; |
| 57 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 58 | static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key, |
| 59 | unsigned int key_len) |
| 60 | { |
| 61 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 62 | int ret; |
| 63 | |
Roel Kluin | d7ac769 | 2010-01-08 14:18:34 +1100 | [diff] [blame] | 64 | sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
| 65 | sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 66 | CRYPTO_TFM_REQ_MASK); |
| 67 | |
| 68 | ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); |
| 69 | if (ret) { |
| 70 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
Roel Kluin | d7ac769 | 2010-01-08 14:18:34 +1100 | [diff] [blame] | 71 | tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags & |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 72 | CRYPTO_TFM_RES_MASK); |
| 73 | } |
| 74 | return ret; |
| 75 | } |
| 76 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 77 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
Herbert Xu | 560c06a | 2006-08-13 14:16:39 +1000 | [diff] [blame] | 78 | unsigned int key_len) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 79 | { |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 80 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 81 | unsigned long fc; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 82 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 83 | /* Pick the correct function code based on the key length */ |
| 84 | fc = (key_len == 16) ? CPACF_KM_AES_128 : |
| 85 | (key_len == 24) ? CPACF_KM_AES_192 : |
| 86 | (key_len == 32) ? CPACF_KM_AES_256 : 0; |
| 87 | |
| 88 | /* Check if the function code is available */ |
| 89 | sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; |
| 90 | if (!sctx->fc) |
| 91 | return setkey_fallback_cip(tfm, in_key, key_len); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 92 | |
| 93 | sctx->key_len = key_len; |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 94 | memcpy(sctx->key, in_key, key_len); |
| 95 | return 0; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 96 | } |
| 97 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 98 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 99 | { |
Chen Gang | e6a67ad | 2015-01-01 22:56:02 +0800 | [diff] [blame] | 100 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 101 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 102 | if (unlikely(!sctx->fc)) { |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 103 | crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); |
| 104 | return; |
| 105 | } |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 106 | cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 107 | } |
| 108 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 109 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 110 | { |
Chen Gang | e6a67ad | 2015-01-01 22:56:02 +0800 | [diff] [blame] | 111 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 112 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 113 | if (unlikely(!sctx->fc)) { |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 114 | crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); |
| 115 | return; |
| 116 | } |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 117 | cpacf_km(sctx->fc | CPACF_DECRYPT, |
| 118 | &sctx->key, out, in, AES_BLOCK_SIZE); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 119 | } |
| 120 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 121 | static int fallback_init_cip(struct crypto_tfm *tfm) |
| 122 | { |
| 123 | const char *name = tfm->__crt_alg->cra_name; |
| 124 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 125 | |
| 126 | sctx->fallback.cip = crypto_alloc_cipher(name, 0, |
| 127 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
| 128 | |
| 129 | if (IS_ERR(sctx->fallback.cip)) { |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 130 | pr_err("Allocating AES fallback algorithm %s failed\n", |
| 131 | name); |
Roel Kluin | b59cdcb3 | 2009-12-18 17:43:18 +0100 | [diff] [blame] | 132 | return PTR_ERR(sctx->fallback.cip); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 133 | } |
| 134 | |
| 135 | return 0; |
| 136 | } |
| 137 | |
| 138 | static void fallback_exit_cip(struct crypto_tfm *tfm) |
| 139 | { |
| 140 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 141 | |
| 142 | crypto_free_cipher(sctx->fallback.cip); |
| 143 | sctx->fallback.cip = NULL; |
| 144 | } |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 145 | |
| 146 | static struct crypto_alg aes_alg = { |
| 147 | .cra_name = "aes", |
Herbert Xu | 65b75c3 | 2006-08-21 21:18:50 +1000 | [diff] [blame] | 148 | .cra_driver_name = "aes-s390", |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 149 | .cra_priority = 300, |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 150 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | |
| 151 | CRYPTO_ALG_NEED_FALLBACK, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 152 | .cra_blocksize = AES_BLOCK_SIZE, |
| 153 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 154 | .cra_module = THIS_MODULE, |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 155 | .cra_init = fallback_init_cip, |
| 156 | .cra_exit = fallback_exit_cip, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 157 | .cra_u = { |
| 158 | .cipher = { |
| 159 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
| 160 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
| 161 | .cia_setkey = aes_set_key, |
| 162 | .cia_encrypt = aes_encrypt, |
| 163 | .cia_decrypt = aes_decrypt, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 164 | } |
| 165 | } |
| 166 | }; |
| 167 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 168 | static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key, |
| 169 | unsigned int len) |
| 170 | { |
| 171 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 172 | unsigned int ret; |
| 173 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 174 | crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK); |
| 175 | crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags & |
| 176 | CRYPTO_TFM_REQ_MASK); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 177 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 178 | ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len); |
| 179 | |
| 180 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
| 181 | tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) & |
| 182 | CRYPTO_TFM_RES_MASK; |
| 183 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 184 | return ret; |
| 185 | } |
| 186 | |
| 187 | static int fallback_blk_dec(struct blkcipher_desc *desc, |
| 188 | struct scatterlist *dst, struct scatterlist *src, |
| 189 | unsigned int nbytes) |
| 190 | { |
| 191 | unsigned int ret; |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 192 | struct crypto_blkcipher *tfm = desc->tfm; |
| 193 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm); |
| 194 | SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 195 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 196 | skcipher_request_set_tfm(req, sctx->fallback.blk); |
| 197 | skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
| 198 | skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 199 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 200 | ret = crypto_skcipher_decrypt(req); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 201 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 202 | skcipher_request_zero(req); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 203 | return ret; |
| 204 | } |
| 205 | |
| 206 | static int fallback_blk_enc(struct blkcipher_desc *desc, |
| 207 | struct scatterlist *dst, struct scatterlist *src, |
| 208 | unsigned int nbytes) |
| 209 | { |
| 210 | unsigned int ret; |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 211 | struct crypto_blkcipher *tfm = desc->tfm; |
| 212 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm); |
| 213 | SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 214 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 215 | skcipher_request_set_tfm(req, sctx->fallback.blk); |
| 216 | skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
| 217 | skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 218 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 219 | ret = crypto_skcipher_encrypt(req); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 220 | return ret; |
| 221 | } |
| 222 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 223 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 224 | unsigned int key_len) |
| 225 | { |
| 226 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 227 | unsigned long fc; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 228 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 229 | /* Pick the correct function code based on the key length */ |
| 230 | fc = (key_len == 16) ? CPACF_KM_AES_128 : |
| 231 | (key_len == 24) ? CPACF_KM_AES_192 : |
| 232 | (key_len == 32) ? CPACF_KM_AES_256 : 0; |
| 233 | |
| 234 | /* Check if the function code is available */ |
| 235 | sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; |
| 236 | if (!sctx->fc) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 237 | return setkey_fallback_blk(tfm, in_key, key_len); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 238 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 239 | sctx->key_len = key_len; |
| 240 | memcpy(sctx->key, in_key, key_len); |
| 241 | return 0; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 242 | } |
| 243 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 244 | static int ecb_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 245 | struct blkcipher_walk *walk) |
| 246 | { |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 247 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 248 | unsigned int nbytes, n; |
| 249 | int ret; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 250 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 251 | ret = blkcipher_walk_virt(desc, walk); |
| 252 | while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 253 | /* only use complete blocks */ |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 254 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
| 255 | cpacf_km(sctx->fc | modifier, sctx->key, |
| 256 | walk->dst.virt.addr, walk->src.virt.addr, n); |
| 257 | ret = blkcipher_walk_done(desc, walk, nbytes - n); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | return ret; |
| 261 | } |
| 262 | |
| 263 | static int ecb_aes_encrypt(struct blkcipher_desc *desc, |
| 264 | struct scatterlist *dst, struct scatterlist *src, |
| 265 | unsigned int nbytes) |
| 266 | { |
| 267 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 268 | struct blkcipher_walk walk; |
| 269 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 270 | if (unlikely(!sctx->fc)) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 271 | return fallback_blk_enc(desc, dst, src, nbytes); |
| 272 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 273 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 274 | return ecb_aes_crypt(desc, 0, &walk); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 275 | } |
| 276 | |
| 277 | static int ecb_aes_decrypt(struct blkcipher_desc *desc, |
| 278 | struct scatterlist *dst, struct scatterlist *src, |
| 279 | unsigned int nbytes) |
| 280 | { |
| 281 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 282 | struct blkcipher_walk walk; |
| 283 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 284 | if (unlikely(!sctx->fc)) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 285 | return fallback_blk_dec(desc, dst, src, nbytes); |
| 286 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 287 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 288 | return ecb_aes_crypt(desc, CPACF_DECRYPT, &walk); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 289 | } |
| 290 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 291 | static int fallback_init_blk(struct crypto_tfm *tfm) |
| 292 | { |
| 293 | const char *name = tfm->__crt_alg->cra_name; |
| 294 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 295 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 296 | sctx->fallback.blk = crypto_alloc_skcipher(name, 0, |
| 297 | CRYPTO_ALG_ASYNC | |
| 298 | CRYPTO_ALG_NEED_FALLBACK); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 299 | |
| 300 | if (IS_ERR(sctx->fallback.blk)) { |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 301 | pr_err("Allocating AES fallback algorithm %s failed\n", |
| 302 | name); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 303 | return PTR_ERR(sctx->fallback.blk); |
| 304 | } |
| 305 | |
| 306 | return 0; |
| 307 | } |
| 308 | |
| 309 | static void fallback_exit_blk(struct crypto_tfm *tfm) |
| 310 | { |
| 311 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 312 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 313 | crypto_free_skcipher(sctx->fallback.blk); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 314 | } |
| 315 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 316 | static struct crypto_alg ecb_aes_alg = { |
| 317 | .cra_name = "ecb(aes)", |
| 318 | .cra_driver_name = "ecb-aes-s390", |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 319 | .cra_priority = 400, /* combo: aes + ecb */ |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 320 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 321 | CRYPTO_ALG_NEED_FALLBACK, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 322 | .cra_blocksize = AES_BLOCK_SIZE, |
| 323 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 324 | .cra_type = &crypto_blkcipher_type, |
| 325 | .cra_module = THIS_MODULE, |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 326 | .cra_init = fallback_init_blk, |
| 327 | .cra_exit = fallback_exit_blk, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 328 | .cra_u = { |
| 329 | .blkcipher = { |
| 330 | .min_keysize = AES_MIN_KEY_SIZE, |
| 331 | .max_keysize = AES_MAX_KEY_SIZE, |
| 332 | .setkey = ecb_aes_set_key, |
| 333 | .encrypt = ecb_aes_encrypt, |
| 334 | .decrypt = ecb_aes_decrypt, |
| 335 | } |
| 336 | } |
| 337 | }; |
| 338 | |
| 339 | static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 340 | unsigned int key_len) |
| 341 | { |
| 342 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 343 | unsigned long fc; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 344 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 345 | /* Pick the correct function code based on the key length */ |
| 346 | fc = (key_len == 16) ? CPACF_KMC_AES_128 : |
| 347 | (key_len == 24) ? CPACF_KMC_AES_192 : |
| 348 | (key_len == 32) ? CPACF_KMC_AES_256 : 0; |
| 349 | |
| 350 | /* Check if the function code is available */ |
| 351 | sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0; |
| 352 | if (!sctx->fc) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 353 | return setkey_fallback_blk(tfm, in_key, key_len); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 354 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 355 | sctx->key_len = key_len; |
| 356 | memcpy(sctx->key, in_key, key_len); |
| 357 | return 0; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 358 | } |
| 359 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 360 | static int cbc_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 361 | struct blkcipher_walk *walk) |
| 362 | { |
Herbert Xu | f262f0f | 2013-11-05 19:36:27 +0800 | [diff] [blame] | 363 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 364 | unsigned int nbytes, n; |
| 365 | int ret; |
Herbert Xu | f262f0f | 2013-11-05 19:36:27 +0800 | [diff] [blame] | 366 | struct { |
| 367 | u8 iv[AES_BLOCK_SIZE]; |
| 368 | u8 key[AES_MAX_KEY_SIZE]; |
| 369 | } param; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 370 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 371 | ret = blkcipher_walk_virt(desc, walk); |
Herbert Xu | f262f0f | 2013-11-05 19:36:27 +0800 | [diff] [blame] | 372 | memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); |
| 373 | memcpy(param.key, sctx->key, sctx->key_len); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 374 | while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 375 | /* only use complete blocks */ |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 376 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
| 377 | cpacf_kmc(sctx->fc | modifier, ¶m, |
| 378 | walk->dst.virt.addr, walk->src.virt.addr, n); |
| 379 | ret = blkcipher_walk_done(desc, walk, nbytes - n); |
| 380 | } |
Herbert Xu | f262f0f | 2013-11-05 19:36:27 +0800 | [diff] [blame] | 381 | memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 382 | return ret; |
| 383 | } |
| 384 | |
| 385 | static int cbc_aes_encrypt(struct blkcipher_desc *desc, |
| 386 | struct scatterlist *dst, struct scatterlist *src, |
| 387 | unsigned int nbytes) |
| 388 | { |
| 389 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 390 | struct blkcipher_walk walk; |
| 391 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 392 | if (unlikely(!sctx->fc)) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 393 | return fallback_blk_enc(desc, dst, src, nbytes); |
| 394 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 395 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 396 | return cbc_aes_crypt(desc, 0, &walk); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 397 | } |
| 398 | |
| 399 | static int cbc_aes_decrypt(struct blkcipher_desc *desc, |
| 400 | struct scatterlist *dst, struct scatterlist *src, |
| 401 | unsigned int nbytes) |
| 402 | { |
| 403 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 404 | struct blkcipher_walk walk; |
| 405 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 406 | if (unlikely(!sctx->fc)) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 407 | return fallback_blk_dec(desc, dst, src, nbytes); |
| 408 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 409 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 410 | return cbc_aes_crypt(desc, CPACF_DECRYPT, &walk); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 411 | } |
| 412 | |
| 413 | static struct crypto_alg cbc_aes_alg = { |
| 414 | .cra_name = "cbc(aes)", |
| 415 | .cra_driver_name = "cbc-aes-s390", |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 416 | .cra_priority = 400, /* combo: aes + cbc */ |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 417 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 418 | CRYPTO_ALG_NEED_FALLBACK, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 419 | .cra_blocksize = AES_BLOCK_SIZE, |
| 420 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 421 | .cra_type = &crypto_blkcipher_type, |
| 422 | .cra_module = THIS_MODULE, |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 423 | .cra_init = fallback_init_blk, |
| 424 | .cra_exit = fallback_exit_blk, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 425 | .cra_u = { |
| 426 | .blkcipher = { |
| 427 | .min_keysize = AES_MIN_KEY_SIZE, |
| 428 | .max_keysize = AES_MAX_KEY_SIZE, |
| 429 | .ivsize = AES_BLOCK_SIZE, |
| 430 | .setkey = cbc_aes_set_key, |
| 431 | .encrypt = cbc_aes_encrypt, |
| 432 | .decrypt = cbc_aes_decrypt, |
| 433 | } |
| 434 | } |
| 435 | }; |
| 436 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 437 | static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key, |
| 438 | unsigned int len) |
| 439 | { |
| 440 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 441 | unsigned int ret; |
| 442 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 443 | crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK); |
| 444 | crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags & |
| 445 | CRYPTO_TFM_REQ_MASK); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 446 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 447 | ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len); |
| 448 | |
| 449 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
| 450 | tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) & |
| 451 | CRYPTO_TFM_RES_MASK; |
| 452 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 453 | return ret; |
| 454 | } |
| 455 | |
| 456 | static int xts_fallback_decrypt(struct blkcipher_desc *desc, |
| 457 | struct scatterlist *dst, struct scatterlist *src, |
| 458 | unsigned int nbytes) |
| 459 | { |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 460 | struct crypto_blkcipher *tfm = desc->tfm; |
| 461 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm); |
| 462 | SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 463 | unsigned int ret; |
| 464 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 465 | skcipher_request_set_tfm(req, xts_ctx->fallback); |
| 466 | skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
| 467 | skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 468 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 469 | ret = crypto_skcipher_decrypt(req); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 470 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 471 | skcipher_request_zero(req); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 472 | return ret; |
| 473 | } |
| 474 | |
| 475 | static int xts_fallback_encrypt(struct blkcipher_desc *desc, |
| 476 | struct scatterlist *dst, struct scatterlist *src, |
| 477 | unsigned int nbytes) |
| 478 | { |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 479 | struct crypto_blkcipher *tfm = desc->tfm; |
| 480 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm); |
| 481 | SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 482 | unsigned int ret; |
| 483 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 484 | skcipher_request_set_tfm(req, xts_ctx->fallback); |
| 485 | skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
| 486 | skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 487 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 488 | ret = crypto_skcipher_encrypt(req); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 489 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 490 | skcipher_request_zero(req); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 491 | return ret; |
| 492 | } |
| 493 | |
| 494 | static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 495 | unsigned int key_len) |
| 496 | { |
| 497 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 498 | unsigned long fc; |
Stephan Mueller | 28856a9 | 2016-02-09 15:37:47 +0100 | [diff] [blame] | 499 | int err; |
| 500 | |
| 501 | err = xts_check_key(tfm, in_key, key_len); |
| 502 | if (err) |
| 503 | return err; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 504 | |
Harald Freudenberger | a4f2779 | 2016-12-15 14:58:08 +0100 | [diff] [blame] | 505 | /* In fips mode only 128 bit or 256 bit keys are valid */ |
| 506 | if (fips_enabled && key_len != 32 && key_len != 64) { |
| 507 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
| 508 | return -EINVAL; |
| 509 | } |
| 510 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 511 | /* Pick the correct function code based on the key length */ |
| 512 | fc = (key_len == 32) ? CPACF_KM_XTS_128 : |
| 513 | (key_len == 64) ? CPACF_KM_XTS_256 : 0; |
| 514 | |
| 515 | /* Check if the function code is available */ |
| 516 | xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; |
| 517 | if (!xts_ctx->fc) |
| 518 | return xts_fallback_setkey(tfm, in_key, key_len); |
| 519 | |
| 520 | /* Split the XTS key into the two subkeys */ |
| 521 | key_len = key_len / 2; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 522 | xts_ctx->key_len = key_len; |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 523 | memcpy(xts_ctx->key, in_key, key_len); |
| 524 | memcpy(xts_ctx->pcc_key, in_key + key_len, key_len); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 525 | return 0; |
| 526 | } |
| 527 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 528 | static int xts_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 529 | struct blkcipher_walk *walk) |
| 530 | { |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 531 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 532 | unsigned int offset, nbytes, n; |
| 533 | int ret; |
| 534 | struct { |
| 535 | u8 key[32]; |
| 536 | u8 tweak[16]; |
| 537 | u8 block[16]; |
| 538 | u8 bit[16]; |
| 539 | u8 xts[16]; |
| 540 | } pcc_param; |
Gerald Schaefer | 9dda276 | 2013-11-19 17:12:47 +0100 | [diff] [blame] | 541 | struct { |
| 542 | u8 key[32]; |
| 543 | u8 init[16]; |
| 544 | } xts_param; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 545 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 546 | ret = blkcipher_walk_virt(desc, walk); |
| 547 | offset = xts_ctx->key_len & 0x10; |
Gerald Schaefer | 9dda276 | 2013-11-19 17:12:47 +0100 | [diff] [blame] | 548 | memset(pcc_param.block, 0, sizeof(pcc_param.block)); |
| 549 | memset(pcc_param.bit, 0, sizeof(pcc_param.bit)); |
| 550 | memset(pcc_param.xts, 0, sizeof(pcc_param.xts)); |
| 551 | memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 552 | memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 553 | cpacf_pcc(xts_ctx->fc, pcc_param.key + offset); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 554 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 555 | memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len); |
Gerald Schaefer | 9dda276 | 2013-11-19 17:12:47 +0100 | [diff] [blame] | 556 | memcpy(xts_param.init, pcc_param.xts, 16); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 557 | |
| 558 | while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 559 | /* only use complete blocks */ |
| 560 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 561 | cpacf_km(xts_ctx->fc | modifier, xts_param.key + offset, |
| 562 | walk->dst.virt.addr, walk->src.virt.addr, n); |
| 563 | ret = blkcipher_walk_done(desc, walk, nbytes - n); |
| 564 | } |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 565 | return ret; |
| 566 | } |
| 567 | |
| 568 | static int xts_aes_encrypt(struct blkcipher_desc *desc, |
| 569 | struct scatterlist *dst, struct scatterlist *src, |
| 570 | unsigned int nbytes) |
| 571 | { |
| 572 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 573 | struct blkcipher_walk walk; |
| 574 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 575 | if (unlikely(!xts_ctx->fc)) |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 576 | return xts_fallback_encrypt(desc, dst, src, nbytes); |
| 577 | |
| 578 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 579 | return xts_aes_crypt(desc, 0, &walk); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 580 | } |
| 581 | |
| 582 | static int xts_aes_decrypt(struct blkcipher_desc *desc, |
| 583 | struct scatterlist *dst, struct scatterlist *src, |
| 584 | unsigned int nbytes) |
| 585 | { |
| 586 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 587 | struct blkcipher_walk walk; |
| 588 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 589 | if (unlikely(!xts_ctx->fc)) |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 590 | return xts_fallback_decrypt(desc, dst, src, nbytes); |
| 591 | |
| 592 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 593 | return xts_aes_crypt(desc, CPACF_DECRYPT, &walk); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 594 | } |
| 595 | |
| 596 | static int xts_fallback_init(struct crypto_tfm *tfm) |
| 597 | { |
| 598 | const char *name = tfm->__crt_alg->cra_name; |
| 599 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 600 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 601 | xts_ctx->fallback = crypto_alloc_skcipher(name, 0, |
| 602 | CRYPTO_ALG_ASYNC | |
| 603 | CRYPTO_ALG_NEED_FALLBACK); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 604 | |
| 605 | if (IS_ERR(xts_ctx->fallback)) { |
| 606 | pr_err("Allocating XTS fallback algorithm %s failed\n", |
| 607 | name); |
| 608 | return PTR_ERR(xts_ctx->fallback); |
| 609 | } |
| 610 | return 0; |
| 611 | } |
| 612 | |
| 613 | static void xts_fallback_exit(struct crypto_tfm *tfm) |
| 614 | { |
| 615 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 616 | |
Herbert Xu | 64e2680 | 2016-06-29 18:04:07 +0800 | [diff] [blame] | 617 | crypto_free_skcipher(xts_ctx->fallback); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 618 | } |
| 619 | |
| 620 | static struct crypto_alg xts_aes_alg = { |
| 621 | .cra_name = "xts(aes)", |
| 622 | .cra_driver_name = "xts-aes-s390", |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 623 | .cra_priority = 400, /* combo: aes + xts */ |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 624 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 625 | CRYPTO_ALG_NEED_FALLBACK, |
| 626 | .cra_blocksize = AES_BLOCK_SIZE, |
| 627 | .cra_ctxsize = sizeof(struct s390_xts_ctx), |
| 628 | .cra_type = &crypto_blkcipher_type, |
| 629 | .cra_module = THIS_MODULE, |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 630 | .cra_init = xts_fallback_init, |
| 631 | .cra_exit = xts_fallback_exit, |
| 632 | .cra_u = { |
| 633 | .blkcipher = { |
| 634 | .min_keysize = 2 * AES_MIN_KEY_SIZE, |
| 635 | .max_keysize = 2 * AES_MAX_KEY_SIZE, |
| 636 | .ivsize = AES_BLOCK_SIZE, |
| 637 | .setkey = xts_aes_set_key, |
| 638 | .encrypt = xts_aes_encrypt, |
| 639 | .decrypt = xts_aes_decrypt, |
| 640 | } |
| 641 | } |
| 642 | }; |
| 643 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 644 | static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 645 | unsigned int key_len) |
| 646 | { |
| 647 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 648 | unsigned long fc; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 649 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 650 | /* Pick the correct function code based on the key length */ |
| 651 | fc = (key_len == 16) ? CPACF_KMCTR_AES_128 : |
| 652 | (key_len == 24) ? CPACF_KMCTR_AES_192 : |
| 653 | (key_len == 32) ? CPACF_KMCTR_AES_256 : 0; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 654 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 655 | /* Check if the function code is available */ |
| 656 | sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0; |
| 657 | if (!sctx->fc) |
| 658 | return setkey_fallback_blk(tfm, in_key, key_len); |
| 659 | |
| 660 | sctx->key_len = key_len; |
| 661 | memcpy(sctx->key, in_key, key_len); |
| 662 | return 0; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 663 | } |
| 664 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 665 | static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 666 | { |
| 667 | unsigned int i, n; |
| 668 | |
| 669 | /* only use complete blocks, max. PAGE_SIZE */ |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 670 | memcpy(ctrptr, iv, AES_BLOCK_SIZE); |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 671 | n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 672 | for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) { |
| 673 | memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); |
| 674 | crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
| 675 | ctrptr += AES_BLOCK_SIZE; |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 676 | } |
| 677 | return n; |
| 678 | } |
| 679 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 680 | static int ctr_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
| 681 | struct blkcipher_walk *walk) |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 682 | { |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 683 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 684 | u8 buf[AES_BLOCK_SIZE], *ctrptr; |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 685 | unsigned int n, nbytes; |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 686 | int ret, locked; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 687 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 688 | locked = spin_trylock(&ctrblk_lock); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 689 | |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 690 | ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 691 | while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 692 | n = AES_BLOCK_SIZE; |
| 693 | if (nbytes >= 2*AES_BLOCK_SIZE && locked) |
| 694 | n = __ctrblk_init(ctrblk, walk->iv, nbytes); |
| 695 | ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv; |
| 696 | cpacf_kmctr(sctx->fc | modifier, sctx->key, |
| 697 | walk->dst.virt.addr, walk->src.virt.addr, |
| 698 | n, ctrptr); |
| 699 | if (ctrptr == ctrblk) |
| 700 | memcpy(walk->iv, ctrptr + n - AES_BLOCK_SIZE, |
| 701 | AES_BLOCK_SIZE); |
| 702 | crypto_inc(walk->iv, AES_BLOCK_SIZE); |
| 703 | ret = blkcipher_walk_done(desc, walk, nbytes - n); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 704 | } |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 705 | if (locked) |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 706 | spin_unlock(&ctrblk_lock); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 707 | /* |
| 708 | * final block may be < AES_BLOCK_SIZE, copy only nbytes |
| 709 | */ |
| 710 | if (nbytes) { |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 711 | cpacf_kmctr(sctx->fc | modifier, sctx->key, |
| 712 | buf, walk->src.virt.addr, |
| 713 | AES_BLOCK_SIZE, walk->iv); |
| 714 | memcpy(walk->dst.virt.addr, buf, nbytes); |
| 715 | crypto_inc(walk->iv, AES_BLOCK_SIZE); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 716 | ret = blkcipher_walk_done(desc, walk, 0); |
| 717 | } |
Harald Freudenberger | 0519e9a | 2014-01-16 16:01:11 +0100 | [diff] [blame] | 718 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 719 | return ret; |
| 720 | } |
| 721 | |
| 722 | static int ctr_aes_encrypt(struct blkcipher_desc *desc, |
| 723 | struct scatterlist *dst, struct scatterlist *src, |
| 724 | unsigned int nbytes) |
| 725 | { |
| 726 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 727 | struct blkcipher_walk walk; |
| 728 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 729 | if (unlikely(!sctx->fc)) |
| 730 | return fallback_blk_enc(desc, dst, src, nbytes); |
| 731 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 732 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 733 | return ctr_aes_crypt(desc, 0, &walk); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 734 | } |
| 735 | |
| 736 | static int ctr_aes_decrypt(struct blkcipher_desc *desc, |
| 737 | struct scatterlist *dst, struct scatterlist *src, |
| 738 | unsigned int nbytes) |
| 739 | { |
| 740 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 741 | struct blkcipher_walk walk; |
| 742 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 743 | if (unlikely(!sctx->fc)) |
| 744 | return fallback_blk_dec(desc, dst, src, nbytes); |
| 745 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 746 | blkcipher_walk_init(&walk, dst, src, nbytes); |
Martin Schwidefsky | 7bac4f5 | 2016-08-15 15:17:52 +0200 | [diff] [blame] | 747 | return ctr_aes_crypt(desc, CPACF_DECRYPT, &walk); |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 748 | } |
| 749 | |
| 750 | static struct crypto_alg ctr_aes_alg = { |
| 751 | .cra_name = "ctr(aes)", |
| 752 | .cra_driver_name = "ctr-aes-s390", |
Martin Schwidefsky | c7d4d25 | 2016-03-17 15:22:12 +0100 | [diff] [blame] | 753 | .cra_priority = 400, /* combo: aes + ctr */ |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 754 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 755 | CRYPTO_ALG_NEED_FALLBACK, |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 756 | .cra_blocksize = 1, |
| 757 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 758 | .cra_type = &crypto_blkcipher_type, |
| 759 | .cra_module = THIS_MODULE, |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 760 | .cra_init = fallback_init_blk, |
| 761 | .cra_exit = fallback_exit_blk, |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 762 | .cra_u = { |
| 763 | .blkcipher = { |
| 764 | .min_keysize = AES_MIN_KEY_SIZE, |
| 765 | .max_keysize = AES_MAX_KEY_SIZE, |
| 766 | .ivsize = AES_BLOCK_SIZE, |
| 767 | .setkey = ctr_aes_set_key, |
| 768 | .encrypt = ctr_aes_encrypt, |
| 769 | .decrypt = ctr_aes_decrypt, |
| 770 | } |
| 771 | } |
| 772 | }; |
| 773 | |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 774 | static struct crypto_alg *aes_s390_algs_ptr[5]; |
| 775 | static int aes_s390_algs_num; |
| 776 | |
| 777 | static int aes_s390_register_alg(struct crypto_alg *alg) |
| 778 | { |
| 779 | int ret; |
| 780 | |
| 781 | ret = crypto_register_alg(alg); |
| 782 | if (!ret) |
| 783 | aes_s390_algs_ptr[aes_s390_algs_num++] = alg; |
| 784 | return ret; |
| 785 | } |
| 786 | |
| 787 | static void aes_s390_fini(void) |
| 788 | { |
| 789 | while (aes_s390_algs_num--) |
| 790 | crypto_unregister_alg(aes_s390_algs_ptr[aes_s390_algs_num]); |
| 791 | if (ctrblk) |
| 792 | free_page((unsigned long) ctrblk); |
| 793 | } |
Ingo Tuchscherer | 4f57ba7 | 2013-10-15 11:24:07 +0200 | [diff] [blame] | 794 | |
Heiko Carstens | 9f7819c | 2008-04-17 07:46:17 +0200 | [diff] [blame] | 795 | static int __init aes_s390_init(void) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 796 | { |
| 797 | int ret; |
| 798 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 799 | /* Query available functions for KM, KMC and KMCTR */ |
| 800 | cpacf_query(CPACF_KM, &km_functions); |
| 801 | cpacf_query(CPACF_KMC, &kmc_functions); |
| 802 | cpacf_query(CPACF_KMCTR, &kmctr_functions); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 803 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 804 | if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) || |
| 805 | cpacf_test_func(&km_functions, CPACF_KM_AES_192) || |
| 806 | cpacf_test_func(&km_functions, CPACF_KM_AES_256)) { |
| 807 | ret = aes_s390_register_alg(&aes_alg); |
| 808 | if (ret) |
| 809 | goto out_err; |
| 810 | ret = aes_s390_register_alg(&ecb_aes_alg); |
| 811 | if (ret) |
| 812 | goto out_err; |
| 813 | } |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 814 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 815 | if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) || |
| 816 | cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) || |
| 817 | cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) { |
| 818 | ret = aes_s390_register_alg(&cbc_aes_alg); |
| 819 | if (ret) |
| 820 | goto out_err; |
| 821 | } |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 822 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 823 | if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) || |
| 824 | cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) { |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 825 | ret = aes_s390_register_alg(&xts_aes_alg); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 826 | if (ret) |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 827 | goto out_err; |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 828 | } |
| 829 | |
Martin Schwidefsky | 69c0e36 | 2016-08-18 12:59:46 +0200 | [diff] [blame] | 830 | if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_128) || |
| 831 | cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_192) || |
| 832 | cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_256)) { |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 833 | ctrblk = (u8 *) __get_free_page(GFP_KERNEL); |
| 834 | if (!ctrblk) { |
| 835 | ret = -ENOMEM; |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 836 | goto out_err; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 837 | } |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 838 | ret = aes_s390_register_alg(&ctr_aes_alg); |
| 839 | if (ret) |
| 840 | goto out_err; |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 841 | } |
| 842 | |
Martin Schwidefsky | d863d59 | 2016-08-18 12:34:34 +0200 | [diff] [blame] | 843 | return 0; |
| 844 | out_err: |
| 845 | aes_s390_fini(); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 846 | return ret; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 847 | } |
| 848 | |
Hendrik Brueckner | d05377c | 2015-02-19 17:34:07 +0100 | [diff] [blame] | 849 | module_cpu_feature_match(MSA, aes_s390_init); |
Heiko Carstens | 9f7819c | 2008-04-17 07:46:17 +0200 | [diff] [blame] | 850 | module_exit(aes_s390_fini); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 851 | |
Kees Cook | 5d26a10 | 2014-11-20 17:05:53 -0800 | [diff] [blame] | 852 | MODULE_ALIAS_CRYPTO("aes-all"); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 853 | |
| 854 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); |
| 855 | MODULE_LICENSE("GPL"); |