Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Cryptographic API. |
| 3 | * |
| 4 | * s390 implementation of the AES Cipher Algorithm. |
| 5 | * |
| 6 | * s390 Version: |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 7 | * Copyright IBM Corp. 2005,2007 |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 8 | * Author(s): Jan Glauber (jang@de.ibm.com) |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 9 | * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 10 | * |
Sebastian Siewior | f8246af | 2007-10-05 16:52:01 +0800 | [diff] [blame] | 11 | * Derived from "crypto/aes_generic.c" |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 12 | * |
| 13 | * This program is free software; you can redistribute it and/or modify it |
| 14 | * under the terms of the GNU General Public License as published by the Free |
| 15 | * Software Foundation; either version 2 of the License, or (at your option) |
| 16 | * any later version. |
| 17 | * |
| 18 | */ |
| 19 | |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 20 | #define KMSG_COMPONENT "aes_s390" |
| 21 | #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt |
| 22 | |
Sebastian Siewior | 89e12654 | 2007-10-17 23:18:57 +0800 | [diff] [blame] | 23 | #include <crypto/aes.h> |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 24 | #include <crypto/algapi.h> |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 25 | #include <linux/err.h> |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 26 | #include <linux/module.h> |
| 27 | #include <linux/init.h> |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 28 | #include "crypt_s390.h" |
| 29 | |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 30 | #define AES_KEYLEN_128 1 |
| 31 | #define AES_KEYLEN_192 2 |
| 32 | #define AES_KEYLEN_256 4 |
| 33 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 34 | static u8 *ctrblk; |
| 35 | static char keylen_flag; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 36 | |
| 37 | struct s390_aes_ctx { |
| 38 | u8 iv[AES_BLOCK_SIZE]; |
| 39 | u8 key[AES_MAX_KEY_SIZE]; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 40 | long enc; |
| 41 | long dec; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 42 | int key_len; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 43 | union { |
| 44 | struct crypto_blkcipher *blk; |
| 45 | struct crypto_cipher *cip; |
| 46 | } fallback; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 47 | }; |
| 48 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 49 | struct pcc_param { |
| 50 | u8 key[32]; |
| 51 | u8 tweak[16]; |
| 52 | u8 block[16]; |
| 53 | u8 bit[16]; |
| 54 | u8 xts[16]; |
| 55 | }; |
| 56 | |
| 57 | struct s390_xts_ctx { |
| 58 | u8 key[32]; |
| 59 | u8 xts_param[16]; |
| 60 | struct pcc_param pcc; |
| 61 | long enc; |
| 62 | long dec; |
| 63 | int key_len; |
| 64 | struct crypto_blkcipher *fallback; |
| 65 | }; |
| 66 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 67 | /* |
| 68 | * Check if the key_len is supported by the HW. |
| 69 | * Returns 0 if it is, a positive number if it is not and software fallback is |
| 70 | * required or a negative number in case the key size is not valid |
| 71 | */ |
| 72 | static int need_fallback(unsigned int key_len) |
| 73 | { |
| 74 | switch (key_len) { |
| 75 | case 16: |
| 76 | if (!(keylen_flag & AES_KEYLEN_128)) |
| 77 | return 1; |
| 78 | break; |
| 79 | case 24: |
| 80 | if (!(keylen_flag & AES_KEYLEN_192)) |
| 81 | return 1; |
| 82 | break; |
| 83 | case 32: |
| 84 | if (!(keylen_flag & AES_KEYLEN_256)) |
| 85 | return 1; |
| 86 | break; |
| 87 | default: |
| 88 | return -1; |
| 89 | break; |
| 90 | } |
| 91 | return 0; |
| 92 | } |
| 93 | |
| 94 | static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key, |
| 95 | unsigned int key_len) |
| 96 | { |
| 97 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 98 | int ret; |
| 99 | |
Roel Kluin | d7ac769 | 2010-01-08 14:18:34 +1100 | [diff] [blame] | 100 | sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
| 101 | sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 102 | CRYPTO_TFM_REQ_MASK); |
| 103 | |
| 104 | ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); |
| 105 | if (ret) { |
| 106 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
Roel Kluin | d7ac769 | 2010-01-08 14:18:34 +1100 | [diff] [blame] | 107 | tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags & |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 108 | CRYPTO_TFM_RES_MASK); |
| 109 | } |
| 110 | return ret; |
| 111 | } |
| 112 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 113 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
Herbert Xu | 560c06a | 2006-08-13 14:16:39 +1000 | [diff] [blame] | 114 | unsigned int key_len) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 115 | { |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 116 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Herbert Xu | 560c06a | 2006-08-13 14:16:39 +1000 | [diff] [blame] | 117 | u32 *flags = &tfm->crt_flags; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 118 | int ret; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 119 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 120 | ret = need_fallback(key_len); |
| 121 | if (ret < 0) { |
| 122 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
| 123 | return -EINVAL; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 124 | } |
| 125 | |
| 126 | sctx->key_len = key_len; |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 127 | if (!ret) { |
| 128 | memcpy(sctx->key, in_key, key_len); |
| 129 | return 0; |
| 130 | } |
| 131 | |
| 132 | return setkey_fallback_cip(tfm, in_key, key_len); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 133 | } |
| 134 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 135 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 136 | { |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 137 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 138 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 139 | if (unlikely(need_fallback(sctx->key_len))) { |
| 140 | crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); |
| 141 | return; |
| 142 | } |
| 143 | |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 144 | switch (sctx->key_len) { |
| 145 | case 16: |
| 146 | crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, |
| 147 | AES_BLOCK_SIZE); |
| 148 | break; |
| 149 | case 24: |
| 150 | crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, |
| 151 | AES_BLOCK_SIZE); |
| 152 | break; |
| 153 | case 32: |
| 154 | crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, |
| 155 | AES_BLOCK_SIZE); |
| 156 | break; |
| 157 | } |
| 158 | } |
| 159 | |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 160 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 161 | { |
Herbert Xu | 6c2bb98 | 2006-05-16 22:09:29 +1000 | [diff] [blame] | 162 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 163 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 164 | if (unlikely(need_fallback(sctx->key_len))) { |
| 165 | crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); |
| 166 | return; |
| 167 | } |
| 168 | |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 169 | switch (sctx->key_len) { |
| 170 | case 16: |
| 171 | crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, |
| 172 | AES_BLOCK_SIZE); |
| 173 | break; |
| 174 | case 24: |
| 175 | crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, |
| 176 | AES_BLOCK_SIZE); |
| 177 | break; |
| 178 | case 32: |
| 179 | crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, |
| 180 | AES_BLOCK_SIZE); |
| 181 | break; |
| 182 | } |
| 183 | } |
| 184 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 185 | static int fallback_init_cip(struct crypto_tfm *tfm) |
| 186 | { |
| 187 | const char *name = tfm->__crt_alg->cra_name; |
| 188 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 189 | |
| 190 | sctx->fallback.cip = crypto_alloc_cipher(name, 0, |
| 191 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
| 192 | |
| 193 | if (IS_ERR(sctx->fallback.cip)) { |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 194 | pr_err("Allocating AES fallback algorithm %s failed\n", |
| 195 | name); |
Roel Kluin | b59cdcb3 | 2009-12-18 17:43:18 +0100 | [diff] [blame] | 196 | return PTR_ERR(sctx->fallback.cip); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 197 | } |
| 198 | |
| 199 | return 0; |
| 200 | } |
| 201 | |
| 202 | static void fallback_exit_cip(struct crypto_tfm *tfm) |
| 203 | { |
| 204 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 205 | |
| 206 | crypto_free_cipher(sctx->fallback.cip); |
| 207 | sctx->fallback.cip = NULL; |
| 208 | } |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 209 | |
| 210 | static struct crypto_alg aes_alg = { |
| 211 | .cra_name = "aes", |
Herbert Xu | 65b75c3 | 2006-08-21 21:18:50 +1000 | [diff] [blame] | 212 | .cra_driver_name = "aes-s390", |
| 213 | .cra_priority = CRYPT_S390_PRIORITY, |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 214 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | |
| 215 | CRYPTO_ALG_NEED_FALLBACK, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 216 | .cra_blocksize = AES_BLOCK_SIZE, |
| 217 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 218 | .cra_module = THIS_MODULE, |
| 219 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 220 | .cra_init = fallback_init_cip, |
| 221 | .cra_exit = fallback_exit_cip, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 222 | .cra_u = { |
| 223 | .cipher = { |
| 224 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
| 225 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
| 226 | .cia_setkey = aes_set_key, |
| 227 | .cia_encrypt = aes_encrypt, |
| 228 | .cia_decrypt = aes_decrypt, |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 229 | } |
| 230 | } |
| 231 | }; |
| 232 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 233 | static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key, |
| 234 | unsigned int len) |
| 235 | { |
| 236 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 237 | unsigned int ret; |
| 238 | |
| 239 | sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
| 240 | sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & |
| 241 | CRYPTO_TFM_REQ_MASK); |
| 242 | |
| 243 | ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len); |
| 244 | if (ret) { |
| 245 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
| 246 | tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags & |
| 247 | CRYPTO_TFM_RES_MASK); |
| 248 | } |
| 249 | return ret; |
| 250 | } |
| 251 | |
| 252 | static int fallback_blk_dec(struct blkcipher_desc *desc, |
| 253 | struct scatterlist *dst, struct scatterlist *src, |
| 254 | unsigned int nbytes) |
| 255 | { |
| 256 | unsigned int ret; |
| 257 | struct crypto_blkcipher *tfm; |
| 258 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 259 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 260 | tfm = desc->tfm; |
| 261 | desc->tfm = sctx->fallback.blk; |
| 262 | |
Sebastian Siewior | 2d74d40 | 2007-12-10 15:49:41 +0800 | [diff] [blame] | 263 | ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 264 | |
| 265 | desc->tfm = tfm; |
| 266 | return ret; |
| 267 | } |
| 268 | |
| 269 | static int fallback_blk_enc(struct blkcipher_desc *desc, |
| 270 | struct scatterlist *dst, struct scatterlist *src, |
| 271 | unsigned int nbytes) |
| 272 | { |
| 273 | unsigned int ret; |
| 274 | struct crypto_blkcipher *tfm; |
| 275 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 276 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 277 | tfm = desc->tfm; |
| 278 | desc->tfm = sctx->fallback.blk; |
| 279 | |
Sebastian Siewior | 2d74d40 | 2007-12-10 15:49:41 +0800 | [diff] [blame] | 280 | ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 281 | |
| 282 | desc->tfm = tfm; |
| 283 | return ret; |
| 284 | } |
| 285 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 286 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 287 | unsigned int key_len) |
| 288 | { |
| 289 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 290 | int ret; |
| 291 | |
| 292 | ret = need_fallback(key_len); |
| 293 | if (ret > 0) { |
| 294 | sctx->key_len = key_len; |
| 295 | return setkey_fallback_blk(tfm, in_key, key_len); |
| 296 | } |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 297 | |
| 298 | switch (key_len) { |
| 299 | case 16: |
| 300 | sctx->enc = KM_AES_128_ENCRYPT; |
| 301 | sctx->dec = KM_AES_128_DECRYPT; |
| 302 | break; |
| 303 | case 24: |
| 304 | sctx->enc = KM_AES_192_ENCRYPT; |
| 305 | sctx->dec = KM_AES_192_DECRYPT; |
| 306 | break; |
| 307 | case 32: |
| 308 | sctx->enc = KM_AES_256_ENCRYPT; |
| 309 | sctx->dec = KM_AES_256_DECRYPT; |
| 310 | break; |
| 311 | } |
| 312 | |
| 313 | return aes_set_key(tfm, in_key, key_len); |
| 314 | } |
| 315 | |
| 316 | static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, |
| 317 | struct blkcipher_walk *walk) |
| 318 | { |
| 319 | int ret = blkcipher_walk_virt(desc, walk); |
| 320 | unsigned int nbytes; |
| 321 | |
| 322 | while ((nbytes = walk->nbytes)) { |
| 323 | /* only use complete blocks */ |
| 324 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); |
| 325 | u8 *out = walk->dst.virt.addr; |
| 326 | u8 *in = walk->src.virt.addr; |
| 327 | |
| 328 | ret = crypt_s390_km(func, param, out, in, n); |
| 329 | BUG_ON((ret < 0) || (ret != n)); |
| 330 | |
| 331 | nbytes &= AES_BLOCK_SIZE - 1; |
| 332 | ret = blkcipher_walk_done(desc, walk, nbytes); |
| 333 | } |
| 334 | |
| 335 | return ret; |
| 336 | } |
| 337 | |
| 338 | static int ecb_aes_encrypt(struct blkcipher_desc *desc, |
| 339 | struct scatterlist *dst, struct scatterlist *src, |
| 340 | unsigned int nbytes) |
| 341 | { |
| 342 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 343 | struct blkcipher_walk walk; |
| 344 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 345 | if (unlikely(need_fallback(sctx->key_len))) |
| 346 | return fallback_blk_enc(desc, dst, src, nbytes); |
| 347 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 348 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 349 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); |
| 350 | } |
| 351 | |
| 352 | static int ecb_aes_decrypt(struct blkcipher_desc *desc, |
| 353 | struct scatterlist *dst, struct scatterlist *src, |
| 354 | unsigned int nbytes) |
| 355 | { |
| 356 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 357 | struct blkcipher_walk walk; |
| 358 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 359 | if (unlikely(need_fallback(sctx->key_len))) |
| 360 | return fallback_blk_dec(desc, dst, src, nbytes); |
| 361 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 362 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 363 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); |
| 364 | } |
| 365 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 366 | static int fallback_init_blk(struct crypto_tfm *tfm) |
| 367 | { |
| 368 | const char *name = tfm->__crt_alg->cra_name; |
| 369 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 370 | |
| 371 | sctx->fallback.blk = crypto_alloc_blkcipher(name, 0, |
| 372 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
| 373 | |
| 374 | if (IS_ERR(sctx->fallback.blk)) { |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 375 | pr_err("Allocating AES fallback algorithm %s failed\n", |
| 376 | name); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 377 | return PTR_ERR(sctx->fallback.blk); |
| 378 | } |
| 379 | |
| 380 | return 0; |
| 381 | } |
| 382 | |
| 383 | static void fallback_exit_blk(struct crypto_tfm *tfm) |
| 384 | { |
| 385 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 386 | |
| 387 | crypto_free_blkcipher(sctx->fallback.blk); |
| 388 | sctx->fallback.blk = NULL; |
| 389 | } |
| 390 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 391 | static struct crypto_alg ecb_aes_alg = { |
| 392 | .cra_name = "ecb(aes)", |
| 393 | .cra_driver_name = "ecb-aes-s390", |
| 394 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 395 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 396 | CRYPTO_ALG_NEED_FALLBACK, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 397 | .cra_blocksize = AES_BLOCK_SIZE, |
| 398 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 399 | .cra_type = &crypto_blkcipher_type, |
| 400 | .cra_module = THIS_MODULE, |
| 401 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 402 | .cra_init = fallback_init_blk, |
| 403 | .cra_exit = fallback_exit_blk, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 404 | .cra_u = { |
| 405 | .blkcipher = { |
| 406 | .min_keysize = AES_MIN_KEY_SIZE, |
| 407 | .max_keysize = AES_MAX_KEY_SIZE, |
| 408 | .setkey = ecb_aes_set_key, |
| 409 | .encrypt = ecb_aes_encrypt, |
| 410 | .decrypt = ecb_aes_decrypt, |
| 411 | } |
| 412 | } |
| 413 | }; |
| 414 | |
| 415 | static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 416 | unsigned int key_len) |
| 417 | { |
| 418 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 419 | int ret; |
| 420 | |
| 421 | ret = need_fallback(key_len); |
| 422 | if (ret > 0) { |
| 423 | sctx->key_len = key_len; |
| 424 | return setkey_fallback_blk(tfm, in_key, key_len); |
| 425 | } |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 426 | |
| 427 | switch (key_len) { |
| 428 | case 16: |
| 429 | sctx->enc = KMC_AES_128_ENCRYPT; |
| 430 | sctx->dec = KMC_AES_128_DECRYPT; |
| 431 | break; |
| 432 | case 24: |
| 433 | sctx->enc = KMC_AES_192_ENCRYPT; |
| 434 | sctx->dec = KMC_AES_192_DECRYPT; |
| 435 | break; |
| 436 | case 32: |
| 437 | sctx->enc = KMC_AES_256_ENCRYPT; |
| 438 | sctx->dec = KMC_AES_256_DECRYPT; |
| 439 | break; |
| 440 | } |
| 441 | |
| 442 | return aes_set_key(tfm, in_key, key_len); |
| 443 | } |
| 444 | |
| 445 | static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, |
| 446 | struct blkcipher_walk *walk) |
| 447 | { |
| 448 | int ret = blkcipher_walk_virt(desc, walk); |
| 449 | unsigned int nbytes = walk->nbytes; |
| 450 | |
| 451 | if (!nbytes) |
| 452 | goto out; |
| 453 | |
| 454 | memcpy(param, walk->iv, AES_BLOCK_SIZE); |
| 455 | do { |
| 456 | /* only use complete blocks */ |
| 457 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); |
| 458 | u8 *out = walk->dst.virt.addr; |
| 459 | u8 *in = walk->src.virt.addr; |
| 460 | |
| 461 | ret = crypt_s390_kmc(func, param, out, in, n); |
| 462 | BUG_ON((ret < 0) || (ret != n)); |
| 463 | |
| 464 | nbytes &= AES_BLOCK_SIZE - 1; |
| 465 | ret = blkcipher_walk_done(desc, walk, nbytes); |
| 466 | } while ((nbytes = walk->nbytes)); |
| 467 | memcpy(walk->iv, param, AES_BLOCK_SIZE); |
| 468 | |
| 469 | out: |
| 470 | return ret; |
| 471 | } |
| 472 | |
| 473 | static int cbc_aes_encrypt(struct blkcipher_desc *desc, |
| 474 | struct scatterlist *dst, struct scatterlist *src, |
| 475 | unsigned int nbytes) |
| 476 | { |
| 477 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 478 | struct blkcipher_walk walk; |
| 479 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 480 | if (unlikely(need_fallback(sctx->key_len))) |
| 481 | return fallback_blk_enc(desc, dst, src, nbytes); |
| 482 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 483 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 484 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); |
| 485 | } |
| 486 | |
| 487 | static int cbc_aes_decrypt(struct blkcipher_desc *desc, |
| 488 | struct scatterlist *dst, struct scatterlist *src, |
| 489 | unsigned int nbytes) |
| 490 | { |
| 491 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 492 | struct blkcipher_walk walk; |
| 493 | |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 494 | if (unlikely(need_fallback(sctx->key_len))) |
| 495 | return fallback_blk_dec(desc, dst, src, nbytes); |
| 496 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 497 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 498 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); |
| 499 | } |
| 500 | |
| 501 | static struct crypto_alg cbc_aes_alg = { |
| 502 | .cra_name = "cbc(aes)", |
| 503 | .cra_driver_name = "cbc-aes-s390", |
| 504 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, |
Jan Glauber | f67d136 | 2007-05-04 18:47:47 +0200 | [diff] [blame] | 505 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 506 | CRYPTO_ALG_NEED_FALLBACK, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 507 | .cra_blocksize = AES_BLOCK_SIZE, |
| 508 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 509 | .cra_type = &crypto_blkcipher_type, |
| 510 | .cra_module = THIS_MODULE, |
| 511 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 512 | .cra_init = fallback_init_blk, |
| 513 | .cra_exit = fallback_exit_blk, |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 514 | .cra_u = { |
| 515 | .blkcipher = { |
| 516 | .min_keysize = AES_MIN_KEY_SIZE, |
| 517 | .max_keysize = AES_MAX_KEY_SIZE, |
| 518 | .ivsize = AES_BLOCK_SIZE, |
| 519 | .setkey = cbc_aes_set_key, |
| 520 | .encrypt = cbc_aes_encrypt, |
| 521 | .decrypt = cbc_aes_decrypt, |
| 522 | } |
| 523 | } |
| 524 | }; |
| 525 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 526 | static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key, |
| 527 | unsigned int len) |
| 528 | { |
| 529 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 530 | unsigned int ret; |
| 531 | |
| 532 | xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
| 533 | xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags & |
| 534 | CRYPTO_TFM_REQ_MASK); |
| 535 | |
| 536 | ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len); |
| 537 | if (ret) { |
| 538 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
| 539 | tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags & |
| 540 | CRYPTO_TFM_RES_MASK); |
| 541 | } |
| 542 | return ret; |
| 543 | } |
| 544 | |
| 545 | static int xts_fallback_decrypt(struct blkcipher_desc *desc, |
| 546 | struct scatterlist *dst, struct scatterlist *src, |
| 547 | unsigned int nbytes) |
| 548 | { |
| 549 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 550 | struct crypto_blkcipher *tfm; |
| 551 | unsigned int ret; |
| 552 | |
| 553 | tfm = desc->tfm; |
| 554 | desc->tfm = xts_ctx->fallback; |
| 555 | |
| 556 | ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); |
| 557 | |
| 558 | desc->tfm = tfm; |
| 559 | return ret; |
| 560 | } |
| 561 | |
| 562 | static int xts_fallback_encrypt(struct blkcipher_desc *desc, |
| 563 | struct scatterlist *dst, struct scatterlist *src, |
| 564 | unsigned int nbytes) |
| 565 | { |
| 566 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 567 | struct crypto_blkcipher *tfm; |
| 568 | unsigned int ret; |
| 569 | |
| 570 | tfm = desc->tfm; |
| 571 | desc->tfm = xts_ctx->fallback; |
| 572 | |
| 573 | ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); |
| 574 | |
| 575 | desc->tfm = tfm; |
| 576 | return ret; |
| 577 | } |
| 578 | |
| 579 | static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 580 | unsigned int key_len) |
| 581 | { |
| 582 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 583 | u32 *flags = &tfm->crt_flags; |
| 584 | |
| 585 | switch (key_len) { |
| 586 | case 32: |
| 587 | xts_ctx->enc = KM_XTS_128_ENCRYPT; |
| 588 | xts_ctx->dec = KM_XTS_128_DECRYPT; |
| 589 | memcpy(xts_ctx->key + 16, in_key, 16); |
| 590 | memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16); |
| 591 | break; |
| 592 | case 48: |
| 593 | xts_ctx->enc = 0; |
| 594 | xts_ctx->dec = 0; |
| 595 | xts_fallback_setkey(tfm, in_key, key_len); |
| 596 | break; |
| 597 | case 64: |
| 598 | xts_ctx->enc = KM_XTS_256_ENCRYPT; |
| 599 | xts_ctx->dec = KM_XTS_256_DECRYPT; |
| 600 | memcpy(xts_ctx->key, in_key, 32); |
| 601 | memcpy(xts_ctx->pcc.key, in_key + 32, 32); |
| 602 | break; |
| 603 | default: |
| 604 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
| 605 | return -EINVAL; |
| 606 | } |
| 607 | xts_ctx->key_len = key_len; |
| 608 | return 0; |
| 609 | } |
| 610 | |
| 611 | static int xts_aes_crypt(struct blkcipher_desc *desc, long func, |
| 612 | struct s390_xts_ctx *xts_ctx, |
| 613 | struct blkcipher_walk *walk) |
| 614 | { |
| 615 | unsigned int offset = (xts_ctx->key_len >> 1) & 0x10; |
| 616 | int ret = blkcipher_walk_virt(desc, walk); |
| 617 | unsigned int nbytes = walk->nbytes; |
| 618 | unsigned int n; |
| 619 | u8 *in, *out; |
| 620 | void *param; |
| 621 | |
| 622 | if (!nbytes) |
| 623 | goto out; |
| 624 | |
| 625 | memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block)); |
| 626 | memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit)); |
| 627 | memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts)); |
| 628 | memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak)); |
| 629 | param = xts_ctx->pcc.key + offset; |
| 630 | ret = crypt_s390_pcc(func, param); |
| 631 | BUG_ON(ret < 0); |
| 632 | |
| 633 | memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16); |
| 634 | param = xts_ctx->key + offset; |
| 635 | do { |
| 636 | /* only use complete blocks */ |
| 637 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
| 638 | out = walk->dst.virt.addr; |
| 639 | in = walk->src.virt.addr; |
| 640 | |
| 641 | ret = crypt_s390_km(func, param, out, in, n); |
| 642 | BUG_ON(ret < 0 || ret != n); |
| 643 | |
| 644 | nbytes &= AES_BLOCK_SIZE - 1; |
| 645 | ret = blkcipher_walk_done(desc, walk, nbytes); |
| 646 | } while ((nbytes = walk->nbytes)); |
| 647 | out: |
| 648 | return ret; |
| 649 | } |
| 650 | |
| 651 | static int xts_aes_encrypt(struct blkcipher_desc *desc, |
| 652 | struct scatterlist *dst, struct scatterlist *src, |
| 653 | unsigned int nbytes) |
| 654 | { |
| 655 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 656 | struct blkcipher_walk walk; |
| 657 | |
| 658 | if (unlikely(xts_ctx->key_len == 48)) |
| 659 | return xts_fallback_encrypt(desc, dst, src, nbytes); |
| 660 | |
| 661 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 662 | return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk); |
| 663 | } |
| 664 | |
| 665 | static int xts_aes_decrypt(struct blkcipher_desc *desc, |
| 666 | struct scatterlist *dst, struct scatterlist *src, |
| 667 | unsigned int nbytes) |
| 668 | { |
| 669 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
| 670 | struct blkcipher_walk walk; |
| 671 | |
| 672 | if (unlikely(xts_ctx->key_len == 48)) |
| 673 | return xts_fallback_decrypt(desc, dst, src, nbytes); |
| 674 | |
| 675 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 676 | return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk); |
| 677 | } |
| 678 | |
| 679 | static int xts_fallback_init(struct crypto_tfm *tfm) |
| 680 | { |
| 681 | const char *name = tfm->__crt_alg->cra_name; |
| 682 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 683 | |
| 684 | xts_ctx->fallback = crypto_alloc_blkcipher(name, 0, |
| 685 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
| 686 | |
| 687 | if (IS_ERR(xts_ctx->fallback)) { |
| 688 | pr_err("Allocating XTS fallback algorithm %s failed\n", |
| 689 | name); |
| 690 | return PTR_ERR(xts_ctx->fallback); |
| 691 | } |
| 692 | return 0; |
| 693 | } |
| 694 | |
| 695 | static void xts_fallback_exit(struct crypto_tfm *tfm) |
| 696 | { |
| 697 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
| 698 | |
| 699 | crypto_free_blkcipher(xts_ctx->fallback); |
| 700 | xts_ctx->fallback = NULL; |
| 701 | } |
| 702 | |
| 703 | static struct crypto_alg xts_aes_alg = { |
| 704 | .cra_name = "xts(aes)", |
| 705 | .cra_driver_name = "xts-aes-s390", |
| 706 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, |
| 707 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
| 708 | CRYPTO_ALG_NEED_FALLBACK, |
| 709 | .cra_blocksize = AES_BLOCK_SIZE, |
| 710 | .cra_ctxsize = sizeof(struct s390_xts_ctx), |
| 711 | .cra_type = &crypto_blkcipher_type, |
| 712 | .cra_module = THIS_MODULE, |
| 713 | .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list), |
| 714 | .cra_init = xts_fallback_init, |
| 715 | .cra_exit = xts_fallback_exit, |
| 716 | .cra_u = { |
| 717 | .blkcipher = { |
| 718 | .min_keysize = 2 * AES_MIN_KEY_SIZE, |
| 719 | .max_keysize = 2 * AES_MAX_KEY_SIZE, |
| 720 | .ivsize = AES_BLOCK_SIZE, |
| 721 | .setkey = xts_aes_set_key, |
| 722 | .encrypt = xts_aes_encrypt, |
| 723 | .decrypt = xts_aes_decrypt, |
| 724 | } |
| 725 | } |
| 726 | }; |
| 727 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 728 | static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
| 729 | unsigned int key_len) |
| 730 | { |
| 731 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
| 732 | |
| 733 | switch (key_len) { |
| 734 | case 16: |
| 735 | sctx->enc = KMCTR_AES_128_ENCRYPT; |
| 736 | sctx->dec = KMCTR_AES_128_DECRYPT; |
| 737 | break; |
| 738 | case 24: |
| 739 | sctx->enc = KMCTR_AES_192_ENCRYPT; |
| 740 | sctx->dec = KMCTR_AES_192_DECRYPT; |
| 741 | break; |
| 742 | case 32: |
| 743 | sctx->enc = KMCTR_AES_256_ENCRYPT; |
| 744 | sctx->dec = KMCTR_AES_256_DECRYPT; |
| 745 | break; |
| 746 | } |
| 747 | |
| 748 | return aes_set_key(tfm, in_key, key_len); |
| 749 | } |
| 750 | |
| 751 | static int ctr_aes_crypt(struct blkcipher_desc *desc, long func, |
| 752 | struct s390_aes_ctx *sctx, struct blkcipher_walk *walk) |
| 753 | { |
| 754 | int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); |
| 755 | unsigned int i, n, nbytes; |
| 756 | u8 buf[AES_BLOCK_SIZE]; |
| 757 | u8 *out, *in; |
| 758 | |
| 759 | if (!walk->nbytes) |
| 760 | return ret; |
| 761 | |
| 762 | memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE); |
| 763 | while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
| 764 | out = walk->dst.virt.addr; |
| 765 | in = walk->src.virt.addr; |
| 766 | while (nbytes >= AES_BLOCK_SIZE) { |
| 767 | /* only use complete blocks, max. PAGE_SIZE */ |
| 768 | n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : |
| 769 | nbytes & ~(AES_BLOCK_SIZE - 1); |
| 770 | for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) { |
| 771 | memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE, |
| 772 | AES_BLOCK_SIZE); |
| 773 | crypto_inc(ctrblk + i, AES_BLOCK_SIZE); |
| 774 | } |
| 775 | ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk); |
| 776 | BUG_ON(ret < 0 || ret != n); |
| 777 | if (n > AES_BLOCK_SIZE) |
| 778 | memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE, |
| 779 | AES_BLOCK_SIZE); |
| 780 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
| 781 | out += n; |
| 782 | in += n; |
| 783 | nbytes -= n; |
| 784 | } |
| 785 | ret = blkcipher_walk_done(desc, walk, nbytes); |
| 786 | } |
| 787 | /* |
| 788 | * final block may be < AES_BLOCK_SIZE, copy only nbytes |
| 789 | */ |
| 790 | if (nbytes) { |
| 791 | out = walk->dst.virt.addr; |
| 792 | in = walk->src.virt.addr; |
| 793 | ret = crypt_s390_kmctr(func, sctx->key, buf, in, |
| 794 | AES_BLOCK_SIZE, ctrblk); |
| 795 | BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE); |
| 796 | memcpy(out, buf, nbytes); |
| 797 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
| 798 | ret = blkcipher_walk_done(desc, walk, 0); |
| 799 | } |
| 800 | memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE); |
| 801 | return ret; |
| 802 | } |
| 803 | |
| 804 | static int ctr_aes_encrypt(struct blkcipher_desc *desc, |
| 805 | struct scatterlist *dst, struct scatterlist *src, |
| 806 | unsigned int nbytes) |
| 807 | { |
| 808 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 809 | struct blkcipher_walk walk; |
| 810 | |
| 811 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 812 | return ctr_aes_crypt(desc, sctx->enc, sctx, &walk); |
| 813 | } |
| 814 | |
| 815 | static int ctr_aes_decrypt(struct blkcipher_desc *desc, |
| 816 | struct scatterlist *dst, struct scatterlist *src, |
| 817 | unsigned int nbytes) |
| 818 | { |
| 819 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
| 820 | struct blkcipher_walk walk; |
| 821 | |
| 822 | blkcipher_walk_init(&walk, dst, src, nbytes); |
| 823 | return ctr_aes_crypt(desc, sctx->dec, sctx, &walk); |
| 824 | } |
| 825 | |
| 826 | static struct crypto_alg ctr_aes_alg = { |
| 827 | .cra_name = "ctr(aes)", |
| 828 | .cra_driver_name = "ctr-aes-s390", |
| 829 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, |
| 830 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
| 831 | .cra_blocksize = 1, |
| 832 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
| 833 | .cra_type = &crypto_blkcipher_type, |
| 834 | .cra_module = THIS_MODULE, |
| 835 | .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list), |
| 836 | .cra_u = { |
| 837 | .blkcipher = { |
| 838 | .min_keysize = AES_MIN_KEY_SIZE, |
| 839 | .max_keysize = AES_MAX_KEY_SIZE, |
| 840 | .ivsize = AES_BLOCK_SIZE, |
| 841 | .setkey = ctr_aes_set_key, |
| 842 | .encrypt = ctr_aes_encrypt, |
| 843 | .decrypt = ctr_aes_decrypt, |
| 844 | } |
| 845 | } |
| 846 | }; |
| 847 | |
Heiko Carstens | 9f7819c | 2008-04-17 07:46:17 +0200 | [diff] [blame] | 848 | static int __init aes_s390_init(void) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 849 | { |
| 850 | int ret; |
| 851 | |
Jan Glauber | 1822bc9 | 2011-04-19 21:29:14 +0200 | [diff] [blame] | 852 | if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 853 | keylen_flag |= AES_KEYLEN_128; |
Jan Glauber | 1822bc9 | 2011-04-19 21:29:14 +0200 | [diff] [blame] | 854 | if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA)) |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 855 | keylen_flag |= AES_KEYLEN_192; |
Jan Glauber | 1822bc9 | 2011-04-19 21:29:14 +0200 | [diff] [blame] | 856 | if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA)) |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 857 | keylen_flag |= AES_KEYLEN_256; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 858 | |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 859 | if (!keylen_flag) |
| 860 | return -EOPNOTSUPP; |
| 861 | |
| 862 | /* z9 109 and z9 BC/EC only support 128 bit key length */ |
Sebastian Siewior | b0c3e75 | 2007-12-01 12:47:37 +1100 | [diff] [blame] | 863 | if (keylen_flag == AES_KEYLEN_128) |
Jan Glauber | 39f0939 | 2008-12-25 13:39:37 +0100 | [diff] [blame] | 864 | pr_info("AES hardware acceleration is only available for" |
| 865 | " 128-bit keys\n"); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 866 | |
| 867 | ret = crypto_register_alg(&aes_alg); |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 868 | if (ret) |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 869 | goto aes_err; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 870 | |
| 871 | ret = crypto_register_alg(&ecb_aes_alg); |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 872 | if (ret) |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 873 | goto ecb_aes_err; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 874 | |
| 875 | ret = crypto_register_alg(&cbc_aes_alg); |
Jan Glauber | 86aa9fc | 2007-02-05 21:18:14 +0100 | [diff] [blame] | 876 | if (ret) |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 877 | goto cbc_aes_err; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 878 | |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 879 | if (crypt_s390_func_available(KM_XTS_128_ENCRYPT, |
| 880 | CRYPT_S390_MSA | CRYPT_S390_MSA4) && |
| 881 | crypt_s390_func_available(KM_XTS_256_ENCRYPT, |
| 882 | CRYPT_S390_MSA | CRYPT_S390_MSA4)) { |
| 883 | ret = crypto_register_alg(&xts_aes_alg); |
| 884 | if (ret) |
| 885 | goto xts_aes_err; |
| 886 | } |
| 887 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 888 | if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT, |
| 889 | CRYPT_S390_MSA | CRYPT_S390_MSA4) && |
| 890 | crypt_s390_func_available(KMCTR_AES_192_ENCRYPT, |
| 891 | CRYPT_S390_MSA | CRYPT_S390_MSA4) && |
| 892 | crypt_s390_func_available(KMCTR_AES_256_ENCRYPT, |
| 893 | CRYPT_S390_MSA | CRYPT_S390_MSA4)) { |
| 894 | ctrblk = (u8 *) __get_free_page(GFP_KERNEL); |
| 895 | if (!ctrblk) { |
| 896 | ret = -ENOMEM; |
| 897 | goto ctr_aes_err; |
| 898 | } |
| 899 | ret = crypto_register_alg(&ctr_aes_alg); |
| 900 | if (ret) { |
| 901 | free_page((unsigned long) ctrblk); |
| 902 | goto ctr_aes_err; |
| 903 | } |
| 904 | } |
| 905 | |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 906 | out: |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 907 | return ret; |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 908 | |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 909 | ctr_aes_err: |
| 910 | crypto_unregister_alg(&xts_aes_alg); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 911 | xts_aes_err: |
| 912 | crypto_unregister_alg(&cbc_aes_alg); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 913 | cbc_aes_err: |
| 914 | crypto_unregister_alg(&ecb_aes_alg); |
| 915 | ecb_aes_err: |
| 916 | crypto_unregister_alg(&aes_alg); |
| 917 | aes_err: |
| 918 | goto out; |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 919 | } |
| 920 | |
Heiko Carstens | 9f7819c | 2008-04-17 07:46:17 +0200 | [diff] [blame] | 921 | static void __exit aes_s390_fini(void) |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 922 | { |
Gerald Schaefer | 0200f3e | 2011-05-04 15:09:44 +1000 | [diff] [blame] | 923 | crypto_unregister_alg(&ctr_aes_alg); |
| 924 | free_page((unsigned long) ctrblk); |
Gerald Schaefer | 99d9722 | 2011-04-26 16:12:42 +1000 | [diff] [blame] | 925 | crypto_unregister_alg(&xts_aes_alg); |
Herbert Xu | a9e62fa | 2006-08-21 21:39:24 +1000 | [diff] [blame] | 926 | crypto_unregister_alg(&cbc_aes_alg); |
| 927 | crypto_unregister_alg(&ecb_aes_alg); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 928 | crypto_unregister_alg(&aes_alg); |
| 929 | } |
| 930 | |
Heiko Carstens | 9f7819c | 2008-04-17 07:46:17 +0200 | [diff] [blame] | 931 | module_init(aes_s390_init); |
| 932 | module_exit(aes_s390_fini); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 933 | |
Herbert Xu | a760a66 | 2009-02-26 14:06:31 +0800 | [diff] [blame] | 934 | MODULE_ALIAS("aes-all"); |
Jan Glauber | bf754ae | 2006-01-06 00:19:18 -0800 | [diff] [blame] | 935 | |
| 936 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); |
| 937 | MODULE_LICENSE("GPL"); |