4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/module.h>
28 #include <linux/cpufeature.h>
29 #include <linux/init.h>
30 #include <linux/spinlock.h>
31 #include <crypto/xts.h>
32 #include <asm/cpacf.h>
35 static DEFINE_SPINLOCK(ctrblk_lock
);
37 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
40 u8 key
[AES_MAX_KEY_SIZE
];
44 struct crypto_skcipher
*blk
;
45 struct crypto_cipher
*cip
;
54 struct crypto_skcipher
*fallback
;
57 static int setkey_fallback_cip(struct crypto_tfm
*tfm
, const u8
*in_key
,
60 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
63 sctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
64 sctx
->fallback
.cip
->base
.crt_flags
|= (tfm
->crt_flags
&
67 ret
= crypto_cipher_setkey(sctx
->fallback
.cip
, in_key
, key_len
);
69 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
70 tfm
->crt_flags
|= (sctx
->fallback
.cip
->base
.crt_flags
&
76 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
79 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
82 /* Pick the correct function code based on the key length */
83 fc
= (key_len
== 16) ? CPACF_KM_AES_128
:
84 (key_len
== 24) ? CPACF_KM_AES_192
:
85 (key_len
== 32) ? CPACF_KM_AES_256
: 0;
87 /* Check if the function code is available */
88 sctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
90 return setkey_fallback_cip(tfm
, in_key
, key_len
);
92 sctx
->key_len
= key_len
;
93 memcpy(sctx
->key
, in_key
, key_len
);
97 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
99 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
101 if (unlikely(!sctx
->fc
)) {
102 crypto_cipher_encrypt_one(sctx
->fallback
.cip
, out
, in
);
105 cpacf_km(sctx
->fc
, &sctx
->key
, out
, in
, AES_BLOCK_SIZE
);
108 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
110 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
112 if (unlikely(!sctx
->fc
)) {
113 crypto_cipher_decrypt_one(sctx
->fallback
.cip
, out
, in
);
116 cpacf_km(sctx
->fc
| CPACF_DECRYPT
,
117 &sctx
->key
, out
, in
, AES_BLOCK_SIZE
);
120 static int fallback_init_cip(struct crypto_tfm
*tfm
)
122 const char *name
= tfm
->__crt_alg
->cra_name
;
123 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
125 sctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
126 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
128 if (IS_ERR(sctx
->fallback
.cip
)) {
129 pr_err("Allocating AES fallback algorithm %s failed\n",
131 return PTR_ERR(sctx
->fallback
.cip
);
137 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
139 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
141 crypto_free_cipher(sctx
->fallback
.cip
);
142 sctx
->fallback
.cip
= NULL
;
145 static struct crypto_alg aes_alg
= {
147 .cra_driver_name
= "aes-s390",
149 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
150 CRYPTO_ALG_NEED_FALLBACK
,
151 .cra_blocksize
= AES_BLOCK_SIZE
,
152 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
153 .cra_module
= THIS_MODULE
,
154 .cra_init
= fallback_init_cip
,
155 .cra_exit
= fallback_exit_cip
,
158 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
159 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
160 .cia_setkey
= aes_set_key
,
161 .cia_encrypt
= aes_encrypt
,
162 .cia_decrypt
= aes_decrypt
,
167 static int setkey_fallback_blk(struct crypto_tfm
*tfm
, const u8
*key
,
170 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
173 crypto_skcipher_clear_flags(sctx
->fallback
.blk
, CRYPTO_TFM_REQ_MASK
);
174 crypto_skcipher_set_flags(sctx
->fallback
.blk
, tfm
->crt_flags
&
175 CRYPTO_TFM_REQ_MASK
);
177 ret
= crypto_skcipher_setkey(sctx
->fallback
.blk
, key
, len
);
179 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
180 tfm
->crt_flags
|= crypto_skcipher_get_flags(sctx
->fallback
.blk
) &
186 static int fallback_blk_dec(struct blkcipher_desc
*desc
,
187 struct scatterlist
*dst
, struct scatterlist
*src
,
191 struct crypto_blkcipher
*tfm
= desc
->tfm
;
192 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(tfm
);
193 SKCIPHER_REQUEST_ON_STACK(req
, sctx
->fallback
.blk
);
195 skcipher_request_set_tfm(req
, sctx
->fallback
.blk
);
196 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
197 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
199 ret
= crypto_skcipher_decrypt(req
);
201 skcipher_request_zero(req
);
205 static int fallback_blk_enc(struct blkcipher_desc
*desc
,
206 struct scatterlist
*dst
, struct scatterlist
*src
,
210 struct crypto_blkcipher
*tfm
= desc
->tfm
;
211 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(tfm
);
212 SKCIPHER_REQUEST_ON_STACK(req
, sctx
->fallback
.blk
);
214 skcipher_request_set_tfm(req
, sctx
->fallback
.blk
);
215 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
216 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
218 ret
= crypto_skcipher_encrypt(req
);
222 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
223 unsigned int key_len
)
225 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
228 /* Pick the correct function code based on the key length */
229 fc
= (key_len
== 16) ? CPACF_KM_AES_128
:
230 (key_len
== 24) ? CPACF_KM_AES_192
:
231 (key_len
== 32) ? CPACF_KM_AES_256
: 0;
233 /* Check if the function code is available */
234 sctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
236 return setkey_fallback_blk(tfm
, in_key
, key_len
);
238 sctx
->key_len
= key_len
;
239 memcpy(sctx
->key
, in_key
, key_len
);
243 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
244 struct blkcipher_walk
*walk
)
246 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
247 unsigned int nbytes
, n
;
250 ret
= blkcipher_walk_virt(desc
, walk
);
251 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
252 /* only use complete blocks */
253 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
254 cpacf_km(sctx
->fc
| modifier
, sctx
->key
,
255 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
256 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
262 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
263 struct scatterlist
*dst
, struct scatterlist
*src
,
266 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
267 struct blkcipher_walk walk
;
269 if (unlikely(!sctx
->fc
))
270 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
272 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
273 return ecb_aes_crypt(desc
, 0, &walk
);
276 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
277 struct scatterlist
*dst
, struct scatterlist
*src
,
280 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
281 struct blkcipher_walk walk
;
283 if (unlikely(!sctx
->fc
))
284 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
286 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
287 return ecb_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
290 static int fallback_init_blk(struct crypto_tfm
*tfm
)
292 const char *name
= tfm
->__crt_alg
->cra_name
;
293 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
295 sctx
->fallback
.blk
= crypto_alloc_skcipher(name
, 0,
297 CRYPTO_ALG_NEED_FALLBACK
);
299 if (IS_ERR(sctx
->fallback
.blk
)) {
300 pr_err("Allocating AES fallback algorithm %s failed\n",
302 return PTR_ERR(sctx
->fallback
.blk
);
308 static void fallback_exit_blk(struct crypto_tfm
*tfm
)
310 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
312 crypto_free_skcipher(sctx
->fallback
.blk
);
315 static struct crypto_alg ecb_aes_alg
= {
316 .cra_name
= "ecb(aes)",
317 .cra_driver_name
= "ecb-aes-s390",
318 .cra_priority
= 400, /* combo: aes + ecb */
319 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
320 CRYPTO_ALG_NEED_FALLBACK
,
321 .cra_blocksize
= AES_BLOCK_SIZE
,
322 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
323 .cra_type
= &crypto_blkcipher_type
,
324 .cra_module
= THIS_MODULE
,
325 .cra_init
= fallback_init_blk
,
326 .cra_exit
= fallback_exit_blk
,
329 .min_keysize
= AES_MIN_KEY_SIZE
,
330 .max_keysize
= AES_MAX_KEY_SIZE
,
331 .setkey
= ecb_aes_set_key
,
332 .encrypt
= ecb_aes_encrypt
,
333 .decrypt
= ecb_aes_decrypt
,
338 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
339 unsigned int key_len
)
341 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
344 /* Pick the correct function code based on the key length */
345 fc
= (key_len
== 16) ? CPACF_KMC_AES_128
:
346 (key_len
== 24) ? CPACF_KMC_AES_192
:
347 (key_len
== 32) ? CPACF_KMC_AES_256
: 0;
349 /* Check if the function code is available */
350 sctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
352 return setkey_fallback_blk(tfm
, in_key
, key_len
);
354 sctx
->key_len
= key_len
;
355 memcpy(sctx
->key
, in_key
, key_len
);
359 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
360 struct blkcipher_walk
*walk
)
362 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
363 unsigned int nbytes
, n
;
366 u8 iv
[AES_BLOCK_SIZE
];
367 u8 key
[AES_MAX_KEY_SIZE
];
370 ret
= blkcipher_walk_virt(desc
, walk
);
371 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
372 memcpy(param
.key
, sctx
->key
, sctx
->key_len
);
373 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
374 /* only use complete blocks */
375 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
376 cpacf_kmc(sctx
->fc
| modifier
, ¶m
,
377 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
378 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
380 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
384 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
385 struct scatterlist
*dst
, struct scatterlist
*src
,
388 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
389 struct blkcipher_walk walk
;
391 if (unlikely(!sctx
->fc
))
392 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
394 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
395 return cbc_aes_crypt(desc
, 0, &walk
);
398 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
399 struct scatterlist
*dst
, struct scatterlist
*src
,
402 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
403 struct blkcipher_walk walk
;
405 if (unlikely(!sctx
->fc
))
406 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
408 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
409 return cbc_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
412 static struct crypto_alg cbc_aes_alg
= {
413 .cra_name
= "cbc(aes)",
414 .cra_driver_name
= "cbc-aes-s390",
415 .cra_priority
= 400, /* combo: aes + cbc */
416 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
417 CRYPTO_ALG_NEED_FALLBACK
,
418 .cra_blocksize
= AES_BLOCK_SIZE
,
419 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
420 .cra_type
= &crypto_blkcipher_type
,
421 .cra_module
= THIS_MODULE
,
422 .cra_init
= fallback_init_blk
,
423 .cra_exit
= fallback_exit_blk
,
426 .min_keysize
= AES_MIN_KEY_SIZE
,
427 .max_keysize
= AES_MAX_KEY_SIZE
,
428 .ivsize
= AES_BLOCK_SIZE
,
429 .setkey
= cbc_aes_set_key
,
430 .encrypt
= cbc_aes_encrypt
,
431 .decrypt
= cbc_aes_decrypt
,
436 static int xts_fallback_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
439 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
442 crypto_skcipher_clear_flags(xts_ctx
->fallback
, CRYPTO_TFM_REQ_MASK
);
443 crypto_skcipher_set_flags(xts_ctx
->fallback
, tfm
->crt_flags
&
444 CRYPTO_TFM_REQ_MASK
);
446 ret
= crypto_skcipher_setkey(xts_ctx
->fallback
, key
, len
);
448 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
449 tfm
->crt_flags
|= crypto_skcipher_get_flags(xts_ctx
->fallback
) &
455 static int xts_fallback_decrypt(struct blkcipher_desc
*desc
,
456 struct scatterlist
*dst
, struct scatterlist
*src
,
459 struct crypto_blkcipher
*tfm
= desc
->tfm
;
460 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(tfm
);
461 SKCIPHER_REQUEST_ON_STACK(req
, xts_ctx
->fallback
);
464 skcipher_request_set_tfm(req
, xts_ctx
->fallback
);
465 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
466 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
468 ret
= crypto_skcipher_decrypt(req
);
470 skcipher_request_zero(req
);
474 static int xts_fallback_encrypt(struct blkcipher_desc
*desc
,
475 struct scatterlist
*dst
, struct scatterlist
*src
,
478 struct crypto_blkcipher
*tfm
= desc
->tfm
;
479 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(tfm
);
480 SKCIPHER_REQUEST_ON_STACK(req
, xts_ctx
->fallback
);
483 skcipher_request_set_tfm(req
, xts_ctx
->fallback
);
484 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
485 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
487 ret
= crypto_skcipher_encrypt(req
);
489 skcipher_request_zero(req
);
493 static int xts_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
494 unsigned int key_len
)
496 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
500 err
= xts_check_key(tfm
, in_key
, key_len
);
504 /* Pick the correct function code based on the key length */
505 fc
= (key_len
== 32) ? CPACF_KM_XTS_128
:
506 (key_len
== 64) ? CPACF_KM_XTS_256
: 0;
508 /* Check if the function code is available */
509 xts_ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
511 return xts_fallback_setkey(tfm
, in_key
, key_len
);
513 /* Split the XTS key into the two subkeys */
514 key_len
= key_len
/ 2;
515 xts_ctx
->key_len
= key_len
;
516 memcpy(xts_ctx
->key
, in_key
, key_len
);
517 memcpy(xts_ctx
->pcc_key
, in_key
+ key_len
, key_len
);
521 static int xts_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
522 struct blkcipher_walk
*walk
)
524 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
525 unsigned int offset
, nbytes
, n
;
539 ret
= blkcipher_walk_virt(desc
, walk
);
540 offset
= xts_ctx
->key_len
& 0x10;
541 memset(pcc_param
.block
, 0, sizeof(pcc_param
.block
));
542 memset(pcc_param
.bit
, 0, sizeof(pcc_param
.bit
));
543 memset(pcc_param
.xts
, 0, sizeof(pcc_param
.xts
));
544 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
545 memcpy(pcc_param
.key
+ offset
, xts_ctx
->pcc_key
, xts_ctx
->key_len
);
546 cpacf_pcc(xts_ctx
->fc
, pcc_param
.key
+ offset
);
548 memcpy(xts_param
.key
+ offset
, xts_ctx
->key
, xts_ctx
->key_len
);
549 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
551 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
552 /* only use complete blocks */
553 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
554 cpacf_km(xts_ctx
->fc
| modifier
, xts_param
.key
+ offset
,
555 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
556 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
561 static int xts_aes_encrypt(struct blkcipher_desc
*desc
,
562 struct scatterlist
*dst
, struct scatterlist
*src
,
565 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
566 struct blkcipher_walk walk
;
568 if (unlikely(!xts_ctx
->fc
))
569 return xts_fallback_encrypt(desc
, dst
, src
, nbytes
);
571 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
572 return xts_aes_crypt(desc
, 0, &walk
);
575 static int xts_aes_decrypt(struct blkcipher_desc
*desc
,
576 struct scatterlist
*dst
, struct scatterlist
*src
,
579 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
580 struct blkcipher_walk walk
;
582 if (unlikely(!xts_ctx
->fc
))
583 return xts_fallback_decrypt(desc
, dst
, src
, nbytes
);
585 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
586 return xts_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
589 static int xts_fallback_init(struct crypto_tfm
*tfm
)
591 const char *name
= tfm
->__crt_alg
->cra_name
;
592 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
594 xts_ctx
->fallback
= crypto_alloc_skcipher(name
, 0,
596 CRYPTO_ALG_NEED_FALLBACK
);
598 if (IS_ERR(xts_ctx
->fallback
)) {
599 pr_err("Allocating XTS fallback algorithm %s failed\n",
601 return PTR_ERR(xts_ctx
->fallback
);
606 static void xts_fallback_exit(struct crypto_tfm
*tfm
)
608 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
610 crypto_free_skcipher(xts_ctx
->fallback
);
613 static struct crypto_alg xts_aes_alg
= {
614 .cra_name
= "xts(aes)",
615 .cra_driver_name
= "xts-aes-s390",
616 .cra_priority
= 400, /* combo: aes + xts */
617 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
618 CRYPTO_ALG_NEED_FALLBACK
,
619 .cra_blocksize
= AES_BLOCK_SIZE
,
620 .cra_ctxsize
= sizeof(struct s390_xts_ctx
),
621 .cra_type
= &crypto_blkcipher_type
,
622 .cra_module
= THIS_MODULE
,
623 .cra_init
= xts_fallback_init
,
624 .cra_exit
= xts_fallback_exit
,
627 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
628 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
629 .ivsize
= AES_BLOCK_SIZE
,
630 .setkey
= xts_aes_set_key
,
631 .encrypt
= xts_aes_encrypt
,
632 .decrypt
= xts_aes_decrypt
,
637 static int ctr_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
638 unsigned int key_len
)
640 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
643 /* Pick the correct function code based on the key length */
644 fc
= (key_len
== 16) ? CPACF_KMCTR_AES_128
:
645 (key_len
== 24) ? CPACF_KMCTR_AES_192
:
646 (key_len
== 32) ? CPACF_KMCTR_AES_256
: 0;
648 /* Check if the function code is available */
649 sctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
651 return setkey_fallback_blk(tfm
, in_key
, key_len
);
653 sctx
->key_len
= key_len
;
654 memcpy(sctx
->key
, in_key
, key_len
);
658 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
662 /* only use complete blocks, max. PAGE_SIZE */
663 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
664 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
665 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
666 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
667 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
668 ctrptr
+= AES_BLOCK_SIZE
;
673 static int ctr_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
674 struct blkcipher_walk
*walk
)
676 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
677 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
678 unsigned int n
, nbytes
;
681 locked
= spin_trylock(&ctrblk_lock
);
683 ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
684 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
686 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
687 n
= __ctrblk_init(ctrblk
, walk
->iv
, nbytes
);
688 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
->iv
;
689 cpacf_kmctr(sctx
->fc
| modifier
, sctx
->key
,
690 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
692 if (ctrptr
== ctrblk
)
693 memcpy(walk
->iv
, ctrptr
+ n
- AES_BLOCK_SIZE
,
695 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
696 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
699 spin_unlock(&ctrblk_lock
);
701 * final block may be < AES_BLOCK_SIZE, copy only nbytes
704 cpacf_kmctr(sctx
->fc
| modifier
, sctx
->key
,
705 buf
, walk
->src
.virt
.addr
,
706 AES_BLOCK_SIZE
, walk
->iv
);
707 memcpy(walk
->dst
.virt
.addr
, buf
, nbytes
);
708 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
709 ret
= blkcipher_walk_done(desc
, walk
, 0);
715 static int ctr_aes_encrypt(struct blkcipher_desc
*desc
,
716 struct scatterlist
*dst
, struct scatterlist
*src
,
719 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
720 struct blkcipher_walk walk
;
722 if (unlikely(!sctx
->fc
))
723 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
725 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
726 return ctr_aes_crypt(desc
, 0, &walk
);
729 static int ctr_aes_decrypt(struct blkcipher_desc
*desc
,
730 struct scatterlist
*dst
, struct scatterlist
*src
,
733 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
734 struct blkcipher_walk walk
;
736 if (unlikely(!sctx
->fc
))
737 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
739 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
740 return ctr_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
743 static struct crypto_alg ctr_aes_alg
= {
744 .cra_name
= "ctr(aes)",
745 .cra_driver_name
= "ctr-aes-s390",
746 .cra_priority
= 400, /* combo: aes + ctr */
747 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
748 CRYPTO_ALG_NEED_FALLBACK
,
750 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
751 .cra_type
= &crypto_blkcipher_type
,
752 .cra_module
= THIS_MODULE
,
753 .cra_init
= fallback_init_blk
,
754 .cra_exit
= fallback_exit_blk
,
757 .min_keysize
= AES_MIN_KEY_SIZE
,
758 .max_keysize
= AES_MAX_KEY_SIZE
,
759 .ivsize
= AES_BLOCK_SIZE
,
760 .setkey
= ctr_aes_set_key
,
761 .encrypt
= ctr_aes_encrypt
,
762 .decrypt
= ctr_aes_decrypt
,
767 static struct crypto_alg
*aes_s390_algs_ptr
[5];
768 static int aes_s390_algs_num
;
770 static int aes_s390_register_alg(struct crypto_alg
*alg
)
774 ret
= crypto_register_alg(alg
);
776 aes_s390_algs_ptr
[aes_s390_algs_num
++] = alg
;
780 static void aes_s390_fini(void)
782 while (aes_s390_algs_num
--)
783 crypto_unregister_alg(aes_s390_algs_ptr
[aes_s390_algs_num
]);
785 free_page((unsigned long) ctrblk
);
788 static int __init
aes_s390_init(void)
792 /* Query available functions for KM, KMC and KMCTR */
793 cpacf_query(CPACF_KM
, &km_functions
);
794 cpacf_query(CPACF_KMC
, &kmc_functions
);
795 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
797 if (cpacf_test_func(&km_functions
, CPACF_KM_AES_128
) ||
798 cpacf_test_func(&km_functions
, CPACF_KM_AES_192
) ||
799 cpacf_test_func(&km_functions
, CPACF_KM_AES_256
)) {
800 ret
= aes_s390_register_alg(&aes_alg
);
803 ret
= aes_s390_register_alg(&ecb_aes_alg
);
808 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_128
) ||
809 cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_192
) ||
810 cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_256
)) {
811 ret
= aes_s390_register_alg(&cbc_aes_alg
);
816 if (cpacf_test_func(&km_functions
, CPACF_KM_XTS_128
) ||
817 cpacf_test_func(&km_functions
, CPACF_KM_XTS_256
)) {
818 ret
= aes_s390_register_alg(&xts_aes_alg
);
823 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_128
) ||
824 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_192
) ||
825 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_256
)) {
826 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
831 ret
= aes_s390_register_alg(&ctr_aes_alg
);
842 module_cpu_feature_match(MSA
, aes_s390_init
);
843 module_exit(aes_s390_fini
);
845 MODULE_ALIAS_CRYPTO("aes-all");
847 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
848 MODULE_LICENSE("GPL");