2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 9 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
114 static struct list_head alg_list
;
116 struct caam_alg_entry
{
124 struct caam_aead_alg
{
125 struct aead_alg aead
;
126 struct caam_alg_entry caam
;
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32
*desc
, u32 type
)
133 u32
*jump_cmd
, *uncond_jump_cmd
;
135 /* DK bit is valid only for AES */
136 if ((type
& OP_ALG_ALGSEL_MASK
) != OP_ALG_ALGSEL_AES
) {
137 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
142 jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
| JUMP_COND_SHRD
);
143 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
145 uncond_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
);
146 set_jump_tgt_here(desc
, jump_cmd
);
147 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
148 OP_ALG_DECRYPT
| OP_ALG_AAI_DK
);
149 set_jump_tgt_here(desc
, uncond_jump_cmd
);
153 * For aead functions, read payload and write payload,
154 * both of which are specified in req->src and req->dst
156 static inline void aead_append_src_dst(u32
*desc
, u32 msg_type
)
158 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| KEY_VLF
);
159 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_BOTH
|
160 KEY_VLF
| msg_type
| FIFOLD_TYPE_LASTBOTH
);
164 * For ablkcipher encrypt and decrypt, read from req->src and
167 static inline void ablkcipher_append_src_dst(u32
*desc
)
169 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
170 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
171 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
|
172 KEY_VLF
| FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
173 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| KEY_VLF
);
177 * per-session context
180 struct device
*jrdev
;
181 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
182 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
183 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
184 dma_addr_t sh_desc_enc_dma
;
185 dma_addr_t sh_desc_dec_dma
;
186 dma_addr_t sh_desc_givenc_dma
;
190 u8 key
[CAAM_MAX_KEY_SIZE
];
192 unsigned int enckeylen
;
193 unsigned int split_key_len
;
194 unsigned int split_key_pad_len
;
195 unsigned int authsize
;
198 static void append_key_aead(u32
*desc
, struct caam_ctx
*ctx
,
199 int keys_fit_inline
, bool is_rfc3686
)
202 unsigned int enckeylen
= ctx
->enckeylen
;
206 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207 * | enckeylen = encryption key size + nonce size
210 enckeylen
-= CTR_RFC3686_NONCE_SIZE
;
212 if (keys_fit_inline
) {
213 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
214 ctx
->split_key_len
, CLASS_2
|
215 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
216 append_key_as_imm(desc
, (void *)ctx
->key
+
217 ctx
->split_key_pad_len
, enckeylen
,
218 enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
220 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
221 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
222 append_key(desc
, ctx
->key_dma
+ ctx
->split_key_pad_len
,
223 enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
226 /* Load Counter into CONTEXT1 reg */
228 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->split_key_pad_len
+
230 append_load_as_imm(desc
, nonce
, CTR_RFC3686_NONCE_SIZE
,
232 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
235 MOVE_DEST_CLASS1CTX
|
236 (16 << MOVE_OFFSET_SHIFT
) |
237 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
241 static void init_sh_desc_key_aead(u32
*desc
, struct caam_ctx
*ctx
,
242 int keys_fit_inline
, bool is_rfc3686
)
246 /* Note: Context registers are saved. */
247 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
249 /* Skip if already shared */
250 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
253 append_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
255 set_jump_tgt_here(desc
, key_jump_cmd
);
258 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
260 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
261 struct device
*jrdev
= ctx
->jrdev
;
262 bool keys_fit_inline
= false;
263 u32
*key_jump_cmd
, *jump_cmd
, *read_move_cmd
, *write_move_cmd
;
267 * Job Descriptor and Shared Descriptors
268 * must all fit into the 64-word Descriptor h/w Buffer
270 if (DESC_AEAD_NULL_ENC_LEN
+ AEAD_DESC_JOB_IO_LEN
+
271 ctx
->split_key_pad_len
<= CAAM_DESC_BYTES_MAX
)
272 keys_fit_inline
= true;
274 /* aead_encrypt shared descriptor */
275 desc
= ctx
->sh_desc_enc
;
277 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
279 /* Skip if already shared */
280 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
283 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
284 ctx
->split_key_len
, CLASS_2
|
285 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
287 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
288 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
289 set_jump_tgt_here(desc
, key_jump_cmd
);
291 /* assoclen + cryptlen = seqinlen */
292 append_math_sub(desc
, REG3
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
294 /* Prepare to read and write cryptlen + assoclen bytes */
295 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
296 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
299 * MOVE_LEN opcode is not available in all SEC HW revisions,
300 * thus need to do some magic, i.e. self-patch the descriptor
303 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
|
305 (0x6 << MOVE_LEN_SHIFT
));
306 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
|
309 (0x8 << MOVE_LEN_SHIFT
));
311 /* Class 2 operation */
312 append_operation(desc
, ctx
->class2_alg_type
|
313 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
315 /* Read and write cryptlen bytes */
316 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
318 set_move_tgt_here(desc
, read_move_cmd
);
319 set_move_tgt_here(desc
, write_move_cmd
);
320 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
321 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
|
325 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
326 LDST_SRCDST_BYTE_CONTEXT
);
328 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
331 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
332 dev_err(jrdev
, "unable to map shared descriptor\n");
336 print_hex_dump(KERN_ERR
,
337 "aead null enc shdesc@"__stringify(__LINE__
)": ",
338 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
339 desc_bytes(desc
), 1);
343 * Job Descriptor and Shared Descriptors
344 * must all fit into the 64-word Descriptor h/w Buffer
346 keys_fit_inline
= false;
347 if (DESC_AEAD_NULL_DEC_LEN
+ DESC_JOB_IO_LEN
+
348 ctx
->split_key_pad_len
<= CAAM_DESC_BYTES_MAX
)
349 keys_fit_inline
= true;
351 desc
= ctx
->sh_desc_dec
;
353 /* aead_decrypt shared descriptor */
354 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
356 /* Skip if already shared */
357 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
360 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
361 ctx
->split_key_len
, CLASS_2
|
362 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
364 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
365 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
366 set_jump_tgt_here(desc
, key_jump_cmd
);
368 /* Class 2 operation */
369 append_operation(desc
, ctx
->class2_alg_type
|
370 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
372 /* assoclen + cryptlen = seqoutlen */
373 append_math_sub(desc
, REG2
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
375 /* Prepare to read and write cryptlen + assoclen bytes */
376 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG2
, CAAM_CMD_SZ
);
377 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG2
, CAAM_CMD_SZ
);
380 * MOVE_LEN opcode is not available in all SEC HW revisions,
381 * thus need to do some magic, i.e. self-patch the descriptor
384 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
|
386 (0x6 << MOVE_LEN_SHIFT
));
387 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH2
|
390 (0x8 << MOVE_LEN_SHIFT
));
392 /* Read and write cryptlen bytes */
393 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
396 * Insert a NOP here, since we need at least 4 instructions between
397 * code patching the descriptor buffer and the location being patched.
399 jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
);
400 set_jump_tgt_here(desc
, jump_cmd
);
402 set_move_tgt_here(desc
, read_move_cmd
);
403 set_move_tgt_here(desc
, write_move_cmd
);
404 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
405 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
|
407 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
410 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS2
|
411 FIFOLD_TYPE_LAST2
| FIFOLD_TYPE_ICV
);
413 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
416 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
417 dev_err(jrdev
, "unable to map shared descriptor\n");
421 print_hex_dump(KERN_ERR
,
422 "aead null dec shdesc@"__stringify(__LINE__
)": ",
423 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
424 desc_bytes(desc
), 1);
430 static int aead_set_sh_desc(struct crypto_aead
*aead
)
432 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
433 struct caam_aead_alg
, aead
);
434 unsigned int ivsize
= crypto_aead_ivsize(aead
);
435 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
436 struct device
*jrdev
= ctx
->jrdev
;
437 bool keys_fit_inline
;
441 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
442 OP_ALG_AAI_CTR_MOD128
);
443 const bool is_rfc3686
= alg
->caam
.rfc3686
;
448 /* NULL encryption / decryption */
450 return aead_null_set_sh_desc(aead
);
453 * AES-CTR needs to load IV in CONTEXT1 reg
454 * at an offset of 128bits (16bytes)
455 * CONTEXT1[255:128] = IV
462 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
465 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
471 * Job Descriptor and Shared Descriptors
472 * must all fit into the 64-word Descriptor h/w Buffer
474 keys_fit_inline
= false;
475 if (DESC_AEAD_ENC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
476 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
477 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
479 keys_fit_inline
= true;
481 /* aead_encrypt shared descriptor */
482 desc
= ctx
->sh_desc_enc
;
484 /* Note: Context registers are saved. */
485 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
487 /* Class 2 operation */
488 append_operation(desc
, ctx
->class2_alg_type
|
489 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
491 /* Read and write assoclen bytes */
492 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
493 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
495 /* Skip assoc data */
496 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
498 /* read assoc before reading payload */
499 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
502 /* Load Counter into CONTEXT1 reg */
504 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
505 LDST_SRCDST_BYTE_CONTEXT
|
506 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
509 /* Class 1 operation */
510 append_operation(desc
, ctx
->class1_alg_type
|
511 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
513 /* Read and write cryptlen bytes */
514 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
515 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
516 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG1OUT2
);
519 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
520 LDST_SRCDST_BYTE_CONTEXT
);
522 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
525 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
526 dev_err(jrdev
, "unable to map shared descriptor\n");
530 print_hex_dump(KERN_ERR
, "aead enc shdesc@"__stringify(__LINE__
)": ",
531 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
532 desc_bytes(desc
), 1);
537 * Job Descriptor and Shared Descriptors
538 * must all fit into the 64-word Descriptor h/w Buffer
540 keys_fit_inline
= false;
541 if (DESC_AEAD_DEC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
542 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
543 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
545 keys_fit_inline
= true;
547 /* aead_decrypt shared descriptor */
548 desc
= ctx
->sh_desc_dec
;
550 /* Note: Context registers are saved. */
551 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
553 /* Class 2 operation */
554 append_operation(desc
, ctx
->class2_alg_type
|
555 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
557 /* Read and write assoclen bytes */
558 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
560 append_math_add_imm_u32(desc
, VARSEQOUTLEN
, REG3
, IMM
, ivsize
);
562 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
564 /* Skip assoc data */
565 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
567 /* read assoc before reading payload */
568 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
571 if (alg
->caam
.geniv
) {
572 append_seq_load(desc
, ivsize
, LDST_CLASS_1_CCB
|
573 LDST_SRCDST_BYTE_CONTEXT
|
574 (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
575 append_move(desc
, MOVE_SRC_CLASS1CTX
| MOVE_DEST_CLASS2INFIFO
|
576 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
) | ivsize
);
579 /* Load Counter into CONTEXT1 reg */
581 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
582 LDST_SRCDST_BYTE_CONTEXT
|
583 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
586 /* Choose operation */
588 append_operation(desc
, ctx
->class1_alg_type
|
589 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
);
591 append_dec_op1(desc
, ctx
->class1_alg_type
);
593 /* Read and write cryptlen bytes */
594 append_math_add(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
595 append_math_add(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
596 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
);
599 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS2
|
600 FIFOLD_TYPE_LAST2
| FIFOLD_TYPE_ICV
);
602 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
605 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
606 dev_err(jrdev
, "unable to map shared descriptor\n");
610 print_hex_dump(KERN_ERR
, "aead dec shdesc@"__stringify(__LINE__
)": ",
611 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
612 desc_bytes(desc
), 1);
615 if (!alg
->caam
.geniv
)
619 * Job Descriptor and Shared Descriptors
620 * must all fit into the 64-word Descriptor h/w Buffer
622 keys_fit_inline
= false;
623 if (DESC_AEAD_GIVENC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
624 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
625 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
627 keys_fit_inline
= true;
629 /* aead_givencrypt shared descriptor */
630 desc
= ctx
->sh_desc_enc
;
632 /* Note: Context registers are saved. */
633 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
639 geniv
= NFIFOENTRY_STYPE_PAD
| NFIFOENTRY_DEST_DECO
|
640 NFIFOENTRY_DTYPE_MSG
| NFIFOENTRY_LC1
|
641 NFIFOENTRY_PTYPE_RND
| (ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
642 append_load_imm_u32(desc
, geniv
, LDST_CLASS_IND_CCB
|
643 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
644 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
645 append_move(desc
, MOVE_WAITCOMP
|
646 MOVE_SRC_INFIFO
| MOVE_DEST_CLASS1CTX
|
647 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
) |
648 (ivsize
<< MOVE_LEN_SHIFT
));
649 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
652 /* Copy IV to class 1 context */
653 append_move(desc
, MOVE_SRC_CLASS1CTX
| MOVE_DEST_OUTFIFO
|
654 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
) |
655 (ivsize
<< MOVE_LEN_SHIFT
));
657 /* Return to encryption */
658 append_operation(desc
, ctx
->class2_alg_type
|
659 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
661 /* Read and write assoclen bytes */
662 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
663 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
665 /* ivsize + cryptlen = seqoutlen - authsize */
666 append_math_sub_imm_u32(desc
, REG3
, SEQOUTLEN
, IMM
, ctx
->authsize
);
668 /* Skip assoc data */
669 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
671 /* read assoc before reading payload */
672 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
675 /* Copy iv from outfifo to class 2 fifo */
676 moveiv
= NFIFOENTRY_STYPE_OFIFO
| NFIFOENTRY_DEST_CLASS2
|
677 NFIFOENTRY_DTYPE_MSG
| (ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
678 append_load_imm_u32(desc
, moveiv
, LDST_CLASS_IND_CCB
|
679 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
680 append_load_imm_u32(desc
, ivsize
, LDST_CLASS_2_CCB
|
681 LDST_SRCDST_WORD_DATASZ_REG
| LDST_IMM
);
683 /* Load Counter into CONTEXT1 reg */
685 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
686 LDST_SRCDST_BYTE_CONTEXT
|
687 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
690 /* Class 1 operation */
691 append_operation(desc
, ctx
->class1_alg_type
|
692 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
694 /* Will write ivsize + cryptlen */
695 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
697 /* Not need to reload iv */
698 append_seq_fifo_load(desc
, ivsize
,
701 /* Will read cryptlen */
702 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
703 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG1OUT2
);
706 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
707 LDST_SRCDST_BYTE_CONTEXT
);
709 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
712 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
713 dev_err(jrdev
, "unable to map shared descriptor\n");
717 print_hex_dump(KERN_ERR
, "aead givenc shdesc@"__stringify(__LINE__
)": ",
718 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
719 desc_bytes(desc
), 1);
726 static int aead_setauthsize(struct crypto_aead
*authenc
,
727 unsigned int authsize
)
729 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
731 ctx
->authsize
= authsize
;
732 aead_set_sh_desc(authenc
);
737 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
739 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
740 struct device
*jrdev
= ctx
->jrdev
;
741 bool keys_fit_inline
= false;
742 u32
*key_jump_cmd
, *zero_payload_jump_cmd
,
743 *zero_assoc_jump_cmd1
, *zero_assoc_jump_cmd2
;
746 if (!ctx
->enckeylen
|| !ctx
->authsize
)
750 * AES GCM encrypt shared descriptor
751 * Job Descriptor and Shared Descriptor
752 * must fit into the 64-word Descriptor h/w Buffer
754 if (DESC_GCM_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
755 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
756 keys_fit_inline
= true;
758 desc
= ctx
->sh_desc_enc
;
760 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
762 /* skip key loading if they are loaded due to sharing */
763 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
764 JUMP_COND_SHRD
| JUMP_COND_SELF
);
766 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
767 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
769 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
770 CLASS_1
| KEY_DEST_CLASS_REG
);
771 set_jump_tgt_here(desc
, key_jump_cmd
);
773 /* class 1 operation */
774 append_operation(desc
, ctx
->class1_alg_type
|
775 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
777 /* if assoclen + cryptlen is ZERO, skip to ICV write */
778 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
779 zero_assoc_jump_cmd2
= append_jump(desc
, JUMP_TEST_ALL
|
782 /* if assoclen is ZERO, skip reading the assoc data */
783 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
784 zero_assoc_jump_cmd1
= append_jump(desc
, JUMP_TEST_ALL
|
787 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
789 /* skip assoc data */
790 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
792 /* cryptlen = seqinlen - assoclen */
793 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG3
, CAAM_CMD_SZ
);
795 /* if cryptlen is ZERO jump to zero-payload commands */
796 zero_payload_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
|
799 /* read assoc data */
800 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
801 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
802 set_jump_tgt_here(desc
, zero_assoc_jump_cmd1
);
804 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
806 /* write encrypted data */
807 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
809 /* read payload data */
810 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
811 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
813 /* jump the zero-payload commands */
814 append_jump(desc
, JUMP_TEST_ALL
| 2);
816 /* zero-payload commands */
817 set_jump_tgt_here(desc
, zero_payload_jump_cmd
);
819 /* read assoc data */
820 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
821 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_LAST1
);
823 /* There is no input data */
824 set_jump_tgt_here(desc
, zero_assoc_jump_cmd2
);
827 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
828 LDST_SRCDST_BYTE_CONTEXT
);
830 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
833 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
834 dev_err(jrdev
, "unable to map shared descriptor\n");
838 print_hex_dump(KERN_ERR
, "gcm enc shdesc@"__stringify(__LINE__
)": ",
839 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
840 desc_bytes(desc
), 1);
844 * Job Descriptor and Shared Descriptors
845 * must all fit into the 64-word Descriptor h/w Buffer
847 keys_fit_inline
= false;
848 if (DESC_GCM_DEC_LEN
+ GCM_DESC_JOB_IO_LEN
+
849 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
850 keys_fit_inline
= true;
852 desc
= ctx
->sh_desc_dec
;
854 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
856 /* skip key loading if they are loaded due to sharing */
857 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
858 JUMP_TEST_ALL
| JUMP_COND_SHRD
|
861 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
862 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
864 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
865 CLASS_1
| KEY_DEST_CLASS_REG
);
866 set_jump_tgt_here(desc
, key_jump_cmd
);
868 /* class 1 operation */
869 append_operation(desc
, ctx
->class1_alg_type
|
870 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
872 /* if assoclen is ZERO, skip reading the assoc data */
873 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
874 zero_assoc_jump_cmd1
= append_jump(desc
, JUMP_TEST_ALL
|
877 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
879 /* skip assoc data */
880 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
882 /* read assoc data */
883 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
884 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
886 set_jump_tgt_here(desc
, zero_assoc_jump_cmd1
);
888 /* cryptlen = seqoutlen - assoclen */
889 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
891 /* jump to zero-payload command if cryptlen is zero */
892 zero_payload_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
|
895 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
897 /* store encrypted data */
898 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
900 /* read payload data */
901 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
902 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
904 /* zero-payload command */
905 set_jump_tgt_here(desc
, zero_payload_jump_cmd
);
908 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
909 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
911 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
914 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
915 dev_err(jrdev
, "unable to map shared descriptor\n");
919 print_hex_dump(KERN_ERR
, "gcm dec shdesc@"__stringify(__LINE__
)": ",
920 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
921 desc_bytes(desc
), 1);
927 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
929 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
931 ctx
->authsize
= authsize
;
932 gcm_set_sh_desc(authenc
);
937 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
939 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
940 struct device
*jrdev
= ctx
->jrdev
;
941 bool keys_fit_inline
= false;
945 if (!ctx
->enckeylen
|| !ctx
->authsize
)
949 * RFC4106 encrypt shared descriptor
950 * Job Descriptor and Shared Descriptor
951 * must fit into the 64-word Descriptor h/w Buffer
953 if (DESC_RFC4106_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
954 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
955 keys_fit_inline
= true;
957 desc
= ctx
->sh_desc_enc
;
959 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
961 /* Skip key loading if it is loaded due to sharing */
962 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
965 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
966 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
968 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
969 CLASS_1
| KEY_DEST_CLASS_REG
);
970 set_jump_tgt_here(desc
, key_jump_cmd
);
972 /* Class 1 operation */
973 append_operation(desc
, ctx
->class1_alg_type
|
974 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
976 append_math_sub_imm_u32(desc
, VARSEQINLEN
, REG3
, IMM
, 8);
977 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
979 /* Read assoc data */
980 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
981 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
984 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
986 /* Will read cryptlen bytes */
987 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
989 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
990 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLD_TYPE_MSG
);
992 /* Skip assoc data */
993 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
995 /* cryptlen = seqoutlen - assoclen */
996 append_math_sub(desc
, VARSEQOUTLEN
, VARSEQINLEN
, REG0
, CAAM_CMD_SZ
);
998 /* Write encrypted data */
999 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
1001 /* Read payload data */
1002 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
1003 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
1006 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
1007 LDST_SRCDST_BYTE_CONTEXT
);
1009 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1012 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1013 dev_err(jrdev
, "unable to map shared descriptor\n");
1017 print_hex_dump(KERN_ERR
, "rfc4106 enc shdesc@"__stringify(__LINE__
)": ",
1018 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1019 desc_bytes(desc
), 1);
1023 * Job Descriptor and Shared Descriptors
1024 * must all fit into the 64-word Descriptor h/w Buffer
1026 keys_fit_inline
= false;
1027 if (DESC_RFC4106_DEC_LEN
+ DESC_JOB_IO_LEN
+
1028 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1029 keys_fit_inline
= true;
1031 desc
= ctx
->sh_desc_dec
;
1033 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1035 /* Skip key loading if it is loaded due to sharing */
1036 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
1037 JUMP_TEST_ALL
| JUMP_COND_SHRD
);
1038 if (keys_fit_inline
)
1039 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1040 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1042 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1043 CLASS_1
| KEY_DEST_CLASS_REG
);
1044 set_jump_tgt_here(desc
, key_jump_cmd
);
1046 /* Class 1 operation */
1047 append_operation(desc
, ctx
->class1_alg_type
|
1048 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
1050 append_math_sub_imm_u32(desc
, VARSEQINLEN
, REG3
, IMM
, 8);
1051 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
1053 /* Read assoc data */
1054 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
1055 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
1058 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1060 /* Will read cryptlen bytes */
1061 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG3
, CAAM_CMD_SZ
);
1063 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1064 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLD_TYPE_MSG
);
1066 /* Skip assoc data */
1067 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
1069 /* Will write cryptlen bytes */
1070 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1072 /* Store payload data */
1073 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
1075 /* Read encrypted data */
1076 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
1077 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
1080 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
1081 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
1083 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1086 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1087 dev_err(jrdev
, "unable to map shared descriptor\n");
1091 print_hex_dump(KERN_ERR
, "rfc4106 dec shdesc@"__stringify(__LINE__
)": ",
1092 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1093 desc_bytes(desc
), 1);
1099 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
1100 unsigned int authsize
)
1102 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
1104 ctx
->authsize
= authsize
;
1105 rfc4106_set_sh_desc(authenc
);
1110 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
1112 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1113 struct device
*jrdev
= ctx
->jrdev
;
1114 bool keys_fit_inline
= false;
1116 u32
*read_move_cmd
, *write_move_cmd
;
1119 if (!ctx
->enckeylen
|| !ctx
->authsize
)
1123 * RFC4543 encrypt shared descriptor
1124 * Job Descriptor and Shared Descriptor
1125 * must fit into the 64-word Descriptor h/w Buffer
1127 if (DESC_RFC4543_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
1128 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1129 keys_fit_inline
= true;
1131 desc
= ctx
->sh_desc_enc
;
1133 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1135 /* Skip key loading if it is loaded due to sharing */
1136 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1138 if (keys_fit_inline
)
1139 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1140 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1142 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1143 CLASS_1
| KEY_DEST_CLASS_REG
);
1144 set_jump_tgt_here(desc
, key_jump_cmd
);
1146 /* Class 1 operation */
1147 append_operation(desc
, ctx
->class1_alg_type
|
1148 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1150 /* assoclen + cryptlen = seqinlen */
1151 append_math_sub(desc
, REG3
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1154 * MOVE_LEN opcode is not available in all SEC HW revisions,
1155 * thus need to do some magic, i.e. self-patch the descriptor
1158 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
| MOVE_DEST_MATH3
|
1159 (0x6 << MOVE_LEN_SHIFT
));
1160 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
| MOVE_DEST_DESCBUF
|
1161 (0x8 << MOVE_LEN_SHIFT
));
1163 /* Will read assoclen + cryptlen bytes */
1164 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1166 /* Will write assoclen + cryptlen bytes */
1167 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1169 /* Read and write assoclen + cryptlen bytes */
1170 aead_append_src_dst(desc
, FIFOLD_TYPE_AAD
);
1172 set_move_tgt_here(desc
, read_move_cmd
);
1173 set_move_tgt_here(desc
, write_move_cmd
);
1174 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1175 /* Move payload data to OFIFO */
1176 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
);
1179 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
1180 LDST_SRCDST_BYTE_CONTEXT
);
1182 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1185 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1186 dev_err(jrdev
, "unable to map shared descriptor\n");
1190 print_hex_dump(KERN_ERR
, "rfc4543 enc shdesc@"__stringify(__LINE__
)": ",
1191 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1192 desc_bytes(desc
), 1);
1196 * Job Descriptor and Shared Descriptors
1197 * must all fit into the 64-word Descriptor h/w Buffer
1199 keys_fit_inline
= false;
1200 if (DESC_RFC4543_DEC_LEN
+ GCM_DESC_JOB_IO_LEN
+
1201 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1202 keys_fit_inline
= true;
1204 desc
= ctx
->sh_desc_dec
;
1206 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1208 /* Skip key loading if it is loaded due to sharing */
1209 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
1210 JUMP_TEST_ALL
| JUMP_COND_SHRD
);
1211 if (keys_fit_inline
)
1212 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1213 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1215 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1216 CLASS_1
| KEY_DEST_CLASS_REG
);
1217 set_jump_tgt_here(desc
, key_jump_cmd
);
1219 /* Class 1 operation */
1220 append_operation(desc
, ctx
->class1_alg_type
|
1221 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
1223 /* assoclen + cryptlen = seqoutlen */
1224 append_math_sub(desc
, REG3
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1227 * MOVE_LEN opcode is not available in all SEC HW revisions,
1228 * thus need to do some magic, i.e. self-patch the descriptor
1231 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
| MOVE_DEST_MATH3
|
1232 (0x6 << MOVE_LEN_SHIFT
));
1233 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
| MOVE_DEST_DESCBUF
|
1234 (0x8 << MOVE_LEN_SHIFT
));
1236 /* Will read assoclen + cryptlen bytes */
1237 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1239 /* Will write assoclen + cryptlen bytes */
1240 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1242 /* Store payload data */
1243 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
1245 /* In-snoop assoclen + cryptlen data */
1246 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_BOTH
| FIFOLDST_VLF
|
1247 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_LAST2FLUSH1
);
1249 set_move_tgt_here(desc
, read_move_cmd
);
1250 set_move_tgt_here(desc
, write_move_cmd
);
1251 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1252 /* Move payload data to OFIFO */
1253 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
);
1254 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
1257 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
1258 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
1260 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1263 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1264 dev_err(jrdev
, "unable to map shared descriptor\n");
1268 print_hex_dump(KERN_ERR
, "rfc4543 dec shdesc@"__stringify(__LINE__
)": ",
1269 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1270 desc_bytes(desc
), 1);
1276 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
1277 unsigned int authsize
)
1279 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
1281 ctx
->authsize
= authsize
;
1282 rfc4543_set_sh_desc(authenc
);
1287 static u32
gen_split_aead_key(struct caam_ctx
*ctx
, const u8
*key_in
,
1290 return gen_split_key(ctx
->jrdev
, ctx
->key
, ctx
->split_key_len
,
1291 ctx
->split_key_pad_len
, key_in
, authkeylen
,
1295 static int aead_setkey(struct crypto_aead
*aead
,
1296 const u8
*key
, unsigned int keylen
)
1298 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1299 static const u8 mdpadlen
[] = { 16, 20, 32, 32, 64, 64 };
1300 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1301 struct device
*jrdev
= ctx
->jrdev
;
1302 struct crypto_authenc_keys keys
;
1305 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1308 /* Pick class 2 key length from algorithm submask */
1309 ctx
->split_key_len
= mdpadlen
[(ctx
->alg_op
& OP_ALG_ALGSEL_SUBMASK
) >>
1310 OP_ALG_ALGSEL_SHIFT
] * 2;
1311 ctx
->split_key_pad_len
= ALIGN(ctx
->split_key_len
, 16);
1313 if (ctx
->split_key_pad_len
+ keys
.enckeylen
> CAAM_MAX_KEY_SIZE
)
1317 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
1318 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
1320 printk(KERN_ERR
"split_key_len %d split_key_pad_len %d\n",
1321 ctx
->split_key_len
, ctx
->split_key_pad_len
);
1322 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1323 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1326 ret
= gen_split_aead_key(ctx
, keys
.authkey
, keys
.authkeylen
);
1331 /* postpend encryption key to auth split key */
1332 memcpy(ctx
->key
+ ctx
->split_key_pad_len
, keys
.enckey
, keys
.enckeylen
);
1334 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->split_key_pad_len
+
1335 keys
.enckeylen
, DMA_TO_DEVICE
);
1336 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1337 dev_err(jrdev
, "unable to map key i/o memory\n");
1341 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
1342 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
1343 ctx
->split_key_pad_len
+ keys
.enckeylen
, 1);
1346 ctx
->enckeylen
= keys
.enckeylen
;
1348 ret
= aead_set_sh_desc(aead
);
1350 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->split_key_pad_len
+
1351 keys
.enckeylen
, DMA_TO_DEVICE
);
1356 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1360 static int gcm_setkey(struct crypto_aead
*aead
,
1361 const u8
*key
, unsigned int keylen
)
1363 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1364 struct device
*jrdev
= ctx
->jrdev
;
1368 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1369 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1372 memcpy(ctx
->key
, key
, keylen
);
1373 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
,
1375 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1376 dev_err(jrdev
, "unable to map key i/o memory\n");
1379 ctx
->enckeylen
= keylen
;
1381 ret
= gcm_set_sh_desc(aead
);
1383 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1390 static int rfc4106_setkey(struct crypto_aead
*aead
,
1391 const u8
*key
, unsigned int keylen
)
1393 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1394 struct device
*jrdev
= ctx
->jrdev
;
1401 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1402 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1405 memcpy(ctx
->key
, key
, keylen
);
1408 * The last four bytes of the key material are used as the salt value
1409 * in the nonce. Update the AES key length.
1411 ctx
->enckeylen
= keylen
- 4;
1413 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->enckeylen
,
1415 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1416 dev_err(jrdev
, "unable to map key i/o memory\n");
1420 ret
= rfc4106_set_sh_desc(aead
);
1422 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1429 static int rfc4543_setkey(struct crypto_aead
*aead
,
1430 const u8
*key
, unsigned int keylen
)
1432 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1433 struct device
*jrdev
= ctx
->jrdev
;
1440 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1441 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1444 memcpy(ctx
->key
, key
, keylen
);
1447 * The last four bytes of the key material are used as the salt value
1448 * in the nonce. Update the AES key length.
1450 ctx
->enckeylen
= keylen
- 4;
1452 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->enckeylen
,
1454 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1455 dev_err(jrdev
, "unable to map key i/o memory\n");
1459 ret
= rfc4543_set_sh_desc(aead
);
1461 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1468 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
1469 const u8
*key
, unsigned int keylen
)
1471 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1472 struct ablkcipher_tfm
*crt
= &ablkcipher
->base
.crt_ablkcipher
;
1473 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
1474 const char *alg_name
= crypto_tfm_alg_name(tfm
);
1475 struct device
*jrdev
= ctx
->jrdev
;
1481 u32 ctx1_iv_off
= 0;
1482 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
1483 OP_ALG_AAI_CTR_MOD128
);
1484 const bool is_rfc3686
= (ctr_mode
&&
1485 (strstr(alg_name
, "rfc3686") != NULL
));
1488 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1489 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1492 * AES-CTR needs to load IV in CONTEXT1 reg
1493 * at an offset of 128bits (16bytes)
1494 * CONTEXT1[255:128] = IV
1501 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1502 * | *key = {KEY, NONCE}
1505 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
1506 keylen
-= CTR_RFC3686_NONCE_SIZE
;
1509 memcpy(ctx
->key
, key
, keylen
);
1510 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
,
1512 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1513 dev_err(jrdev
, "unable to map key i/o memory\n");
1516 ctx
->enckeylen
= keylen
;
1518 /* ablkcipher_encrypt shared descriptor */
1519 desc
= ctx
->sh_desc_enc
;
1520 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1521 /* Skip if already shared */
1522 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1525 /* Load class1 key only */
1526 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1527 ctx
->enckeylen
, CLASS_1
|
1528 KEY_DEST_CLASS_REG
);
1530 /* Load nonce into CONTEXT1 reg */
1532 nonce
= (u8
*)key
+ keylen
;
1533 append_load_as_imm(desc
, nonce
, CTR_RFC3686_NONCE_SIZE
,
1534 LDST_CLASS_IND_CCB
|
1535 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1536 append_move(desc
, MOVE_WAITCOMP
|
1538 MOVE_DEST_CLASS1CTX
|
1539 (16 << MOVE_OFFSET_SHIFT
) |
1540 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1543 set_jump_tgt_here(desc
, key_jump_cmd
);
1546 append_seq_load(desc
, crt
->ivsize
, LDST_SRCDST_BYTE_CONTEXT
|
1547 LDST_CLASS_1_CCB
| (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1549 /* Load counter into CONTEXT1 reg */
1551 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
1552 LDST_SRCDST_BYTE_CONTEXT
|
1553 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1554 LDST_OFFSET_SHIFT
));
1556 /* Load operation */
1557 append_operation(desc
, ctx
->class1_alg_type
|
1558 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1560 /* Perform operation */
1561 ablkcipher_append_src_dst(desc
);
1563 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1566 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1567 dev_err(jrdev
, "unable to map shared descriptor\n");
1571 print_hex_dump(KERN_ERR
,
1572 "ablkcipher enc shdesc@"__stringify(__LINE__
)": ",
1573 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1574 desc_bytes(desc
), 1);
1576 /* ablkcipher_decrypt shared descriptor */
1577 desc
= ctx
->sh_desc_dec
;
1579 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1580 /* Skip if already shared */
1581 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1584 /* Load class1 key only */
1585 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1586 ctx
->enckeylen
, CLASS_1
|
1587 KEY_DEST_CLASS_REG
);
1589 /* Load nonce into CONTEXT1 reg */
1591 nonce
= (u8
*)key
+ keylen
;
1592 append_load_as_imm(desc
, nonce
, CTR_RFC3686_NONCE_SIZE
,
1593 LDST_CLASS_IND_CCB
|
1594 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1595 append_move(desc
, MOVE_WAITCOMP
|
1597 MOVE_DEST_CLASS1CTX
|
1598 (16 << MOVE_OFFSET_SHIFT
) |
1599 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1602 set_jump_tgt_here(desc
, key_jump_cmd
);
1605 append_seq_load(desc
, crt
->ivsize
, LDST_SRCDST_BYTE_CONTEXT
|
1606 LDST_CLASS_1_CCB
| (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1608 /* Load counter into CONTEXT1 reg */
1610 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
1611 LDST_SRCDST_BYTE_CONTEXT
|
1612 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1613 LDST_OFFSET_SHIFT
));
1615 /* Choose operation */
1617 append_operation(desc
, ctx
->class1_alg_type
|
1618 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
);
1620 append_dec_op1(desc
, ctx
->class1_alg_type
);
1622 /* Perform operation */
1623 ablkcipher_append_src_dst(desc
);
1625 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1628 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1629 dev_err(jrdev
, "unable to map shared descriptor\n");
1634 print_hex_dump(KERN_ERR
,
1635 "ablkcipher dec shdesc@"__stringify(__LINE__
)": ",
1636 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1637 desc_bytes(desc
), 1);
1639 /* ablkcipher_givencrypt shared descriptor */
1640 desc
= ctx
->sh_desc_givenc
;
1642 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1643 /* Skip if already shared */
1644 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1647 /* Load class1 key only */
1648 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1649 ctx
->enckeylen
, CLASS_1
|
1650 KEY_DEST_CLASS_REG
);
1652 /* Load Nonce into CONTEXT1 reg */
1654 nonce
= (u8
*)key
+ keylen
;
1655 append_load_as_imm(desc
, nonce
, CTR_RFC3686_NONCE_SIZE
,
1656 LDST_CLASS_IND_CCB
|
1657 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1658 append_move(desc
, MOVE_WAITCOMP
|
1660 MOVE_DEST_CLASS1CTX
|
1661 (16 << MOVE_OFFSET_SHIFT
) |
1662 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1664 set_jump_tgt_here(desc
, key_jump_cmd
);
1667 geniv
= NFIFOENTRY_STYPE_PAD
| NFIFOENTRY_DEST_DECO
|
1668 NFIFOENTRY_DTYPE_MSG
| NFIFOENTRY_LC1
|
1669 NFIFOENTRY_PTYPE_RND
| (crt
->ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
1670 append_load_imm_u32(desc
, geniv
, LDST_CLASS_IND_CCB
|
1671 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
1672 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1673 append_move(desc
, MOVE_WAITCOMP
|
1675 MOVE_DEST_CLASS1CTX
|
1676 (crt
->ivsize
<< MOVE_LEN_SHIFT
) |
1677 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
));
1678 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
1680 /* Copy generated IV to memory */
1681 append_seq_store(desc
, crt
->ivsize
,
1682 LDST_SRCDST_BYTE_CONTEXT
| LDST_CLASS_1_CCB
|
1683 (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1685 /* Load Counter into CONTEXT1 reg */
1687 append_load_imm_be32(desc
, 1, LDST_IMM
| LDST_CLASS_1_CCB
|
1688 LDST_SRCDST_BYTE_CONTEXT
|
1689 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1690 LDST_OFFSET_SHIFT
));
1693 append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
| JUMP_COND_NCP
|
1694 (1 << JUMP_OFFSET_SHIFT
));
1696 /* Load operation */
1697 append_operation(desc
, ctx
->class1_alg_type
|
1698 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1700 /* Perform operation */
1701 ablkcipher_append_src_dst(desc
);
1703 ctx
->sh_desc_givenc_dma
= dma_map_single(jrdev
, desc
,
1706 if (dma_mapping_error(jrdev
, ctx
->sh_desc_givenc_dma
)) {
1707 dev_err(jrdev
, "unable to map shared descriptor\n");
1711 print_hex_dump(KERN_ERR
,
1712 "ablkcipher givenc shdesc@" __stringify(__LINE__
) ": ",
1713 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1714 desc_bytes(desc
), 1);
1720 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
1721 const u8
*key
, unsigned int keylen
)
1723 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1724 struct device
*jrdev
= ctx
->jrdev
;
1725 u32
*key_jump_cmd
, *desc
;
1726 __be64 sector_size
= cpu_to_be64(512);
1728 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
1729 crypto_ablkcipher_set_flags(ablkcipher
,
1730 CRYPTO_TFM_RES_BAD_KEY_LEN
);
1731 dev_err(jrdev
, "key size mismatch\n");
1735 memcpy(ctx
->key
, key
, keylen
);
1736 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
, DMA_TO_DEVICE
);
1737 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1738 dev_err(jrdev
, "unable to map key i/o memory\n");
1741 ctx
->enckeylen
= keylen
;
1743 /* xts_ablkcipher_encrypt shared descriptor */
1744 desc
= ctx
->sh_desc_enc
;
1745 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1746 /* Skip if already shared */
1747 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1750 /* Load class1 keys only */
1751 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1752 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1754 /* Load sector size with index 40 bytes (0x28) */
1755 append_cmd(desc
, CMD_LOAD
| IMMEDIATE
| LDST_SRCDST_BYTE_CONTEXT
|
1756 LDST_CLASS_1_CCB
| (0x28 << LDST_OFFSET_SHIFT
) | 8);
1757 append_data(desc
, (void *)§or_size
, 8);
1759 set_jump_tgt_here(desc
, key_jump_cmd
);
1762 * create sequence for loading the sector index
1763 * Upper 8B of IV - will be used as sector index
1764 * Lower 8B of IV - will be discarded
1766 append_cmd(desc
, CMD_SEQ_LOAD
| LDST_SRCDST_BYTE_CONTEXT
|
1767 LDST_CLASS_1_CCB
| (0x20 << LDST_OFFSET_SHIFT
) | 8);
1768 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1770 /* Load operation */
1771 append_operation(desc
, ctx
->class1_alg_type
| OP_ALG_AS_INITFINAL
|
1774 /* Perform operation */
1775 ablkcipher_append_src_dst(desc
);
1777 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
, desc_bytes(desc
),
1779 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1780 dev_err(jrdev
, "unable to map shared descriptor\n");
1784 print_hex_dump(KERN_ERR
,
1785 "xts ablkcipher enc shdesc@" __stringify(__LINE__
) ": ",
1786 DUMP_PREFIX_ADDRESS
, 16, 4, desc
, desc_bytes(desc
), 1);
1789 /* xts_ablkcipher_decrypt shared descriptor */
1790 desc
= ctx
->sh_desc_dec
;
1792 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1793 /* Skip if already shared */
1794 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1797 /* Load class1 key only */
1798 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1799 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1801 /* Load sector size with index 40 bytes (0x28) */
1802 append_cmd(desc
, CMD_LOAD
| IMMEDIATE
| LDST_SRCDST_BYTE_CONTEXT
|
1803 LDST_CLASS_1_CCB
| (0x28 << LDST_OFFSET_SHIFT
) | 8);
1804 append_data(desc
, (void *)§or_size
, 8);
1806 set_jump_tgt_here(desc
, key_jump_cmd
);
1809 * create sequence for loading the sector index
1810 * Upper 8B of IV - will be used as sector index
1811 * Lower 8B of IV - will be discarded
1813 append_cmd(desc
, CMD_SEQ_LOAD
| LDST_SRCDST_BYTE_CONTEXT
|
1814 LDST_CLASS_1_CCB
| (0x20 << LDST_OFFSET_SHIFT
) | 8);
1815 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1817 /* Load operation */
1818 append_dec_op1(desc
, ctx
->class1_alg_type
);
1820 /* Perform operation */
1821 ablkcipher_append_src_dst(desc
);
1823 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
, desc_bytes(desc
),
1825 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1826 dma_unmap_single(jrdev
, ctx
->sh_desc_enc_dma
,
1827 desc_bytes(ctx
->sh_desc_enc
), DMA_TO_DEVICE
);
1828 dev_err(jrdev
, "unable to map shared descriptor\n");
1832 print_hex_dump(KERN_ERR
,
1833 "xts ablkcipher dec shdesc@" __stringify(__LINE__
) ": ",
1834 DUMP_PREFIX_ADDRESS
, 16, 4, desc
, desc_bytes(desc
), 1);
1841 * aead_edesc - s/w-extended aead descriptor
1842 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1843 * @src_nents: number of segments in input scatterlist
1844 * @dst_nents: number of segments in output scatterlist
1845 * @iv_dma: dma address of iv for checking continuity and link table
1846 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1847 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1848 * @sec4_sg_dma: bus physical mapped address of h/w link table
1849 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1857 dma_addr_t sec4_sg_dma
;
1858 struct sec4_sg_entry
*sec4_sg
;
1863 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1864 * @src_nents: number of segments in input scatterlist
1865 * @dst_nents: number of segments in output scatterlist
1866 * @iv_dma: dma address of iv for checking continuity and link table
1867 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1868 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1869 * @sec4_sg_dma: bus physical mapped address of h/w link table
1870 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1872 struct ablkcipher_edesc
{
1877 dma_addr_t sec4_sg_dma
;
1878 struct sec4_sg_entry
*sec4_sg
;
1882 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
1883 struct scatterlist
*dst
, int src_nents
,
1885 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
1889 dma_unmap_sg(dev
, src
, src_nents
? : 1, DMA_TO_DEVICE
);
1890 dma_unmap_sg(dev
, dst
, dst_nents
? : 1, DMA_FROM_DEVICE
);
1892 dma_unmap_sg(dev
, src
, src_nents
? : 1, DMA_BIDIRECTIONAL
);
1896 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
1898 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
1902 static void aead_unmap(struct device
*dev
,
1903 struct aead_edesc
*edesc
,
1904 struct aead_request
*req
)
1906 caam_unmap(dev
, req
->src
, req
->dst
,
1907 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
1908 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
1911 static void ablkcipher_unmap(struct device
*dev
,
1912 struct ablkcipher_edesc
*edesc
,
1913 struct ablkcipher_request
*req
)
1915 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1916 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1918 caam_unmap(dev
, req
->src
, req
->dst
,
1919 edesc
->src_nents
, edesc
->dst_nents
,
1920 edesc
->iv_dma
, ivsize
,
1921 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
1924 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1927 struct aead_request
*req
= context
;
1928 struct aead_edesc
*edesc
;
1931 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1934 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
1937 caam_jr_strstatus(jrdev
, err
);
1939 aead_unmap(jrdev
, edesc
, req
);
1943 aead_request_complete(req
, err
);
1946 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1949 struct aead_request
*req
= context
;
1950 struct aead_edesc
*edesc
;
1953 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1956 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
1959 caam_jr_strstatus(jrdev
, err
);
1961 aead_unmap(jrdev
, edesc
, req
);
1964 * verify hw auth check passed else return -EBADMSG
1966 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
1971 aead_request_complete(req
, err
);
1974 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1977 struct ablkcipher_request
*req
= context
;
1978 struct ablkcipher_edesc
*edesc
;
1980 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1981 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1983 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1986 edesc
= (struct ablkcipher_edesc
*)((char *)desc
-
1987 offsetof(struct ablkcipher_edesc
, hw_desc
));
1990 caam_jr_strstatus(jrdev
, err
);
1993 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
1994 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1995 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
1996 print_hex_dump(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
1997 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
1998 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
2001 ablkcipher_unmap(jrdev
, edesc
, req
);
2004 ablkcipher_request_complete(req
, err
);
2007 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
2010 struct ablkcipher_request
*req
= context
;
2011 struct ablkcipher_edesc
*edesc
;
2013 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2014 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2016 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
2019 edesc
= (struct ablkcipher_edesc
*)((char *)desc
-
2020 offsetof(struct ablkcipher_edesc
, hw_desc
));
2022 caam_jr_strstatus(jrdev
, err
);
2025 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
2026 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2028 print_hex_dump(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
2029 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2030 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
2033 ablkcipher_unmap(jrdev
, edesc
, req
);
2036 ablkcipher_request_complete(req
, err
);
2040 * Fill in aead job descriptor
2042 static void init_aead_job(struct aead_request
*req
,
2043 struct aead_edesc
*edesc
,
2044 bool all_contig
, bool encrypt
)
2046 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2047 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2048 int authsize
= ctx
->authsize
;
2049 u32
*desc
= edesc
->hw_desc
;
2050 u32 out_options
, in_options
;
2051 dma_addr_t dst_dma
, src_dma
;
2052 int len
, sec4_sg_index
= 0;
2056 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
2057 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
2059 len
= desc_len(sh_desc
);
2060 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2063 src_dma
= sg_dma_address(req
->src
);
2066 src_dma
= edesc
->sec4_sg_dma
;
2067 sec4_sg_index
+= edesc
->src_nents
;
2068 in_options
= LDST_SGF
;
2071 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
2075 out_options
= in_options
;
2077 if (unlikely(req
->src
!= req
->dst
)) {
2078 if (!edesc
->dst_nents
) {
2079 dst_dma
= sg_dma_address(req
->dst
);
2081 dst_dma
= edesc
->sec4_sg_dma
+
2083 sizeof(struct sec4_sg_entry
);
2084 out_options
= LDST_SGF
;
2089 append_seq_out_ptr(desc
, dst_dma
,
2090 req
->assoclen
+ req
->cryptlen
+ authsize
,
2093 append_seq_out_ptr(desc
, dst_dma
,
2094 req
->assoclen
+ req
->cryptlen
- authsize
,
2097 /* REG3 = assoclen */
2098 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
2101 static void init_gcm_job(struct aead_request
*req
,
2102 struct aead_edesc
*edesc
,
2103 bool all_contig
, bool encrypt
)
2105 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2106 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2107 unsigned int ivsize
= crypto_aead_ivsize(aead
);
2108 u32
*desc
= edesc
->hw_desc
;
2109 bool generic_gcm
= (ivsize
== 12);
2112 init_aead_job(req
, edesc
, all_contig
, encrypt
);
2114 /* BUG This should not be specific to generic GCM. */
2116 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
2117 last
= FIFOLD_TYPE_LAST1
;
2120 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
2121 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| 12 | last
);
2124 append_data(desc
, ctx
->key
+ ctx
->enckeylen
, 4);
2126 append_data(desc
, req
->iv
, ivsize
);
2127 /* End of blank commands */
2130 static void init_authenc_job(struct aead_request
*req
,
2131 struct aead_edesc
*edesc
,
2132 bool all_contig
, bool encrypt
)
2134 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2135 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
2136 struct caam_aead_alg
, aead
);
2137 unsigned int ivsize
= crypto_aead_ivsize(aead
);
2138 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2139 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
2140 OP_ALG_AAI_CTR_MOD128
);
2141 const bool is_rfc3686
= alg
->caam
.rfc3686
;
2142 u32
*desc
= edesc
->hw_desc
;
2146 * AES-CTR needs to load IV in CONTEXT1 reg
2147 * at an offset of 128bits (16bytes)
2148 * CONTEXT1[255:128] = IV
2155 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2158 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
2160 init_aead_job(req
, edesc
, all_contig
, encrypt
);
2162 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
2163 append_load_as_imm(desc
, req
->iv
, ivsize
,
2165 LDST_SRCDST_BYTE_CONTEXT
|
2166 (ivoffset
<< LDST_OFFSET_SHIFT
));
2170 * Fill in ablkcipher job descriptor
2172 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
2173 struct ablkcipher_edesc
*edesc
,
2174 struct ablkcipher_request
*req
,
2177 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2178 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2179 u32
*desc
= edesc
->hw_desc
;
2180 u32 out_options
= 0, in_options
;
2181 dma_addr_t dst_dma
, src_dma
;
2182 int len
, sec4_sg_index
= 0;
2185 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
2186 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2188 print_hex_dump(KERN_ERR
, "src @"__stringify(__LINE__
)": ",
2189 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2190 edesc
->src_nents
? 100 : req
->nbytes
, 1);
2193 len
= desc_len(sh_desc
);
2194 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2197 src_dma
= edesc
->iv_dma
;
2200 src_dma
= edesc
->sec4_sg_dma
;
2201 sec4_sg_index
+= edesc
->src_nents
+ 1;
2202 in_options
= LDST_SGF
;
2204 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
2206 if (likely(req
->src
== req
->dst
)) {
2207 if (!edesc
->src_nents
&& iv_contig
) {
2208 dst_dma
= sg_dma_address(req
->src
);
2210 dst_dma
= edesc
->sec4_sg_dma
+
2211 sizeof(struct sec4_sg_entry
);
2212 out_options
= LDST_SGF
;
2215 if (!edesc
->dst_nents
) {
2216 dst_dma
= sg_dma_address(req
->dst
);
2218 dst_dma
= edesc
->sec4_sg_dma
+
2219 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
2220 out_options
= LDST_SGF
;
2223 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
2227 * Fill in ablkcipher givencrypt job descriptor
2229 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
2230 struct ablkcipher_edesc
*edesc
,
2231 struct ablkcipher_request
*req
,
2234 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2235 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2236 u32
*desc
= edesc
->hw_desc
;
2237 u32 out_options
, in_options
;
2238 dma_addr_t dst_dma
, src_dma
;
2239 int len
, sec4_sg_index
= 0;
2242 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
2243 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2245 print_hex_dump(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
2246 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2247 edesc
->src_nents
? 100 : req
->nbytes
, 1);
2250 len
= desc_len(sh_desc
);
2251 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2253 if (!edesc
->src_nents
) {
2254 src_dma
= sg_dma_address(req
->src
);
2257 src_dma
= edesc
->sec4_sg_dma
;
2258 sec4_sg_index
+= edesc
->src_nents
;
2259 in_options
= LDST_SGF
;
2261 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
2264 dst_dma
= edesc
->iv_dma
;
2267 dst_dma
= edesc
->sec4_sg_dma
+
2268 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
2269 out_options
= LDST_SGF
;
2271 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
2275 * allocate and map the aead extended descriptor
2277 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
2278 int desc_bytes
, bool *all_contig_ptr
,
2281 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2282 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2283 struct device
*jrdev
= ctx
->jrdev
;
2284 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2285 CRYPTO_TFM_REQ_MAY_SLEEP
)) ? GFP_KERNEL
: GFP_ATOMIC
;
2286 int src_nents
, dst_nents
= 0;
2287 struct aead_edesc
*edesc
;
2289 bool all_contig
= true;
2290 int sec4_sg_index
, sec4_sg_len
= 0, sec4_sg_bytes
;
2291 unsigned int authsize
= ctx
->authsize
;
2293 if (unlikely(req
->dst
!= req
->src
)) {
2294 src_nents
= sg_count(req
->src
, req
->assoclen
+ req
->cryptlen
);
2295 dst_nents
= sg_count(req
->dst
,
2296 req
->assoclen
+ req
->cryptlen
+
2297 (encrypt
? authsize
: (-authsize
)));
2299 src_nents
= sg_count(req
->src
,
2300 req
->assoclen
+ req
->cryptlen
+
2301 (encrypt
? authsize
: 0));
2304 /* Check if data are contiguous. */
2305 all_contig
= !src_nents
;
2307 src_nents
= src_nents
? : 1;
2308 sec4_sg_len
= src_nents
;
2311 sec4_sg_len
+= dst_nents
;
2313 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
2315 /* allocate space for base edesc and hw desc commands, link tables */
2316 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2319 dev_err(jrdev
, "could not allocate extended descriptor\n");
2320 return ERR_PTR(-ENOMEM
);
2323 if (likely(req
->src
== req
->dst
)) {
2324 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2326 if (unlikely(!sgc
)) {
2327 dev_err(jrdev
, "unable to map source\n");
2329 return ERR_PTR(-ENOMEM
);
2332 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2334 if (unlikely(!sgc
)) {
2335 dev_err(jrdev
, "unable to map source\n");
2337 return ERR_PTR(-ENOMEM
);
2340 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2342 if (unlikely(!sgc
)) {
2343 dev_err(jrdev
, "unable to map destination\n");
2344 dma_unmap_sg(jrdev
, req
->src
, src_nents
? : 1,
2347 return ERR_PTR(-ENOMEM
);
2351 edesc
->src_nents
= src_nents
;
2352 edesc
->dst_nents
= dst_nents
;
2353 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
2355 *all_contig_ptr
= all_contig
;
2359 sg_to_sec4_sg_last(req
->src
, src_nents
,
2360 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2361 sec4_sg_index
+= src_nents
;
2364 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2365 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2371 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2372 sec4_sg_bytes
, DMA_TO_DEVICE
);
2373 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2374 dev_err(jrdev
, "unable to map S/G table\n");
2375 aead_unmap(jrdev
, edesc
, req
);
2377 return ERR_PTR(-ENOMEM
);
2380 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2385 static int gcm_encrypt(struct aead_request
*req
)
2387 struct aead_edesc
*edesc
;
2388 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2389 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2390 struct device
*jrdev
= ctx
->jrdev
;
2395 /* allocate extended descriptor */
2396 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
2398 return PTR_ERR(edesc
);
2400 /* Create and submit job descriptor */
2401 init_gcm_job(req
, edesc
, all_contig
, true);
2403 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2404 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2405 desc_bytes(edesc
->hw_desc
), 1);
2408 desc
= edesc
->hw_desc
;
2409 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
2413 aead_unmap(jrdev
, edesc
, req
);
2420 static int ipsec_gcm_encrypt(struct aead_request
*req
)
2422 if (req
->assoclen
< 8)
2425 return gcm_encrypt(req
);
2428 static int aead_encrypt(struct aead_request
*req
)
2430 struct aead_edesc
*edesc
;
2431 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2432 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2433 struct device
*jrdev
= ctx
->jrdev
;
2438 /* allocate extended descriptor */
2439 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
2442 return PTR_ERR(edesc
);
2444 /* Create and submit job descriptor */
2445 init_authenc_job(req
, edesc
, all_contig
, true);
2447 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2448 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2449 desc_bytes(edesc
->hw_desc
), 1);
2452 desc
= edesc
->hw_desc
;
2453 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
2457 aead_unmap(jrdev
, edesc
, req
);
2464 static int gcm_decrypt(struct aead_request
*req
)
2466 struct aead_edesc
*edesc
;
2467 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2468 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2469 struct device
*jrdev
= ctx
->jrdev
;
2474 /* allocate extended descriptor */
2475 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
2477 return PTR_ERR(edesc
);
2479 /* Create and submit job descriptor*/
2480 init_gcm_job(req
, edesc
, all_contig
, false);
2482 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2483 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2484 desc_bytes(edesc
->hw_desc
), 1);
2487 desc
= edesc
->hw_desc
;
2488 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
2492 aead_unmap(jrdev
, edesc
, req
);
2499 static int ipsec_gcm_decrypt(struct aead_request
*req
)
2501 if (req
->assoclen
< 8)
2504 return gcm_decrypt(req
);
2507 static int aead_decrypt(struct aead_request
*req
)
2509 struct aead_edesc
*edesc
;
2510 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2511 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2512 struct device
*jrdev
= ctx
->jrdev
;
2517 /* allocate extended descriptor */
2518 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
2519 &all_contig
, false);
2521 return PTR_ERR(edesc
);
2524 print_hex_dump(KERN_ERR
, "dec src@"__stringify(__LINE__
)": ",
2525 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2526 req
->assoclen
+ req
->cryptlen
, 1);
2529 /* Create and submit job descriptor*/
2530 init_authenc_job(req
, edesc
, all_contig
, false);
2532 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2533 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2534 desc_bytes(edesc
->hw_desc
), 1);
2537 desc
= edesc
->hw_desc
;
2538 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
2542 aead_unmap(jrdev
, edesc
, req
);
2550 * allocate and map the ablkcipher extended descriptor for ablkcipher
2552 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
2553 *req
, int desc_bytes
,
2554 bool *iv_contig_out
)
2556 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2557 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2558 struct device
*jrdev
= ctx
->jrdev
;
2559 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2560 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
2561 GFP_KERNEL
: GFP_ATOMIC
;
2562 int src_nents
, dst_nents
= 0, sec4_sg_bytes
;
2563 struct ablkcipher_edesc
*edesc
;
2564 dma_addr_t iv_dma
= 0;
2565 bool iv_contig
= false;
2567 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2570 src_nents
= sg_count(req
->src
, req
->nbytes
);
2572 if (req
->dst
!= req
->src
)
2573 dst_nents
= sg_count(req
->dst
, req
->nbytes
);
2575 if (likely(req
->src
== req
->dst
)) {
2576 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2579 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2581 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2585 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
2586 if (dma_mapping_error(jrdev
, iv_dma
)) {
2587 dev_err(jrdev
, "unable to map IV\n");
2588 return ERR_PTR(-ENOMEM
);
2592 * Check if iv can be contiguous with source and destination.
2593 * If so, include it. If not, create scatterlist.
2595 if (!src_nents
&& iv_dma
+ ivsize
== sg_dma_address(req
->src
))
2598 src_nents
= src_nents
? : 1;
2599 sec4_sg_bytes
= ((iv_contig
? 0 : 1) + src_nents
+ dst_nents
) *
2600 sizeof(struct sec4_sg_entry
);
2602 /* allocate space for base edesc and hw desc commands, link tables */
2603 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2606 dev_err(jrdev
, "could not allocate extended descriptor\n");
2607 return ERR_PTR(-ENOMEM
);
2610 edesc
->src_nents
= src_nents
;
2611 edesc
->dst_nents
= dst_nents
;
2612 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2613 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
2618 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
2619 sg_to_sec4_sg_last(req
->src
, src_nents
,
2620 edesc
->sec4_sg
+ 1, 0);
2621 sec4_sg_index
+= 1 + src_nents
;
2625 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2626 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2629 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2630 sec4_sg_bytes
, DMA_TO_DEVICE
);
2631 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2632 dev_err(jrdev
, "unable to map S/G table\n");
2633 return ERR_PTR(-ENOMEM
);
2636 edesc
->iv_dma
= iv_dma
;
2639 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
2640 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
2644 *iv_contig_out
= iv_contig
;
2648 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
2650 struct ablkcipher_edesc
*edesc
;
2651 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2652 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2653 struct device
*jrdev
= ctx
->jrdev
;
2658 /* allocate extended descriptor */
2659 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
2660 CAAM_CMD_SZ
, &iv_contig
);
2662 return PTR_ERR(edesc
);
2664 /* Create and submit job descriptor*/
2665 init_ablkcipher_job(ctx
->sh_desc_enc
,
2666 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
2668 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
2669 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2670 desc_bytes(edesc
->hw_desc
), 1);
2672 desc
= edesc
->hw_desc
;
2673 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
2678 ablkcipher_unmap(jrdev
, edesc
, req
);
2685 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
2687 struct ablkcipher_edesc
*edesc
;
2688 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2689 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2690 struct device
*jrdev
= ctx
->jrdev
;
2695 /* allocate extended descriptor */
2696 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
2697 CAAM_CMD_SZ
, &iv_contig
);
2699 return PTR_ERR(edesc
);
2701 /* Create and submit job descriptor*/
2702 init_ablkcipher_job(ctx
->sh_desc_dec
,
2703 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
2704 desc
= edesc
->hw_desc
;
2706 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
2707 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2708 desc_bytes(edesc
->hw_desc
), 1);
2711 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
2715 ablkcipher_unmap(jrdev
, edesc
, req
);
2723 * allocate and map the ablkcipher extended descriptor
2724 * for ablkcipher givencrypt
2726 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
2727 struct skcipher_givcrypt_request
*greq
,
2729 bool *iv_contig_out
)
2731 struct ablkcipher_request
*req
= &greq
->creq
;
2732 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2733 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2734 struct device
*jrdev
= ctx
->jrdev
;
2735 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2736 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
2737 GFP_KERNEL
: GFP_ATOMIC
;
2738 int src_nents
, dst_nents
= 0, sec4_sg_bytes
;
2739 struct ablkcipher_edesc
*edesc
;
2740 dma_addr_t iv_dma
= 0;
2741 bool iv_contig
= false;
2743 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2746 src_nents
= sg_count(req
->src
, req
->nbytes
);
2748 if (unlikely(req
->dst
!= req
->src
))
2749 dst_nents
= sg_count(req
->dst
, req
->nbytes
);
2751 if (likely(req
->src
== req
->dst
)) {
2752 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2755 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2757 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2762 * Check if iv can be contiguous with source and destination.
2763 * If so, include it. If not, create scatterlist.
2765 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
2766 if (dma_mapping_error(jrdev
, iv_dma
)) {
2767 dev_err(jrdev
, "unable to map IV\n");
2768 return ERR_PTR(-ENOMEM
);
2771 if (!dst_nents
&& iv_dma
+ ivsize
== sg_dma_address(req
->dst
))
2774 dst_nents
= dst_nents
? : 1;
2775 sec4_sg_bytes
= ((iv_contig
? 0 : 1) + src_nents
+ dst_nents
) *
2776 sizeof(struct sec4_sg_entry
);
2778 /* allocate space for base edesc and hw desc commands, link tables */
2779 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2782 dev_err(jrdev
, "could not allocate extended descriptor\n");
2783 return ERR_PTR(-ENOMEM
);
2786 edesc
->src_nents
= src_nents
;
2787 edesc
->dst_nents
= dst_nents
;
2788 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2789 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
2794 sg_to_sec4_sg_last(req
->src
, src_nents
, edesc
->sec4_sg
, 0);
2795 sec4_sg_index
+= src_nents
;
2799 dma_to_sec4_sg_one(edesc
->sec4_sg
+ sec4_sg_index
,
2802 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2803 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2806 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2807 sec4_sg_bytes
, DMA_TO_DEVICE
);
2808 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2809 dev_err(jrdev
, "unable to map S/G table\n");
2810 return ERR_PTR(-ENOMEM
);
2812 edesc
->iv_dma
= iv_dma
;
2815 print_hex_dump(KERN_ERR
,
2816 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
2817 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
2821 *iv_contig_out
= iv_contig
;
2825 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
2827 struct ablkcipher_request
*req
= &creq
->creq
;
2828 struct ablkcipher_edesc
*edesc
;
2829 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2830 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2831 struct device
*jrdev
= ctx
->jrdev
;
2836 /* allocate extended descriptor */
2837 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
2838 CAAM_CMD_SZ
, &iv_contig
);
2840 return PTR_ERR(edesc
);
2842 /* Create and submit job descriptor*/
2843 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
2844 edesc
, req
, iv_contig
);
2846 print_hex_dump(KERN_ERR
,
2847 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
2848 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2849 desc_bytes(edesc
->hw_desc
), 1);
2851 desc
= edesc
->hw_desc
;
2852 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
2857 ablkcipher_unmap(jrdev
, edesc
, req
);
2864 #define template_aead template_u.aead
2865 #define template_ablkcipher template_u.ablkcipher
2866 struct caam_alg_template
{
2867 char name
[CRYPTO_MAX_ALG_NAME
];
2868 char driver_name
[CRYPTO_MAX_ALG_NAME
];
2869 unsigned int blocksize
;
2872 struct ablkcipher_alg ablkcipher
;
2874 u32 class1_alg_type
;
2875 u32 class2_alg_type
;
2879 static struct caam_alg_template driver_algs
[] = {
2880 /* ablkcipher descriptor */
2883 .driver_name
= "cbc-aes-caam",
2884 .blocksize
= AES_BLOCK_SIZE
,
2885 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2886 .template_ablkcipher
= {
2887 .setkey
= ablkcipher_setkey
,
2888 .encrypt
= ablkcipher_encrypt
,
2889 .decrypt
= ablkcipher_decrypt
,
2890 .givencrypt
= ablkcipher_givencrypt
,
2891 .geniv
= "<built-in>",
2892 .min_keysize
= AES_MIN_KEY_SIZE
,
2893 .max_keysize
= AES_MAX_KEY_SIZE
,
2894 .ivsize
= AES_BLOCK_SIZE
,
2896 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2899 .name
= "cbc(des3_ede)",
2900 .driver_name
= "cbc-3des-caam",
2901 .blocksize
= DES3_EDE_BLOCK_SIZE
,
2902 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2903 .template_ablkcipher
= {
2904 .setkey
= ablkcipher_setkey
,
2905 .encrypt
= ablkcipher_encrypt
,
2906 .decrypt
= ablkcipher_decrypt
,
2907 .givencrypt
= ablkcipher_givencrypt
,
2908 .geniv
= "<built-in>",
2909 .min_keysize
= DES3_EDE_KEY_SIZE
,
2910 .max_keysize
= DES3_EDE_KEY_SIZE
,
2911 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2913 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2917 .driver_name
= "cbc-des-caam",
2918 .blocksize
= DES_BLOCK_SIZE
,
2919 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2920 .template_ablkcipher
= {
2921 .setkey
= ablkcipher_setkey
,
2922 .encrypt
= ablkcipher_encrypt
,
2923 .decrypt
= ablkcipher_decrypt
,
2924 .givencrypt
= ablkcipher_givencrypt
,
2925 .geniv
= "<built-in>",
2926 .min_keysize
= DES_KEY_SIZE
,
2927 .max_keysize
= DES_KEY_SIZE
,
2928 .ivsize
= DES_BLOCK_SIZE
,
2930 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2934 .driver_name
= "ctr-aes-caam",
2936 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2937 .template_ablkcipher
= {
2938 .setkey
= ablkcipher_setkey
,
2939 .encrypt
= ablkcipher_encrypt
,
2940 .decrypt
= ablkcipher_decrypt
,
2942 .min_keysize
= AES_MIN_KEY_SIZE
,
2943 .max_keysize
= AES_MAX_KEY_SIZE
,
2944 .ivsize
= AES_BLOCK_SIZE
,
2946 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
2949 .name
= "rfc3686(ctr(aes))",
2950 .driver_name
= "rfc3686-ctr-aes-caam",
2952 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2953 .template_ablkcipher
= {
2954 .setkey
= ablkcipher_setkey
,
2955 .encrypt
= ablkcipher_encrypt
,
2956 .decrypt
= ablkcipher_decrypt
,
2957 .givencrypt
= ablkcipher_givencrypt
,
2958 .geniv
= "<built-in>",
2959 .min_keysize
= AES_MIN_KEY_SIZE
+
2960 CTR_RFC3686_NONCE_SIZE
,
2961 .max_keysize
= AES_MAX_KEY_SIZE
+
2962 CTR_RFC3686_NONCE_SIZE
,
2963 .ivsize
= CTR_RFC3686_IV_SIZE
,
2965 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
2969 .driver_name
= "xts-aes-caam",
2970 .blocksize
= AES_BLOCK_SIZE
,
2971 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2972 .template_ablkcipher
= {
2973 .setkey
= xts_ablkcipher_setkey
,
2974 .encrypt
= ablkcipher_encrypt
,
2975 .decrypt
= ablkcipher_decrypt
,
2977 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
2978 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
2979 .ivsize
= AES_BLOCK_SIZE
,
2981 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
2985 static struct caam_aead_alg driver_aeads
[] = {
2989 .cra_name
= "rfc4106(gcm(aes))",
2990 .cra_driver_name
= "rfc4106-gcm-aes-caam",
2993 .setkey
= rfc4106_setkey
,
2994 .setauthsize
= rfc4106_setauthsize
,
2995 .encrypt
= ipsec_gcm_encrypt
,
2996 .decrypt
= ipsec_gcm_decrypt
,
2998 .maxauthsize
= AES_BLOCK_SIZE
,
3001 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3007 .cra_name
= "rfc4543(gcm(aes))",
3008 .cra_driver_name
= "rfc4543-gcm-aes-caam",
3011 .setkey
= rfc4543_setkey
,
3012 .setauthsize
= rfc4543_setauthsize
,
3013 .encrypt
= ipsec_gcm_encrypt
,
3014 .decrypt
= ipsec_gcm_decrypt
,
3016 .maxauthsize
= AES_BLOCK_SIZE
,
3019 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3022 /* Galois Counter Mode */
3026 .cra_name
= "gcm(aes)",
3027 .cra_driver_name
= "gcm-aes-caam",
3030 .setkey
= gcm_setkey
,
3031 .setauthsize
= gcm_setauthsize
,
3032 .encrypt
= gcm_encrypt
,
3033 .decrypt
= gcm_decrypt
,
3035 .maxauthsize
= AES_BLOCK_SIZE
,
3038 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3041 /* single-pass ipsec_esp descriptor */
3045 .cra_name
= "authenc(hmac(md5),"
3046 "ecb(cipher_null))",
3047 .cra_driver_name
= "authenc-hmac-md5-"
3048 "ecb-cipher_null-caam",
3049 .cra_blocksize
= NULL_BLOCK_SIZE
,
3051 .setkey
= aead_setkey
,
3052 .setauthsize
= aead_setauthsize
,
3053 .encrypt
= aead_encrypt
,
3054 .decrypt
= aead_decrypt
,
3055 .ivsize
= NULL_IV_SIZE
,
3056 .maxauthsize
= MD5_DIGEST_SIZE
,
3059 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3060 OP_ALG_AAI_HMAC_PRECOMP
,
3061 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3067 .cra_name
= "authenc(hmac(sha1),"
3068 "ecb(cipher_null))",
3069 .cra_driver_name
= "authenc-hmac-sha1-"
3070 "ecb-cipher_null-caam",
3071 .cra_blocksize
= NULL_BLOCK_SIZE
,
3073 .setkey
= aead_setkey
,
3074 .setauthsize
= aead_setauthsize
,
3075 .encrypt
= aead_encrypt
,
3076 .decrypt
= aead_decrypt
,
3077 .ivsize
= NULL_IV_SIZE
,
3078 .maxauthsize
= SHA1_DIGEST_SIZE
,
3081 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3082 OP_ALG_AAI_HMAC_PRECOMP
,
3083 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3089 .cra_name
= "authenc(hmac(sha224),"
3090 "ecb(cipher_null))",
3091 .cra_driver_name
= "authenc-hmac-sha224-"
3092 "ecb-cipher_null-caam",
3093 .cra_blocksize
= NULL_BLOCK_SIZE
,
3095 .setkey
= aead_setkey
,
3096 .setauthsize
= aead_setauthsize
,
3097 .encrypt
= aead_encrypt
,
3098 .decrypt
= aead_decrypt
,
3099 .ivsize
= NULL_IV_SIZE
,
3100 .maxauthsize
= SHA224_DIGEST_SIZE
,
3103 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3104 OP_ALG_AAI_HMAC_PRECOMP
,
3105 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3111 .cra_name
= "authenc(hmac(sha256),"
3112 "ecb(cipher_null))",
3113 .cra_driver_name
= "authenc-hmac-sha256-"
3114 "ecb-cipher_null-caam",
3115 .cra_blocksize
= NULL_BLOCK_SIZE
,
3117 .setkey
= aead_setkey
,
3118 .setauthsize
= aead_setauthsize
,
3119 .encrypt
= aead_encrypt
,
3120 .decrypt
= aead_decrypt
,
3121 .ivsize
= NULL_IV_SIZE
,
3122 .maxauthsize
= SHA256_DIGEST_SIZE
,
3125 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3126 OP_ALG_AAI_HMAC_PRECOMP
,
3127 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3133 .cra_name
= "authenc(hmac(sha384),"
3134 "ecb(cipher_null))",
3135 .cra_driver_name
= "authenc-hmac-sha384-"
3136 "ecb-cipher_null-caam",
3137 .cra_blocksize
= NULL_BLOCK_SIZE
,
3139 .setkey
= aead_setkey
,
3140 .setauthsize
= aead_setauthsize
,
3141 .encrypt
= aead_encrypt
,
3142 .decrypt
= aead_decrypt
,
3143 .ivsize
= NULL_IV_SIZE
,
3144 .maxauthsize
= SHA384_DIGEST_SIZE
,
3147 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3148 OP_ALG_AAI_HMAC_PRECOMP
,
3149 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3155 .cra_name
= "authenc(hmac(sha512),"
3156 "ecb(cipher_null))",
3157 .cra_driver_name
= "authenc-hmac-sha512-"
3158 "ecb-cipher_null-caam",
3159 .cra_blocksize
= NULL_BLOCK_SIZE
,
3161 .setkey
= aead_setkey
,
3162 .setauthsize
= aead_setauthsize
,
3163 .encrypt
= aead_encrypt
,
3164 .decrypt
= aead_decrypt
,
3165 .ivsize
= NULL_IV_SIZE
,
3166 .maxauthsize
= SHA512_DIGEST_SIZE
,
3169 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3170 OP_ALG_AAI_HMAC_PRECOMP
,
3171 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3177 .cra_name
= "authenc(hmac(md5),cbc(aes))",
3178 .cra_driver_name
= "authenc-hmac-md5-"
3180 .cra_blocksize
= AES_BLOCK_SIZE
,
3182 .setkey
= aead_setkey
,
3183 .setauthsize
= aead_setauthsize
,
3184 .encrypt
= aead_encrypt
,
3185 .decrypt
= aead_decrypt
,
3186 .ivsize
= AES_BLOCK_SIZE
,
3187 .maxauthsize
= MD5_DIGEST_SIZE
,
3190 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3191 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3192 OP_ALG_AAI_HMAC_PRECOMP
,
3193 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3199 .cra_name
= "echainiv(authenc(hmac(md5),"
3201 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3203 .cra_blocksize
= AES_BLOCK_SIZE
,
3205 .setkey
= aead_setkey
,
3206 .setauthsize
= aead_setauthsize
,
3207 .encrypt
= aead_encrypt
,
3208 .decrypt
= aead_decrypt
,
3209 .ivsize
= AES_BLOCK_SIZE
,
3210 .maxauthsize
= MD5_DIGEST_SIZE
,
3213 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3214 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3215 OP_ALG_AAI_HMAC_PRECOMP
,
3216 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3223 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
3224 .cra_driver_name
= "authenc-hmac-sha1-"
3226 .cra_blocksize
= AES_BLOCK_SIZE
,
3228 .setkey
= aead_setkey
,
3229 .setauthsize
= aead_setauthsize
,
3230 .encrypt
= aead_encrypt
,
3231 .decrypt
= aead_decrypt
,
3232 .ivsize
= AES_BLOCK_SIZE
,
3233 .maxauthsize
= SHA1_DIGEST_SIZE
,
3236 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3237 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3238 OP_ALG_AAI_HMAC_PRECOMP
,
3239 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3245 .cra_name
= "echainiv(authenc(hmac(sha1),"
3247 .cra_driver_name
= "echainiv-authenc-"
3248 "hmac-sha1-cbc-aes-caam",
3249 .cra_blocksize
= AES_BLOCK_SIZE
,
3251 .setkey
= aead_setkey
,
3252 .setauthsize
= aead_setauthsize
,
3253 .encrypt
= aead_encrypt
,
3254 .decrypt
= aead_decrypt
,
3255 .ivsize
= AES_BLOCK_SIZE
,
3256 .maxauthsize
= SHA1_DIGEST_SIZE
,
3259 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3260 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3261 OP_ALG_AAI_HMAC_PRECOMP
,
3262 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3269 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
3270 .cra_driver_name
= "authenc-hmac-sha224-"
3272 .cra_blocksize
= AES_BLOCK_SIZE
,
3274 .setkey
= aead_setkey
,
3275 .setauthsize
= aead_setauthsize
,
3276 .encrypt
= aead_encrypt
,
3277 .decrypt
= aead_decrypt
,
3278 .ivsize
= AES_BLOCK_SIZE
,
3279 .maxauthsize
= SHA224_DIGEST_SIZE
,
3282 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3283 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3284 OP_ALG_AAI_HMAC_PRECOMP
,
3285 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3291 .cra_name
= "echainiv(authenc(hmac(sha224),"
3293 .cra_driver_name
= "echainiv-authenc-"
3294 "hmac-sha224-cbc-aes-caam",
3295 .cra_blocksize
= AES_BLOCK_SIZE
,
3297 .setkey
= aead_setkey
,
3298 .setauthsize
= aead_setauthsize
,
3299 .encrypt
= aead_encrypt
,
3300 .decrypt
= aead_decrypt
,
3301 .ivsize
= AES_BLOCK_SIZE
,
3302 .maxauthsize
= SHA224_DIGEST_SIZE
,
3305 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3306 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3307 OP_ALG_AAI_HMAC_PRECOMP
,
3308 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3315 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
3316 .cra_driver_name
= "authenc-hmac-sha256-"
3318 .cra_blocksize
= AES_BLOCK_SIZE
,
3320 .setkey
= aead_setkey
,
3321 .setauthsize
= aead_setauthsize
,
3322 .encrypt
= aead_encrypt
,
3323 .decrypt
= aead_decrypt
,
3324 .ivsize
= AES_BLOCK_SIZE
,
3325 .maxauthsize
= SHA256_DIGEST_SIZE
,
3328 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3329 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3330 OP_ALG_AAI_HMAC_PRECOMP
,
3331 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3337 .cra_name
= "echainiv(authenc(hmac(sha256),"
3339 .cra_driver_name
= "echainiv-authenc-"
3340 "hmac-sha256-cbc-aes-caam",
3341 .cra_blocksize
= AES_BLOCK_SIZE
,
3343 .setkey
= aead_setkey
,
3344 .setauthsize
= aead_setauthsize
,
3345 .encrypt
= aead_encrypt
,
3346 .decrypt
= aead_decrypt
,
3347 .ivsize
= AES_BLOCK_SIZE
,
3348 .maxauthsize
= SHA256_DIGEST_SIZE
,
3351 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3352 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3353 OP_ALG_AAI_HMAC_PRECOMP
,
3354 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3361 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
3362 .cra_driver_name
= "authenc-hmac-sha384-"
3364 .cra_blocksize
= AES_BLOCK_SIZE
,
3366 .setkey
= aead_setkey
,
3367 .setauthsize
= aead_setauthsize
,
3368 .encrypt
= aead_encrypt
,
3369 .decrypt
= aead_decrypt
,
3370 .ivsize
= AES_BLOCK_SIZE
,
3371 .maxauthsize
= SHA384_DIGEST_SIZE
,
3374 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3375 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3376 OP_ALG_AAI_HMAC_PRECOMP
,
3377 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3383 .cra_name
= "echainiv(authenc(hmac(sha384),"
3385 .cra_driver_name
= "echainiv-authenc-"
3386 "hmac-sha384-cbc-aes-caam",
3387 .cra_blocksize
= AES_BLOCK_SIZE
,
3389 .setkey
= aead_setkey
,
3390 .setauthsize
= aead_setauthsize
,
3391 .encrypt
= aead_encrypt
,
3392 .decrypt
= aead_decrypt
,
3393 .ivsize
= AES_BLOCK_SIZE
,
3394 .maxauthsize
= SHA384_DIGEST_SIZE
,
3397 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3398 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3399 OP_ALG_AAI_HMAC_PRECOMP
,
3400 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3407 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
3408 .cra_driver_name
= "authenc-hmac-sha512-"
3410 .cra_blocksize
= AES_BLOCK_SIZE
,
3412 .setkey
= aead_setkey
,
3413 .setauthsize
= aead_setauthsize
,
3414 .encrypt
= aead_encrypt
,
3415 .decrypt
= aead_decrypt
,
3416 .ivsize
= AES_BLOCK_SIZE
,
3417 .maxauthsize
= SHA512_DIGEST_SIZE
,
3420 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3421 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3422 OP_ALG_AAI_HMAC_PRECOMP
,
3423 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3429 .cra_name
= "echainiv(authenc(hmac(sha512),"
3431 .cra_driver_name
= "echainiv-authenc-"
3432 "hmac-sha512-cbc-aes-caam",
3433 .cra_blocksize
= AES_BLOCK_SIZE
,
3435 .setkey
= aead_setkey
,
3436 .setauthsize
= aead_setauthsize
,
3437 .encrypt
= aead_encrypt
,
3438 .decrypt
= aead_decrypt
,
3439 .ivsize
= AES_BLOCK_SIZE
,
3440 .maxauthsize
= SHA512_DIGEST_SIZE
,
3443 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3444 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3445 OP_ALG_AAI_HMAC_PRECOMP
,
3446 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3453 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
3454 .cra_driver_name
= "authenc-hmac-md5-"
3455 "cbc-des3_ede-caam",
3456 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3458 .setkey
= aead_setkey
,
3459 .setauthsize
= aead_setauthsize
,
3460 .encrypt
= aead_encrypt
,
3461 .decrypt
= aead_decrypt
,
3462 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3463 .maxauthsize
= MD5_DIGEST_SIZE
,
3466 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3467 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3468 OP_ALG_AAI_HMAC_PRECOMP
,
3469 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3475 .cra_name
= "echainiv(authenc(hmac(md5),"
3477 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3478 "cbc-des3_ede-caam",
3479 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3481 .setkey
= aead_setkey
,
3482 .setauthsize
= aead_setauthsize
,
3483 .encrypt
= aead_encrypt
,
3484 .decrypt
= aead_decrypt
,
3485 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3486 .maxauthsize
= MD5_DIGEST_SIZE
,
3489 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3490 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3491 OP_ALG_AAI_HMAC_PRECOMP
,
3492 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3499 .cra_name
= "authenc(hmac(sha1),"
3501 .cra_driver_name
= "authenc-hmac-sha1-"
3502 "cbc-des3_ede-caam",
3503 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3505 .setkey
= aead_setkey
,
3506 .setauthsize
= aead_setauthsize
,
3507 .encrypt
= aead_encrypt
,
3508 .decrypt
= aead_decrypt
,
3509 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3510 .maxauthsize
= SHA1_DIGEST_SIZE
,
3513 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3514 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3515 OP_ALG_AAI_HMAC_PRECOMP
,
3516 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3522 .cra_name
= "echainiv(authenc(hmac(sha1),"
3524 .cra_driver_name
= "echainiv-authenc-"
3526 "cbc-des3_ede-caam",
3527 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3529 .setkey
= aead_setkey
,
3530 .setauthsize
= aead_setauthsize
,
3531 .encrypt
= aead_encrypt
,
3532 .decrypt
= aead_decrypt
,
3533 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3534 .maxauthsize
= SHA1_DIGEST_SIZE
,
3537 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3538 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3539 OP_ALG_AAI_HMAC_PRECOMP
,
3540 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3547 .cra_name
= "authenc(hmac(sha224),"
3549 .cra_driver_name
= "authenc-hmac-sha224-"
3550 "cbc-des3_ede-caam",
3551 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3553 .setkey
= aead_setkey
,
3554 .setauthsize
= aead_setauthsize
,
3555 .encrypt
= aead_encrypt
,
3556 .decrypt
= aead_decrypt
,
3557 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3558 .maxauthsize
= SHA224_DIGEST_SIZE
,
3561 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3562 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3563 OP_ALG_AAI_HMAC_PRECOMP
,
3564 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3570 .cra_name
= "echainiv(authenc(hmac(sha224),"
3572 .cra_driver_name
= "echainiv-authenc-"
3574 "cbc-des3_ede-caam",
3575 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3577 .setkey
= aead_setkey
,
3578 .setauthsize
= aead_setauthsize
,
3579 .encrypt
= aead_encrypt
,
3580 .decrypt
= aead_decrypt
,
3581 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3582 .maxauthsize
= SHA224_DIGEST_SIZE
,
3585 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3586 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3587 OP_ALG_AAI_HMAC_PRECOMP
,
3588 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3595 .cra_name
= "authenc(hmac(sha256),"
3597 .cra_driver_name
= "authenc-hmac-sha256-"
3598 "cbc-des3_ede-caam",
3599 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3601 .setkey
= aead_setkey
,
3602 .setauthsize
= aead_setauthsize
,
3603 .encrypt
= aead_encrypt
,
3604 .decrypt
= aead_decrypt
,
3605 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3606 .maxauthsize
= SHA256_DIGEST_SIZE
,
3609 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3610 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3611 OP_ALG_AAI_HMAC_PRECOMP
,
3612 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3618 .cra_name
= "echainiv(authenc(hmac(sha256),"
3620 .cra_driver_name
= "echainiv-authenc-"
3622 "cbc-des3_ede-caam",
3623 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3625 .setkey
= aead_setkey
,
3626 .setauthsize
= aead_setauthsize
,
3627 .encrypt
= aead_encrypt
,
3628 .decrypt
= aead_decrypt
,
3629 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3630 .maxauthsize
= SHA256_DIGEST_SIZE
,
3633 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3634 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3635 OP_ALG_AAI_HMAC_PRECOMP
,
3636 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3643 .cra_name
= "authenc(hmac(sha384),"
3645 .cra_driver_name
= "authenc-hmac-sha384-"
3646 "cbc-des3_ede-caam",
3647 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3649 .setkey
= aead_setkey
,
3650 .setauthsize
= aead_setauthsize
,
3651 .encrypt
= aead_encrypt
,
3652 .decrypt
= aead_decrypt
,
3653 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3654 .maxauthsize
= SHA384_DIGEST_SIZE
,
3657 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3658 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3659 OP_ALG_AAI_HMAC_PRECOMP
,
3660 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3666 .cra_name
= "echainiv(authenc(hmac(sha384),"
3668 .cra_driver_name
= "echainiv-authenc-"
3670 "cbc-des3_ede-caam",
3671 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3673 .setkey
= aead_setkey
,
3674 .setauthsize
= aead_setauthsize
,
3675 .encrypt
= aead_encrypt
,
3676 .decrypt
= aead_decrypt
,
3677 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3678 .maxauthsize
= SHA384_DIGEST_SIZE
,
3681 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3682 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3683 OP_ALG_AAI_HMAC_PRECOMP
,
3684 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3691 .cra_name
= "authenc(hmac(sha512),"
3693 .cra_driver_name
= "authenc-hmac-sha512-"
3694 "cbc-des3_ede-caam",
3695 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3697 .setkey
= aead_setkey
,
3698 .setauthsize
= aead_setauthsize
,
3699 .encrypt
= aead_encrypt
,
3700 .decrypt
= aead_decrypt
,
3701 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3702 .maxauthsize
= SHA512_DIGEST_SIZE
,
3705 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3706 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3707 OP_ALG_AAI_HMAC_PRECOMP
,
3708 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3714 .cra_name
= "echainiv(authenc(hmac(sha512),"
3716 .cra_driver_name
= "echainiv-authenc-"
3718 "cbc-des3_ede-caam",
3719 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3721 .setkey
= aead_setkey
,
3722 .setauthsize
= aead_setauthsize
,
3723 .encrypt
= aead_encrypt
,
3724 .decrypt
= aead_decrypt
,
3725 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3726 .maxauthsize
= SHA512_DIGEST_SIZE
,
3729 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3730 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3731 OP_ALG_AAI_HMAC_PRECOMP
,
3732 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3739 .cra_name
= "authenc(hmac(md5),cbc(des))",
3740 .cra_driver_name
= "authenc-hmac-md5-"
3742 .cra_blocksize
= DES_BLOCK_SIZE
,
3744 .setkey
= aead_setkey
,
3745 .setauthsize
= aead_setauthsize
,
3746 .encrypt
= aead_encrypt
,
3747 .decrypt
= aead_decrypt
,
3748 .ivsize
= DES_BLOCK_SIZE
,
3749 .maxauthsize
= MD5_DIGEST_SIZE
,
3752 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3753 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3754 OP_ALG_AAI_HMAC_PRECOMP
,
3755 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3761 .cra_name
= "echainiv(authenc(hmac(md5),"
3763 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3765 .cra_blocksize
= DES_BLOCK_SIZE
,
3767 .setkey
= aead_setkey
,
3768 .setauthsize
= aead_setauthsize
,
3769 .encrypt
= aead_encrypt
,
3770 .decrypt
= aead_decrypt
,
3771 .ivsize
= DES_BLOCK_SIZE
,
3772 .maxauthsize
= MD5_DIGEST_SIZE
,
3775 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3776 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3777 OP_ALG_AAI_HMAC_PRECOMP
,
3778 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3785 .cra_name
= "authenc(hmac(sha1),cbc(des))",
3786 .cra_driver_name
= "authenc-hmac-sha1-"
3788 .cra_blocksize
= DES_BLOCK_SIZE
,
3790 .setkey
= aead_setkey
,
3791 .setauthsize
= aead_setauthsize
,
3792 .encrypt
= aead_encrypt
,
3793 .decrypt
= aead_decrypt
,
3794 .ivsize
= DES_BLOCK_SIZE
,
3795 .maxauthsize
= SHA1_DIGEST_SIZE
,
3798 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3799 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3800 OP_ALG_AAI_HMAC_PRECOMP
,
3801 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3807 .cra_name
= "echainiv(authenc(hmac(sha1),"
3809 .cra_driver_name
= "echainiv-authenc-"
3810 "hmac-sha1-cbc-des-caam",
3811 .cra_blocksize
= DES_BLOCK_SIZE
,
3813 .setkey
= aead_setkey
,
3814 .setauthsize
= aead_setauthsize
,
3815 .encrypt
= aead_encrypt
,
3816 .decrypt
= aead_decrypt
,
3817 .ivsize
= DES_BLOCK_SIZE
,
3818 .maxauthsize
= SHA1_DIGEST_SIZE
,
3821 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3822 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3823 OP_ALG_AAI_HMAC_PRECOMP
,
3824 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3831 .cra_name
= "authenc(hmac(sha224),cbc(des))",
3832 .cra_driver_name
= "authenc-hmac-sha224-"
3834 .cra_blocksize
= DES_BLOCK_SIZE
,
3836 .setkey
= aead_setkey
,
3837 .setauthsize
= aead_setauthsize
,
3838 .encrypt
= aead_encrypt
,
3839 .decrypt
= aead_decrypt
,
3840 .ivsize
= DES_BLOCK_SIZE
,
3841 .maxauthsize
= SHA224_DIGEST_SIZE
,
3844 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3845 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3846 OP_ALG_AAI_HMAC_PRECOMP
,
3847 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3853 .cra_name
= "echainiv(authenc(hmac(sha224),"
3855 .cra_driver_name
= "echainiv-authenc-"
3856 "hmac-sha224-cbc-des-caam",
3857 .cra_blocksize
= DES_BLOCK_SIZE
,
3859 .setkey
= aead_setkey
,
3860 .setauthsize
= aead_setauthsize
,
3861 .encrypt
= aead_encrypt
,
3862 .decrypt
= aead_decrypt
,
3863 .ivsize
= DES_BLOCK_SIZE
,
3864 .maxauthsize
= SHA224_DIGEST_SIZE
,
3867 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3868 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3869 OP_ALG_AAI_HMAC_PRECOMP
,
3870 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3877 .cra_name
= "authenc(hmac(sha256),cbc(des))",
3878 .cra_driver_name
= "authenc-hmac-sha256-"
3880 .cra_blocksize
= DES_BLOCK_SIZE
,
3882 .setkey
= aead_setkey
,
3883 .setauthsize
= aead_setauthsize
,
3884 .encrypt
= aead_encrypt
,
3885 .decrypt
= aead_decrypt
,
3886 .ivsize
= DES_BLOCK_SIZE
,
3887 .maxauthsize
= SHA256_DIGEST_SIZE
,
3890 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3891 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3892 OP_ALG_AAI_HMAC_PRECOMP
,
3893 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3899 .cra_name
= "echainiv(authenc(hmac(sha256),"
3901 .cra_driver_name
= "echainiv-authenc-"
3902 "hmac-sha256-cbc-des-caam",
3903 .cra_blocksize
= DES_BLOCK_SIZE
,
3905 .setkey
= aead_setkey
,
3906 .setauthsize
= aead_setauthsize
,
3907 .encrypt
= aead_encrypt
,
3908 .decrypt
= aead_decrypt
,
3909 .ivsize
= DES_BLOCK_SIZE
,
3910 .maxauthsize
= SHA256_DIGEST_SIZE
,
3913 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3914 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3915 OP_ALG_AAI_HMAC_PRECOMP
,
3916 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3923 .cra_name
= "authenc(hmac(sha384),cbc(des))",
3924 .cra_driver_name
= "authenc-hmac-sha384-"
3926 .cra_blocksize
= DES_BLOCK_SIZE
,
3928 .setkey
= aead_setkey
,
3929 .setauthsize
= aead_setauthsize
,
3930 .encrypt
= aead_encrypt
,
3931 .decrypt
= aead_decrypt
,
3932 .ivsize
= DES_BLOCK_SIZE
,
3933 .maxauthsize
= SHA384_DIGEST_SIZE
,
3936 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3937 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3938 OP_ALG_AAI_HMAC_PRECOMP
,
3939 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3945 .cra_name
= "echainiv(authenc(hmac(sha384),"
3947 .cra_driver_name
= "echainiv-authenc-"
3948 "hmac-sha384-cbc-des-caam",
3949 .cra_blocksize
= DES_BLOCK_SIZE
,
3951 .setkey
= aead_setkey
,
3952 .setauthsize
= aead_setauthsize
,
3953 .encrypt
= aead_encrypt
,
3954 .decrypt
= aead_decrypt
,
3955 .ivsize
= DES_BLOCK_SIZE
,
3956 .maxauthsize
= SHA384_DIGEST_SIZE
,
3959 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3960 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3961 OP_ALG_AAI_HMAC_PRECOMP
,
3962 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3969 .cra_name
= "authenc(hmac(sha512),cbc(des))",
3970 .cra_driver_name
= "authenc-hmac-sha512-"
3972 .cra_blocksize
= DES_BLOCK_SIZE
,
3974 .setkey
= aead_setkey
,
3975 .setauthsize
= aead_setauthsize
,
3976 .encrypt
= aead_encrypt
,
3977 .decrypt
= aead_decrypt
,
3978 .ivsize
= DES_BLOCK_SIZE
,
3979 .maxauthsize
= SHA512_DIGEST_SIZE
,
3982 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3983 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3984 OP_ALG_AAI_HMAC_PRECOMP
,
3985 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3991 .cra_name
= "echainiv(authenc(hmac(sha512),"
3993 .cra_driver_name
= "echainiv-authenc-"
3994 "hmac-sha512-cbc-des-caam",
3995 .cra_blocksize
= DES_BLOCK_SIZE
,
3997 .setkey
= aead_setkey
,
3998 .setauthsize
= aead_setauthsize
,
3999 .encrypt
= aead_encrypt
,
4000 .decrypt
= aead_decrypt
,
4001 .ivsize
= DES_BLOCK_SIZE
,
4002 .maxauthsize
= SHA512_DIGEST_SIZE
,
4005 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
4006 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4007 OP_ALG_AAI_HMAC_PRECOMP
,
4008 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4015 .cra_name
= "authenc(hmac(md5),"
4016 "rfc3686(ctr(aes)))",
4017 .cra_driver_name
= "authenc-hmac-md5-"
4018 "rfc3686-ctr-aes-caam",
4021 .setkey
= aead_setkey
,
4022 .setauthsize
= aead_setauthsize
,
4023 .encrypt
= aead_encrypt
,
4024 .decrypt
= aead_decrypt
,
4025 .ivsize
= CTR_RFC3686_IV_SIZE
,
4026 .maxauthsize
= MD5_DIGEST_SIZE
,
4029 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4030 OP_ALG_AAI_CTR_MOD128
,
4031 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
4032 OP_ALG_AAI_HMAC_PRECOMP
,
4033 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
4040 .cra_name
= "seqiv(authenc("
4041 "hmac(md5),rfc3686(ctr(aes))))",
4042 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
4043 "rfc3686-ctr-aes-caam",
4046 .setkey
= aead_setkey
,
4047 .setauthsize
= aead_setauthsize
,
4048 .encrypt
= aead_encrypt
,
4049 .decrypt
= aead_decrypt
,
4050 .ivsize
= CTR_RFC3686_IV_SIZE
,
4051 .maxauthsize
= MD5_DIGEST_SIZE
,
4054 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4055 OP_ALG_AAI_CTR_MOD128
,
4056 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
4057 OP_ALG_AAI_HMAC_PRECOMP
,
4058 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
4066 .cra_name
= "authenc(hmac(sha1),"
4067 "rfc3686(ctr(aes)))",
4068 .cra_driver_name
= "authenc-hmac-sha1-"
4069 "rfc3686-ctr-aes-caam",
4072 .setkey
= aead_setkey
,
4073 .setauthsize
= aead_setauthsize
,
4074 .encrypt
= aead_encrypt
,
4075 .decrypt
= aead_decrypt
,
4076 .ivsize
= CTR_RFC3686_IV_SIZE
,
4077 .maxauthsize
= SHA1_DIGEST_SIZE
,
4080 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4081 OP_ALG_AAI_CTR_MOD128
,
4082 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
4083 OP_ALG_AAI_HMAC_PRECOMP
,
4084 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
4091 .cra_name
= "seqiv(authenc("
4092 "hmac(sha1),rfc3686(ctr(aes))))",
4093 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
4094 "rfc3686-ctr-aes-caam",
4097 .setkey
= aead_setkey
,
4098 .setauthsize
= aead_setauthsize
,
4099 .encrypt
= aead_encrypt
,
4100 .decrypt
= aead_decrypt
,
4101 .ivsize
= CTR_RFC3686_IV_SIZE
,
4102 .maxauthsize
= SHA1_DIGEST_SIZE
,
4105 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4106 OP_ALG_AAI_CTR_MOD128
,
4107 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
4108 OP_ALG_AAI_HMAC_PRECOMP
,
4109 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
4117 .cra_name
= "authenc(hmac(sha224),"
4118 "rfc3686(ctr(aes)))",
4119 .cra_driver_name
= "authenc-hmac-sha224-"
4120 "rfc3686-ctr-aes-caam",
4123 .setkey
= aead_setkey
,
4124 .setauthsize
= aead_setauthsize
,
4125 .encrypt
= aead_encrypt
,
4126 .decrypt
= aead_decrypt
,
4127 .ivsize
= CTR_RFC3686_IV_SIZE
,
4128 .maxauthsize
= SHA224_DIGEST_SIZE
,
4131 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4132 OP_ALG_AAI_CTR_MOD128
,
4133 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
4134 OP_ALG_AAI_HMAC_PRECOMP
,
4135 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
4142 .cra_name
= "seqiv(authenc("
4143 "hmac(sha224),rfc3686(ctr(aes))))",
4144 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
4145 "rfc3686-ctr-aes-caam",
4148 .setkey
= aead_setkey
,
4149 .setauthsize
= aead_setauthsize
,
4150 .encrypt
= aead_encrypt
,
4151 .decrypt
= aead_decrypt
,
4152 .ivsize
= CTR_RFC3686_IV_SIZE
,
4153 .maxauthsize
= SHA224_DIGEST_SIZE
,
4156 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4157 OP_ALG_AAI_CTR_MOD128
,
4158 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
4159 OP_ALG_AAI_HMAC_PRECOMP
,
4160 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
4168 .cra_name
= "authenc(hmac(sha256),"
4169 "rfc3686(ctr(aes)))",
4170 .cra_driver_name
= "authenc-hmac-sha256-"
4171 "rfc3686-ctr-aes-caam",
4174 .setkey
= aead_setkey
,
4175 .setauthsize
= aead_setauthsize
,
4176 .encrypt
= aead_encrypt
,
4177 .decrypt
= aead_decrypt
,
4178 .ivsize
= CTR_RFC3686_IV_SIZE
,
4179 .maxauthsize
= SHA256_DIGEST_SIZE
,
4182 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4183 OP_ALG_AAI_CTR_MOD128
,
4184 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
4185 OP_ALG_AAI_HMAC_PRECOMP
,
4186 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
4193 .cra_name
= "seqiv(authenc(hmac(sha256),"
4194 "rfc3686(ctr(aes))))",
4195 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
4196 "rfc3686-ctr-aes-caam",
4199 .setkey
= aead_setkey
,
4200 .setauthsize
= aead_setauthsize
,
4201 .encrypt
= aead_encrypt
,
4202 .decrypt
= aead_decrypt
,
4203 .ivsize
= CTR_RFC3686_IV_SIZE
,
4204 .maxauthsize
= SHA256_DIGEST_SIZE
,
4207 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4208 OP_ALG_AAI_CTR_MOD128
,
4209 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
4210 OP_ALG_AAI_HMAC_PRECOMP
,
4211 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
4219 .cra_name
= "authenc(hmac(sha384),"
4220 "rfc3686(ctr(aes)))",
4221 .cra_driver_name
= "authenc-hmac-sha384-"
4222 "rfc3686-ctr-aes-caam",
4225 .setkey
= aead_setkey
,
4226 .setauthsize
= aead_setauthsize
,
4227 .encrypt
= aead_encrypt
,
4228 .decrypt
= aead_decrypt
,
4229 .ivsize
= CTR_RFC3686_IV_SIZE
,
4230 .maxauthsize
= SHA384_DIGEST_SIZE
,
4233 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4234 OP_ALG_AAI_CTR_MOD128
,
4235 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
4236 OP_ALG_AAI_HMAC_PRECOMP
,
4237 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
4244 .cra_name
= "seqiv(authenc(hmac(sha384),"
4245 "rfc3686(ctr(aes))))",
4246 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
4247 "rfc3686-ctr-aes-caam",
4250 .setkey
= aead_setkey
,
4251 .setauthsize
= aead_setauthsize
,
4252 .encrypt
= aead_encrypt
,
4253 .decrypt
= aead_decrypt
,
4254 .ivsize
= CTR_RFC3686_IV_SIZE
,
4255 .maxauthsize
= SHA384_DIGEST_SIZE
,
4258 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4259 OP_ALG_AAI_CTR_MOD128
,
4260 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
4261 OP_ALG_AAI_HMAC_PRECOMP
,
4262 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
4270 .cra_name
= "authenc(hmac(sha512),"
4271 "rfc3686(ctr(aes)))",
4272 .cra_driver_name
= "authenc-hmac-sha512-"
4273 "rfc3686-ctr-aes-caam",
4276 .setkey
= aead_setkey
,
4277 .setauthsize
= aead_setauthsize
,
4278 .encrypt
= aead_encrypt
,
4279 .decrypt
= aead_decrypt
,
4280 .ivsize
= CTR_RFC3686_IV_SIZE
,
4281 .maxauthsize
= SHA512_DIGEST_SIZE
,
4284 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4285 OP_ALG_AAI_CTR_MOD128
,
4286 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4287 OP_ALG_AAI_HMAC_PRECOMP
,
4288 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4295 .cra_name
= "seqiv(authenc(hmac(sha512),"
4296 "rfc3686(ctr(aes))))",
4297 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
4298 "rfc3686-ctr-aes-caam",
4301 .setkey
= aead_setkey
,
4302 .setauthsize
= aead_setauthsize
,
4303 .encrypt
= aead_encrypt
,
4304 .decrypt
= aead_decrypt
,
4305 .ivsize
= CTR_RFC3686_IV_SIZE
,
4306 .maxauthsize
= SHA512_DIGEST_SIZE
,
4309 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4310 OP_ALG_AAI_CTR_MOD128
,
4311 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4312 OP_ALG_AAI_HMAC_PRECOMP
,
4313 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4320 struct caam_crypto_alg
{
4321 struct crypto_alg crypto_alg
;
4322 struct list_head entry
;
4323 struct caam_alg_entry caam
;
4326 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
4328 ctx
->jrdev
= caam_jr_alloc();
4329 if (IS_ERR(ctx
->jrdev
)) {
4330 pr_err("Job Ring Device allocation for transform failed\n");
4331 return PTR_ERR(ctx
->jrdev
);
4334 /* copy descriptor header template value */
4335 ctx
->class1_alg_type
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
4336 ctx
->class2_alg_type
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
4337 ctx
->alg_op
= OP_TYPE_CLASS2_ALG
| caam
->alg_op
;
4342 static int caam_cra_init(struct crypto_tfm
*tfm
)
4344 struct crypto_alg
*alg
= tfm
->__crt_alg
;
4345 struct caam_crypto_alg
*caam_alg
=
4346 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
4347 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
4349 return caam_init_common(ctx
, &caam_alg
->caam
);
4352 static int caam_aead_init(struct crypto_aead
*tfm
)
4354 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
4355 struct caam_aead_alg
*caam_alg
=
4356 container_of(alg
, struct caam_aead_alg
, aead
);
4357 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
4359 return caam_init_common(ctx
, &caam_alg
->caam
);
4362 static void caam_exit_common(struct caam_ctx
*ctx
)
4364 if (ctx
->sh_desc_enc_dma
&&
4365 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_enc_dma
))
4366 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
4367 desc_bytes(ctx
->sh_desc_enc
), DMA_TO_DEVICE
);
4368 if (ctx
->sh_desc_dec_dma
&&
4369 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_dec_dma
))
4370 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_dec_dma
,
4371 desc_bytes(ctx
->sh_desc_dec
), DMA_TO_DEVICE
);
4372 if (ctx
->sh_desc_givenc_dma
&&
4373 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_givenc_dma
))
4374 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_givenc_dma
,
4375 desc_bytes(ctx
->sh_desc_givenc
),
4378 !dma_mapping_error(ctx
->jrdev
, ctx
->key_dma
))
4379 dma_unmap_single(ctx
->jrdev
, ctx
->key_dma
,
4380 ctx
->enckeylen
+ ctx
->split_key_pad_len
,
4383 caam_jr_free(ctx
->jrdev
);
4386 static void caam_cra_exit(struct crypto_tfm
*tfm
)
4388 caam_exit_common(crypto_tfm_ctx(tfm
));
4391 static void caam_aead_exit(struct crypto_aead
*tfm
)
4393 caam_exit_common(crypto_aead_ctx(tfm
));
4396 static void __exit
caam_algapi_exit(void)
4399 struct caam_crypto_alg
*t_alg
, *n
;
4402 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
4403 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
4405 if (t_alg
->registered
)
4406 crypto_unregister_aead(&t_alg
->aead
);
4412 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
4413 crypto_unregister_alg(&t_alg
->crypto_alg
);
4414 list_del(&t_alg
->entry
);
4419 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
4422 struct caam_crypto_alg
*t_alg
;
4423 struct crypto_alg
*alg
;
4425 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
4427 pr_err("failed to allocate t_alg\n");
4428 return ERR_PTR(-ENOMEM
);
4431 alg
= &t_alg
->crypto_alg
;
4433 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
4434 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
4435 template->driver_name
);
4436 alg
->cra_module
= THIS_MODULE
;
4437 alg
->cra_init
= caam_cra_init
;
4438 alg
->cra_exit
= caam_cra_exit
;
4439 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
4440 alg
->cra_blocksize
= template->blocksize
;
4441 alg
->cra_alignmask
= 0;
4442 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
4443 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
4445 switch (template->type
) {
4446 case CRYPTO_ALG_TYPE_GIVCIPHER
:
4447 alg
->cra_type
= &crypto_givcipher_type
;
4448 alg
->cra_ablkcipher
= template->template_ablkcipher
;
4450 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
4451 alg
->cra_type
= &crypto_ablkcipher_type
;
4452 alg
->cra_ablkcipher
= template->template_ablkcipher
;
4456 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
4457 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
4458 t_alg
->caam
.alg_op
= template->alg_op
;
4463 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
4465 struct aead_alg
*alg
= &t_alg
->aead
;
4467 alg
->base
.cra_module
= THIS_MODULE
;
4468 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
4469 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
4470 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
4472 alg
->init
= caam_aead_init
;
4473 alg
->exit
= caam_aead_exit
;
4476 static int __init
caam_algapi_init(void)
4478 struct device_node
*dev_node
;
4479 struct platform_device
*pdev
;
4480 struct device
*ctrldev
;
4481 struct caam_drv_private
*priv
;
4483 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
4484 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
4485 bool registered
= false;
4487 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
4489 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
4494 pdev
= of_find_device_by_node(dev_node
);
4496 of_node_put(dev_node
);
4500 ctrldev
= &pdev
->dev
;
4501 priv
= dev_get_drvdata(ctrldev
);
4502 of_node_put(dev_node
);
4505 * If priv is NULL, it's probably because the caam driver wasn't
4506 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4512 INIT_LIST_HEAD(&alg_list
);
4515 * Register crypto algorithms the device supports.
4516 * First, detect presence and attributes of DES, AES, and MD blocks.
4518 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
4519 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
4520 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
4521 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
4522 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
4524 /* If MD is present, limit digest size based on LP256 */
4525 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
4526 md_limit
= SHA256_DIGEST_SIZE
;
4528 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
4529 struct caam_crypto_alg
*t_alg
;
4530 struct caam_alg_template
*alg
= driver_algs
+ i
;
4531 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
4533 /* Skip DES algorithms if not supported by device */
4535 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
4536 (alg_sel
== OP_ALG_ALGSEL_DES
)))
4539 /* Skip AES algorithms if not supported by device */
4540 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
4543 t_alg
= caam_alg_alloc(alg
);
4544 if (IS_ERR(t_alg
)) {
4545 err
= PTR_ERR(t_alg
);
4546 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
4550 err
= crypto_register_alg(&t_alg
->crypto_alg
);
4552 pr_warn("%s alg registration failed\n",
4553 t_alg
->crypto_alg
.cra_driver_name
);
4558 list_add_tail(&t_alg
->entry
, &alg_list
);
4562 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
4563 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
4564 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
4566 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
4568 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
4570 /* Skip DES algorithms if not supported by device */
4572 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
4573 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
4576 /* Skip AES algorithms if not supported by device */
4577 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
4581 * Check support for AES algorithms not available
4584 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
4585 if (alg_aai
== OP_ALG_AAI_GCM
)
4589 * Skip algorithms requiring message digests
4590 * if MD or MD size is not supported by device.
4593 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
4596 caam_aead_alg_init(t_alg
);
4598 err
= crypto_register_aead(&t_alg
->aead
);
4600 pr_warn("%s alg registration failed\n",
4601 t_alg
->aead
.base
.cra_driver_name
);
4605 t_alg
->registered
= true;
4610 pr_info("caam algorithms registered in /proc/crypto\n");
4615 module_init(caam_algapi_init
);
4616 module_exit(caam_algapi_exit
);
4618 MODULE_LICENSE("GPL");
4619 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4620 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");