summaryrefslogtreecommitdiff
path: root/drivers/crypto/caam
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/caam')
-rw-r--r--drivers/crypto/caam/caamalg.c81
-rw-r--r--drivers/crypto/caam/caamalg_qi.c58
-rw-r--r--drivers/crypto/caam/ctrl.c5
-rw-r--r--drivers/crypto/caam/desc.h27
-rw-r--r--drivers/crypto/caam/jr.c2
5 files changed, 112 insertions, 61 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index 17ea75f..c68f3eb 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -75,7 +75,7 @@
#define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 17 * CAAM_CMD_SZ)
#define DESC_TLS_BASE (4 * CAAM_CMD_SZ)
-#define DESC_TLS10_ENC_LEN (DESC_TLS_BASE + 23 * CAAM_CMD_SZ)
+#define DESC_TLS10_ENC_LEN (DESC_TLS_BASE + 29 * CAAM_CMD_SZ)
#define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
#define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 23 * CAAM_CMD_SZ)
@@ -634,7 +634,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
struct device *jrdev = ctx->jrdev;
bool keys_fit_inline = false;
u32 *key_jump_cmd, *zero_payload_jump_cmd, *skip_zero_jump_cmd;
- u32 genpad, clrw, jumpback, stidx;
+ u32 genpad, jumpback, stidx;
u32 *desc;
unsigned int blocksize = crypto_aead_blocksize(aead);
/* Associated data length is always = 13 for TLS */
@@ -659,9 +659,8 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
/* skip key loading if they are loaded due to sharing */
- key_jump_cmd = append_jump(desc, JUMP_CLASS_BOTH | JUMP_JSL |
- JUMP_TEST_ALL | JUMP_COND_SHRD |
- JUMP_COND_SELF);
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
+ JUMP_COND_SHRD);
if (keys_fit_inline) {
append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
ctx->split_key_len, CLASS_2 |
@@ -685,11 +684,11 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
/* payloadlen = input data length - (assoclen + ivlen) */
- append_math_sub_imm_u32(desc, VARSEQINLEN, SEQINLEN, IMM, assoclen +
+ append_math_sub_imm_u32(desc, REG0, SEQINLEN, IMM, assoclen +
tfm->ivsize);
/* math1 = payloadlen + icvlen */
- append_math_add_imm_u32(desc, REG1, VARSEQINLEN, IMM, ctx->authsize);
+ append_math_add_imm_u32(desc, REG1, REG0, IMM, ctx->authsize);
/* padlen = block_size - math1 % block_size */
append_math_and_imm_u32(desc, REG3, REG1, IMM, blocksize - 1);
@@ -698,11 +697,26 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
/* cryptlen = payloadlen + icvlen + padlen */
append_math_add(desc, VARSEQOUTLEN, REG1, REG2, 4);
+ /*
+ * update immediate data with the padding length value
+ * for the LOAD in the class 1 data size register.
+ */
+ append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
+ (45 * 4 << MOVE_OFFSET_SHIFT) | 7);
+ append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
+ (45 * 4 << MOVE_OFFSET_SHIFT) | 8);
+
+ /* overwrite PL field for the padding iNFO FIFO entry */
+ append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
+ (47 * 4 << MOVE_OFFSET_SHIFT) | 7);
+ append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
+ (47 * 4 << MOVE_OFFSET_SHIFT) | 8);
+
/* store encrypted payload, icv and padding */
append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
/* if payload length is zero, jump to zero-payload commands */
- append_math_add(desc, NONE, ZERO, VARSEQINLEN, 4);
+ append_math_add(desc, VARSEQINLEN, ZERO, REG0, 4);
zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
JUMP_COND_MATH_Z);
@@ -731,12 +745,15 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_move(desc, MOVE_SRC_CLASS2CTX | MOVE_DEST_CLASS1INFIFO |
ctx->authsize);
+ /* update class 1 data size register with padding length */
+ append_load_imm_u32(desc, 0, LDST_CLASS_1_CCB |
+ LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
+
/* generate padding and send it to encryption */
genpad = NFIFOENTRY_DEST_CLASS1 | NFIFOENTRY_LC1 | NFIFOENTRY_FC1 |
NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_PTYPE_N;
append_load_imm_u32(desc, genpad, LDST_CLASS_IND_CCB |
- LDST_SRCDST_WORD_INFO_FIFO_SZM | LDST_IMM |
- (2 & LDST_LEN_MASK));
+ LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
desc_bytes(desc),
@@ -761,9 +778,8 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
/* skip key loading if they are loaded due to sharing */
- key_jump_cmd = append_jump(desc, JUMP_CLASS_BOTH | JUMP_JSL |
- JUMP_TEST_ALL | JUMP_COND_SHRD |
- JUMP_COND_SELF);
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
+ JUMP_COND_SHRD);
append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
KEY_DEST_MDHA_SPLIT | KEY_ENC);
append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
@@ -803,11 +819,9 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | MOVE_DEST_MATH0 |
blocksize);
- /* clear cha1 specific registers */
- clrw = CLRW_CLR_C1MODE | CLRW_CLR_C1DATAS | CLRW_CLR_C1CTX |
- CLRW_RESET_CLS1_CHA;
- append_load_imm_u32(desc, clrw, LDST_CLASS_IND_CCB |
- LDST_SRCDST_WORD_CLRW | LDST_IMM);
+ /* reset AES CHA */
+ append_load_imm_u32(desc, CCTRL_RESET_CHA_AESA, LDST_CLASS_IND_CCB |
+ LDST_SRCDST_WORD_CHACTRL | LDST_IMM);
/* rewind input sequence */
append_seq_in_ptr_intlen(desc, 0, 65535, SQIN_RTO);
@@ -820,7 +834,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_seq_fifo_load(desc, 8, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG);
/* load Type, Version and Len fields in math0 */
append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_DECO |
- LDST_SRCDST_WORD_DECO_MATH0 | 5);
+ LDST_SRCDST_WORD_DECO_MATH0 | (3 << LDST_OFFSET_SHIFT) | 5);
/* load iv in context1 */
append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_1_CCB |
@@ -838,7 +852,6 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, 4);
/* update Len field */
- append_math_rshift_imm_u64(desc, REG0, REG0, IMM, 24);
append_math_sub(desc, REG0, REG0, REG2, 8);
/* store decrypted payload, icv and padding */
@@ -879,7 +892,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
/* move seqoutptr fields into math registers */
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF | MOVE_DEST_MATH0 |
- (55 * 4 << MOVE_OFFSET_SHIFT) | 20);
+ (54 * 4 << MOVE_OFFSET_SHIFT) | 20);
/* seqinptr will point to seqoutptr */
append_math_and_imm_u32(desc, REG0, REG0, IMM,
~(CMD_SEQ_IN_PTR ^ CMD_SEQ_OUT_PTR));
@@ -888,9 +901,10 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_load_imm_u32(desc, jumpback, LDST_CLASS_DECO | LDST_IMM |
LDST_SRCDST_WORD_DECO_MATH2 |
(4 << LDST_OFFSET_SHIFT));
+ append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 1);
/* move updated seqinptr fields to JD */
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 | MOVE_DEST_DESCBUF |
- (55 * 4 << MOVE_OFFSET_SHIFT) | 24);
+ (54 * 4 << MOVE_OFFSET_SHIFT) | 24);
/* read updated seqinptr */
append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 6);
@@ -930,7 +944,7 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)
struct aead_tfm *tfm = &aead->base.crt_aead;
struct caam_ctx *ctx = crypto_aead_ctx(aead);
struct device *jrdev = ctx->jrdev;
- bool keys_fit_inline = false;
+ bool key_fits_inline = false;
u32 *key_jump_cmd, *zero_payload_jump_cmd,
*zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
u32 *desc;
@@ -945,16 +959,16 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)
*/
if (DESC_GCM_ENC_LEN + DESC_JOB_IO_LEN +
ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
- keys_fit_inline = true;
+ key_fits_inline = true;
desc = ctx->sh_desc_enc;
init_sh_desc(desc, HDR_SHARE_SERIAL);
- /* skip key loading if they are loaded due to sharing */
+ /* skip key loading if it is loaded due to sharing */
key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
- JUMP_COND_SHRD | JUMP_COND_SELF);
- if (keys_fit_inline)
+ JUMP_COND_SHRD);
+ if (key_fits_inline)
append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
else
@@ -1052,17 +1066,16 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)
*/
if (DESC_GCM_DEC_LEN + DESC_JOB_IO_LEN +
ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
- keys_fit_inline = true;
+ key_fits_inline = true;
desc = ctx->sh_desc_dec;
init_sh_desc(desc, HDR_SHARE_SERIAL);
- /* skip key loading if they are loaded due to sharing */
- key_jump_cmd = append_jump(desc, JUMP_JSL |
- JUMP_TEST_ALL | JUMP_COND_SHRD |
- JUMP_COND_SELF);
- if (keys_fit_inline)
+ /* skip key loading if it is loaded due to sharing */
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
+ JUMP_COND_SHRD);
+ if (key_fits_inline)
append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
else
@@ -3278,7 +3291,7 @@ static struct caam_alg_template driver_algs[] = {
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
- .min_era = 4,
+ .min_era = 2,
},
/* Galois Counter Mode */
{
diff --git a/drivers/crypto/caam/caamalg_qi.c b/drivers/crypto/caam/caamalg_qi.c
index 7eb95b9..7e85816 100644
--- a/drivers/crypto/caam/caamalg_qi.c
+++ b/drivers/crypto/caam/caamalg_qi.c
@@ -386,7 +386,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
struct caam_ctx *ctx = crypto_aead_ctx(aead);
bool keys_fit_inline = false;
u32 *key_jump_cmd, *zero_payload_jump_cmd, *skip_zero_jump_cmd;
- u32 genpad, clrw, jumpback, stidx;
+ u32 genpad, jumpback, stidx;
u32 *desc;
unsigned int blocksize = crypto_aead_blocksize(aead);
/* Associated data length is always = 13 for TLS */
@@ -411,9 +411,8 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
/* skip key loading if they are loaded due to sharing */
- key_jump_cmd = append_jump(desc, JUMP_CLASS_BOTH | JUMP_JSL |
- JUMP_TEST_ALL | JUMP_COND_SHRD |
- JUMP_COND_SELF);
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
+ JUMP_COND_SHRD);
if (keys_fit_inline) {
append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
ctx->split_key_len, CLASS_2 |
@@ -437,11 +436,11 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
/* payloadlen = input data length - (assoclen + ivlen) */
- append_math_sub_imm_u32(desc, VARSEQINLEN, SEQINLEN, IMM, assoclen +
+ append_math_sub_imm_u32(desc, REG0, SEQINLEN, IMM, assoclen +
tfm->ivsize);
/* math1 = payloadlen + icvlen */
- append_math_add_imm_u32(desc, REG1, VARSEQINLEN, IMM, ctx->authsize);
+ append_math_add_imm_u32(desc, REG1, REG0, IMM, ctx->authsize);
/* padlen = block_size - math1 % block_size */
append_math_and_imm_u32(desc, REG3, REG1, IMM, blocksize - 1);
@@ -450,11 +449,26 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
/* cryptlen = payloadlen + icvlen + padlen */
append_math_add(desc, VARSEQOUTLEN, REG1, REG2, 4);
+ /*
+ * update immediate data with the padding length value
+ * for the LOAD in the class 1 data size register.
+ */
+ append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
+ (45 * 4 << MOVE_OFFSET_SHIFT) | 7);
+ append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
+ (45 * 4 << MOVE_OFFSET_SHIFT) | 8);
+
+ /* overwrite PL field for the padding iNFO FIFO entry */
+ append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
+ (47 * 4 << MOVE_OFFSET_SHIFT) | 7);
+ append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
+ (47 * 4 << MOVE_OFFSET_SHIFT) | 8);
+
/* store encrypted payload, icv and padding */
append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
/* if payload length is zero, jump to zero-payload commands */
- append_math_add(desc, NONE, ZERO, VARSEQINLEN, 4);
+ append_math_add(desc, VARSEQINLEN, ZERO, REG0, 4);
zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
JUMP_COND_MATH_Z);
@@ -483,12 +497,15 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_move(desc, MOVE_SRC_CLASS2CTX | MOVE_DEST_CLASS1INFIFO |
ctx->authsize);
+ /* update class 1 data size register with padding length */
+ append_load_imm_u32(desc, 0, LDST_CLASS_1_CCB |
+ LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
+
/* generate padding and send it to encryption */
genpad = NFIFOENTRY_DEST_CLASS1 | NFIFOENTRY_LC1 | NFIFOENTRY_FC1 |
NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_PTYPE_N;
append_load_imm_u32(desc, genpad, LDST_CLASS_IND_CCB |
- LDST_SRCDST_WORD_INFO_FIFO_SZM | LDST_IMM |
- (2 & LDST_LEN_MASK));
+ LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
#ifdef DEBUG
print_hex_dump(KERN_ERR, "tls enc shdesc@"__stringify(__LINE__)": ",
@@ -506,9 +523,8 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
/* skip key loading if they are loaded due to sharing */
- key_jump_cmd = append_jump(desc, JUMP_CLASS_BOTH | JUMP_JSL |
- JUMP_TEST_ALL | JUMP_COND_SHRD |
- JUMP_COND_SELF);
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
+ JUMP_COND_SHRD);
append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
KEY_DEST_MDHA_SPLIT | KEY_ENC);
append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
@@ -548,11 +564,9 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | MOVE_DEST_MATH0 |
blocksize);
- /* clear cha1 specific registers */
- clrw = CLRW_CLR_C1MODE | CLRW_CLR_C1DATAS | CLRW_CLR_C1CTX |
- CLRW_RESET_CLS1_CHA;
- append_load_imm_u32(desc, clrw, LDST_CLASS_IND_CCB |
- LDST_SRCDST_WORD_CLRW | LDST_IMM);
+ /* reset AES CHA */
+ append_load_imm_u32(desc, CCTRL_RESET_CHA_AESA, LDST_CLASS_IND_CCB |
+ LDST_SRCDST_WORD_CHACTRL | LDST_IMM);
/* rewind input sequence */
append_seq_in_ptr_intlen(desc, 0, 65535, SQIN_RTO);
@@ -565,7 +579,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_seq_fifo_load(desc, 8, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG);
/* load Type, Version and Len fields in math0 */
append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_DECO |
- LDST_SRCDST_WORD_DECO_MATH0 | 5);
+ LDST_SRCDST_WORD_DECO_MATH0 | (3 << LDST_OFFSET_SHIFT) | 5);
/* load iv in context1 */
append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_1_CCB |
@@ -583,7 +597,6 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, 4);
/* update Len field */
- append_math_rshift_imm_u64(desc, REG0, REG0, IMM, 24);
append_math_sub(desc, REG0, REG0, REG2, 8);
/* store decrypted payload, icv and padding */
@@ -624,7 +637,7 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
/* move seqoutptr fields into math registers */
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF | MOVE_DEST_MATH0 |
- (55 * 4 << MOVE_OFFSET_SHIFT) | 20);
+ (54 * 4 << MOVE_OFFSET_SHIFT) | 20);
/* seqinptr will point to seqoutptr */
append_math_and_imm_u32(desc, REG0, REG0, IMM,
~(CMD_SEQ_IN_PTR ^ CMD_SEQ_OUT_PTR));
@@ -633,9 +646,10 @@ static int tls_set_sh_desc(struct crypto_aead *aead)
append_load_imm_u32(desc, jumpback, LDST_CLASS_DECO | LDST_IMM |
LDST_SRCDST_WORD_DECO_MATH2 |
(4 << LDST_OFFSET_SHIFT));
+ append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 1);
/* move updated seqinptr fields to JD */
append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 | MOVE_DEST_DESCBUF |
- (55 * 4 << MOVE_OFFSET_SHIFT) | 24);
+ (54 * 4 << MOVE_OFFSET_SHIFT) | 24);
/* read updated seqinptr */
append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 6);
@@ -2042,7 +2056,7 @@ static struct caam_alg_template driver_algs[] = {
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
- .min_era = 4,
+ .min_era = 2,
}
};
diff --git a/drivers/crypto/caam/ctrl.c b/drivers/crypto/caam/ctrl.c
index 1099914..17805eb 100644
--- a/drivers/crypto/caam/ctrl.c
+++ b/drivers/crypto/caam/ctrl.c
@@ -5,9 +5,6 @@
* Copyright 2008-2012 Freescale Semiconductor, Inc.
*/
-#include <linux/of_address.h>
-#include <linux/of_irq.h>
-
#include "compat.h"
#include "regs.h"
#include "intern.h"
@@ -477,7 +474,7 @@ static int caam_probe(struct platform_device *pdev)
topregs = (struct caam_full __iomem *)ctrl;
/* Get the IRQ of the controller (for security violations only) */
- ctrlpriv->secvio_irq = irq_of_parse_and_map(nprop, 0);
+ ctrlpriv->secvio_irq = of_irq_to_resource(nprop, 0, NULL);
/*
* Enable DECO watchdogs and, if this is a PHYS_ADDR_T_64BIT kernel,
diff --git a/drivers/crypto/caam/desc.h b/drivers/crypto/caam/desc.h
index 4743d8e..ffb9e99 100644
--- a/drivers/crypto/caam/desc.h
+++ b/drivers/crypto/caam/desc.h
@@ -302,6 +302,33 @@ struct sec4_sg_entry {
#define CLRW_RESET_OFIFO 0x40000000u /* era 3 */
#define CLRW_RESET_IFIFO_DFIFO 0x80000000u /* era 3 */
+/* CHA Control Register bits */
+#define CCTRL_RESET_CHA_ALL 0x1
+#define CCTRL_RESET_CHA_AESA 0x2
+#define CCTRL_RESET_CHA_DESA 0x4
+#define CCTRL_RESET_CHA_AFHA 0x8
+#define CCTRL_RESET_CHA_KFHA 0x10
+#define CCTRL_RESET_CHA_SF8A 0x20
+#define CCTRL_RESET_CHA_PKHA 0x40
+#define CCTRL_RESET_CHA_MDHA 0x80
+#define CCTRL_RESET_CHA_CRCA 0x100
+#define CCTRL_RESET_CHA_RNG 0x200
+#define CCTRL_RESET_CHA_SF9A 0x400
+#define CCTRL_RESET_CHA_ZUCE 0x800
+#define CCTRL_RESET_CHA_ZUCA 0x1000
+#define CCTRL_UNLOAD_PK_A0 0x10000
+#define CCTRL_UNLOAD_PK_A1 0x20000
+#define CCTRL_UNLOAD_PK_A2 0x40000
+#define CCTRL_UNLOAD_PK_A3 0x80000
+#define CCTRL_UNLOAD_PK_B0 0x100000
+#define CCTRL_UNLOAD_PK_B1 0x200000
+#define CCTRL_UNLOAD_PK_B2 0x400000
+#define CCTRL_UNLOAD_PK_B3 0x800000
+#define CCTRL_UNLOAD_PK_N 0x1000000
+#define CCTRL_UNLOAD_PK_A 0x4000000
+#define CCTRL_UNLOAD_PK_B 0x8000000
+#define CCTRL_UNLOAD_SBOX 0x10000000
+
/*
* FIFO_LOAD/FIFO_STORE/SEQ_FIFO_LOAD/SEQ_FIFO_STORE
* Command Constructs
diff --git a/drivers/crypto/caam/jr.c b/drivers/crypto/caam/jr.c
index 4ae0a64..aa1dd5c 100644
--- a/drivers/crypto/caam/jr.c
+++ b/drivers/crypto/caam/jr.c
@@ -637,7 +637,7 @@ static int caam_jr_probe(struct platform_device *pdev)
dma_set_mask(jrdev, DMA_BIT_MASK(32));
/* Identify the interrupt */
- jrpriv->irq = irq_of_parse_and_map(nprop, 0);
+ jrpriv->irq = of_irq_to_resource(nprop, 0, NULL);
/* Now do the platform independent part */
error = caam_jr_init(jrdev); /* now turn on hardware */