diff options
author | Joakim Bech <joakim.xx.bech@stericsson.com> | 2011-02-24 09:55:24 +0100 |
---|---|---|
committer | said m bagheri <ebgheri@steludxu2848.(none)> | 2011-06-17 13:41:54 +0200 |
commit | ee890c81d0126109a8771dce22486419e9f623e5 (patch) | |
tree | 6825a2ecf28f74a3c3d0740a54a910ca808fceb0 /drivers | |
parent | 753bc170e86059ab84895fcbc7f89ae331890c31 (diff) |
cryp: Updates according to corrected design spec
- Change names of context registers so they corresponds to the name in
the DS.
- Follow the design specification strictly and with the additional
missing steps that we have got from IP developers.
- Remove unused functions from u8500.
- Call atomic version of power enable/disable functions to get rid of
sleeping while atomic BUG prints.
- Replace mutex with spinlock in the crypto context to get rid of
sleeping while atomic BUG prints.
- Replace completion in interrupt mode with polling for the remaining
data length of data to get rid of sleeping in invalid context BUG
print.
- Correct optimization bug which occured when building without debug
information (the compiler optimized it incorrectly).
- Update of irq, fixed interrupt mask handling.
- Correct bug regarding keysize when doing context save.
- BUG! DES hangs when encrypting data not modulo 16 using DMA. The
reason for this is that the CRYP IP only supports burst size of 4
words. Due to this reason DMA for DES have been replaced by CPU
mode instead.
ST-Ericsson ID: 283399, 340779
ST-Ericsson Linux next: Not tested, ER 320876
ST-Ericsson FOSS-OUT ID: Trivial
Change-Id: I23dbc123dd2fb7e47f5713025ed71423efbb5b18
Signed-off-by: Joakim Bech <joakim.xx.bech@stericsson.com>
Reviewed-on: http://gerrit.lud.stericsson.com/gerrit/23297
Reviewed-by: QATEST
Reviewed-by: Berne HEBARK <berne.hebark@stericsson.com>
Diffstat (limited to 'drivers')
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp.c | 392 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp.h | 54 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp_core.c | 322 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp_irq.c | 4 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp_irqp.h | 2 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp_p.h | 64 |
6 files changed, 337 insertions, 501 deletions
diff --git a/drivers/crypto/ux500/cryp/cryp.c b/drivers/crypto/ux500/cryp/cryp.c index 94928f7efce..ed9eeccf1d3 100644 --- a/drivers/crypto/ux500/cryp/cryp.c +++ b/drivers/crypto/ux500/cryp/cryp.c @@ -53,40 +53,6 @@ int cryp_check(struct cryp_device_data *device_data) } /** - * cryp_reset - This routine loads the cryp register with the default values - * @device_data: Pointer to the device data struct for base address. - */ -void cryp_reset(struct cryp_device_data *device_data) -{ - writel(CRYP_DMACR_DEFAULT, &device_data->base->dmacr); - writel(CRYP_IMSC_DEFAULT, &device_data->base->imsc); - - writel(CRYP_KEY_DEFAULT, &device_data->base->key_1_l); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_1_r); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_2_l); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_2_r); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_3_l); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_3_r); - writel(CRYP_INIT_VECT_DEFAULT, &device_data->base->init_vect_0_l); - writel(CRYP_INIT_VECT_DEFAULT, &device_data->base->init_vect_0_r); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_4_l); - writel(CRYP_KEY_DEFAULT, &device_data->base->key_4_r); - writel(CRYP_INIT_VECT_DEFAULT, &device_data->base->init_vect_1_l); - writel(CRYP_INIT_VECT_DEFAULT, &device_data->base->init_vect_1_r); - - /* Last step since the protection mode bits need to be modified. */ - writel(CRYP_CR_DEFAULT | CRYP_CR_FFLUSH, &device_data->base->cr); - - /* - * CRYP_INFIFO_READY_MASK is the expected value on the status register - * when starting a new calculation, which means Input FIFO is not full - * and input FIFO is empty. - */ - while (readl(&device_data->base->status) != CRYP_INFIFO_READY_MASK) - cpu_relax(); -} - -/** * cryp_activity - This routine enables/disable the cryptography function. * @device_data: Pointer to the device data struct for base address. * @cryp_activity: Enable/Disable functionality @@ -96,49 +62,8 @@ void cryp_activity(struct cryp_device_data *device_data, { CRYP_PUT_BITS(&device_data->base->cr, cryp_crypen, - CRYP_CRYPEN_POS, - CRYP_CRYPEN_MASK); -} - -/** - * cryp_start - starts the computation - * @device_data: Pointer to the device data struct for base address. - * @cryp_start: Enable/Disable functionality - */ -void cryp_start(struct cryp_device_data *device_data) -{ - CRYP_PUT_BITS(&device_data->base->cr, - CRYP_START_ENABLE, - CRYP_START_POS, - CRYP_START_MASK); -} - -/** - * cryp_init_signal - This routine submit the initialization values. - * @device_data: Pointer to the device data struct for base address. - * @cryp_init_bit: Enable/Disable init signal - */ -void cryp_init_signal(struct cryp_device_data *device_data, - enum cryp_init cryp_init_bit) -{ - CRYP_PUT_BITS(&device_data->base->cr, - cryp_init_bit, - CRYP_INIT_POS, - CRYP_INIT_MASK); -} - -/** - * cryp_key_preparation - This routine prepares key for decryption. - * @device_data: Pointer to the device data struct for base address. - * @cryp_prepkey: Enable/Disable - */ -void cryp_key_preparation(struct cryp_device_data *device_data, - enum cryp_key_prep cryp_prepkey) -{ - CRYP_PUT_BITS(&device_data->base->cr, - cryp_prepkey, - CRYP_KSE_POS, - CRYP_KSE_MASK); + CRYP_CR_CRYPEN_POS, + CRYP_CR_CRYPEN_MASK); } /** @@ -147,43 +72,23 @@ void cryp_key_preparation(struct cryp_device_data *device_data, */ void cryp_flush_inoutfifo(struct cryp_device_data *device_data) { - CRYP_SET_BITS(&device_data->base->cr, CRYP_FIFO_FLUSH_MASK); -} - -/** - * cryp_set_dir - - * @device_data: Pointer to the device data struct for base address. - * @dir: Crypto direction, encrypt/decrypt - */ -void cryp_set_dir(struct cryp_device_data *device_data, int dir) -{ - CRYP_PUT_BITS(&device_data->base->cr, - dir, - CRYP_ENC_DEC_POS, - CRYP_ENC_DEC_MASK); - - CRYP_PUT_BITS(&device_data->base->cr, - CRYP_DATA_TYPE_8BIT_SWAP, - CRYP_DATA_TYPE_POS, - CRYP_DATA_TYPE_MASK); -} + /* + * We always need to disble the hardware before trying to flush the + * FIFO. This is something that isn't written in the design + * specification, but we have been informed by the hardware designers + * that this must be done. + */ + cryp_activity(device_data, CRYP_CRYPEN_DISABLE); + cryp_wait_until_done(device_data); -/** - * cryp_cen_flush - - * @device_data: Pointer to the device data struct for base address. - */ -void cryp_cen_flush(struct cryp_device_data *device_data) -{ - CRYP_PUT_BITS(&device_data->base->cr, - CRYP_STATE_DISABLE, - CRYP_KEY_ACCESS_POS, - CRYP_KEY_ACCESS_MASK); - CRYP_SET_BITS(&device_data->base->cr, - CRYP_FIFO_FLUSH_MASK); - CRYP_PUT_BITS(&device_data->base->cr, - CRYP_CRYPEN_ENABLE, - CRYP_CRYPEN_POS, - CRYP_CRYPEN_MASK); + CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK); + /* + * CRYP_SR_INFIFO_READY_MASK is the expected value on the status + * register when starting a new calculation, which means Input FIFO is + * not full and input FIFO is empty. + */ + while (readl(&device_data->base->sr) != CRYP_SR_INFIFO_READY_MASK) + cpu_relax(); } /** @@ -194,96 +99,68 @@ void cryp_cen_flush(struct cryp_device_data *device_data) int cryp_set_configuration(struct cryp_device_data *device_data, struct cryp_config *p_cryp_config) { - if (NULL == device_data) - return -EINVAL; - if (NULL == p_cryp_config) + if (NULL == device_data || NULL == p_cryp_config) return -EINVAL; - /* Since more than one bit is written macro put_bits is used*/ CRYP_PUT_BITS(&device_data->base->cr, - p_cryp_config->key_access, - CRYP_KEY_ACCESS_POS, - CRYP_KEY_ACCESS_MASK); - CRYP_PUT_BITS(&device_data->base->cr, - p_cryp_config->key_size, - CRYP_KEY_SIZE_POS, - CRYP_KEY_SIZE_MASK); - CRYP_PUT_BITS(&device_data->base->cr, - p_cryp_config->data_type, - CRYP_DATA_TYPE_POS, - CRYP_DATA_TYPE_MASK); - - /* Prepare key for decryption */ - if ((CRYP_ALGORITHM_DECRYPT == p_cryp_config->encrypt_or_decrypt) && - ((CRYP_ALGO_AES_ECB == p_cryp_config->algo_mode) || - (CRYP_ALGO_AES_CBC == p_cryp_config->algo_mode))) { + p_cryp_config->keysize, + CRYP_CR_KEYSIZE_POS, + CRYP_CR_KEYSIZE_MASK); + + /* Prepare key for decryption in AES_ECB and AES_CBC mode. */ + if ((CRYP_ALGORITHM_DECRYPT == p_cryp_config->algodir) && + ((CRYP_ALGO_AES_ECB == p_cryp_config->algomode) || + (CRYP_ALGO_AES_CBC == p_cryp_config->algomode))) { + /* + * This seems a bit odd, but it is indeed needed to set this to + * encrypt even though it is a decryption that we are doing. It + * also mentioned in the design spec that you need to do this. + * After the keyprepartion for decrypting is done you should set + * algodir back to decryption, which is done outside this if + * statement. + */ + CRYP_PUT_BITS(&device_data->base->cr, + CRYP_ALGORITHM_ENCRYPT, + CRYP_CR_ALGODIR_POS, + CRYP_CR_ALGODIR_MASK); + + /* + * According to design specification we should set mode ECB + * during key preparation even though we might be running CBC + * when enter this function. + */ CRYP_PUT_BITS(&device_data->base->cr, CRYP_ALGO_AES_ECB, - CRYP_ALGOMODE_POS, - CRYP_ALGOMODE_MASK); + CRYP_CR_ALGOMODE_POS, + CRYP_CR_ALGOMODE_MASK); + CRYP_PUT_BITS(&device_data->base->cr, CRYP_CRYPEN_ENABLE, - CRYP_CRYPEN_POS, - CRYP_CRYPEN_MASK); + CRYP_CR_CRYPEN_POS, + CRYP_CR_CRYPEN_MASK); + + /* + * Writing to KSE_ENABLED will drop CRYPEN when key preparation + * is done. Therefore we need to set CRYPEN again outside this + * if statement when running decryption. + */ CRYP_PUT_BITS(&device_data->base->cr, KSE_ENABLED, - CRYP_KSE_POS, - CRYP_KSE_MASK); + CRYP_CR_KSE_POS, + CRYP_CR_KSE_MASK); cryp_wait_until_done(device_data); - - CRYP_PUT_BITS(&device_data->base->cr, - CRYP_CRYPEN_DISABLE, - CRYP_CRYPEN_POS, - CRYP_CRYPEN_MASK); } CRYP_PUT_BITS(&device_data->base->cr, - CRYP_CRYPEN_ENABLE, - CRYP_CRYPEN_POS, - CRYP_CRYPEN_MASK); - CRYP_PUT_BITS(&device_data->base->cr, - p_cryp_config->algo_mode, - CRYP_ALGOMODE_POS, - CRYP_ALGOMODE_MASK); - CRYP_PUT_BITS(&device_data->base->cr, - p_cryp_config->encrypt_or_decrypt, - CRYP_ENC_DEC_POS, - CRYP_ENC_DEC_MASK); - - return 0; -} - -/** - * cryp_get_configuration - gets the parameter of the control register of IP - * @device_data: Pointer to the device data struct for base address. - * @p_cryp_config: Gets the configuration parameter from cryp ip. - */ -int cryp_get_configuration(struct cryp_device_data *device_data, - struct cryp_config *p_cryp_config) -{ - if (NULL == p_cryp_config) - return -EINVAL; + p_cryp_config->algomode, + CRYP_CR_ALGOMODE_POS, + CRYP_CR_ALGOMODE_MASK); - p_cryp_config->key_access = - ((readl(&device_data->base->cr) & CRYP_KEY_ACCESS_MASK) ? - CRYP_STATE_ENABLE : - CRYP_STATE_DISABLE); - p_cryp_config->key_size = - ((readl(&device_data->base->cr) & CRYP_KEY_SIZE_MASK) >> - CRYP_KEY_SIZE_POS); - - p_cryp_config->encrypt_or_decrypt = - ((readl(&device_data->base->cr) & CRYP_ENC_DEC_MASK) ? - CRYP_ALGORITHM_DECRYPT : - CRYP_ALGORITHM_ENCRYPT); - - p_cryp_config->data_type = - ((readl(&device_data->base->cr) & CRYP_DATA_TYPE_MASK) >> - CRYP_DATA_TYPE_POS); - p_cryp_config->algo_mode = - ((readl(&device_data->base->cr) & CRYP_ALGOMODE_MASK) >> - CRYP_ALGOMODE_POS); + CRYP_PUT_BITS(&device_data->base->cr, + p_cryp_config->algodir, + CRYP_CR_ALGODIR_POS, + CRYP_CR_ALGODIR_MASK); return 0; } @@ -302,11 +179,11 @@ int cryp_configure_protection(struct cryp_device_data *device_data, CRYP_WRITE_BIT(&device_data->base->cr, (u32) p_protect_config->secure_access, - CRYP_SECURE_MASK); + CRYP_CR_SECURE_MASK); CRYP_PUT_BITS(&device_data->base->cr, p_protect_config->privilege_access, - CRYP_PRLG_POS, - CRYP_PRLG_MASK); + CRYP_CR_PRLG_POS, + CRYP_CR_PRLG_MASK); return 0; } @@ -317,20 +194,9 @@ int cryp_configure_protection(struct cryp_device_data *device_data, */ int cryp_is_logic_busy(struct cryp_device_data *device_data) { - return CRYP_TEST_BITS(&device_data->base->status, - CRYP_BUSY_STATUS_MASK); -} - -/** - * cryp_get_status - This routine returns the complete status of the cryp logic - * @device_data: Pointer to the device data struct for base address. - */ -/* -int cryp_get_status(struct cryp_device_data *device_data) -{ - return (int) readl(device_data->base->status); + return CRYP_TEST_BITS(&device_data->base->sr, + CRYP_SR_BUSY_MASK); } -*/ /** * cryp_configure_for_dma - configures the CRYP IP for DMA operation @@ -426,17 +292,6 @@ int cryp_configure_init_vector(struct cryp_device_data *device_data, } /** - * cryp_prep_ctx_mgmt - Prepares for handling the context of the block - * @device_data: Pointer to the device data struct for base address. - */ -static void cryp_prep_ctx_mgmt(struct cryp_device_data *device_data) -{ - cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH); - cryp_activity(device_data, CRYP_CRYPEN_DISABLE); - cryp_wait_until_done(device_data); -} - -/** * cryp_save_device_context - Store hardware registers and * other device context parameter * @device_data: Pointer to the device data struct for base address. @@ -447,15 +302,21 @@ void cryp_save_device_context(struct cryp_device_data *device_data, { struct cryp_register *src_reg = device_data->base; - cryp_prep_ctx_mgmt(device_data); + /* + * Always start by disable the hardware and wait for it to finish the + * ongoing calculations before trying to reprogram it. + */ + cryp_activity(device_data, CRYP_CRYPEN_DISABLE); + cryp_wait_until_done(device_data); + cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH); - ctx->din = readl(&src_reg->din); + if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0) + ctx->din = readl(&src_reg->din); - ctx->dout = readl(&src_reg->dout); + ctx->cr = readl(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK; - ctx->cr = readl(&src_reg->cr); - ctx->dmacr = readl(&src_reg->dmacr); - ctx->imsc = readl(&src_reg->imsc); + CRYP_PUT_BITS(&src_reg->cr, 1, CRYP_CR_KEYRDEN_POS, + CRYP_CR_KEYRDEN_MASK); ctx->key_1_l = readl(&src_reg->key_1_l); ctx->key_1_r = readl(&src_reg->key_1_r); @@ -466,10 +327,21 @@ void cryp_save_device_context(struct cryp_device_data *device_data, ctx->key_4_l = readl(&src_reg->key_4_l); ctx->key_4_r = readl(&src_reg->key_4_r); - ctx->init_vect_0_l = readl(&src_reg->init_vect_0_l); - ctx->init_vect_0_r = readl(&src_reg->init_vect_0_r); - ctx->init_vect_1_l = readl(&src_reg->init_vect_1_l); - ctx->init_vect_1_r = readl(&src_reg->init_vect_1_r); + CRYP_PUT_BITS(&src_reg->cr, 0, CRYP_CR_KEYRDEN_POS, + CRYP_CR_KEYRDEN_MASK); + + /* Save IV for CBC mode for both AES and DES. */ + if (CRYP_TEST_BITS(&src_reg->cr, CRYP_CR_ALGOMODE_POS) == + CRYP_ALGO_TDES_CBC || + CRYP_TEST_BITS(&src_reg->cr, CRYP_CR_ALGOMODE_POS) == + CRYP_ALGO_DES_CBC || + CRYP_TEST_BITS(&src_reg->cr, CRYP_CR_ALGOMODE_POS) == + CRYP_ALGO_AES_CBC) { + ctx->init_vect_0_l = readl(&src_reg->init_vect_0_l); + ctx->init_vect_0_r = readl(&src_reg->init_vect_0_r); + ctx->init_vect_1_l = readl(&src_reg->init_vect_1_l); + ctx->init_vect_1_r = readl(&src_reg->init_vect_1_r); + } } /** @@ -482,26 +354,44 @@ void cryp_restore_device_context(struct cryp_device_data *device_data, struct cryp_device_context *ctx) { struct cryp_register *reg = device_data->base; + struct cryp_config *config = + (struct cryp_config *)device_data->current_ctx; + - cryp_prep_ctx_mgmt(device_data); + /* + * Fall through for all items in switch statement. DES is captured in + * the default. + */ + switch (config->keysize) { + case CRYP_KEY_SIZE_256: + writel(ctx->key_4_l, ®->key_4_l); + writel(ctx->key_4_r, ®->key_4_r); + + case CRYP_KEY_SIZE_192: + writel(ctx->key_3_l, ®->key_3_l); + writel(ctx->key_3_r, ®->key_3_r); + + case CRYP_KEY_SIZE_128: + writel(ctx->key_2_l, ®->key_2_l); + writel(ctx->key_2_r, ®->key_2_r); + + default: + writel(ctx->key_1_l, ®->key_1_l); + writel(ctx->key_1_r, ®->key_1_r); + } + + /* Restore IV for CBC mode for AES and DES. */ + if (config->algomode == CRYP_ALGO_TDES_CBC || + config->algomode == CRYP_ALGO_DES_CBC || + config->algomode == CRYP_ALGO_AES_CBC) { + writel(ctx->init_vect_0_l, ®->init_vect_0_l); + writel(ctx->init_vect_0_r, ®->init_vect_0_r); + writel(ctx->init_vect_1_l, ®->init_vect_1_l); + writel(ctx->init_vect_1_r, ®->init_vect_1_r); + } - writel(ctx->din, ®->din); - writel(ctx->dout, ®->dout); writel(ctx->cr, ®->cr); - writel(ctx->dmacr, ®->dmacr); - writel(ctx->imsc, ®->imsc); - writel(ctx->key_1_l, ®->key_1_l); - writel(ctx->key_1_r, ®->key_1_r); - writel(ctx->key_2_l, ®->key_2_l); - writel(ctx->key_2_r, ®->key_2_r); - writel(ctx->key_3_l, ®->key_3_l); - writel(ctx->key_3_r, ®->key_3_r); - writel(ctx->key_4_l, ®->key_4_l); - writel(ctx->key_4_r, ®->key_4_r); - writel(ctx->init_vect_0_l, ®->init_vect_0_l); - writel(ctx->init_vect_0_r, ®->init_vect_0_r); - writel(ctx->init_vect_1_l, ®->init_vect_1_l); - writel(ctx->init_vect_1_r, ®->init_vect_1_r); + cryp_activity(device_data, CRYP_CRYPEN_ENABLE); } /** @@ -520,24 +410,6 @@ int cryp_write_indata(struct cryp_device_data *device_data, u32 write_data) } /** - * cryp_read_indata - This routine reads the 32 bit data from the data input - * register into the specified location. - * @device_data: Pointer to the device data struct for base address. - * @p_read_data: Read the data from the input FIFO. - */ -int cryp_read_indata(struct cryp_device_data *device_data, u32 *p_read_data) -{ - if (NULL == device_data) - return -EINVAL; - if (NULL == p_read_data) - return -EINVAL; - - *p_read_data = readl(&device_data->base->din); - - return 0; -} - -/** * cryp_read_outdata - This routine reads the data from the data output * register of the CRYP logic * @device_data: Pointer to the device data struct for base address. diff --git a/drivers/crypto/ux500/cryp/cryp.h b/drivers/crypto/ux500/cryp/cryp.h index 2d98923071c..ee7aee3dcb1 100644 --- a/drivers/crypto/ux500/cryp/cryp.h +++ b/drivers/crypto/ux500/cryp/cryp.h @@ -16,9 +16,6 @@ #include <linux/klist.h> #include <linux/mutex.h> -/* Module Defines */ -#define CRYP_MODULE_NAME "CRYP HCL Module" - #define DEV_DBG_NAME "crypX crypX:" /* CRYP enable/disable */ @@ -47,11 +44,11 @@ enum cryp_state { /* Key preparation bit enable */ enum cryp_key_prep { - KSE_DISABLED, - KSE_ENABLED + KSE_DISABLED = 0, + KSE_ENABLED = 1 }; -/* Key size for AES*/ +/* Key size for AES */ #define CRYP_KEY_SIZE_128 (0) #define CRYP_KEY_SIZE_192 (1) #define CRYP_KEY_SIZE_256 (2) @@ -89,20 +86,20 @@ enum cryp_mode { /** * struct cryp_config - - * @key_access: Cryp state enable/disable - * @key_size: Key size for AES - * @data_type: Data type Swap - * @algo_mode: AES modes - * @encrypt_or_decrypt: Cryp Encryption or Decryption + * @keyrden: Cryp state enable/disable + * @keysize: Key size for AES + * @datatype: Data type Swap + * @algomode: AES modes + * @algodir: Cryp Encryption or Decryption * * CRYP configuration structure to be passed to set configuration */ struct cryp_config { - enum cryp_state key_access; - int key_size; - int data_type; - enum cryp_algo_mode algo_mode; - enum cryp_algorithm_dir encrypt_or_decrypt; + enum cryp_state keyrden; + int keysize; + int datatype; + enum cryp_algo_mode algomode; + enum cryp_algorithm_dir algodir; }; /** @@ -232,7 +229,6 @@ struct cryp_dma { * struct cryp_device_data - structure for a cryp device. * @base: Pointer to the hardware base address. * @dev: Pointer to the devices dev structure. - * @cryp_irq_complete: Pointer to an interrupt completion structure. * @clk: Pointer to the device's clock control. * @pwr_regulator: Pointer to the device's power control. * @power_status: Current status of the power. @@ -241,22 +237,21 @@ struct cryp_dma { * @list_node: For inclusion into a klist. * @dma: The dma structure holding channel configuration. * @power_state: TRUE = power state on, FALSE = power state off. - * @power_state_mutex: Mutex for power_state. + * @power_state_spinlock: Spinlock for power_state. * @restore_dev_ctx: TRUE = saved ctx, FALSE = no saved ctx. */ struct cryp_device_data { struct cryp_register __iomem *base; struct device *dev; - struct completion cryp_irq_complete; struct clk *clk; - struct regulator *pwr_regulator; + struct ux500_regulator *pwr_regulator; int power_status; struct spinlock ctx_lock; struct cryp_ctx *current_ctx; struct klist_node list_node; struct cryp_dma dma; bool power_state; - struct mutex power_state_mutex; + struct spinlock power_state_spinlock; bool restore_dev_ctx; }; @@ -266,31 +261,14 @@ void cryp_wait_until_done(struct cryp_device_data *device_data); int cryp_check(struct cryp_device_data *device_data); -void cryp_reset(struct cryp_device_data *device_data); - void cryp_activity(struct cryp_device_data *device_data, enum cryp_crypen cryp_crypen); -void cryp_start(struct cryp_device_data *device_data); - -void cryp_init_signal(struct cryp_device_data *device_data, - enum cryp_init cryp_init); - -void cryp_key_preparation(struct cryp_device_data *device_data, - enum cryp_key_prep cryp_key_prep); - void cryp_flush_inoutfifo(struct cryp_device_data *device_data); -void cryp_cen_flush(struct cryp_device_data *device_data); - -void cryp_set_dir(struct cryp_device_data *device_data, int dir); - int cryp_set_configuration(struct cryp_device_data *device_data, struct cryp_config *p_cryp_config); -int cryp_get_configuration(struct cryp_device_data *device_data, - struct cryp_config *p_cryp_config); - void cryp_configure_for_dma(struct cryp_device_data *device_data, enum cryp_dma_req_type dma_req); diff --git a/drivers/crypto/ux500/cryp/cryp_core.c b/drivers/crypto/ux500/cryp/cryp_core.c index 197bb416067..f67577c386e 100644 --- a/drivers/crypto/ux500/cryp/cryp_core.c +++ b/drivers/crypto/ux500/cryp/cryp_core.c @@ -20,7 +20,7 @@ #include <linux/klist.h> #include <linux/module.h> #include <linux/platform_device.h> -#include <linux/regulator/consumer.h> +#include <mach/regulator.h> #include <linux/semaphore.h> #include <crypto/aes.h> @@ -43,15 +43,13 @@ static int cryp_mode; -static DEFINE_KLIST(cryp_device_list, NULL, NULL); - static struct stedma40_chan_cfg *mem_to_engine; static struct stedma40_chan_cfg *engine_to_mem; /** * struct cryp_driver_data - data specific to the driver. * - * @cryp_device_list: A list of registered devices to choose from. + * @device_list: A list of registered devices to choose from. * @device_allocation: A semaphore initialized with number of devices. */ struct cryp_driver_data { @@ -104,34 +102,6 @@ static inline u32 uint8p_to_uint32_be(u8 *in) } /** - * uint8p_to_uint32_le - 4*uint8 to uint32 little endian - * @in: Data to convert. - */ -static inline u32 uint8p_to_uint32_le(u8 *in) -{ - return (u32)in[3]<<24 | - ((u32)in[2]<<16) | - ((u32)in[1]<<8) | - ((u32)in[0]); -} - -static inline void uint32_to_uint8p_be(u32 in, u8 *out) -{ - out[0] = (u8)(in>>24); - out[1] = (u8)(in>>16); - out[2] = (u8)(in>>8); - out[3] = (u8) in; -} - -static inline void uint32_to_uint8p_le(u32 in, u8 *out) -{ - out[3] = (u8)(in>>24); - out[2] = (u8)(in>>16); - out[1] = (u8)(in>>8); - out[0] = (u8) in; -} - -/** * swap_bits_in_byte - mirror the bits in a byte * @b: the byte to be mirrored * @@ -206,13 +176,6 @@ static inline void swap_words_in_key_and_bits_in_byte(const u8 *in, } } -static inline void swap_4bits_in_bytes(const u8 *in, u8 *out, u32 len) -{ - unsigned int i; - for (i = 0; i < len; i++) - out[i] = swap_bits_in_byte(in[i]); -} - static irqreturn_t cryp_interrupt_handler(int irq, void *param) { struct cryp_ctx *ctx; @@ -224,6 +187,7 @@ static irqreturn_t cryp_interrupt_handler(int irq, void *param) return IRQ_HANDLED; } + /* The device is coming from the one found in hw_crypt_noxts. */ device_data = (struct cryp_device_data *)param; ctx = device_data->current_ctx; @@ -233,6 +197,10 @@ static irqreturn_t cryp_interrupt_handler(int irq, void *param) return IRQ_HANDLED; } + dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen, + cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ? + "out" : "in"); + if (cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO)) { if (ctx->outlen / ctx->blocksize > 0) { @@ -246,7 +214,6 @@ static irqreturn_t cryp_interrupt_handler(int irq, void *param) if (ctx->outlen == 0) { cryp_disable_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO); - complete(&ctx->device->cryp_irq_complete); } } } else if (cryp_pending_irq_src(device_data, @@ -263,8 +230,12 @@ static irqreturn_t cryp_interrupt_handler(int irq, void *param) cryp_disable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO); - if (ctx->config.algo_mode == CRYP_ALGO_AES_XTS) { - cryp_start(device_data); + if (ctx->config.algomode == CRYP_ALGO_AES_XTS) { + CRYP_PUT_BITS(&device_data->base->cr, + CRYP_START_ENABLE, + CRYP_CR_START_POS, + CRYP_CR_START_MASK); + cryp_wait_until_done(device_data); } } @@ -360,7 +331,7 @@ static int cfg_keys(struct cryp_ctx *ctx) dev_dbg(ctx->device->dev, "[%s]", __func__); - if (mode_is_aes(ctx->config.algo_mode)) { + if (mode_is_aes(ctx->config.algomode)) { swap_words_in_key_and_bits_in_byte((u8 *)ctx->key, (u8 *)swapped_key, ctx->keylen); @@ -387,11 +358,29 @@ static int cfg_keys(struct cryp_ctx *ctx) static int cryp_setup_context(struct cryp_ctx *ctx, struct cryp_device_data *device_data) { + cryp_flush_inoutfifo(device_data); + + CRYP_PUT_BITS(&device_data->base->cr, + ctx->config.datatype, + CRYP_CR_DATATYPE_POS, + CRYP_CR_DATATYPE_MASK); + + switch (cryp_mode) { + case CRYP_MODE_INTERRUPT: + writel(CRYP_IMSC_DEFAULT, &device_data->base->imsc); + break; + + case CRYP_MODE_DMA: + writel(CRYP_DMACR_DEFAULT, &device_data->base->dmacr); + break; + + default: + break; + } + if (ctx->updated) cryp_restore_device_context(device_data, &ctx->dev_ctx); else { - cryp_activity(device_data, CRYP_CRYPEN_DISABLE); - if (cfg_keys(ctx) != 0) { dev_err(ctx->device->dev, "[%s]: cfg_keys failed!", __func__); @@ -399,9 +388,9 @@ static int cryp_setup_context(struct cryp_ctx *ctx, } if ((ctx->iv) && - (CRYP_ALGO_AES_ECB != ctx->config.algo_mode) && - (CRYP_ALGO_DES_ECB != ctx->config.algo_mode) && - (CRYP_ALGO_TDES_ECB != ctx->config.algo_mode)) { + (CRYP_ALGO_AES_ECB != ctx->config.algomode) && + (CRYP_ALGO_DES_ECB != ctx->config.algomode) && + (CRYP_ALGO_TDES_ECB != ctx->config.algomode)) { if (cfg_ivs(device_data, ctx) != 0) return -EPERM; } @@ -409,10 +398,11 @@ static int cryp_setup_context(struct cryp_ctx *ctx, cryp_set_configuration(device_data, &ctx->config); } + cryp_activity(device_data, CRYP_CRYPEN_ENABLE); + return 0; } - static int cryp_get_device_data(struct cryp_ctx *ctx, struct cryp_device_data **device_data) { @@ -505,6 +495,12 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx, dev_dbg(ctx->device->dev, "[%s]: ", __func__); + if (unlikely(!IS_ALIGNED((u32)sg, 4))) { + dev_err(ctx->device->dev, "[%s]: Data in sg list isn't " + "aligned! Addr: 0x%08x", __func__, (u32)sg); + return -EFAULT; + } + switch (direction) { case DMA_TO_DEVICE: channel = ctx->device->dma.chan_mem2cryp; @@ -534,7 +530,6 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx, case DMA_FROM_DEVICE: channel = ctx->device->dma.chan_cryp2mem; ctx->device->dma.sg_dst = sg; - ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev, ctx->device->dma.sg_dst, ctx->device->dma.nents_dst, @@ -618,7 +613,7 @@ static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len) } static int cryp_polling_mode(struct cryp_ctx *ctx, - struct cryp_device_data *device_data) + struct cryp_device_data *device_data) { int i; int ret = 0; @@ -650,16 +645,15 @@ out: return ret; } -static int cryp_disable_power( - struct device *dev, - struct cryp_device_data *device_data, - bool save_device_context) +static int cryp_disable_power(struct device *dev, + struct cryp_device_data *device_data, + bool save_device_context) { int ret = 0; dev_dbg(dev, "[%s]", __func__); - mutex_lock(&device_data->power_state_mutex); + spin_lock(&device_data->power_state_spinlock); if (!device_data->power_state) goto out; @@ -672,7 +666,7 @@ static int cryp_disable_power( spin_unlock(&device_data->ctx_lock); clk_disable(device_data->clk); - ret = regulator_disable(device_data->pwr_regulator); + ret = ux500_regulator_atomic_disable(device_data->pwr_regulator); if (ret) dev_err(dev, "[%s]: " "regulator_disable() failed!", @@ -681,7 +675,7 @@ static int cryp_disable_power( device_data->power_state = false; out: - mutex_unlock(&device_data->power_state_mutex); + spin_unlock(&device_data->power_state_spinlock); return ret; } @@ -695,9 +689,9 @@ static int cryp_enable_power( dev_dbg(dev, "[%s]", __func__); - mutex_lock(&device_data->power_state_mutex); + spin_lock(&device_data->power_state_spinlock); if (!device_data->power_state) { - ret = regulator_enable(device_data->pwr_regulator); + ret = ux500_regulator_atomic_enable(device_data->pwr_regulator); if (ret) { dev_err(dev, "[%s]: regulator_enable() failed!", __func__); @@ -708,7 +702,8 @@ static int cryp_enable_power( if (ret) { dev_err(dev, "[%s]: clk_enable() failed!", __func__); - regulator_disable(device_data->pwr_regulator); + ux500_regulator_atomic_disable( + device_data->pwr_regulator); goto out; } device_data->power_state = true; @@ -724,13 +719,13 @@ static int cryp_enable_power( spin_unlock(&device_data->ctx_lock); } out: - mutex_unlock(&device_data->power_state_mutex); + spin_unlock(&device_data->power_state_spinlock); return ret; } static int hw_crypt_noxts(struct cryp_ctx *ctx, - struct cryp_device_data *device_data) + struct cryp_device_data *device_data) { int ret; @@ -742,26 +737,30 @@ static int hw_crypt_noxts(struct cryp_ctx *ctx, pr_debug(DEV_DBG_NAME " [%s]", __func__); ctx->outlen = ctx->datalen; - ctx->config.key_access = CRYP_STATE_ENABLE; - ctx->config.data_type = CRYP_DATA_TYPE_8BIT_SWAP; + ctx->config.keyrden = CRYP_STATE_ENABLE; + ctx->config.datatype = CRYP_DATA_TYPE_8BIT_SWAP; - cryp_reset(device_data); + if (unlikely(!IS_ALIGNED((u32)indata, 4))) { + pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: " + "0x%08x", __func__, (u32)indata); + return -EINVAL; + } ret = cryp_setup_context(ctx, device_data); if (ret) goto out; - cryp_flush_inoutfifo(device_data); - if (cryp_mode == CRYP_MODE_INTERRUPT) { - INIT_COMPLETION(ctx->device->cryp_irq_complete); - - cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO); - cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO); + cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO | + CRYP_IRQ_SRC_OUTPUT_FIFO); - cryp_activity(device_data, CRYP_CRYPEN_ENABLE); - - wait_for_completion(&ctx->device->cryp_irq_complete); + /* + * ctx->outlen is decremented in the cryp_interrupt_handler + * function. We had to add cpu_relax() (barrier) to make sure + * that gcc didn't optimze away this variable. + */ + while (ctx->outlen > 0) + cpu_relax(); } else if (cryp_mode == CRYP_MODE_POLLING || cryp_mode == CRYP_MODE_DMA) { /* @@ -821,8 +820,8 @@ static int ablk_dma_crypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.key_access = CRYP_STATE_ENABLE; - ctx->config.data_type = CRYP_DATA_TYPE_8BIT_SWAP; + ctx->config.keyrden = CRYP_STATE_ENABLE; + ctx->config.datatype = CRYP_DATA_TYPE_8BIT_SWAP; ctx->datalen = areq->nbytes; ctx->outlen = areq->nbytes; @@ -837,8 +836,6 @@ static int ablk_dma_crypt(struct ablkcipher_request *areq) goto out; } - cryp_reset(device_data); - ret = cryp_setup_context(ctx, device_data); if (ret) goto out_power; @@ -884,14 +881,16 @@ out: static int ablk_crypt(struct ablkcipher_request *areq) { + struct ablkcipher_walk walk; struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); - struct ablkcipher_walk walk; + struct cryp_device_data *device_data; unsigned long src_paddr; unsigned long dst_paddr; int ret; int nbytes; - struct cryp_device_data *device_data; + + pr_debug(DEV_DBG_NAME " [%s]", __func__); ret = cryp_get_device_data(ctx, &device_data); if (ret) @@ -955,7 +954,7 @@ out: } static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher, - const u8 *key, unsigned int keylen) + const u8 *key, unsigned int keylen) { struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); u32 *flags = &cipher->base.crt_flags; @@ -964,15 +963,15 @@ static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher, switch (keylen) { case AES_KEYSIZE_128: - ctx->config.key_size = CRYP_KEY_SIZE_128; + ctx->config.keysize = CRYP_KEY_SIZE_128; break; case AES_KEYSIZE_192: - ctx->config.key_size = CRYP_KEY_SIZE_192; + ctx->config.keysize = CRYP_KEY_SIZE_192; break; case AES_KEYSIZE_256: - ctx->config.key_size = CRYP_KEY_SIZE_256; + ctx->config.keysize = CRYP_KEY_SIZE_256; break; default: @@ -997,6 +996,12 @@ static int aes_setkey(struct crypto_tfm *tfm, const u8 *key, pr_debug(DEV_DBG_NAME " [%s]", __func__); + if (unlikely(!IS_ALIGNED((u32)key, 4))) { + dev_err(ctx->device->dev, "[%s]: key isn't aligned! Addr: " + "0x%08x", __func__, (u32)key); + return -EFAULT; + } + /* For CTR mode */ if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && @@ -1008,11 +1013,11 @@ static int aes_setkey(struct crypto_tfm *tfm, const u8 *key, } if (keylen == AES_KEYSIZE_128) - ctx->config.key_size = CRYP_KEY_SIZE_128; + ctx->config.keysize = CRYP_KEY_SIZE_128; else if (keylen == AES_KEYSIZE_192) - ctx->config.key_size = CRYP_KEY_SIZE_192; + ctx->config.keysize = CRYP_KEY_SIZE_192; else if (keylen == AES_KEYSIZE_256) - ctx->config.key_size = CRYP_KEY_SIZE_256; + ctx->config.keysize = CRYP_KEY_SIZE_256; memcpy(ctx->key, key, keylen); ctx->keylen = keylen; @@ -1022,7 +1027,7 @@ static int aes_setkey(struct crypto_tfm *tfm, const u8 *key, } static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher, - const u8 *key, unsigned int keylen) + const u8 *key, unsigned int keylen) { struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); u32 *flags = &cipher->base.crt_flags; @@ -1085,7 +1090,7 @@ static int des_setkey(struct crypto_tfm *tfm, const u8 *key, } static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher, - const u8 *key, unsigned int keylen) + const u8 *key, unsigned int keylen) { struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); u32 *flags = &cipher->base.crt_flags; @@ -1218,8 +1223,8 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_AES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1238,8 +1243,8 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_AES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1258,8 +1263,8 @@ static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_DES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1278,8 +1283,8 @@ static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_DES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1298,8 +1303,8 @@ static void des3_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1318,8 +1323,8 @@ static void des3_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ctx->blocksize = crypto_tfm_alg_blocksize(tfm); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_ECB; ctx->indata = in; ctx->outdata = out; @@ -1330,7 +1335,6 @@ static void des3_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) __func__); } - static int aes_ecb_encrypt(struct ablkcipher_request *areq) { struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); @@ -1338,8 +1342,8 @@ static int aes_ecb_encrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_AES_ECB; ctx->blocksize = AES_BLOCK_SIZE; if (cryp_mode == CRYP_MODE_DMA) @@ -1356,8 +1360,8 @@ static int aes_ecb_decrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_AES_ECB; ctx->blocksize = AES_BLOCK_SIZE; if (cryp_mode == CRYP_MODE_DMA) @@ -1375,8 +1379,8 @@ static int aes_cbc_encrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_AES_CBC; ctx->blocksize = AES_BLOCK_SIZE; /* Only DMA for ablkcipher, since givcipher not yet supported */ @@ -1396,8 +1400,8 @@ static int aes_cbc_decrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_AES_CBC; ctx->blocksize = AES_BLOCK_SIZE; /* Only DMA for ablkcipher, since givcipher not yet supported */ @@ -1417,8 +1421,8 @@ static int aes_ctr_encrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_CTR; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_AES_CTR; ctx->blocksize = AES_BLOCK_SIZE; /* Only DMA for ablkcipher, since givcipher not yet supported */ @@ -1438,8 +1442,8 @@ static int aes_ctr_decrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_AES_CTR; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_AES_CTR; ctx->blocksize = AES_BLOCK_SIZE; /* Only DMA for ablkcipher, since givcipher not yet supported */ @@ -1458,11 +1462,11 @@ static int des_ecb_encrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_DES_ECB; ctx->blocksize = DES_BLOCK_SIZE; - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1476,11 +1480,11 @@ static int des_ecb_decrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_DES_ECB; ctx->blocksize = DES_BLOCK_SIZE; - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1491,20 +1495,14 @@ static int des_cbc_encrypt(struct ablkcipher_request *areq) { struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); - u32 *flags = &cipher->base.crt_flags; pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_DES_CBC; ctx->blocksize = DES_BLOCK_SIZE; - /* Only DMA for ablkcipher, since givcipher not yet supported */ - if ((cryp_mode == CRYP_MODE_DMA) && - (*flags & CRYPTO_ALG_TYPE_ABLKCIPHER)) - return ablk_dma_crypt(areq); - - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1515,20 +1513,14 @@ static int des_cbc_decrypt(struct ablkcipher_request *areq) { struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); - u32 *flags = &cipher->base.crt_flags; pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_DES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_DES_CBC; ctx->blocksize = DES_BLOCK_SIZE; - /* Only DMA for ablkcipher, since givcipher not yet supported */ - if ((cryp_mode == CRYP_MODE_DMA) && - (*flags & CRYPTO_ALG_TYPE_ABLKCIPHER)) - return ablk_dma_crypt(areq); - - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1542,11 +1534,11 @@ static int des3_ecb_encrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_ECB; ctx->blocksize = DES3_EDE_BLOCK_SIZE; - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1560,11 +1552,11 @@ static int des3_ecb_decrypt(struct ablkcipher_request *areq) pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_ECB; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_ECB; ctx->blocksize = DES3_EDE_BLOCK_SIZE; - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1575,20 +1567,14 @@ static int des3_cbc_encrypt(struct ablkcipher_request *areq) { struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); - u32 *flags = &cipher->base.crt_flags; pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_CBC; ctx->blocksize = DES3_EDE_BLOCK_SIZE; - /* Only DMA for ablkcipher, since givcipher not yet supported */ - if ((cryp_mode == CRYP_MODE_DMA) && - (*flags & CRYPTO_ALG_TYPE_ABLKCIPHER)) - return ablk_dma_crypt(areq); - - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1599,20 +1585,14 @@ static int des3_cbc_decrypt(struct ablkcipher_request *areq) { struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher); - u32 *flags = &cipher->base.crt_flags; pr_debug(DEV_DBG_NAME " [%s]", __func__); - ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT; - ctx->config.algo_mode = CRYP_ALGO_TDES_CBC; + ctx->config.algodir = CRYP_ALGORITHM_DECRYPT; + ctx->config.algomode = CRYP_ALGO_TDES_CBC; ctx->blocksize = DES3_EDE_BLOCK_SIZE; - /* Only DMA for ablkcipher, since givcipher not yet supported */ - if ((cryp_mode == CRYP_MODE_DMA) && - (*flags & CRYPTO_ALG_TYPE_ABLKCIPHER)) - return ablk_dma_crypt(areq); - - /** + /* * Run the non DMA version also for DMA, since DMA is currently not * working for DES. */ @@ -1890,7 +1870,7 @@ static struct crypto_alg *u8500_cryp_algs[] = { &des_ecb_alg, &des_cbc_alg, &des3_ecb_alg, - &des3_cbc_alg + &des3_cbc_alg, }; /** @@ -1946,7 +1926,7 @@ static int u8500_cryp_probe(struct platform_device *pdev) struct device *dev = &pdev->dev; dev_dbg(dev, "[%s]", __func__); - device_data = kzalloc(sizeof(struct cryp_device_data), GFP_KERNEL); + device_data = kzalloc(sizeof(struct cryp_device_data), GFP_ATOMIC); if (!device_data) { dev_err(dev, "[%s]: kzalloc() failed!", __func__); ret = -ENOMEM; @@ -1986,10 +1966,10 @@ static int u8500_cryp_probe(struct platform_device *pdev) } spin_lock_init(&device_data->ctx_lock); - mutex_init(&device_data->power_state_mutex); + spin_lock_init(&device_data->power_state_spinlock); /* Enable power for CRYP hardware block */ - device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape"); + device_data->pwr_regulator = ux500_regulator_get(&pdev->dev); if (IS_ERR(device_data->pwr_regulator)) { dev_err(dev, "[%s]: could not get cryp regulator", __func__); ret = PTR_ERR(device_data->pwr_regulator); @@ -2044,8 +2024,6 @@ static int u8500_cryp_probe(struct platform_device *pdev) goto out_power; } - init_completion(&device_data->cryp_irq_complete); - if (cryp_mode == CRYP_MODE_DMA) cryp_dma_setup_channel(device_data, dev); @@ -2076,7 +2054,7 @@ out_clk: clk_put(device_data->clk); out_regulator: - regulator_put(device_data->pwr_regulator); + ux500_regulator_put(device_data->pwr_regulator); out_unmap: iounmap(device_data->base); @@ -2143,7 +2121,7 @@ static int u8500_cryp_remove(struct platform_device *pdev) __func__); clk_put(device_data->clk); - regulator_put(device_data->pwr_regulator); + ux500_regulator_put(device_data->pwr_regulator); iounmap(device_data->base); @@ -2327,5 +2305,7 @@ module_exit(u8500_cryp_mod_fini); module_param(cryp_mode, int, 0); MODULE_DESCRIPTION("Driver for ST-Ericsson U8500 CRYP crypto engine."); +MODULE_ALIAS("aes-all"); +MODULE_ALIAS("des-all"); MODULE_LICENSE("GPL"); diff --git a/drivers/crypto/ux500/cryp/cryp_irq.c b/drivers/crypto/ux500/cryp/cryp_irq.c index eacff226aa8..8814acc05d7 100644 --- a/drivers/crypto/ux500/cryp/cryp_irq.c +++ b/drivers/crypto/ux500/cryp/cryp_irq.c @@ -24,7 +24,7 @@ void cryp_enable_irq_src(struct cryp_device_data *device_data, u32 irq_src) dev_dbg(device_data->dev, "[%s]", __func__); i = readl(&device_data->base->imsc); - set_bit(irq_src, (void *)&i); + i = i | irq_src; writel(i, &device_data->base->imsc); } @@ -35,7 +35,7 @@ void cryp_disable_irq_src(struct cryp_device_data *device_data, u32 irq_src) dev_dbg(device_data->dev, "[%s]", __func__); i = readl(&device_data->base->imsc); - clear_bit(irq_src, (void *)&i); + i = i & ~irq_src; writel(i, &device_data->base->imsc); } diff --git a/drivers/crypto/ux500/cryp/cryp_irqp.h b/drivers/crypto/ux500/cryp/cryp_irqp.h index 5b60f887d02..8b339cc34bf 100644 --- a/drivers/crypto/ux500/cryp/cryp_irqp.h +++ b/drivers/crypto/ux500/cryp/cryp_irqp.h @@ -81,7 +81,7 @@ */ struct cryp_register { u32 cr; /* Configuration register */ - u32 status; /* Status register */ + u32 sr; /* Status register */ u32 din; /* Data input register */ u32 din_size; /* Data input size register */ u32 dout; /* Data output register */ diff --git a/drivers/crypto/ux500/cryp/cryp_p.h b/drivers/crypto/ux500/cryp/cryp_p.h index 966de4633cc..adc95457499 100644 --- a/drivers/crypto/ux500/cryp/cryp_p.h +++ b/drivers/crypto/ux500/cryp/cryp_p.h @@ -51,7 +51,6 @@ */ #define MAX_DEVICE_SUPPORT 2 #define CRYP_CR_DEFAULT 0x0002 -#define CRYP_CR_FFLUSH BIT(14) #define CRYP_DMACR_DEFAULT 0x0 #define CRYP_IMSC_DEFAULT 0x0 #define CRYP_DIN_DEFAULT 0x0 @@ -62,40 +61,47 @@ /** * CRYP Control register specific mask */ -#define CRYP_SECURE_MASK BIT(0) -#define CRYP_PRLG_MASK BIT(1) -#define CRYP_ENC_DEC_MASK BIT(2) +#define CRYP_CR_SECURE_MASK BIT(0) +#define CRYP_CR_PRLG_MASK BIT(1) +#define CRYP_CR_ALGODIR_MASK BIT(2) +#define CRYP_CR_ALGOMODE_MASK (BIT(5) | BIT(4) | BIT(3)) +#define CRYP_CR_DATATYPE_MASK (BIT(7) | BIT(6)) +#define CRYP_CR_KEYSIZE_MASK (BIT(9) | BIT(8)) +#define CRYP_CR_KEYRDEN_MASK BIT(10) +#define CRYP_CR_KSE_MASK BIT(11) +#define CRYP_CR_START_MASK BIT(12) +#define CRYP_CR_INIT_MASK BIT(13) +#define CRYP_CR_FFLUSH_MASK BIT(14) +#define CRYP_CR_CRYPEN_MASK BIT(15) +#define CRYP_CR_CONTEXT_SAVE_MASK (CRYP_CR_SECURE_MASK |\ + CRYP_CR_PRLG_MASK |\ + CRYP_CR_ALGODIR_MASK |\ + CRYP_CR_ALGOMODE_MASK |\ + CRYP_CR_DATATYPE_MASK |\ + CRYP_CR_KEYSIZE_MASK |\ + CRYP_CR_KEYRDEN_MASK |\ + CRYP_CR_DATATYPE_MASK) + + +#define CRYP_SR_INFIFO_READY_MASK (BIT(0) | BIT(1)) +#define CRYP_SR_IFEM_MASK BIT(0) #define CRYP_SR_BUSY_MASK BIT(4) -#define CRYP_KEY_ACCESS_MASK BIT(10) -#define CRYP_KSE_MASK BIT(11) -#define CRYP_START_MASK BIT(12) -#define CRYP_INIT_MASK BIT(13) -#define CRYP_FIFO_FLUSH_MASK BIT(14) -#define CRYP_CRYPEN_MASK BIT(15) -#define CRYP_INFIFO_READY_MASK (BIT(0) | BIT(1)) -#define CRYP_ALGOMODE_MASK (BIT(5) | BIT(4) | BIT(3)) -#define CRYP_DATA_TYPE_MASK (BIT(7) | BIT(6)) -#define CRYP_KEY_SIZE_MASK (BIT(9) | BIT(8)) /** * Bit position used while setting bits in register */ -#define CRYP_PRLG_POS 1 -#define CRYP_ENC_DEC_POS 2 -#define CRYP_ALGOMODE_POS 3 -#define CRYP_SR_BUSY_POS 4 -#define CRYP_DATA_TYPE_POS 6 -#define CRYP_KEY_SIZE_POS 8 -#define CRYP_KEY_ACCESS_POS 10 -#define CRYP_KSE_POS 11 -#define CRYP_START_POS 12 -#define CRYP_INIT_POS 13 -#define CRYP_CRYPEN_POS 15 +#define CRYP_CR_PRLG_POS 1 +#define CRYP_CR_ALGODIR_POS 2 +#define CRYP_CR_ALGOMODE_POS 3 +#define CRYP_CR_DATATYPE_POS 6 +#define CRYP_CR_KEYSIZE_POS 8 +#define CRYP_CR_KEYRDEN_POS 10 +#define CRYP_CR_KSE_POS 11 +#define CRYP_CR_START_POS 12 +#define CRYP_CR_INIT_POS 13 +#define CRYP_CR_CRYPEN_POS 15 -/** - * CRYP Status register - */ -#define CRYP_BUSY_STATUS_MASK BIT(4) +#define CRYP_SR_BUSY_POS 4 /** * CRYP PCRs------PC_NAND control register |