treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / drivers / crypto / ux500 / cryp / cryp_core.c
blob800dfc4d16c4d529a28191f2a69cb25869cfc462
1 // SPDX-License-Identifier: GPL-2.0-only
2 /**
3 * Copyright (C) ST-Ericsson SA 2010
4 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
9 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/crypto.h>
15 #include <linux/dmaengine.h>
16 #include <linux/err.h>
17 #include <linux/errno.h>
18 #include <linux/interrupt.h>
19 #include <linux/io.h>
20 #include <linux/irqreturn.h>
21 #include <linux/klist.h>
22 #include <linux/module.h>
23 #include <linux/mod_devicetable.h>
24 #include <linux/platform_device.h>
25 #include <linux/regulator/consumer.h>
26 #include <linux/semaphore.h>
27 #include <linux/platform_data/dma-ste-dma40.h>
29 #include <crypto/aes.h>
30 #include <crypto/algapi.h>
31 #include <crypto/ctr.h>
32 #include <crypto/internal/des.h>
33 #include <crypto/internal/skcipher.h>
34 #include <crypto/scatterwalk.h>
36 #include <linux/platform_data/crypto-ux500.h>
38 #include "cryp_p.h"
39 #include "cryp.h"
41 #define CRYP_MAX_KEY_SIZE 32
42 #define BYTES_PER_WORD 4
44 static int cryp_mode;
45 static atomic_t session_id;
47 static struct stedma40_chan_cfg *mem_to_engine;
48 static struct stedma40_chan_cfg *engine_to_mem;
50 /**
51 * struct cryp_driver_data - data specific to the driver.
53 * @device_list: A list of registered devices to choose from.
54 * @device_allocation: A semaphore initialized with number of devices.
56 struct cryp_driver_data {
57 struct klist device_list;
58 struct semaphore device_allocation;
61 /**
62 * struct cryp_ctx - Crypto context
63 * @config: Crypto mode.
64 * @key[CRYP_MAX_KEY_SIZE]: Key.
65 * @keylen: Length of key.
66 * @iv: Pointer to initialization vector.
67 * @indata: Pointer to indata.
68 * @outdata: Pointer to outdata.
69 * @datalen: Length of indata.
70 * @outlen: Length of outdata.
71 * @blocksize: Size of blocks.
72 * @updated: Updated flag.
73 * @dev_ctx: Device dependent context.
74 * @device: Pointer to the device.
76 struct cryp_ctx {
77 struct cryp_config config;
78 u8 key[CRYP_MAX_KEY_SIZE];
79 u32 keylen;
80 u8 *iv;
81 const u8 *indata;
82 u8 *outdata;
83 u32 datalen;
84 u32 outlen;
85 u32 blocksize;
86 u8 updated;
87 struct cryp_device_context dev_ctx;
88 struct cryp_device_data *device;
89 u32 session_id;
92 static struct cryp_driver_data driver_data;
94 /**
95 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
96 * @in: Data to convert.
98 static inline u32 uint8p_to_uint32_be(u8 *in)
100 u32 *data = (u32 *)in;
102 return cpu_to_be32p(data);
106 * swap_bits_in_byte - mirror the bits in a byte
107 * @b: the byte to be mirrored
109 * The bits are swapped the following way:
110 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
111 * nibble 2 (n2) bits 4-7.
113 * Nibble 1 (n1):
114 * (The "old" (moved) bit is replaced with a zero)
115 * 1. Move bit 6 and 7, 4 positions to the left.
116 * 2. Move bit 3 and 5, 2 positions to the left.
117 * 3. Move bit 1-4, 1 position to the left.
119 * Nibble 2 (n2):
120 * 1. Move bit 0 and 1, 4 positions to the right.
121 * 2. Move bit 2 and 4, 2 positions to the right.
122 * 3. Move bit 3-6, 1 position to the right.
124 * Combine the two nibbles to a complete and swapped byte.
127 static inline u8 swap_bits_in_byte(u8 b)
129 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
130 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
131 right shift 2 */
132 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
133 right shift 1 */
134 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
135 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
136 left shift 2 */
137 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
138 left shift 1 */
140 u8 n1;
141 u8 n2;
143 /* Swap most significant nibble */
144 /* Right shift 4, bits 6 and 7 */
145 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
146 /* Right shift 2, bits 3 and 5 */
147 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
148 /* Right shift 1, bits 1-4 */
149 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
151 /* Swap least significant nibble */
152 /* Left shift 4, bits 0 and 1 */
153 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
154 /* Left shift 2, bits 2 and 4 */
155 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
156 /* Left shift 1, bits 3-6 */
157 n2 = (n2 & L_SHIFT_1_MASK) << 1;
159 return n1 | n2;
162 static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
163 u8 *out, u32 len)
165 unsigned int i = 0;
166 int j;
167 int index = 0;
169 j = len - BYTES_PER_WORD;
170 while (j >= 0) {
171 for (i = 0; i < BYTES_PER_WORD; i++) {
172 index = len - j - BYTES_PER_WORD + i;
173 out[j + i] =
174 swap_bits_in_byte(in[index]);
176 j -= BYTES_PER_WORD;
180 static void add_session_id(struct cryp_ctx *ctx)
183 * We never want 0 to be a valid value, since this is the default value
184 * for the software context.
186 if (unlikely(atomic_inc_and_test(&session_id)))
187 atomic_inc(&session_id);
189 ctx->session_id = atomic_read(&session_id);
192 static irqreturn_t cryp_interrupt_handler(int irq, void *param)
194 struct cryp_ctx *ctx;
195 int count;
196 struct cryp_device_data *device_data;
198 if (param == NULL) {
199 BUG_ON(!param);
200 return IRQ_HANDLED;
203 /* The device is coming from the one found in hw_crypt_noxts. */
204 device_data = (struct cryp_device_data *)param;
206 ctx = device_data->current_ctx;
208 if (ctx == NULL) {
209 BUG_ON(!ctx);
210 return IRQ_HANDLED;
213 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
214 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
215 "out" : "in");
217 if (cryp_pending_irq_src(device_data,
218 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
219 if (ctx->outlen / ctx->blocksize > 0) {
220 count = ctx->blocksize / 4;
222 readsl(&device_data->base->dout, ctx->outdata, count);
223 ctx->outdata += count;
224 ctx->outlen -= count;
226 if (ctx->outlen == 0) {
227 cryp_disable_irq_src(device_data,
228 CRYP_IRQ_SRC_OUTPUT_FIFO);
231 } else if (cryp_pending_irq_src(device_data,
232 CRYP_IRQ_SRC_INPUT_FIFO)) {
233 if (ctx->datalen / ctx->blocksize > 0) {
234 count = ctx->blocksize / 4;
236 writesl(&device_data->base->din, ctx->indata, count);
238 ctx->indata += count;
239 ctx->datalen -= count;
241 if (ctx->datalen == 0)
242 cryp_disable_irq_src(device_data,
243 CRYP_IRQ_SRC_INPUT_FIFO);
245 if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
246 CRYP_PUT_BITS(&device_data->base->cr,
247 CRYP_START_ENABLE,
248 CRYP_CR_START_POS,
249 CRYP_CR_START_MASK);
251 cryp_wait_until_done(device_data);
256 return IRQ_HANDLED;
259 static int mode_is_aes(enum cryp_algo_mode mode)
261 return CRYP_ALGO_AES_ECB == mode ||
262 CRYP_ALGO_AES_CBC == mode ||
263 CRYP_ALGO_AES_CTR == mode ||
264 CRYP_ALGO_AES_XTS == mode;
267 static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
268 enum cryp_init_vector_index index)
270 struct cryp_init_vector_value vector_value;
272 dev_dbg(device_data->dev, "[%s]", __func__);
274 vector_value.init_value_left = left;
275 vector_value.init_value_right = right;
277 return cryp_configure_init_vector(device_data,
278 index,
279 vector_value);
282 static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
284 int i;
285 int status = 0;
286 int num_of_regs = ctx->blocksize / 8;
287 u32 iv[AES_BLOCK_SIZE / 4];
289 dev_dbg(device_data->dev, "[%s]", __func__);
292 * Since we loop on num_of_regs we need to have a check in case
293 * someone provides an incorrect blocksize which would force calling
294 * cfg_iv with i greater than 2 which is an error.
296 if (num_of_regs > 2) {
297 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
298 __func__, ctx->blocksize);
299 return -EINVAL;
302 for (i = 0; i < ctx->blocksize / 4; i++)
303 iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
305 for (i = 0; i < num_of_regs; i++) {
306 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
307 (enum cryp_init_vector_index) i);
308 if (status != 0)
309 return status;
311 return status;
314 static int set_key(struct cryp_device_data *device_data,
315 u32 left_key,
316 u32 right_key,
317 enum cryp_key_reg_index index)
319 struct cryp_key_value key_value;
320 int cryp_error;
322 dev_dbg(device_data->dev, "[%s]", __func__);
324 key_value.key_value_left = left_key;
325 key_value.key_value_right = right_key;
327 cryp_error = cryp_configure_key_values(device_data,
328 index,
329 key_value);
330 if (cryp_error != 0)
331 dev_err(device_data->dev, "[%s]: "
332 "cryp_configure_key_values() failed!", __func__);
334 return cryp_error;
337 static int cfg_keys(struct cryp_ctx *ctx)
339 int i;
340 int num_of_regs = ctx->keylen / 8;
341 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
342 int cryp_error = 0;
344 dev_dbg(ctx->device->dev, "[%s]", __func__);
346 if (mode_is_aes(ctx->config.algomode)) {
347 swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
348 (u8 *)swapped_key,
349 ctx->keylen);
350 } else {
351 for (i = 0; i < ctx->keylen / 4; i++)
352 swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
355 for (i = 0; i < num_of_regs; i++) {
356 cryp_error = set_key(ctx->device,
357 *(((u32 *)swapped_key)+i*2),
358 *(((u32 *)swapped_key)+i*2+1),
359 (enum cryp_key_reg_index) i);
361 if (cryp_error != 0) {
362 dev_err(ctx->device->dev, "[%s]: set_key() failed!",
363 __func__);
364 return cryp_error;
367 return cryp_error;
370 static int cryp_setup_context(struct cryp_ctx *ctx,
371 struct cryp_device_data *device_data)
373 u32 control_register = CRYP_CR_DEFAULT;
375 switch (cryp_mode) {
376 case CRYP_MODE_INTERRUPT:
377 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
378 break;
380 case CRYP_MODE_DMA:
381 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
382 break;
384 default:
385 break;
388 if (ctx->updated == 0) {
389 cryp_flush_inoutfifo(device_data);
390 if (cfg_keys(ctx) != 0) {
391 dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
392 __func__);
393 return -EINVAL;
396 if (ctx->iv &&
397 CRYP_ALGO_AES_ECB != ctx->config.algomode &&
398 CRYP_ALGO_DES_ECB != ctx->config.algomode &&
399 CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
400 if (cfg_ivs(device_data, ctx) != 0)
401 return -EPERM;
404 cryp_set_configuration(device_data, &ctx->config,
405 &control_register);
406 add_session_id(ctx);
407 } else if (ctx->updated == 1 &&
408 ctx->session_id != atomic_read(&session_id)) {
409 cryp_flush_inoutfifo(device_data);
410 cryp_restore_device_context(device_data, &ctx->dev_ctx);
412 add_session_id(ctx);
413 control_register = ctx->dev_ctx.cr;
414 } else
415 control_register = ctx->dev_ctx.cr;
417 writel(control_register |
418 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
419 &device_data->base->cr);
421 return 0;
424 static int cryp_get_device_data(struct cryp_ctx *ctx,
425 struct cryp_device_data **device_data)
427 int ret;
428 struct klist_iter device_iterator;
429 struct klist_node *device_node;
430 struct cryp_device_data *local_device_data = NULL;
431 pr_debug(DEV_DBG_NAME " [%s]", __func__);
433 /* Wait until a device is available */
434 ret = down_interruptible(&driver_data.device_allocation);
435 if (ret)
436 return ret; /* Interrupted */
438 /* Select a device */
439 klist_iter_init(&driver_data.device_list, &device_iterator);
441 device_node = klist_next(&device_iterator);
442 while (device_node) {
443 local_device_data = container_of(device_node,
444 struct cryp_device_data, list_node);
445 spin_lock(&local_device_data->ctx_lock);
446 /* current_ctx allocates a device, NULL = unallocated */
447 if (local_device_data->current_ctx) {
448 device_node = klist_next(&device_iterator);
449 } else {
450 local_device_data->current_ctx = ctx;
451 ctx->device = local_device_data;
452 spin_unlock(&local_device_data->ctx_lock);
453 break;
455 spin_unlock(&local_device_data->ctx_lock);
457 klist_iter_exit(&device_iterator);
459 if (!device_node) {
461 * No free device found.
462 * Since we allocated a device with down_interruptible, this
463 * should not be able to happen.
464 * Number of available devices, which are contained in
465 * device_allocation, is therefore decremented by not doing
466 * an up(device_allocation).
468 return -EBUSY;
471 *device_data = local_device_data;
473 return 0;
476 static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
477 struct device *dev)
479 struct dma_slave_config mem2cryp = {
480 .direction = DMA_MEM_TO_DEV,
481 .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
482 .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
483 .dst_maxburst = 4,
485 struct dma_slave_config cryp2mem = {
486 .direction = DMA_DEV_TO_MEM,
487 .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
488 .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
489 .src_maxburst = 4,
492 dma_cap_zero(device_data->dma.mask);
493 dma_cap_set(DMA_SLAVE, device_data->dma.mask);
495 device_data->dma.cfg_mem2cryp = mem_to_engine;
496 device_data->dma.chan_mem2cryp =
497 dma_request_channel(device_data->dma.mask,
498 stedma40_filter,
499 device_data->dma.cfg_mem2cryp);
501 device_data->dma.cfg_cryp2mem = engine_to_mem;
502 device_data->dma.chan_cryp2mem =
503 dma_request_channel(device_data->dma.mask,
504 stedma40_filter,
505 device_data->dma.cfg_cryp2mem);
507 dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
508 dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
510 init_completion(&device_data->dma.cryp_dma_complete);
513 static void cryp_dma_out_callback(void *data)
515 struct cryp_ctx *ctx = (struct cryp_ctx *) data;
516 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
518 complete(&ctx->device->dma.cryp_dma_complete);
521 static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
522 struct scatterlist *sg,
523 int len,
524 enum dma_data_direction direction)
526 struct dma_async_tx_descriptor *desc;
527 struct dma_chan *channel = NULL;
528 dma_cookie_t cookie;
530 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
532 if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
533 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
534 "aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
535 return -EFAULT;
538 switch (direction) {
539 case DMA_TO_DEVICE:
540 channel = ctx->device->dma.chan_mem2cryp;
541 ctx->device->dma.sg_src = sg;
542 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
543 ctx->device->dma.sg_src,
544 ctx->device->dma.nents_src,
545 direction);
547 if (!ctx->device->dma.sg_src_len) {
548 dev_dbg(ctx->device->dev,
549 "[%s]: Could not map the sg list (TO_DEVICE)",
550 __func__);
551 return -EFAULT;
554 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
555 "(TO_DEVICE)", __func__);
557 desc = dmaengine_prep_slave_sg(channel,
558 ctx->device->dma.sg_src,
559 ctx->device->dma.sg_src_len,
560 DMA_MEM_TO_DEV, DMA_CTRL_ACK);
561 break;
563 case DMA_FROM_DEVICE:
564 channel = ctx->device->dma.chan_cryp2mem;
565 ctx->device->dma.sg_dst = sg;
566 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
567 ctx->device->dma.sg_dst,
568 ctx->device->dma.nents_dst,
569 direction);
571 if (!ctx->device->dma.sg_dst_len) {
572 dev_dbg(ctx->device->dev,
573 "[%s]: Could not map the sg list (FROM_DEVICE)",
574 __func__);
575 return -EFAULT;
578 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
579 "(FROM_DEVICE)", __func__);
581 desc = dmaengine_prep_slave_sg(channel,
582 ctx->device->dma.sg_dst,
583 ctx->device->dma.sg_dst_len,
584 DMA_DEV_TO_MEM,
585 DMA_CTRL_ACK |
586 DMA_PREP_INTERRUPT);
588 desc->callback = cryp_dma_out_callback;
589 desc->callback_param = ctx;
590 break;
592 default:
593 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
594 __func__);
595 return -EFAULT;
598 cookie = dmaengine_submit(desc);
599 if (dma_submit_error(cookie)) {
600 dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
601 __func__);
602 return cookie;
605 dma_async_issue_pending(channel);
607 return 0;
610 static void cryp_dma_done(struct cryp_ctx *ctx)
612 struct dma_chan *chan;
614 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
616 chan = ctx->device->dma.chan_mem2cryp;
617 dmaengine_terminate_all(chan);
618 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
619 ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
621 chan = ctx->device->dma.chan_cryp2mem;
622 dmaengine_terminate_all(chan);
623 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
624 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
627 static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
628 int len)
630 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
631 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
633 if (error) {
634 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
635 "failed", __func__);
636 return error;
639 return len;
642 static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
644 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
645 if (error) {
646 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
647 "failed", __func__);
648 return error;
651 return len;
654 static void cryp_polling_mode(struct cryp_ctx *ctx,
655 struct cryp_device_data *device_data)
657 int len = ctx->blocksize / BYTES_PER_WORD;
658 int remaining_length = ctx->datalen;
659 u32 *indata = (u32 *)ctx->indata;
660 u32 *outdata = (u32 *)ctx->outdata;
662 while (remaining_length > 0) {
663 writesl(&device_data->base->din, indata, len);
664 indata += len;
665 remaining_length -= (len * BYTES_PER_WORD);
666 cryp_wait_until_done(device_data);
668 readsl(&device_data->base->dout, outdata, len);
669 outdata += len;
670 cryp_wait_until_done(device_data);
674 static int cryp_disable_power(struct device *dev,
675 struct cryp_device_data *device_data,
676 bool save_device_context)
678 int ret = 0;
680 dev_dbg(dev, "[%s]", __func__);
682 spin_lock(&device_data->power_state_spinlock);
683 if (!device_data->power_state)
684 goto out;
686 spin_lock(&device_data->ctx_lock);
687 if (save_device_context && device_data->current_ctx) {
688 cryp_save_device_context(device_data,
689 &device_data->current_ctx->dev_ctx,
690 cryp_mode);
691 device_data->restore_dev_ctx = true;
693 spin_unlock(&device_data->ctx_lock);
695 clk_disable(device_data->clk);
696 ret = regulator_disable(device_data->pwr_regulator);
697 if (ret)
698 dev_err(dev, "[%s]: "
699 "regulator_disable() failed!",
700 __func__);
702 device_data->power_state = false;
704 out:
705 spin_unlock(&device_data->power_state_spinlock);
707 return ret;
710 static int cryp_enable_power(
711 struct device *dev,
712 struct cryp_device_data *device_data,
713 bool restore_device_context)
715 int ret = 0;
717 dev_dbg(dev, "[%s]", __func__);
719 spin_lock(&device_data->power_state_spinlock);
720 if (!device_data->power_state) {
721 ret = regulator_enable(device_data->pwr_regulator);
722 if (ret) {
723 dev_err(dev, "[%s]: regulator_enable() failed!",
724 __func__);
725 goto out;
728 ret = clk_enable(device_data->clk);
729 if (ret) {
730 dev_err(dev, "[%s]: clk_enable() failed!",
731 __func__);
732 regulator_disable(device_data->pwr_regulator);
733 goto out;
735 device_data->power_state = true;
738 if (device_data->restore_dev_ctx) {
739 spin_lock(&device_data->ctx_lock);
740 if (restore_device_context && device_data->current_ctx) {
741 device_data->restore_dev_ctx = false;
742 cryp_restore_device_context(device_data,
743 &device_data->current_ctx->dev_ctx);
745 spin_unlock(&device_data->ctx_lock);
747 out:
748 spin_unlock(&device_data->power_state_spinlock);
750 return ret;
753 static int hw_crypt_noxts(struct cryp_ctx *ctx,
754 struct cryp_device_data *device_data)
756 int ret = 0;
758 const u8 *indata = ctx->indata;
759 u8 *outdata = ctx->outdata;
760 u32 datalen = ctx->datalen;
761 u32 outlen = datalen;
763 pr_debug(DEV_DBG_NAME " [%s]", __func__);
765 ctx->outlen = ctx->datalen;
767 if (unlikely(!IS_ALIGNED((unsigned long)indata, 4))) {
768 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
769 "0x%08lx", __func__, (unsigned long)indata);
770 return -EINVAL;
773 ret = cryp_setup_context(ctx, device_data);
775 if (ret)
776 goto out;
778 if (cryp_mode == CRYP_MODE_INTERRUPT) {
779 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
780 CRYP_IRQ_SRC_OUTPUT_FIFO);
783 * ctx->outlen is decremented in the cryp_interrupt_handler
784 * function. We had to add cpu_relax() (barrier) to make sure
785 * that gcc didn't optimze away this variable.
787 while (ctx->outlen > 0)
788 cpu_relax();
789 } else if (cryp_mode == CRYP_MODE_POLLING ||
790 cryp_mode == CRYP_MODE_DMA) {
792 * The reason for having DMA in this if case is that if we are
793 * running cryp_mode = 2, then we separate DMA routines for
794 * handling cipher/plaintext > blocksize, except when
795 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
796 * the polling mode. Overhead of doing DMA setup eats up the
797 * benefits using it.
799 cryp_polling_mode(ctx, device_data);
800 } else {
801 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
802 __func__);
803 ret = -EPERM;
804 goto out;
807 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
808 ctx->updated = 1;
810 out:
811 ctx->indata = indata;
812 ctx->outdata = outdata;
813 ctx->datalen = datalen;
814 ctx->outlen = outlen;
816 return ret;
819 static int get_nents(struct scatterlist *sg, int nbytes)
821 int nents = 0;
823 while (nbytes > 0) {
824 nbytes -= sg->length;
825 sg = sg_next(sg);
826 nents++;
829 return nents;
832 static int ablk_dma_crypt(struct skcipher_request *areq)
834 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
835 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
836 struct cryp_device_data *device_data;
838 int bytes_written = 0;
839 int bytes_read = 0;
840 int ret;
842 pr_debug(DEV_DBG_NAME " [%s]", __func__);
844 ctx->datalen = areq->cryptlen;
845 ctx->outlen = areq->cryptlen;
847 ret = cryp_get_device_data(ctx, &device_data);
848 if (ret)
849 return ret;
851 ret = cryp_setup_context(ctx, device_data);
852 if (ret)
853 goto out;
855 /* We have the device now, so store the nents in the dma struct. */
856 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
857 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
859 /* Enable DMA in- and output. */
860 cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
862 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
863 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
865 wait_for_completion(&ctx->device->dma.cryp_dma_complete);
866 cryp_dma_done(ctx);
868 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
869 ctx->updated = 1;
871 out:
872 spin_lock(&device_data->ctx_lock);
873 device_data->current_ctx = NULL;
874 ctx->device = NULL;
875 spin_unlock(&device_data->ctx_lock);
878 * The down_interruptible part for this semaphore is called in
879 * cryp_get_device_data.
881 up(&driver_data.device_allocation);
883 if (unlikely(bytes_written != bytes_read))
884 return -EPERM;
886 return 0;
889 static int ablk_crypt(struct skcipher_request *areq)
891 struct skcipher_walk walk;
892 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
893 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
894 struct cryp_device_data *device_data;
895 unsigned long src_paddr;
896 unsigned long dst_paddr;
897 int ret;
898 int nbytes;
900 pr_debug(DEV_DBG_NAME " [%s]", __func__);
902 ret = cryp_get_device_data(ctx, &device_data);
903 if (ret)
904 goto out;
906 ret = skcipher_walk_async(&walk, areq);
908 if (ret) {
909 pr_err(DEV_DBG_NAME "[%s]: skcipher_walk_async() failed!",
910 __func__);
911 goto out;
914 while ((nbytes = walk.nbytes) > 0) {
915 ctx->iv = walk.iv;
916 src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset);
917 ctx->indata = phys_to_virt(src_paddr);
919 dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset);
920 ctx->outdata = phys_to_virt(dst_paddr);
922 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
924 ret = hw_crypt_noxts(ctx, device_data);
925 if (ret)
926 goto out;
928 nbytes -= ctx->datalen;
929 ret = skcipher_walk_done(&walk, nbytes);
930 if (ret)
931 goto out;
934 out:
935 /* Release the device */
936 spin_lock(&device_data->ctx_lock);
937 device_data->current_ctx = NULL;
938 ctx->device = NULL;
939 spin_unlock(&device_data->ctx_lock);
942 * The down_interruptible part for this semaphore is called in
943 * cryp_get_device_data.
945 up(&driver_data.device_allocation);
947 return ret;
950 static int aes_skcipher_setkey(struct crypto_skcipher *cipher,
951 const u8 *key, unsigned int keylen)
953 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
955 pr_debug(DEV_DBG_NAME " [%s]", __func__);
957 switch (keylen) {
958 case AES_KEYSIZE_128:
959 ctx->config.keysize = CRYP_KEY_SIZE_128;
960 break;
962 case AES_KEYSIZE_192:
963 ctx->config.keysize = CRYP_KEY_SIZE_192;
964 break;
966 case AES_KEYSIZE_256:
967 ctx->config.keysize = CRYP_KEY_SIZE_256;
968 break;
970 default:
971 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
972 return -EINVAL;
975 memcpy(ctx->key, key, keylen);
976 ctx->keylen = keylen;
978 ctx->updated = 0;
980 return 0;
983 static int des_skcipher_setkey(struct crypto_skcipher *cipher,
984 const u8 *key, unsigned int keylen)
986 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
987 int err;
989 pr_debug(DEV_DBG_NAME " [%s]", __func__);
991 err = verify_skcipher_des_key(cipher, key);
992 if (err)
993 return err;
995 memcpy(ctx->key, key, keylen);
996 ctx->keylen = keylen;
998 ctx->updated = 0;
999 return 0;
1002 static int des3_skcipher_setkey(struct crypto_skcipher *cipher,
1003 const u8 *key, unsigned int keylen)
1005 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1006 int err;
1008 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1010 err = verify_skcipher_des3_key(cipher, key);
1011 if (err)
1012 return err;
1014 memcpy(ctx->key, key, keylen);
1015 ctx->keylen = keylen;
1017 ctx->updated = 0;
1018 return 0;
1021 static int cryp_blk_encrypt(struct skcipher_request *areq)
1023 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1024 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1026 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1028 ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1031 * DMA does not work for DES due to a hw bug */
1032 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1033 return ablk_dma_crypt(areq);
1035 /* For everything except DMA, we run the non DMA version. */
1036 return ablk_crypt(areq);
1039 static int cryp_blk_decrypt(struct skcipher_request *areq)
1041 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1042 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1044 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1046 ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1048 /* DMA does not work for DES due to a hw bug */
1049 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1050 return ablk_dma_crypt(areq);
1052 /* For everything except DMA, we run the non DMA version. */
1053 return ablk_crypt(areq);
1056 struct cryp_algo_template {
1057 enum cryp_algo_mode algomode;
1058 struct skcipher_alg skcipher;
1061 static int cryp_init_tfm(struct crypto_skcipher *tfm)
1063 struct cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1064 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
1065 struct cryp_algo_template *cryp_alg = container_of(alg,
1066 struct cryp_algo_template,
1067 skcipher);
1069 ctx->config.algomode = cryp_alg->algomode;
1070 ctx->blocksize = crypto_skcipher_blocksize(tfm);
1072 return 0;
1075 static struct cryp_algo_template cryp_algs[] = {
1077 .algomode = CRYP_ALGO_AES_ECB,
1078 .skcipher = {
1079 .base.cra_name = "ecb(aes)",
1080 .base.cra_driver_name = "ecb-aes-ux500",
1081 .base.cra_priority = 300,
1082 .base.cra_flags = CRYPTO_ALG_ASYNC,
1083 .base.cra_blocksize = AES_BLOCK_SIZE,
1084 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1085 .base.cra_alignmask = 3,
1086 .base.cra_module = THIS_MODULE,
1088 .min_keysize = AES_MIN_KEY_SIZE,
1089 .max_keysize = AES_MAX_KEY_SIZE,
1090 .setkey = aes_skcipher_setkey,
1091 .encrypt = cryp_blk_encrypt,
1092 .decrypt = cryp_blk_decrypt,
1093 .init = cryp_init_tfm,
1097 .algomode = CRYP_ALGO_AES_CBC,
1098 .skcipher = {
1099 .base.cra_name = "cbc(aes)",
1100 .base.cra_driver_name = "cbc-aes-ux500",
1101 .base.cra_priority = 300,
1102 .base.cra_flags = CRYPTO_ALG_ASYNC,
1103 .base.cra_blocksize = AES_BLOCK_SIZE,
1104 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1105 .base.cra_alignmask = 3,
1106 .base.cra_module = THIS_MODULE,
1108 .min_keysize = AES_MIN_KEY_SIZE,
1109 .max_keysize = AES_MAX_KEY_SIZE,
1110 .setkey = aes_skcipher_setkey,
1111 .encrypt = cryp_blk_encrypt,
1112 .decrypt = cryp_blk_decrypt,
1113 .init = cryp_init_tfm,
1114 .ivsize = AES_BLOCK_SIZE,
1118 .algomode = CRYP_ALGO_AES_CTR,
1119 .skcipher = {
1120 .base.cra_name = "ctr(aes)",
1121 .base.cra_driver_name = "ctr-aes-ux500",
1122 .base.cra_priority = 300,
1123 .base.cra_flags = CRYPTO_ALG_ASYNC,
1124 .base.cra_blocksize = 1,
1125 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1126 .base.cra_alignmask = 3,
1127 .base.cra_module = THIS_MODULE,
1129 .min_keysize = AES_MIN_KEY_SIZE,
1130 .max_keysize = AES_MAX_KEY_SIZE,
1131 .setkey = aes_skcipher_setkey,
1132 .encrypt = cryp_blk_encrypt,
1133 .decrypt = cryp_blk_decrypt,
1134 .init = cryp_init_tfm,
1135 .ivsize = AES_BLOCK_SIZE,
1136 .chunksize = AES_BLOCK_SIZE,
1140 .algomode = CRYP_ALGO_DES_ECB,
1141 .skcipher = {
1142 .base.cra_name = "ecb(des)",
1143 .base.cra_driver_name = "ecb-des-ux500",
1144 .base.cra_priority = 300,
1145 .base.cra_flags = CRYPTO_ALG_ASYNC,
1146 .base.cra_blocksize = DES_BLOCK_SIZE,
1147 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1148 .base.cra_alignmask = 3,
1149 .base.cra_module = THIS_MODULE,
1151 .min_keysize = DES_KEY_SIZE,
1152 .max_keysize = DES_KEY_SIZE,
1153 .setkey = des_skcipher_setkey,
1154 .encrypt = cryp_blk_encrypt,
1155 .decrypt = cryp_blk_decrypt,
1156 .init = cryp_init_tfm,
1160 .algomode = CRYP_ALGO_TDES_ECB,
1161 .skcipher = {
1162 .base.cra_name = "ecb(des3_ede)",
1163 .base.cra_driver_name = "ecb-des3_ede-ux500",
1164 .base.cra_priority = 300,
1165 .base.cra_flags = CRYPTO_ALG_ASYNC,
1166 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1167 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1168 .base.cra_alignmask = 3,
1169 .base.cra_module = THIS_MODULE,
1171 .min_keysize = DES3_EDE_KEY_SIZE,
1172 .max_keysize = DES3_EDE_KEY_SIZE,
1173 .setkey = des3_skcipher_setkey,
1174 .encrypt = cryp_blk_encrypt,
1175 .decrypt = cryp_blk_decrypt,
1176 .init = cryp_init_tfm,
1180 .algomode = CRYP_ALGO_DES_CBC,
1181 .skcipher = {
1182 .base.cra_name = "cbc(des)",
1183 .base.cra_driver_name = "cbc-des-ux500",
1184 .base.cra_priority = 300,
1185 .base.cra_flags = CRYPTO_ALG_ASYNC,
1186 .base.cra_blocksize = DES_BLOCK_SIZE,
1187 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1188 .base.cra_alignmask = 3,
1189 .base.cra_module = THIS_MODULE,
1191 .min_keysize = DES_KEY_SIZE,
1192 .max_keysize = DES_KEY_SIZE,
1193 .setkey = des_skcipher_setkey,
1194 .encrypt = cryp_blk_encrypt,
1195 .decrypt = cryp_blk_decrypt,
1196 .ivsize = DES_BLOCK_SIZE,
1197 .init = cryp_init_tfm,
1201 .algomode = CRYP_ALGO_TDES_CBC,
1202 .skcipher = {
1203 .base.cra_name = "cbc(des3_ede)",
1204 .base.cra_driver_name = "cbc-des3_ede-ux500",
1205 .base.cra_priority = 300,
1206 .base.cra_flags = CRYPTO_ALG_ASYNC,
1207 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1208 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1209 .base.cra_alignmask = 3,
1210 .base.cra_module = THIS_MODULE,
1212 .min_keysize = DES3_EDE_KEY_SIZE,
1213 .max_keysize = DES3_EDE_KEY_SIZE,
1214 .setkey = des3_skcipher_setkey,
1215 .encrypt = cryp_blk_encrypt,
1216 .decrypt = cryp_blk_decrypt,
1217 .ivsize = DES3_EDE_BLOCK_SIZE,
1218 .init = cryp_init_tfm,
1224 * cryp_algs_register_all -
1226 static int cryp_algs_register_all(void)
1228 int ret;
1229 int i;
1230 int count;
1232 pr_debug("[%s]", __func__);
1234 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1235 ret = crypto_register_skcipher(&cryp_algs[i].skcipher);
1236 if (ret) {
1237 count = i;
1238 pr_err("[%s] alg registration failed",
1239 cryp_algs[i].skcipher.base.cra_driver_name);
1240 goto unreg;
1243 return 0;
1244 unreg:
1245 for (i = 0; i < count; i++)
1246 crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1247 return ret;
1251 * cryp_algs_unregister_all -
1253 static void cryp_algs_unregister_all(void)
1255 int i;
1257 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1259 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1260 crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1263 static int ux500_cryp_probe(struct platform_device *pdev)
1265 int ret;
1266 struct resource *res;
1267 struct resource *res_irq;
1268 struct cryp_device_data *device_data;
1269 struct cryp_protection_config prot = {
1270 .privilege_access = CRYP_STATE_ENABLE
1272 struct device *dev = &pdev->dev;
1274 dev_dbg(dev, "[%s]", __func__);
1275 device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1276 if (!device_data) {
1277 ret = -ENOMEM;
1278 goto out;
1281 device_data->dev = dev;
1282 device_data->current_ctx = NULL;
1284 /* Grab the DMA configuration from platform data. */
1285 mem_to_engine = &((struct cryp_platform_data *)
1286 dev->platform_data)->mem_to_engine;
1287 engine_to_mem = &((struct cryp_platform_data *)
1288 dev->platform_data)->engine_to_mem;
1290 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1291 if (!res) {
1292 dev_err(dev, "[%s]: platform_get_resource() failed",
1293 __func__);
1294 ret = -ENODEV;
1295 goto out;
1298 device_data->phybase = res->start;
1299 device_data->base = devm_ioremap_resource(dev, res);
1300 if (IS_ERR(device_data->base)) {
1301 dev_err(dev, "[%s]: ioremap failed!", __func__);
1302 ret = PTR_ERR(device_data->base);
1303 goto out;
1306 spin_lock_init(&device_data->ctx_lock);
1307 spin_lock_init(&device_data->power_state_spinlock);
1309 /* Enable power for CRYP hardware block */
1310 device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1311 if (IS_ERR(device_data->pwr_regulator)) {
1312 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1313 ret = PTR_ERR(device_data->pwr_regulator);
1314 device_data->pwr_regulator = NULL;
1315 goto out;
1318 /* Enable the clk for CRYP hardware block */
1319 device_data->clk = devm_clk_get(&pdev->dev, NULL);
1320 if (IS_ERR(device_data->clk)) {
1321 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1322 ret = PTR_ERR(device_data->clk);
1323 goto out_regulator;
1326 ret = clk_prepare(device_data->clk);
1327 if (ret) {
1328 dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1329 goto out_regulator;
1332 /* Enable device power (and clock) */
1333 ret = cryp_enable_power(device_data->dev, device_data, false);
1334 if (ret) {
1335 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1336 goto out_clk_unprepare;
1339 if (cryp_check(device_data)) {
1340 dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1341 ret = -EINVAL;
1342 goto out_power;
1345 if (cryp_configure_protection(device_data, &prot)) {
1346 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1347 __func__);
1348 ret = -EINVAL;
1349 goto out_power;
1352 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1353 if (!res_irq) {
1354 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1355 __func__);
1356 ret = -ENODEV;
1357 goto out_power;
1360 ret = devm_request_irq(&pdev->dev, res_irq->start,
1361 cryp_interrupt_handler, 0, "cryp1", device_data);
1362 if (ret) {
1363 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1364 goto out_power;
1367 if (cryp_mode == CRYP_MODE_DMA)
1368 cryp_dma_setup_channel(device_data, dev);
1370 platform_set_drvdata(pdev, device_data);
1372 /* Put the new device into the device list... */
1373 klist_add_tail(&device_data->list_node, &driver_data.device_list);
1375 /* ... and signal that a new device is available. */
1376 up(&driver_data.device_allocation);
1378 atomic_set(&session_id, 1);
1380 ret = cryp_algs_register_all();
1381 if (ret) {
1382 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1383 __func__);
1384 goto out_power;
1387 dev_info(dev, "successfully registered\n");
1389 return 0;
1391 out_power:
1392 cryp_disable_power(device_data->dev, device_data, false);
1394 out_clk_unprepare:
1395 clk_unprepare(device_data->clk);
1397 out_regulator:
1398 regulator_put(device_data->pwr_regulator);
1400 out:
1401 return ret;
1404 static int ux500_cryp_remove(struct platform_device *pdev)
1406 struct cryp_device_data *device_data;
1408 dev_dbg(&pdev->dev, "[%s]", __func__);
1409 device_data = platform_get_drvdata(pdev);
1410 if (!device_data) {
1411 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1412 __func__);
1413 return -ENOMEM;
1416 /* Try to decrease the number of available devices. */
1417 if (down_trylock(&driver_data.device_allocation))
1418 return -EBUSY;
1420 /* Check that the device is free */
1421 spin_lock(&device_data->ctx_lock);
1422 /* current_ctx allocates a device, NULL = unallocated */
1423 if (device_data->current_ctx) {
1424 /* The device is busy */
1425 spin_unlock(&device_data->ctx_lock);
1426 /* Return the device to the pool. */
1427 up(&driver_data.device_allocation);
1428 return -EBUSY;
1431 spin_unlock(&device_data->ctx_lock);
1433 /* Remove the device from the list */
1434 if (klist_node_attached(&device_data->list_node))
1435 klist_remove(&device_data->list_node);
1437 /* If this was the last device, remove the services */
1438 if (list_empty(&driver_data.device_list.k_list))
1439 cryp_algs_unregister_all();
1441 if (cryp_disable_power(&pdev->dev, device_data, false))
1442 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1443 __func__);
1445 clk_unprepare(device_data->clk);
1446 regulator_put(device_data->pwr_regulator);
1448 return 0;
1451 static void ux500_cryp_shutdown(struct platform_device *pdev)
1453 struct cryp_device_data *device_data;
1455 dev_dbg(&pdev->dev, "[%s]", __func__);
1457 device_data = platform_get_drvdata(pdev);
1458 if (!device_data) {
1459 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1460 __func__);
1461 return;
1464 /* Check that the device is free */
1465 spin_lock(&device_data->ctx_lock);
1466 /* current_ctx allocates a device, NULL = unallocated */
1467 if (!device_data->current_ctx) {
1468 if (down_trylock(&driver_data.device_allocation))
1469 dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1470 "Shutting down anyway...", __func__);
1472 * (Allocate the device)
1473 * Need to set this to non-null (dummy) value,
1474 * to avoid usage if context switching.
1476 device_data->current_ctx++;
1478 spin_unlock(&device_data->ctx_lock);
1480 /* Remove the device from the list */
1481 if (klist_node_attached(&device_data->list_node))
1482 klist_remove(&device_data->list_node);
1484 /* If this was the last device, remove the services */
1485 if (list_empty(&driver_data.device_list.k_list))
1486 cryp_algs_unregister_all();
1488 if (cryp_disable_power(&pdev->dev, device_data, false))
1489 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1490 __func__);
1494 #ifdef CONFIG_PM_SLEEP
1495 static int ux500_cryp_suspend(struct device *dev)
1497 int ret;
1498 struct platform_device *pdev = to_platform_device(dev);
1499 struct cryp_device_data *device_data;
1500 struct resource *res_irq;
1501 struct cryp_ctx *temp_ctx = NULL;
1503 dev_dbg(dev, "[%s]", __func__);
1505 /* Handle state? */
1506 device_data = platform_get_drvdata(pdev);
1507 if (!device_data) {
1508 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1509 return -ENOMEM;
1512 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1513 if (!res_irq)
1514 dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1515 else
1516 disable_irq(res_irq->start);
1518 spin_lock(&device_data->ctx_lock);
1519 if (!device_data->current_ctx)
1520 device_data->current_ctx++;
1521 spin_unlock(&device_data->ctx_lock);
1523 if (device_data->current_ctx == ++temp_ctx) {
1524 if (down_interruptible(&driver_data.device_allocation))
1525 dev_dbg(dev, "[%s]: down_interruptible() failed",
1526 __func__);
1527 ret = cryp_disable_power(dev, device_data, false);
1529 } else
1530 ret = cryp_disable_power(dev, device_data, true);
1532 if (ret)
1533 dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1535 return ret;
1538 static int ux500_cryp_resume(struct device *dev)
1540 int ret = 0;
1541 struct platform_device *pdev = to_platform_device(dev);
1542 struct cryp_device_data *device_data;
1543 struct resource *res_irq;
1544 struct cryp_ctx *temp_ctx = NULL;
1546 dev_dbg(dev, "[%s]", __func__);
1548 device_data = platform_get_drvdata(pdev);
1549 if (!device_data) {
1550 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1551 return -ENOMEM;
1554 spin_lock(&device_data->ctx_lock);
1555 if (device_data->current_ctx == ++temp_ctx)
1556 device_data->current_ctx = NULL;
1557 spin_unlock(&device_data->ctx_lock);
1560 if (!device_data->current_ctx)
1561 up(&driver_data.device_allocation);
1562 else
1563 ret = cryp_enable_power(dev, device_data, true);
1565 if (ret)
1566 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1567 else {
1568 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1569 if (res_irq)
1570 enable_irq(res_irq->start);
1573 return ret;
1575 #endif
1577 static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1579 static const struct of_device_id ux500_cryp_match[] = {
1580 { .compatible = "stericsson,ux500-cryp" },
1581 { },
1583 MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1585 static struct platform_driver cryp_driver = {
1586 .probe = ux500_cryp_probe,
1587 .remove = ux500_cryp_remove,
1588 .shutdown = ux500_cryp_shutdown,
1589 .driver = {
1590 .name = "cryp1",
1591 .of_match_table = ux500_cryp_match,
1592 .pm = &ux500_cryp_pm,
1596 static int __init ux500_cryp_mod_init(void)
1598 pr_debug("[%s] is called!", __func__);
1599 klist_init(&driver_data.device_list, NULL, NULL);
1600 /* Initialize the semaphore to 0 devices (locked state) */
1601 sema_init(&driver_data.device_allocation, 0);
1602 return platform_driver_register(&cryp_driver);
1605 static void __exit ux500_cryp_mod_fini(void)
1607 pr_debug("[%s] is called!", __func__);
1608 platform_driver_unregister(&cryp_driver);
1611 module_init(ux500_cryp_mod_init);
1612 module_exit(ux500_cryp_mod_fini);
1614 module_param(cryp_mode, int, 0);
1616 MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1617 MODULE_ALIAS_CRYPTO("aes-all");
1618 MODULE_ALIAS_CRYPTO("des-all");
1620 MODULE_LICENSE("GPL");