Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
cryp.c
Go to the documentation of this file.
1 
11 #include <linux/errno.h>
12 #include <linux/kernel.h>
13 #include <linux/types.h>
14 
15 #include <mach/hardware.h>
16 
17 #include "cryp_p.h"
18 #include "cryp.h"
19 
23 void cryp_wait_until_done(struct cryp_device_data *device_data)
24 {
25  while (cryp_is_logic_busy(device_data))
26  cpu_relax();
27 }
28 
33 int cryp_check(struct cryp_device_data *device_data)
34 {
35  int peripheralid2 = 0;
36 
37  if (NULL == device_data)
38  return -EINVAL;
39 
40  peripheralid2 = readl_relaxed(&device_data->base->periphId2);
41 
42  if (peripheralid2 != CRYP_PERIPHERAL_ID2_DB8500)
43  return -EPERM;
44 
45  /* Check Peripheral and Pcell Id Register for CRYP */
46  if ((CRYP_PERIPHERAL_ID0 ==
47  readl_relaxed(&device_data->base->periphId0))
48  && (CRYP_PERIPHERAL_ID1 ==
49  readl_relaxed(&device_data->base->periphId1))
50  && (CRYP_PERIPHERAL_ID3 ==
51  readl_relaxed(&device_data->base->periphId3))
52  && (CRYP_PCELL_ID0 ==
53  readl_relaxed(&device_data->base->pcellId0))
54  && (CRYP_PCELL_ID1 ==
55  readl_relaxed(&device_data->base->pcellId1))
56  && (CRYP_PCELL_ID2 ==
57  readl_relaxed(&device_data->base->pcellId2))
58  && (CRYP_PCELL_ID3 ==
59  readl_relaxed(&device_data->base->pcellId3))) {
60  return 0;
61  }
62 
63  return -EPERM;
64 }
65 
71 void cryp_activity(struct cryp_device_data *device_data,
73 {
74  CRYP_PUT_BITS(&device_data->base->cr,
75  cryp_crypen,
78 }
79 
84 void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
85 {
86  /*
87  * We always need to disble the hardware before trying to flush the
88  * FIFO. This is something that isn't written in the design
89  * specification, but we have been informed by the hardware designers
90  * that this must be done.
91  */
92  cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
93  cryp_wait_until_done(device_data);
94 
95  CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
96  /*
97  * CRYP_SR_INFIFO_READY_MASK is the expected value on the status
98  * register when starting a new calculation, which means Input FIFO is
99  * not full and input FIFO is empty.
100  */
101  while (readl_relaxed(&device_data->base->sr) !=
103  cpu_relax();
104 }
105 
112 int cryp_set_configuration(struct cryp_device_data *device_data,
113  struct cryp_config *cryp_config,
114  u32 *control_register)
115 {
116  u32 cr_for_kse;
117 
118  if (NULL == device_data || NULL == cryp_config)
119  return -EINVAL;
120 
121  *control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
122 
123  /* Prepare key for decryption in AES_ECB and AES_CBC mode. */
124  if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
125  ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
126  (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
127  cr_for_kse = *control_register;
128  /*
129  * This seems a bit odd, but it is indeed needed to set this to
130  * encrypt even though it is a decryption that we are doing. It
131  * also mentioned in the design spec that you need to do this.
132  * After the keyprepartion for decrypting is done you should set
133  * algodir back to decryption, which is done outside this if
134  * statement.
135  *
136  * According to design specification we should set mode ECB
137  * during key preparation even though we might be running CBC
138  * when enter this function.
139  *
140  * Writing to KSE_ENABLED will drop CRYPEN when key preparation
141  * is done. Therefore we need to set CRYPEN again outside this
142  * if statement when running decryption.
143  */
144  cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
148 
149  writel_relaxed(cr_for_kse, &device_data->base->cr);
150  cryp_wait_until_done(device_data);
151  }
152 
153  *control_register |=
154  ((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
155  (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
156 
157  return 0;
158 }
159 
167  struct cryp_protection_config *p_protect_config)
168 {
169  if (NULL == p_protect_config)
170  return -EINVAL;
171 
172  CRYP_WRITE_BIT(&device_data->base->cr,
173  (u32) p_protect_config->secure_access,
175  CRYP_PUT_BITS(&device_data->base->cr,
176  p_protect_config->privilege_access,
179 
180  return 0;
181 }
182 
187 int cryp_is_logic_busy(struct cryp_device_data *device_data)
188 {
189  return CRYP_TEST_BITS(&device_data->base->sr,
191 }
192 
198 void cryp_configure_for_dma(struct cryp_device_data *device_data,
199  enum cryp_dma_req_type dma_req)
200 {
201  CRYP_SET_BITS(&device_data->base->dmacr,
202  (u32) dma_req);
203 }
204 
212  enum cryp_key_reg_index key_reg_index,
213  struct cryp_key_value key_value)
214 {
215  while (cryp_is_logic_busy(device_data))
216  cpu_relax();
217 
218  switch (key_reg_index) {
219  case CRYP_KEY_REG_1:
220  writel_relaxed(key_value.key_value_left,
221  &device_data->base->key_1_l);
222  writel_relaxed(key_value.key_value_right,
223  &device_data->base->key_1_r);
224  break;
225  case CRYP_KEY_REG_2:
226  writel_relaxed(key_value.key_value_left,
227  &device_data->base->key_2_l);
228  writel_relaxed(key_value.key_value_right,
229  &device_data->base->key_2_r);
230  break;
231  case CRYP_KEY_REG_3:
232  writel_relaxed(key_value.key_value_left,
233  &device_data->base->key_3_l);
234  writel_relaxed(key_value.key_value_right,
235  &device_data->base->key_3_r);
236  break;
237  case CRYP_KEY_REG_4:
238  writel_relaxed(key_value.key_value_left,
239  &device_data->base->key_4_l);
240  writel_relaxed(key_value.key_value_right,
241  &device_data->base->key_4_r);
242  break;
243  default:
244  return -EINVAL;
245  }
246 
247  return 0;
248 }
249 
258  init_vector_index,
260  init_vector_value)
261 {
262  while (cryp_is_logic_busy(device_data))
263  cpu_relax();
264 
265  switch (init_vector_index) {
267  writel_relaxed(init_vector_value.init_value_left,
268  &device_data->base->init_vect_0_l);
269  writel_relaxed(init_vector_value.init_value_right,
270  &device_data->base->init_vect_0_r);
271  break;
273  writel_relaxed(init_vector_value.init_value_left,
274  &device_data->base->init_vect_1_l);
275  writel_relaxed(init_vector_value.init_value_right,
276  &device_data->base->init_vect_1_r);
277  break;
278  default:
279  return -EINVAL;
280  }
281 
282  return 0;
283 }
284 
292  struct cryp_device_context *ctx,
293  int cryp_mode)
294 {
295  enum cryp_algo_mode algomode;
296  struct cryp_register *src_reg = device_data->base;
297  struct cryp_config *config =
298  (struct cryp_config *)device_data->current_ctx;
299 
300  /*
301  * Always start by disable the hardware and wait for it to finish the
302  * ongoing calculations before trying to reprogram it.
303  */
304  cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
305  cryp_wait_until_done(device_data);
306 
307  if (cryp_mode == CRYP_MODE_DMA)
309 
310  if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
311  ctx->din = readl_relaxed(&src_reg->din);
312 
313  ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
314 
315  switch (config->keysize) {
316  case CRYP_KEY_SIZE_256:
317  ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
318  ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
319 
320  case CRYP_KEY_SIZE_192:
321  ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
322  ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
323 
324  case CRYP_KEY_SIZE_128:
325  ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
326  ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
327 
328  default:
329  ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
330  ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
331  }
332 
333  /* Save IV for CBC mode for both AES and DES. */
334  algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
335  if (algomode == CRYP_ALGO_TDES_CBC ||
336  algomode == CRYP_ALGO_DES_CBC ||
337  algomode == CRYP_ALGO_AES_CBC) {
338  ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
339  ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
340  ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
341  ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
342  }
343 }
344 
352  struct cryp_device_context *ctx)
353 {
354  struct cryp_register *reg = device_data->base;
355  struct cryp_config *config =
356  (struct cryp_config *)device_data->current_ctx;
357 
358  /*
359  * Fall through for all items in switch statement. DES is captured in
360  * the default.
361  */
362  switch (config->keysize) {
363  case CRYP_KEY_SIZE_256:
364  writel_relaxed(ctx->key_4_l, &reg->key_4_l);
365  writel_relaxed(ctx->key_4_r, &reg->key_4_r);
366 
367  case CRYP_KEY_SIZE_192:
368  writel_relaxed(ctx->key_3_l, &reg->key_3_l);
369  writel_relaxed(ctx->key_3_r, &reg->key_3_r);
370 
371  case CRYP_KEY_SIZE_128:
372  writel_relaxed(ctx->key_2_l, &reg->key_2_l);
373  writel_relaxed(ctx->key_2_r, &reg->key_2_r);
374 
375  default:
376  writel_relaxed(ctx->key_1_l, &reg->key_1_l);
377  writel_relaxed(ctx->key_1_r, &reg->key_1_r);
378  }
379 
380  /* Restore IV for CBC mode for AES and DES. */
381  if (config->algomode == CRYP_ALGO_TDES_CBC ||
382  config->algomode == CRYP_ALGO_DES_CBC ||
383  config->algomode == CRYP_ALGO_AES_CBC) {
384  writel_relaxed(ctx->init_vect_0_l, &reg->init_vect_0_l);
385  writel_relaxed(ctx->init_vect_0_r, &reg->init_vect_0_r);
386  writel_relaxed(ctx->init_vect_1_l, &reg->init_vect_1_l);
387  writel_relaxed(ctx->init_vect_1_r, &reg->init_vect_1_r);
388  }
389 }