17 #include <crypto/sha.h>
19 #include <linux/module.h>
21 #include <linux/errno.h>
23 #include <linux/kernel.h>
43 return crypto_shash_init(&dctx->
fallback);
59 return crypto_shash_export(&dctx->
fallback, out);
69 return crypto_shash_import(&dctx->
fallback, in);
72 static inline void padlock_output_block(
uint32_t *
src,
80 unsigned int count,
u8 *
out)
91 unsigned int leftover;
124 ts_state = irq_ts_save();
125 asm volatile (
".byte 0xf3,0x0f,0xa6,0xc8"
127 :
"c"((
unsigned long)
state.count + count), \
129 "S"(
in),
"D"(result));
130 irq_ts_restore(ts_state);
138 static int padlock_sha1_final(
struct shash_desc *desc,
u8 *out)
142 return padlock_sha1_finup(desc, buf, 0, out);
145 static int padlock_sha256_finup(
struct shash_desc *desc,
const u8 *in,
146 unsigned int count,
u8 *out)
157 unsigned int leftover;
190 ts_state = irq_ts_save();
191 asm volatile (
".byte 0xf3,0x0f,0xa6,0xd0"
193 :
"c"((
unsigned long)
state.count + count), \
195 "S"(
in),
"D"(result));
196 irq_ts_restore(ts_state);
204 static int padlock_sha256_final(
struct shash_desc *desc,
u8 *out)
208 return padlock_sha256_finup(desc, buf, 0, out);
211 static int padlock_cra_init(
struct crypto_tfm *tfm)
214 const char *fallback_driver_name = tfm->
__crt_alg->cra_name;
222 if (IS_ERR(fallback_tfm)) {
224 fallback_driver_name);
225 err = PTR_ERR(fallback_tfm);
230 hash->
descsize += crypto_shash_descsize(fallback_tfm);
237 static void padlock_cra_exit(
struct crypto_tfm *tfm)
246 .init = padlock_sha_init,
247 .update = padlock_sha_update,
248 .finup = padlock_sha1_finup,
249 .final = padlock_sha1_final,
250 .export = padlock_sha_export,
251 .import = padlock_sha_import,
256 .cra_driver_name =
"sha1-padlock",
263 .cra_init = padlock_cra_init,
264 .cra_exit = padlock_cra_exit,
270 .init = padlock_sha_init,
271 .update = padlock_sha_update,
272 .finup = padlock_sha256_finup,
273 .final = padlock_sha256_final,
274 .export = padlock_sha_export,
275 .import = padlock_sha_import,
279 .cra_name =
"sha256",
280 .cra_driver_name =
"sha256-padlock",
287 .cra_init = padlock_cra_init,
288 .cra_exit = padlock_cra_exit,
294 static int padlock_sha1_init_nano(
struct shash_desc *desc)
296 struct sha1_state *sctx = shash_desc_ctx(desc);
305 static int padlock_sha1_update_nano(
struct shash_desc *desc,
306 const u8 *
data,
unsigned int len)
308 struct sha1_state *sctx = shash_desc_ctx(desc);
309 unsigned int partial,
done;
317 partial = sctx->
count & 0x3f;
331 ts_state = irq_ts_save();
332 asm volatile (
".byte 0xf3,0x0f,0xa6,0xc8"
333 :
"+S"(
src),
"+D"(dst) \
334 :
"a"((
long)-1),
"c"((
unsigned long)1));
335 irq_ts_restore(ts_state);
342 ts_state = irq_ts_save();
343 asm volatile (
".byte 0xf3,0x0f,0xa6,0xc8"
344 :
"+S"(
src),
"+D"(dst)
347 irq_ts_restore(ts_state);
359 static int padlock_sha1_final_nano(
struct shash_desc *desc,
u8 *out)
362 unsigned int partial, padlen;
369 partial = state->
count & 0x3f;
370 padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
371 padlock_sha1_update_nano(desc, padding, padlen);
374 padlock_sha1_update_nano(desc, (
const u8 *)&bits,
sizeof(bits));
382 static int padlock_sha256_init_nano(
struct shash_desc *desc)
394 static int padlock_sha256_update_nano(
struct shash_desc *desc,
const u8 *data,
398 unsigned int partial,
done;
406 partial = sctx->
count & 0x3f;
420 ts_state = irq_ts_save();
421 asm volatile (
".byte 0xf3,0x0f,0xa6,0xd0"
422 :
"+S"(
src),
"+D"(dst)
423 :
"a"((
long)-1),
"c"((
unsigned long)1));
424 irq_ts_restore(ts_state);
431 ts_state = irq_ts_save();
432 asm volatile (
".byte 0xf3,0x0f,0xa6,0xd0"
433 :
"+S"(
src),
"+D"(dst)
435 "c"((
unsigned long)((len - done) / 64)));
436 irq_ts_restore(ts_state);
437 done += ((len -
done) - (len - done) % 64);
443 memcpy(sctx->
buf + partial, src, len - done);
448 static int padlock_sha256_final_nano(
struct shash_desc *desc,
u8 *out)
452 unsigned int partial, padlen;
454 static const u8 padding[64] = { 0x80, };
459 partial = state->
count & 0x3f;
460 padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
461 padlock_sha256_update_nano(desc, padding, padlen);
464 padlock_sha256_update_nano(desc, (
const u8 *)&bits,
sizeof(bits));
472 static int padlock_sha_export_nano(
struct shash_desc *desc,
475 int statesize = crypto_shash_statesize(desc->
tfm);
476 void *sctx = shash_desc_ctx(desc);
478 memcpy(out, sctx, statesize);
482 static int padlock_sha_import_nano(
struct shash_desc *desc,
485 int statesize = crypto_shash_statesize(desc->
tfm);
486 void *sctx = shash_desc_ctx(desc);
488 memcpy(sctx, in, statesize);
492 static struct shash_alg sha1_alg_nano = {
494 .init = padlock_sha1_init_nano,
495 .update = padlock_sha1_update_nano,
496 .final = padlock_sha1_final_nano,
497 .export = padlock_sha_export_nano,
498 .import = padlock_sha_import_nano,
503 .cra_driver_name =
"sha1-padlock-nano",
511 static struct shash_alg sha256_alg_nano = {
513 .init = padlock_sha256_init_nano,
514 .update = padlock_sha256_update_nano,
515 .final = padlock_sha256_final_nano,
516 .export = padlock_sha_export_nano,
517 .import = padlock_sha_import_nano,
521 .cra_name =
"sha256",
522 .cra_driver_name =
"sha256-padlock-nano",
530 static struct x86_cpu_id padlock_sha_ids[] = {
536 static int __init padlock_init(
void)
550 sha256 = &sha256_alg;
552 sha1 = &sha1_alg_nano;
553 sha256 = &sha256_alg_nano;
576 static void __exit padlock_fini(
void)