11 #include <crypto/aes.h>
13 #include <linux/module.h>
15 #include <linux/types.h>
16 #include <linux/errno.h>
18 #include <linux/kernel.h>
21 #include <linux/slab.h>
23 #include <asm/byteorder.h>
24 #include <asm/processor.h>
31 static unsigned int ecb_fetch_blocks = 2;
32 #define MAX_ECB_FETCH_BLOCKS (8)
33 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE)
35 static unsigned int cbc_fetch_blocks = 1;
36 #define MAX_CBC_FETCH_BLOCKS (4)
37 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE)
84 static inline struct aes_ctx *aes_ctx_common(
void *
ctx)
86 unsigned long addr = (
unsigned long)ctx;
89 if (align <= crypto_tfm_ctx_alignment())
96 return aes_ctx_common(crypto_tfm_ctx(tfm));
101 return aes_ctx_common(crypto_blkcipher_ctx(tfm));
104 static int aes_set_key(
struct crypto_tfm *tfm,
const u8 *in_key,
105 unsigned int key_len)
133 ctx->
cword.decrypt.encdec = 1;
134 ctx->
cword.encrypt.rounds = 10 + (key_len - 16) / 4;
135 ctx->
cword.decrypt.rounds = ctx->
cword.encrypt.rounds;
136 ctx->
cword.encrypt.ksize = (key_len - 16) / 8;
137 ctx->
cword.decrypt.ksize = ctx->
cword.encrypt.ksize;
140 if (aes_hw_extkey_available(key_len))
143 ctx->
D = ctx->d_data;
144 ctx->
cword.encrypt.keygen = 1;
145 ctx->
cword.decrypt.keygen = 1;
171 if (cword !=
per_cpu(paes_last_cword, cpu))
172 #ifndef CONFIG_X86_64
173 asm volatile (
"pushfl; popfl");
175 asm volatile (
"pushfq; popfq");
179 static inline void padlock_store_cword(
struct cword *cword)
190 static inline void rep_xcrypt_ecb(
const u8 *
input,
u8 *output,
void *key,
193 asm volatile (
".byte 0xf3,0x0f,0xa7,0xc8"
194 :
"+S"(
input),
"+D"(output)
198 static inline u8 *rep_xcrypt_cbc(
const u8 *input,
u8 *output,
void *key,
199 u8 *
iv,
struct cword *control_word,
int count)
201 asm volatile (
".byte 0xf3,0x0f,0xa7,0xd0"
202 :
"+S" (
input),
"+D" (output),
"+a" (
iv)
203 :
"d" (control_word),
"b" (
key),
"c" (count));
207 static void ecb_crypt_copy(
const u8 *
in,
u8 *
out,
u32 *key,
208 struct cword *cword,
int count)
218 rep_xcrypt_ecb(tmp, out, key, cword, count);
222 u8 *iv,
struct cword *cword,
int count)
232 return rep_xcrypt_cbc(tmp, out, key, iv, cword, count);
235 static inline void ecb_crypt(
const u8 *
in,
u8 *
out,
u32 *key,
236 struct cword *cword,
int count)
242 ecb_crypt_copy(in, out, key, cword, count);
246 rep_xcrypt_ecb(in, out, key, cword, count);
249 static inline u8 *cbc_crypt(
const u8 *in,
u8 *out,
u32 *key,
250 u8 *iv,
struct cword *cword,
int count)
254 return cbc_crypt_copy(in, out, key, iv, cword, count);
256 return rep_xcrypt_cbc(in, out, key, iv, cword, count);
259 static inline void padlock_xcrypt_ecb(
const u8 *input,
u8 *output,
void *key,
260 void *control_word,
u32 count)
262 u32 initial = count & (ecb_fetch_blocks - 1);
264 if (count < ecb_fetch_blocks) {
265 ecb_crypt(input, output, key, control_word, count);
270 asm volatile (
".byte 0xf3,0x0f,0xa7,0xc8"
271 :
"+S"(
input),
"+D"(output)
274 asm volatile (
".byte 0xf3,0x0f,0xa7,0xc8"
275 :
"+S"(
input),
"+D"(output)
279 static inline u8 *padlock_xcrypt_cbc(
const u8 *input,
u8 *output,
void *key,
280 u8 *iv,
void *control_word,
u32 count)
282 u32 initial = count & (cbc_fetch_blocks - 1);
284 if (count < cbc_fetch_blocks)
285 return cbc_crypt(input, output, key, iv, control_word, count);
288 asm volatile (
".byte 0xf3,0x0f,0xa7,0xd0"
289 :
"+S" (
input),
"+D" (output),
"+a" (
iv)
290 :
"d" (control_word),
"b" (
key),
"c" (initial));
292 asm volatile (
".byte 0xf3,0x0f,0xa7,0xd0"
293 :
"+S" (
input),
"+D" (output),
"+a" (
iv)
294 :
"d" (control_word),
"b" (
key),
"c" (count-initial));
298 static void aes_encrypt(
struct crypto_tfm *tfm,
u8 *out,
const u8 *in)
303 padlock_reset_key(&ctx->
cword.encrypt);
304 ts_state = irq_ts_save();
305 ecb_crypt(in, out, ctx->
E, &ctx->
cword.encrypt, 1);
306 irq_ts_restore(ts_state);
307 padlock_store_cword(&ctx->
cword.encrypt);
310 static void aes_decrypt(
struct crypto_tfm *tfm,
u8 *out,
const u8 *in)
315 padlock_reset_key(&ctx->
cword.encrypt);
316 ts_state = irq_ts_save();
317 ecb_crypt(in, out, ctx->
D, &ctx->
cword.decrypt, 1);
318 irq_ts_restore(ts_state);
319 padlock_store_cword(&ctx->
cword.encrypt);
324 .cra_driver_name =
"aes-padlock",
328 .cra_ctxsize =
sizeof(
struct aes_ctx),
335 .cia_setkey = aes_set_key,
336 .cia_encrypt = aes_encrypt,
337 .cia_decrypt = aes_decrypt,
351 padlock_reset_key(&ctx->
cword.encrypt);
353 blkcipher_walk_init(&walk, dst, src, nbytes);
356 ts_state = irq_ts_save();
357 while ((nbytes = walk.nbytes)) {
358 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
359 ctx->
E, &ctx->
cword.encrypt,
364 irq_ts_restore(ts_state);
366 padlock_store_cword(&ctx->
cword.encrypt);
380 padlock_reset_key(&ctx->
cword.decrypt);
382 blkcipher_walk_init(&walk, dst, src, nbytes);
385 ts_state = irq_ts_save();
386 while ((nbytes = walk.nbytes)) {
387 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
388 ctx->
D, &ctx->
cword.decrypt,
393 irq_ts_restore(ts_state);
395 padlock_store_cword(&ctx->
cword.encrypt);
401 .cra_name =
"ecb(aes)",
402 .cra_driver_name =
"ecb-aes-padlock",
406 .cra_ctxsize =
sizeof(
struct aes_ctx),
414 .setkey = aes_set_key,
415 .encrypt = ecb_aes_encrypt,
416 .decrypt = ecb_aes_decrypt,
430 padlock_reset_key(&ctx->
cword.encrypt);
432 blkcipher_walk_init(&walk, dst, src, nbytes);
435 ts_state = irq_ts_save();
436 while ((nbytes = walk.nbytes)) {
437 u8 *iv = padlock_xcrypt_cbc(walk.src.virt.addr,
438 walk.dst.virt.addr, ctx->
E,
439 walk.iv, &ctx->
cword.encrypt,
445 irq_ts_restore(ts_state);
447 padlock_store_cword(&ctx->
cword.decrypt);
461 padlock_reset_key(&ctx->
cword.encrypt);
463 blkcipher_walk_init(&walk, dst, src, nbytes);
466 ts_state = irq_ts_save();
467 while ((nbytes = walk.nbytes)) {
468 padlock_xcrypt_cbc(walk.src.virt.addr, walk.dst.virt.addr,
469 ctx->
D, walk.iv, &ctx->
cword.decrypt,
475 irq_ts_restore(ts_state);
477 padlock_store_cword(&ctx->
cword.encrypt);
483 .cra_name =
"cbc(aes)",
484 .cra_driver_name =
"cbc-aes-padlock",
488 .cra_ctxsize =
sizeof(
struct aes_ctx),
497 .setkey = aes_set_key,
498 .encrypt = cbc_aes_encrypt,
499 .decrypt = cbc_aes_decrypt,
510 static int __init padlock_init(
void)
518 if (!cpu_has_xcrypt_enabled) {
552 static void __exit padlock_fini(
void)