diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-05-16 22:09:29 +1000 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-06-26 17:34:39 +1000 |
commit | 6c2bb98bc33ae33c7a33a133a4cd5a06395fece5 (patch) | |
tree | 96684cd2c473cd05d651ce1fa3dd72b1b4b19b09 /arch | |
parent | 43600106e32809a4dead79fec67a63e9860e3d5d (diff) |
[CRYPTO] all: Pass tfm instead of ctx to algorithms
Up until now algorithms have been happy to get a context pointer since
they know everything that's in the tfm already (e.g., alignment, block
size).
However, once we have parameterised algorithms, such information will
be specific to each tfm. So the algorithm API needs to be changed to
pass the tfm structure instead of the context pointer.
This patch is basically a text substitution. The only tricky bit is
the assembly routines that need to get the context pointer offset
through asm-offsets.h.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/i386/crypto/aes-i586-asm.S | 28 | ||||
-rw-r--r-- | arch/i386/crypto/aes.c | 10 | ||||
-rw-r--r-- | arch/i386/kernel/asm-offsets.c | 3 | ||||
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 14 | ||||
-rw-r--r-- | arch/s390/crypto/des_s390.c | 42 | ||||
-rw-r--r-- | arch/s390/crypto/sha1_s390.c | 15 | ||||
-rw-r--r-- | arch/s390/crypto/sha256_s390.c | 13 | ||||
-rw-r--r-- | arch/x86_64/crypto/aes-x86_64-asm.S | 18 | ||||
-rw-r--r-- | arch/x86_64/crypto/aes.c | 10 | ||||
-rw-r--r-- | arch/x86_64/kernel/asm-offsets.c | 3 |
10 files changed, 83 insertions, 73 deletions
diff --git a/arch/i386/crypto/aes-i586-asm.S b/arch/i386/crypto/aes-i586-asm.S index 2851f7fe51e6..f942f0c8f630 100644 --- a/arch/i386/crypto/aes-i586-asm.S +++ b/arch/i386/crypto/aes-i586-asm.S @@ -36,19 +36,19 @@ .file "aes-i586-asm.S" .text -#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) - -// offsets to parameters with one register pushed onto stack +#include <asm/asm-offsets.h> -#define in_blk 16 // input byte array address parameter -#define out_blk 12 // output byte array address parameter -#define ctx 8 // AES context structure +#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) -// offsets in context structure +/* offsets to parameters with one register pushed onto stack */ +#define tfm 8 +#define out_blk 12 +#define in_blk 16 -#define ekey 0 // encryption key schedule base address -#define nrnd 256 // number of rounds -#define dkey 260 // decryption key schedule base address +/* offsets in crypto_tfm structure */ +#define ekey (crypto_tfm_ctx_offset + 0) +#define nrnd (crypto_tfm_ctx_offset + 256) +#define dkey (crypto_tfm_ctx_offset + 260) // register mapping for encrypt and decrypt subroutines @@ -217,7 +217,7 @@ do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ // AES (Rijndael) Encryption Subroutine -/* void aes_enc_blk(void *ctx, u8 *out_blk, const u8 *in_blk) */ +/* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ .global aes_enc_blk @@ -228,7 +228,7 @@ aes_enc_blk: push %ebp - mov ctx(%esp),%ebp // pointer to context + mov tfm(%esp),%ebp // CAUTION: the order and the values used in these assigns // rely on the register mappings @@ -293,7 +293,7 @@ aes_enc_blk: ret // AES (Rijndael) Decryption Subroutine -/* void aes_dec_blk(void *ctx, u8 *out_blk, const u8 *in_blk) */ +/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ .global aes_dec_blk @@ -304,7 +304,7 @@ aes_enc_blk: aes_dec_blk: push %ebp - mov ctx(%esp),%ebp // pointer to context + mov tfm(%esp),%ebp // CAUTION: the order and the values used in these assigns // rely on the register mappings diff --git a/arch/i386/crypto/aes.c b/arch/i386/crypto/aes.c index a0e033510a3b..b9c7d99160f1 100644 --- a/arch/i386/crypto/aes.c +++ b/arch/i386/crypto/aes.c @@ -45,8 +45,8 @@ #include <linux/crypto.h> #include <linux/linkage.h> -asmlinkage void aes_enc_blk(void *ctx, u8 *dst, const u8 *src); -asmlinkage void aes_dec_blk(void *ctx, u8 *dst, const u8 *src); +asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); +asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); #define AES_MIN_KEY_SIZE 16 #define AES_MAX_KEY_SIZE 32 @@ -378,12 +378,12 @@ static void gen_tabs(void) k[8*(i)+11] = ss[3]; \ } -static int -aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { int i; u32 ss[8]; - struct aes_ctx *ctx = ctx_arg; + struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; /* encryption schedule */ diff --git a/arch/i386/kernel/asm-offsets.c b/arch/i386/kernel/asm-offsets.c index 36d66e2077d0..1c3a809e6421 100644 --- a/arch/i386/kernel/asm-offsets.c +++ b/arch/i386/kernel/asm-offsets.c @@ -4,6 +4,7 @@ * to extract and format the required data. */ +#include <linux/crypto.h> #include <linux/sched.h> #include <linux/signal.h> #include <linux/personality.h> @@ -69,4 +70,6 @@ void foo(void) DEFINE(PAGE_SIZE_asm, PAGE_SIZE); DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); + + OFFSET(crypto_tfm_ctx_offset, crypto_tfm, __crt_ctx); } diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index c5ca2dc5d428..5713c7e5bd16 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c @@ -37,10 +37,10 @@ struct s390_aes_ctx { int key_len; }; -static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len, - u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct s390_aes_ctx *sctx = ctx; + struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (key_len) { case 16: @@ -70,9 +70,9 @@ fail: return -EINVAL; } -static void aes_encrypt(void *ctx, u8 *out, const u8 *in) +static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct s390_aes_ctx *sctx = ctx; + const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (sctx->key_len) { case 16: @@ -90,9 +90,9 @@ static void aes_encrypt(void *ctx, u8 *out, const u8 *in) } } -static void aes_decrypt(void *ctx, u8 *out, const u8 *in) +static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct s390_aes_ctx *sctx = ctx; + const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (sctx->key_len) { case 16: diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index e3c37aa0a199..b3f7496a79b4 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c @@ -44,10 +44,10 @@ struct crypt_s390_des3_192_ctx { u8 key[DES3_192_KEY_SIZE]; }; -static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); int ret; /* test if key is valid (not a weak key) */ @@ -57,16 +57,16 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, return ret; } -static void des_encrypt(void *ctx, u8 *out, const u8 *in) +static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); } -static void des_decrypt(void *ctx, u8 *out, const u8 *in) +static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); } @@ -166,11 +166,11 @@ static struct crypto_alg des_alg = { * Implementers MUST reject keys that exhibit this property. * */ -static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { int i, ret; - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); const u8* temp_key = key; if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { @@ -186,17 +186,17 @@ static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, return 0; } -static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_128_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, DES3_128_BLOCK_SIZE); } -static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, DES3_128_BLOCK_SIZE); @@ -302,11 +302,11 @@ static struct crypto_alg des3_128_alg = { * property. * */ -static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { int i, ret; - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); const u8* temp_key = key; if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && @@ -325,17 +325,17 @@ static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, return 0; } -static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, DES3_192_BLOCK_SIZE); } -static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, DES3_192_BLOCK_SIZE); diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c index 36bb5346a8c4..9d34a35b1aa5 100644 --- a/arch/s390/crypto/sha1_s390.c +++ b/arch/s390/crypto/sha1_s390.c @@ -40,9 +40,9 @@ struct crypt_s390_sha1_ctx { u8 buffer[2 * SHA1_BLOCK_SIZE]; }; -static void sha1_init(void *ctx_arg) +static void sha1_init(struct crypto_tfm *tfm) { - struct crypt_s390_sha1_ctx *ctx = ctx_arg; + struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm); static const u32 initstate[5] = { 0x67452301, 0xEFCDAB89, @@ -56,13 +56,13 @@ static void sha1_init(void *ctx_arg) ctx->buf_len = 0; } -static void -sha1_update(void *ctx, const u8 *data, unsigned int len) +static void sha1_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { struct crypt_s390_sha1_ctx *sctx; long imd_len; - sctx = ctx; + sctx = crypto_tfm_ctx(tfm); sctx->count += len * 8; //message bit length //anything in buffer yet? -> must be completed @@ -111,10 +111,9 @@ pad_message(struct crypt_s390_sha1_ctx* sctx) } /* Add padding and return the message digest. */ -static void -sha1_final(void* ctx, u8 *out) +static void sha1_final(struct crypto_tfm *tfm, u8 *out) { - struct crypt_s390_sha1_ctx *sctx = ctx; + struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); //must perform manual padding pad_message(sctx); diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c index 2c76e7bee41c..f573df30f31d 100644 --- a/arch/s390/crypto/sha256_s390.c +++ b/arch/s390/crypto/sha256_s390.c @@ -31,9 +31,9 @@ struct s390_sha256_ctx { u8 buf[2 * SHA256_BLOCK_SIZE]; }; -static void sha256_init(void *ctx) +static void sha256_init(struct crypto_tfm *tfm) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); sctx->state[0] = 0x6a09e667; sctx->state[1] = 0xbb67ae85; @@ -46,9 +46,10 @@ static void sha256_init(void *ctx) sctx->count = 0; } -static void sha256_update(void *ctx, const u8 *data, unsigned int len) +static void sha256_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); unsigned int index; int ret; @@ -107,9 +108,9 @@ static void pad_message(struct s390_sha256_ctx* sctx) } /* Add padding and return the message digest */ -static void sha256_final(void* ctx, u8 *out) +static void sha256_final(struct crypto_tfm *tfm, u8 *out) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); /* must perform manual padding */ pad_message(sctx); diff --git a/arch/x86_64/crypto/aes-x86_64-asm.S b/arch/x86_64/crypto/aes-x86_64-asm.S index 483cbb23ab8d..f3ba643e144d 100644 --- a/arch/x86_64/crypto/aes-x86_64-asm.S +++ b/arch/x86_64/crypto/aes-x86_64-asm.S @@ -15,6 +15,10 @@ .text +#include <asm/asm-offsets.h> + +#define BASE crypto_tfm_ctx_offset + #define R1 %rax #define R1E %eax #define R1X %ax @@ -46,19 +50,19 @@ #define R10 %r10 #define R11 %r11 -#define prologue(FUNC,BASE,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ +#define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ .global FUNC; \ .type FUNC,@function; \ .align 8; \ FUNC: movq r1,r2; \ movq r3,r4; \ - leaq BASE+52(r8),r9; \ + leaq BASE+KEY+52(r8),r9; \ movq r10,r11; \ movl (r7),r5 ## E; \ movl 4(r7),r1 ## E; \ movl 8(r7),r6 ## E; \ movl 12(r7),r7 ## E; \ - movl (r8),r10 ## E; \ + movl BASE(r8),r10 ## E; \ xorl -48(r9),r5 ## E; \ xorl -44(r9),r1 ## E; \ xorl -40(r9),r6 ## E; \ @@ -128,8 +132,8 @@ FUNC: movq r1,r2; \ movl r3 ## E,r1 ## E; \ movl r4 ## E,r2 ## E; -#define entry(FUNC,BASE,B128,B192) \ - prologue(FUNC,BASE,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) +#define entry(FUNC,KEY,B128,B192) \ + prologue(FUNC,KEY,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) @@ -147,7 +151,7 @@ FUNC: movq r1,r2; \ #define decrypt_final(TAB,OFFSET) \ round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) -/* void aes_encrypt(void *ctx, u8 *out, const u8 *in) */ +/* void aes_encrypt(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */ entry(aes_encrypt,0,enc128,enc192) encrypt_round(aes_ft_tab,-96) @@ -166,7 +170,7 @@ enc128: encrypt_round(aes_ft_tab,-32) encrypt_final(aes_fl_tab,112) return -/* void aes_decrypt(void *ctx, u8 *out, const u8 *in) */ +/* void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) */ entry(aes_decrypt,240,dec128,dec192) decrypt_round(aes_it_tab,-96) diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c index 6f77e7700d32..d6f8e0463b5d 100644 --- a/arch/x86_64/crypto/aes.c +++ b/arch/x86_64/crypto/aes.c @@ -227,10 +227,10 @@ static void __init gen_tabs(void) t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ } -static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, - u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct aes_ctx *ctx = ctx_arg; + struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; u32 i, j, t, u, v, w; @@ -283,8 +283,8 @@ static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, return 0; } -extern void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in); -extern void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in); +extern void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in); +extern void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in); static struct crypto_alg aes_alg = { .cra_name = "aes", diff --git a/arch/x86_64/kernel/asm-offsets.c b/arch/x86_64/kernel/asm-offsets.c index 38834bbbae11..96687e2beb2c 100644 --- a/arch/x86_64/kernel/asm-offsets.c +++ b/arch/x86_64/kernel/asm-offsets.c @@ -4,6 +4,7 @@ * and format the required data. */ +#include <linux/crypto.h> #include <linux/sched.h> #include <linux/stddef.h> #include <linux/errno.h> @@ -68,5 +69,7 @@ int main(void) DEFINE(pbe_next, offsetof(struct pbe, next)); BLANK(); DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); + BLANK(); + DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx)); return 0; } |