diff options
author | lloyd <[email protected]> | 2011-04-22 13:08:05 +0000 |
---|---|---|
committer | lloyd <[email protected]> | 2011-04-22 13:08:05 +0000 |
commit | 8b40f974e65b7cc7d21a8e72b5f18f6e14208e57 (patch) | |
tree | 1192bdf14dbab29218db64abbef9ec0217ce30c8 /src/block/serpent_x86_32 | |
parent | 7b96a4844bf97b9c04a18565334e21dc89b8ba0b (diff) |
Rename all references of ia32 to x86-32 and amd64 to x86-64.
Back the reported version from 1.10.0 to 1.9.17 for the time
being. Still on the fence if this will be 1.10.0 or another release
candidate instead.
Diffstat (limited to 'src/block/serpent_x86_32')
-rw-r--r-- | src/block/serpent_x86_32/info.txt | 12 | ||||
-rw-r--r-- | src/block/serpent_x86_32/serp_x86_32.cpp | 84 | ||||
-rw-r--r-- | src/block/serpent_x86_32/serp_x86_32.h | 31 | ||||
-rw-r--r-- | src/block/serpent_x86_32/serp_x86_32_imp.S | 669 |
4 files changed, 796 insertions, 0 deletions
diff --git a/src/block/serpent_x86_32/info.txt b/src/block/serpent_x86_32/info.txt new file mode 100644 index 000000000..b9c993546 --- /dev/null +++ b/src/block/serpent_x86_32/info.txt @@ -0,0 +1,12 @@ +define SERPENT_X86_32 + +load_on asm_ok + +<arch> +x86_32 +</arch> + +<requires> +asm_x86_32 +serpent +</requires> diff --git a/src/block/serpent_x86_32/serp_x86_32.cpp b/src/block/serpent_x86_32/serp_x86_32.cpp new file mode 100644 index 000000000..4cefe1d65 --- /dev/null +++ b/src/block/serpent_x86_32/serp_x86_32.cpp @@ -0,0 +1,84 @@ +/* +* Serpent in x86-32 +* (C) 1999-2007 Jack Lloyd +* +* Distributed under the terms of the Botan license +*/ + +#include <botan/serp_x86_32.h> +#include <botan/loadstor.h> + +namespace Botan { + +extern "C" { + +/** +* Entry point for Serpent encryption in x86 asm +* @param in the input block +* @param out the output block +* @param ks the key schedule +*/ +void botan_serpent_x86_32_encrypt(const byte in[16], + byte out[16], + const u32bit ks[132]); + +/** +* Entry point for Serpent decryption in x86 asm +* @param in the input block +* @param out the output block +* @param ks the key schedule +*/ +void botan_serpent_x86_32_decrypt(const byte in[16], + byte out[16], + const u32bit ks[132]); + +/** +* Entry point for Serpent key schedule in x86 asm +* @param ks holds the initial working key (padded), and is set to the + final key schedule +*/ +void botan_serpent_x86_32_key_schedule(u32bit ks[140]); + +} + +/* +* Serpent Encryption +*/ +void Serpent_X86_32::encrypt_n(const byte in[], byte out[], size_t blocks) const + { + for(size_t i = 0; i != blocks; ++i) + { + botan_serpent_x86_32_encrypt(in, out, this->get_round_keys()); + in += BLOCK_SIZE; + out += BLOCK_SIZE; + } + } + +/* +* Serpent Decryption +*/ +void Serpent_X86_32::decrypt_n(const byte in[], byte out[], size_t blocks) const + { + for(size_t i = 0; i != blocks; ++i) + { + botan_serpent_x86_32_decrypt(in, out, this->get_round_keys()); + in += BLOCK_SIZE; + out += BLOCK_SIZE; + } + } + +/* +* Serpent Key Schedule +*/ +void Serpent_X86_32::key_schedule(const byte key[], size_t length) + { + SecureVector<u32bit> W(140); + for(size_t i = 0; i != length / 4; ++i) + W[i] = load_le<u32bit>(key, i); + W[length / 4] |= u32bit(1) << ((length%4)*8); + + botan_serpent_x86_32_key_schedule(W); + this->set_round_keys(W + 8); + } + +} diff --git a/src/block/serpent_x86_32/serp_x86_32.h b/src/block/serpent_x86_32/serp_x86_32.h new file mode 100644 index 000000000..f6c4d564a --- /dev/null +++ b/src/block/serpent_x86_32/serp_x86_32.h @@ -0,0 +1,31 @@ +/* +* Serpent in x86-32 asm +* (C) 1999-2007 Jack Lloyd +* +* Distributed under the terms of the Botan license +*/ + +#ifndef BOTAN_SERPENT_X86_32_H__ +#define BOTAN_SERPENT_X86_32_H__ + +#include <botan/serpent.h> + +namespace Botan { + +/** +* Serpent implementation in x86-32 assembly +*/ +class BOTAN_DLL Serpent_X86_32 : public Serpent + { + public: + void encrypt_n(const byte in[], byte out[], size_t blocks) const; + void decrypt_n(const byte in[], byte out[], size_t blocks) const; + + BlockCipher* clone() const { return new Serpent_X86_32; } + private: + void key_schedule(const byte[], size_t); + }; + +} + +#endif diff --git a/src/block/serpent_x86_32/serp_x86_32_imp.S b/src/block/serpent_x86_32/serp_x86_32_imp.S new file mode 100644 index 000000000..e2549a099 --- /dev/null +++ b/src/block/serpent_x86_32/serp_x86_32_imp.S @@ -0,0 +1,669 @@ +/* +* Serpent in x86-32 assembler +* (C) 1999-2007 Jack Lloyd +* +* Distributed under the terms of the Botan license +*/ + +#include <botan/internal/asm_x86_32.h> + +START_LISTING(serp_x86_32.S) + +#define SBOX_E1(A, B, C, D, T) \ + XOR(D, A) ; \ + ASSIGN(T, B) ; \ + AND(B, D) ; \ + XOR(T, C) ; \ + XOR(B, A) ; \ + OR(A, D) ; \ + XOR(A, T) ; \ + XOR(T, D) ; \ + XOR(D, C) ; \ + OR(C, B) ; \ + XOR(C, T) ; \ + NOT(T) ; \ + OR(T, B) ; \ + XOR(B, D) ; \ + XOR(B, T) ; \ + OR(D, A) ; \ + XOR(B, D) ; \ + XOR(T, D) ; \ + ASSIGN(D, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, T) ; + +#define SBOX_E2(A, B, C, D, T) \ + NOT(A) ; \ + NOT(C) ; \ + ASSIGN(T, A) ; \ + AND(A, B) ; \ + XOR(C, A) ; \ + OR(A, D) ; \ + XOR(D, C) ; \ + XOR(B, A) ; \ + XOR(A, T) ; \ + OR(T, B) ; \ + XOR(B, D) ; \ + OR(C, A) ; \ + AND(C, T) ; \ + XOR(A, B) ; \ + AND(B, C) ; \ + XOR(B, A) ; \ + AND(A, C) ; \ + XOR(T, A) ; \ + ASSIGN(A, C) ; \ + ASSIGN(C, D) ; \ + ASSIGN(D, B) ; \ + ASSIGN(B, T) ; + +#define SBOX_E3(A, B, C, D, T) \ + ASSIGN(T, A) ; \ + AND(A, C) ; \ + XOR(A, D) ; \ + XOR(C, B) ; \ + XOR(C, A) ; \ + OR(D, T) ; \ + XOR(D, B) ; \ + XOR(T, C) ; \ + ASSIGN(B, D) ; \ + OR(D, T) ; \ + XOR(D, A) ; \ + AND(A, B) ; \ + XOR(T, A) ; \ + XOR(B, D) ; \ + XOR(B, T) ; \ + NOT(T) ; \ + ASSIGN(A, C) ; \ + ASSIGN(C, B) ; \ + ASSIGN(B, D) ; \ + ASSIGN(D, T) ; + +#define SBOX_E4(A, B, C, D, T) \ + ASSIGN(T, A) ; \ + OR(A, D) ; \ + XOR(D, B) ; \ + AND(B, T) ; \ + XOR(T, C) ; \ + XOR(C, D) ; \ + AND(D, A) ; \ + OR(T, B) ; \ + XOR(D, T) ; \ + XOR(A, B) ; \ + AND(T, A) ; \ + XOR(B, D) ; \ + XOR(T, C) ; \ + OR(B, A) ; \ + XOR(B, C) ; \ + XOR(A, D) ; \ + ASSIGN(C, B) ; \ + OR(B, D) ; \ + XOR(B, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, C) ; \ + ASSIGN(C, D) ; \ + ASSIGN(D, T) ; + +#define SBOX_E5(A, B, C, D, T) \ + XOR(B, D) ; \ + NOT(D) ; \ + XOR(C, D) ; \ + XOR(D, A) ; \ + ASSIGN(T, B) ; \ + AND(B, D) ; \ + XOR(B, C) ; \ + XOR(T, D) ; \ + XOR(A, T) ; \ + AND(C, T) ; \ + XOR(C, A) ; \ + AND(A, B) ; \ + XOR(D, A) ; \ + OR(T, B) ; \ + XOR(T, A) ; \ + OR(A, D) ; \ + XOR(A, C) ; \ + AND(C, D) ; \ + NOT(A) ; \ + XOR(T, C) ; \ + ASSIGN(C, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, T) ; + +#define SBOX_E6(A, B, C, D, T) \ + XOR(A, B) ; \ + XOR(B, D) ; \ + NOT(D) ; \ + ASSIGN(T, B) ; \ + AND(B, A) ; \ + XOR(C, D) ; \ + XOR(B, C) ; \ + OR(C, T) ; \ + XOR(T, D) ; \ + AND(D, B) ; \ + XOR(D, A) ; \ + XOR(T, B) ; \ + XOR(T, C) ; \ + XOR(C, A) ; \ + AND(A, D) ; \ + NOT(C) ; \ + XOR(A, T) ; \ + OR(T, D) ; \ + XOR(T, C) ; \ + ASSIGN(C, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, D) ; \ + ASSIGN(D, T) ; + +#define SBOX_E7(A, B, C, D, T) \ + NOT(C) ; \ + ASSIGN(T, D) ; \ + AND(D, A) ; \ + XOR(A, T) ; \ + XOR(D, C) ; \ + OR(C, T) ; \ + XOR(B, D) ; \ + XOR(C, A) ; \ + OR(A, B) ; \ + XOR(C, B) ; \ + XOR(T, A) ; \ + OR(A, D) ; \ + XOR(A, C) ; \ + XOR(T, D) ; \ + XOR(T, A) ; \ + NOT(D) ; \ + AND(C, T) ; \ + XOR(C, D) ; \ + ASSIGN(D, C) ; \ + ASSIGN(C, T) ; + +#define SBOX_E8(A, B, C, D, T) \ + ASSIGN(T, B) ; \ + OR(B, C) ; \ + XOR(B, D) ; \ + XOR(T, C) ; \ + XOR(C, B) ; \ + OR(D, T) ; \ + AND(D, A) ; \ + XOR(T, C) ; \ + XOR(D, B) ; \ + OR(B, T) ; \ + XOR(B, A) ; \ + OR(A, T) ; \ + XOR(A, C) ; \ + XOR(B, T) ; \ + XOR(C, B) ; \ + AND(B, A) ; \ + XOR(B, T) ; \ + NOT(C) ; \ + OR(C, A) ; \ + XOR(T, C) ; \ + ASSIGN(C, B) ; \ + ASSIGN(B, D) ; \ + ASSIGN(D, A) ; \ + ASSIGN(A, T) ; + +#define SBOX_D1(A, B, C, D, T) \ + NOT(C) ; \ + ASSIGN(T, B) ; \ + OR(B, A) ; \ + NOT(T) ; \ + XOR(B, C) ; \ + OR(C, T) ; \ + XOR(B, D) ; \ + XOR(A, T) ; \ + XOR(C, A) ; \ + AND(A, D) ; \ + XOR(T, A) ; \ + OR(A, B) ; \ + XOR(A, C) ; \ + XOR(D, T) ; \ + XOR(C, B) ; \ + XOR(D, A) ; \ + XOR(D, B) ; \ + AND(C, D) ; \ + XOR(T, C) ; \ + ASSIGN(C, B) ; \ + ASSIGN(B, T) ; + +#define SBOX_D2(A, B, C, D, T) \ + ASSIGN(T, B) ; \ + XOR(B, D) ; \ + AND(D, B) ; \ + XOR(T, C) ; \ + XOR(D, A) ; \ + OR(A, B) ; \ + XOR(C, D) ; \ + XOR(A, T) ; \ + OR(A, C) ; \ + XOR(B, D) ; \ + XOR(A, B) ; \ + OR(B, D) ; \ + XOR(B, A) ; \ + NOT(T) ; \ + XOR(T, B) ; \ + OR(B, A) ; \ + XOR(B, A) ; \ + OR(B, T) ; \ + XOR(D, B) ; \ + ASSIGN(B, A) ; \ + ASSIGN(A, T) ; \ + ASSIGN(T, D) ; \ + ASSIGN(D, C) ; \ + ASSIGN(C, T) ; + +#define SBOX_D3(A, B, C, D, T) \ + XOR(C, D) ; \ + XOR(D, A) ; \ + ASSIGN(T, D) ; \ + AND(D, C) ; \ + XOR(D, B) ; \ + OR(B, C) ; \ + XOR(B, T) ; \ + AND(T, D) ; \ + XOR(C, D) ; \ + AND(T, A) ; \ + XOR(T, C) ; \ + AND(C, B) ; \ + OR(C, A) ; \ + NOT(D) ; \ + XOR(C, D) ; \ + XOR(A, D) ; \ + AND(A, B) ; \ + XOR(D, T) ; \ + XOR(D, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, T) ; + +#define SBOX_D4(A, B, C, D, T) \ + ASSIGN(T, C) ; \ + XOR(C, B) ; \ + XOR(A, C) ; \ + AND(T, C) ; \ + XOR(T, A) ; \ + AND(A, B) ; \ + XOR(B, D) ; \ + OR(D, T) ; \ + XOR(C, D) ; \ + XOR(A, D) ; \ + XOR(B, T) ; \ + AND(D, C) ; \ + XOR(D, B) ; \ + XOR(B, A) ; \ + OR(B, C) ; \ + XOR(A, D) ; \ + XOR(B, T) ; \ + XOR(A, B) ; \ + ASSIGN(T, A) ; \ + ASSIGN(A, C) ; \ + ASSIGN(C, D) ; \ + ASSIGN(D, T) ; + +#define SBOX_D5(A, B, C, D, T) \ + ASSIGN(T, C) ; \ + AND(C, D) ; \ + XOR(C, B) ; \ + OR(B, D) ; \ + AND(B, A) ; \ + XOR(T, C) ; \ + XOR(T, B) ; \ + AND(B, C) ; \ + NOT(A) ; \ + XOR(D, T) ; \ + XOR(B, D) ; \ + AND(D, A) ; \ + XOR(D, C) ; \ + XOR(A, B) ; \ + AND(C, A) ; \ + XOR(D, A) ; \ + XOR(C, T) ; \ + OR(C, D) ; \ + XOR(D, A) ; \ + XOR(C, B) ; \ + ASSIGN(B, D) ; \ + ASSIGN(D, T) ; + +#define SBOX_D6(A, B, C, D, T) \ + NOT(B) ; \ + ASSIGN(T, D) ; \ + XOR(C, B) ; \ + OR(D, A) ; \ + XOR(D, C) ; \ + OR(C, B) ; \ + AND(C, A) ; \ + XOR(T, D) ; \ + XOR(C, T) ; \ + OR(T, A) ; \ + XOR(T, B) ; \ + AND(B, C) ; \ + XOR(B, D) ; \ + XOR(T, C) ; \ + AND(D, T) ; \ + XOR(T, B) ; \ + XOR(D, T) ; \ + NOT(T) ; \ + XOR(D, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, T) ; \ + ASSIGN(T, D) ; \ + ASSIGN(D, C) ; \ + ASSIGN(C, T) ; + +#define SBOX_D7(A, B, C, D, T) \ + XOR(A, C) ; \ + ASSIGN(T, C) ; \ + AND(C, A) ; \ + XOR(T, D) ; \ + NOT(C) ; \ + XOR(D, B) ; \ + XOR(C, D) ; \ + OR(T, A) ; \ + XOR(A, C) ; \ + XOR(D, T) ; \ + XOR(T, B) ; \ + AND(B, D) ; \ + XOR(B, A) ; \ + XOR(A, D) ; \ + OR(A, C) ; \ + XOR(D, B) ; \ + XOR(T, A) ; \ + ASSIGN(A, B) ; \ + ASSIGN(B, C) ; \ + ASSIGN(C, T) ; + +#define SBOX_D8(A, B, C, D, T) \ + ASSIGN(T, C) ; \ + XOR(C, A) ; \ + AND(A, D) ; \ + OR(T, D) ; \ + NOT(C) ; \ + XOR(D, B) ; \ + OR(B, A) ; \ + XOR(A, C) ; \ + AND(C, T) ; \ + AND(D, T) ; \ + XOR(B, C) ; \ + XOR(C, A) ; \ + OR(A, C) ; \ + XOR(T, B) ; \ + XOR(A, D) ; \ + XOR(D, T) ; \ + OR(T, A) ; \ + XOR(D, C) ; \ + XOR(T, C) ; \ + ASSIGN(C, B) ; \ + ASSIGN(B, A) ; \ + ASSIGN(A, D) ; \ + ASSIGN(D, T) ; + +#define TRANSFORM(A, B, C, D, T) \ + ROTL_IMM(A, 13) ; \ + ROTL_IMM(C, 3) ; \ + SHL2_3(T, A) ; \ + XOR(B, A) ; \ + XOR(D, C) ; \ + XOR(B, C) ; \ + XOR(D, T) ; \ + ROTL_IMM(B, 1) ; \ + ROTL_IMM(D, 7) ; \ + ASSIGN(T, B) ; \ + SHL_IMM(T, 7) ; \ + XOR(A, B) ; \ + XOR(C, D) ; \ + XOR(A, D) ; \ + XOR(C, T) ; \ + ROTL_IMM(A, 5) ; \ + ROTL_IMM(C, 22) ; + +#define I_TRANSFORM(A, B, C, D, T) \ + ROTR_IMM(C, 22) ; \ + ROTR_IMM(A, 5) ; \ + ASSIGN(T, B) ; \ + SHL_IMM(T, 7) ; \ + XOR(A, B) ; \ + XOR(C, D) ; \ + XOR(A, D) ; \ + XOR(C, T) ; \ + ROTR_IMM(D, 7) ; \ + ROTR_IMM(B, 1) ; \ + SHL2_3(T, A) ; \ + XOR(B, C) ; \ + XOR(D, C) ; \ + XOR(B, A) ; \ + XOR(D, T) ; \ + ROTR_IMM(C, 3) ; \ + ROTR_IMM(A, 13) ; + +#define KEY_XOR(A, B, C, D, N) \ + XOR(A, ARRAY4(EDI, (4*N ))) ; \ + XOR(B, ARRAY4(EDI, (4*N+1))) ; \ + XOR(C, ARRAY4(EDI, (4*N+2))) ; \ + XOR(D, ARRAY4(EDI, (4*N+3))) ; + +/* +* Serpent Encryption +*/ +START_FUNCTION(botan_serpent_x86_32_encrypt) + SPILL_REGS() +#define PUSHED 4 + + ASSIGN(EBP, ARG(1)) /* input block */ + ASSIGN(EAX, ARRAY4(EBP, 0)) + ASSIGN(EBX, ARRAY4(EBP, 1)) + ASSIGN(ECX, ARRAY4(EBP, 2)) + ASSIGN(EDX, ARRAY4(EBP, 3)) + + ASSIGN(EDI, ARG(3)) /* round keys */ + ZEROIZE(EBP) + +#define E_ROUND(A, B, C, D, T, N, SBOX) \ + KEY_XOR(A, B, C, D, N) \ + SBOX(A, B, C, D, T) \ + TRANSFORM(A, B, C, D, T) + + + E_ROUND(EAX, EBX, ECX, EDX, EBP, 0, SBOX_E1) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 1, SBOX_E2) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 2, SBOX_E3) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 3, SBOX_E4) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 4, SBOX_E5) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 5, SBOX_E6) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 6, SBOX_E7) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 7, SBOX_E8) + + E_ROUND(EAX, EBX, ECX, EDX, EBP, 8, SBOX_E1) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 9, SBOX_E2) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 10, SBOX_E3) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 11, SBOX_E4) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 12, SBOX_E5) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 13, SBOX_E6) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 14, SBOX_E7) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 15, SBOX_E8) + + E_ROUND(EAX, EBX, ECX, EDX, EBP, 16, SBOX_E1) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 17, SBOX_E2) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 18, SBOX_E3) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 19, SBOX_E4) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 20, SBOX_E5) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 21, SBOX_E6) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 22, SBOX_E7) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 23, SBOX_E8) + + E_ROUND(EAX, EBX, ECX, EDX, EBP, 24, SBOX_E1) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 25, SBOX_E2) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 26, SBOX_E3) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 27, SBOX_E4) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 28, SBOX_E5) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 29, SBOX_E6) + E_ROUND(EAX, EBX, ECX, EDX, EBP, 30, SBOX_E7) + + KEY_XOR(EAX, EBX, ECX, EDX, 31) + SBOX_E8(EAX, EBX, ECX, EDX, EBP) + KEY_XOR(EAX, EBX, ECX, EDX, 32) + + ASSIGN(EBP, ARG(2)) /* output block */ + ASSIGN(ARRAY4(EBP, 0), EAX) + ASSIGN(ARRAY4(EBP, 1), EBX) + ASSIGN(ARRAY4(EBP, 2), ECX) + ASSIGN(ARRAY4(EBP, 3), EDX) + + RESTORE_REGS() +#undef PUSHED +END_FUNCTION(botan_serpent_x86_32_encrypt) + +/* +* Serpent Decryption +*/ +START_FUNCTION(botan_serpent_x86_32_decrypt) + SPILL_REGS() +#define PUSHED 4 + + ASSIGN(EBP, ARG(1)) /* input block */ + ASSIGN(EAX, ARRAY4(EBP, 0)) + ASSIGN(EBX, ARRAY4(EBP, 1)) + ASSIGN(ECX, ARRAY4(EBP, 2)) + ASSIGN(EDX, ARRAY4(EBP, 3)) + + ASSIGN(EDI, ARG(3)) /* round keys */ + + ZEROIZE(EBP) + +#define D_ROUND(A, B, C, D, T, N, SBOX) \ + I_TRANSFORM(A, B, C, D, T) \ + SBOX(A, B, C, D, T) \ + KEY_XOR(A, B, C, D, N) \ + + KEY_XOR(EAX, EBX, ECX, EDX, 32) + SBOX_D8(EAX, EBX, ECX, EDX, EBP) + KEY_XOR(EAX, EBX, ECX, EDX, 31) + + D_ROUND(EAX, EBX, ECX, EDX, EBP, 30, SBOX_D7) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 29, SBOX_D6) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 28, SBOX_D5) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 27, SBOX_D4) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 26, SBOX_D3) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 25, SBOX_D2) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 24, SBOX_D1) + + D_ROUND(EAX, EBX, ECX, EDX, EBP, 23, SBOX_D8) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 22, SBOX_D7) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 21, SBOX_D6) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 20, SBOX_D5) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 19, SBOX_D4) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 18, SBOX_D3) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 17, SBOX_D2) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 16, SBOX_D1) + + D_ROUND(EAX, EBX, ECX, EDX, EBP, 15, SBOX_D8) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 14, SBOX_D7) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 13, SBOX_D6) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 12, SBOX_D5) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 11, SBOX_D4) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 10, SBOX_D3) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 9, SBOX_D2) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 8, SBOX_D1) + + D_ROUND(EAX, EBX, ECX, EDX, EBP, 7, SBOX_D8) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 6, SBOX_D7) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 5, SBOX_D6) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 4, SBOX_D5) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 3, SBOX_D4) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 2, SBOX_D3) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 1, SBOX_D2) + D_ROUND(EAX, EBX, ECX, EDX, EBP, 0, SBOX_D1) + + ASSIGN(EBP, ARG(2)) /* output block */ + ASSIGN(ARRAY4(EBP, 0), EAX) + ASSIGN(ARRAY4(EBP, 1), EBX) + ASSIGN(ARRAY4(EBP, 2), ECX) + ASSIGN(ARRAY4(EBP, 3), EDX) + + RESTORE_REGS() +#undef PUSHED +END_FUNCTION(botan_serpent_x86_32_decrypt) + +/* +* Serpent Key Schedule +*/ +START_FUNCTION(botan_serpent_x86_32_key_schedule) + SPILL_REGS() +#define PUSHED 4 + + ASSIGN(EDI, ARG(1)) /* round keys */ + ASSIGN(ESI, IMM(8)) + ADD_IMM(EDI, 32) + +START_LOOP(.L_SERP_EXPANSION) + ASSIGN(EAX, ARRAY4(EDI, -1)) + ASSIGN(EBX, ARRAY4(EDI, -3)) + ASSIGN(ECX, ARRAY4(EDI, -5)) + ASSIGN(EDX, ARRAY4(EDI, -8)) + + ASSIGN(EBP, ESI) + SUB_IMM(EBP, 8) + XOR(EBP, IMM(0x9E3779B9)) + XOR(EAX, EBX) + XOR(ECX, EDX) + XOR(EAX, EBP) + XOR(EAX, ECX) + + ROTL_IMM(EAX, 11) + + ASSIGN(ARRAY4(EDI, 0), EAX) + + ADD_IMM(ESI, 1) + ADD_IMM(EDI, 4) +LOOP_UNTIL_EQ(ESI, 140, .L_SERP_EXPANSION) + + ASSIGN(EDI, ARG(1)) /* round keys */ + +#define LOAD_AND_SBOX(MSG, SBOX) \ + ASSIGN(EAX, ARRAY4(EDI, (4*MSG+ 8))) ; \ + ASSIGN(EBX, ARRAY4(EDI, (4*MSG+ 9))) ; \ + ASSIGN(ECX, ARRAY4(EDI, (4*MSG+10))) ; \ + ASSIGN(EDX, ARRAY4(EDI, (4*MSG+11))) ; \ + SBOX(EAX, EBX, ECX, EDX, EBP) ; \ + ASSIGN(ARRAY4(EDI, (4*MSG+ 8)), EAX) ; \ + ASSIGN(ARRAY4(EDI, (4*MSG+ 9)), EBX) ; \ + ASSIGN(ARRAY4(EDI, (4*MSG+10)), ECX) ; \ + ASSIGN(ARRAY4(EDI, (4*MSG+11)), EDX) + + LOAD_AND_SBOX( 0, SBOX_E4) + LOAD_AND_SBOX( 1, SBOX_E3) + LOAD_AND_SBOX( 2, SBOX_E2) + LOAD_AND_SBOX( 3, SBOX_E1) + + LOAD_AND_SBOX( 4, SBOX_E8) + LOAD_AND_SBOX( 5, SBOX_E7) + LOAD_AND_SBOX( 6, SBOX_E6) + LOAD_AND_SBOX( 7, SBOX_E5) + LOAD_AND_SBOX( 8, SBOX_E4) + LOAD_AND_SBOX( 9, SBOX_E3) + LOAD_AND_SBOX(10, SBOX_E2) + LOAD_AND_SBOX(11, SBOX_E1) + + LOAD_AND_SBOX(12, SBOX_E8) + LOAD_AND_SBOX(13, SBOX_E7) + LOAD_AND_SBOX(14, SBOX_E6) + LOAD_AND_SBOX(15, SBOX_E5) + LOAD_AND_SBOX(16, SBOX_E4) + LOAD_AND_SBOX(17, SBOX_E3) + LOAD_AND_SBOX(18, SBOX_E2) + LOAD_AND_SBOX(19, SBOX_E1) + + LOAD_AND_SBOX(20, SBOX_E8) + LOAD_AND_SBOX(21, SBOX_E7) + LOAD_AND_SBOX(22, SBOX_E6) + LOAD_AND_SBOX(23, SBOX_E5) + LOAD_AND_SBOX(24, SBOX_E4) + LOAD_AND_SBOX(25, SBOX_E3) + LOAD_AND_SBOX(26, SBOX_E2) + LOAD_AND_SBOX(27, SBOX_E1) + + LOAD_AND_SBOX(28, SBOX_E8) + LOAD_AND_SBOX(29, SBOX_E7) + LOAD_AND_SBOX(30, SBOX_E6) + LOAD_AND_SBOX(31, SBOX_E5) + LOAD_AND_SBOX(32, SBOX_E4) + + RESTORE_REGS() +#undef PUSHED +END_FUNCTION(botan_serpent_x86_32_key_schedule) |