aboutsummaryrefslogtreecommitdiffstats
path: root/src/modes/ctr
diff options
context:
space:
mode:
authorlloyd <[email protected]>2009-08-12 20:44:09 +0000
committerlloyd <[email protected]>2009-08-12 20:44:09 +0000
commit921ef0147666aca06693f04155e590e16dde9726 (patch)
tree587fe2caa2d6fce98bd2dd4ac2d5f8ab27d923a8 /src/modes/ctr
parentb00a8c0045344e0758e5b0989096cc5f6fe516da (diff)
Use a much faster counter increment system, noticable speedups (~15%)
for both Serpent and AES-128 in CTR mode.
Diffstat (limited to 'src/modes/ctr')
-rw-r--r--src/modes/ctr/ctr.cpp41
1 files changed, 21 insertions, 20 deletions
diff --git a/src/modes/ctr/ctr.cpp b/src/modes/ctr/ctr.cpp
index a3476c474..d458d7848 100644
--- a/src/modes/ctr/ctr.cpp
+++ b/src/modes/ctr/ctr.cpp
@@ -65,21 +65,24 @@ std::string CTR_BE::name() const
*/
void CTR_BE::set_iv(const InitializationVector& iv)
{
- if(iv.length() != cipher->BLOCK_SIZE)
+ const u32bit BLOCK_SIZE = cipher->BLOCK_SIZE;
+
+ if(iv.length() != BLOCK_SIZE)
throw Invalid_IV_Length(name(), iv.length());
enc_buffer.clear();
position = 0;
- for(u32bit i = 0; i != PARALLEL_BLOCKS; ++i)
+ counter.copy(0, iv.begin(), iv.length());
+
+ for(u32bit i = 1; i != PARALLEL_BLOCKS; ++i)
{
- counter.copy(i*cipher->BLOCK_SIZE, iv.begin(), iv.length());
+ counter.copy(i*BLOCK_SIZE,
+ counter.begin() + (i-1)*BLOCK_SIZE, BLOCK_SIZE);
- // FIXME: this is stupid
- for(u32bit j = 0; j != i; ++j)
- for(s32bit k = cipher->BLOCK_SIZE - 1; k >= 0; --k)
- if(++counter[i*cipher->BLOCK_SIZE+k])
- break;
+ for(s32bit j = BLOCK_SIZE - 1; j >= 0; --j)
+ if(++counter[i*BLOCK_SIZE+j])
+ break;
}
cipher->encrypt_n(counter, enc_buffer, PARALLEL_BLOCKS);
@@ -122,19 +125,17 @@ void CTR_BE::increment_counter()
{
for(u32bit i = 0; i != PARALLEL_BLOCKS; ++i)
{
- // FIXME: Can do it in a single loop
- /*
- for(u32bit j = 1; j != cipher->BLOCK_SIZE; ++j)
- {
- byte carry = 0;
- byte z = counter[(i+1)*cipher->BLOCK_SIZE-1] + PARALLEL_BLOCKS;
-
- if(
- */
- for(u32bit j = 0; j != PARALLEL_BLOCKS; ++j)
- for(s32bit k = cipher->BLOCK_SIZE - 1; k >= 0; --k)
- if(++counter[i*cipher->BLOCK_SIZE+k])
+ byte* this_ctr = counter + i*cipher->BLOCK_SIZE;
+
+ byte last_byte = this_ctr[cipher->BLOCK_SIZE-1];
+ last_byte += PARALLEL_BLOCKS;
+
+ if(this_ctr[cipher->BLOCK_SIZE-1] > last_byte)
+ for(s32bit j = cipher->BLOCK_SIZE - 2; j >= 0; --j)
+ if(++this_ctr[j])
break;
+
+ this_ctr[cipher->BLOCK_SIZE-1] = last_byte;
}
cipher->encrypt_n(counter, enc_buffer, PARALLEL_BLOCKS);