1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
|
/*
* Compression Transform
* (C) 2014 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#include <botan/compression.h>
namespace Botan {
void Stream_Compression::clear()
{
m_stream.reset();
}
secure_vector<byte> Stream_Compression::start_raw(const byte[], size_t nonce_len)
{
if(!valid_nonce_length(nonce_len))
throw Invalid_IV_Length(name(), nonce_len);
clear();
m_stream.reset(make_stream());
return secure_vector<byte>();
}
void Stream_Compression::process(secure_vector<byte>& buf, size_t offset, u32bit flags)
{
BOTAN_ASSERT(m_stream, "Initialized");
BOTAN_ASSERT(buf.size() >= offset, "Offset is sane");
if(m_buffer.size() < buf.size() + offset)
m_buffer.resize(buf.size() + offset);
m_stream->next_in(&buf[offset], buf.size() - offset);
m_stream->next_out(&m_buffer[offset], m_buffer.size() - offset);
while(true)
{
const bool end = m_stream->run(flags);
if(m_stream->avail_out() == 0)
{
const size_t added = 8 + m_buffer.size();
m_buffer.resize(m_buffer.size() + added);
m_stream->next_out(&m_buffer[m_buffer.size() - added], added);
}
else if(m_stream->avail_in() == 0)
{
m_buffer.resize(m_buffer.size() - m_stream->avail_out());
break;
}
}
copy_mem(&m_buffer[0], &buf[0], offset);
buf.swap(m_buffer);
}
void Stream_Compression::update(secure_vector<byte>& buf, size_t offset)
{
process(buf, offset, m_stream->run_flag());
}
void Stream_Compression::flush(secure_vector<byte>& buf, size_t offset)
{
process(buf, offset, m_stream->flush_flag());
}
void Stream_Compression::finish(secure_vector<byte>& buf, size_t offset)
{
process(buf, offset, m_stream->finish_flag());
clear();
}
void Stream_Decompression::clear()
{
m_stream.reset();
}
secure_vector<byte> Stream_Decompression::start_raw(const byte[], size_t nonce_len)
{
if(!valid_nonce_length(nonce_len))
throw Invalid_IV_Length(name(), nonce_len);
clear();
m_stream.reset(make_stream());
return secure_vector<byte>();
}
void Stream_Decompression::process(secure_vector<byte>& buf, size_t offset, u32bit flags)
{
BOTAN_ASSERT(m_stream, "Initialized");
BOTAN_ASSERT(buf.size() >= offset, "Offset is sane");
if(m_buffer.size() < buf.size() + offset)
m_buffer.resize(buf.size() + offset);
m_stream->next_in(&buf[offset], buf.size() - offset);
m_stream->next_out(&m_buffer[offset], m_buffer.size() - offset);
while(true)
{
const bool stream_end = m_stream->run(flags);
if(stream_end)
{
if(m_stream->avail_in() == 0) // all data consumed?
{
m_buffer.resize(m_buffer.size() - m_stream->avail_out());
clear();
break;
}
// More data follows: try to process as a following stream
const size_t read = (buf.size() - offset) - m_stream->avail_in();
start();
m_stream->next_in(&buf[offset + read], buf.size() - offset - read);
}
if(m_stream->avail_out() == 0)
{
const size_t added = 8 + m_buffer.size();
m_buffer.resize(m_buffer.size() + added);
m_stream->next_out(&m_buffer[m_buffer.size() - added], added);
}
else if(m_stream->avail_in() == 0)
{
m_buffer.resize(m_buffer.size() - m_stream->avail_out());
break;
}
}
copy_mem(&m_buffer[0], &buf[0], offset);
buf.swap(m_buffer);
}
void Stream_Decompression::update(secure_vector<byte>& buf, size_t offset)
{
process(buf, offset, m_stream->run_flag());
}
void Stream_Decompression::finish(secure_vector<byte>& buf, size_t offset)
{
if(buf.size() != offset || m_stream.get())
process(buf, offset, m_stream->finish_flag());
if(m_stream.get())
throw std::runtime_error(name() + " finished but not at stream end");
}
}
|