blob: 461b03d6b91cf06c544d80f9064efa85f0498b37 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
|
/*
* Memory Scrubbing
* (C) 2012,2015,2016 Jack Lloyd
*
* Botan is released under the Simplified BSD License (see license.txt)
*/
#include <botan/mem_ops.h>
#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY)
#include <windows.h>
#endif
namespace Botan {
void secure_scrub_memory(void* ptr, size_t n)
{
#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY)
::RtlSecureZeroMemory(ptr, n);
#elif defined(BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO) && (BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO == 1)
/*
Call memset through a static volatile pointer, which the compiler
should not elide. This construct should be safe in conforming
compilers, but who knows. I did confirm that on x86-64 GCC 6.1 and
Clang 3.8 both create code that saves the memset address in the
data segment and uncondtionally loads and jumps to that address.
*/
static void* (*const volatile memset_ptr)(void*, int, size_t) = std::memset;
(memset_ptr)(ptr, 0, n);
#else
volatile byte* p = reinterpret_cast<volatile byte*>(ptr);
for(size_t i = 0; i != n; ++i)
p[i] = 0;
#endif
}
}
|