From e36ab67b80a0fb162df768d5895c413d26e2e5ca Mon Sep 17 00:00:00 2001 From: Jack Lloyd Date: Thu, 3 Nov 2016 11:33:27 -0400 Subject: Rename zero_mem to secure_scrub_memory --- .../utils/locking_allocator/locking_allocator.cpp | 10 ++--- src/lib/utils/mem_ops.cpp | 38 +++++++++++++++++++ src/lib/utils/mem_ops.h | 43 +++++++++++++++++----- src/lib/utils/os_utils.cpp | 4 +- src/lib/utils/zero_mem.cpp | 38 ------------------- 5 files changed, 77 insertions(+), 56 deletions(-) create mode 100644 src/lib/utils/mem_ops.cpp delete mode 100644 src/lib/utils/zero_mem.cpp (limited to 'src/lib/utils') diff --git a/src/lib/utils/locking_allocator/locking_allocator.cpp b/src/lib/utils/locking_allocator/locking_allocator.cpp index 770464d92..8b15b417e 100644 --- a/src/lib/utils/locking_allocator/locking_allocator.cpp +++ b/src/lib/utils/locking_allocator/locking_allocator.cpp @@ -128,12 +128,6 @@ bool mlock_allocator::deallocate(void* p, size_t num_elems, size_t elem_size) if(!m_pool) return false; - /* - We do not have to zero the memory here, as - secure_allocator::deallocate does that for all arguments before - invoking the deallocator (us or delete[]) - */ - size_t n = num_elems * elem_size; /* @@ -146,6 +140,8 @@ bool mlock_allocator::deallocate(void* p, size_t num_elems, size_t elem_size) if(!ptr_in_pool(m_pool, m_poolsize, p, n)) return false; + std::memset(ptr, 0, n); + lock_guard_type lock(m_mutex); const size_t start = static_cast(p) - m_pool; @@ -216,7 +212,7 @@ mlock_allocator::~mlock_allocator() { if(m_pool) { - zero_mem(m_pool, m_poolsize); + secure_scrub_memory(m_pool, m_poolsize); OS::free_locked_pages(m_pool, m_poolsize); m_pool = nullptr; } diff --git a/src/lib/utils/mem_ops.cpp b/src/lib/utils/mem_ops.cpp new file mode 100644 index 000000000..461b03d6b --- /dev/null +++ b/src/lib/utils/mem_ops.cpp @@ -0,0 +1,38 @@ +/* +* Memory Scrubbing +* (C) 2012,2015,2016 Jack Lloyd +* +* Botan is released under the Simplified BSD License (see license.txt) +*/ + +#include + +#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY) + #include +#endif + +namespace Botan { + +void secure_scrub_memory(void* ptr, size_t n) + { +#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY) + ::RtlSecureZeroMemory(ptr, n); +#elif defined(BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO) && (BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO == 1) + /* + Call memset through a static volatile pointer, which the compiler + should not elide. This construct should be safe in conforming + compilers, but who knows. I did confirm that on x86-64 GCC 6.1 and + Clang 3.8 both create code that saves the memset address in the + data segment and uncondtionally loads and jumps to that address. + */ + static void* (*const volatile memset_ptr)(void*, int, size_t) = std::memset; + (memset_ptr)(ptr, 0, n); +#else + volatile byte* p = reinterpret_cast(ptr); + + for(size_t i = 0; i != n; ++i) + p[i] = 0; +#endif + } + +} diff --git a/src/lib/utils/mem_ops.h b/src/lib/utils/mem_ops.h index 0d2d0dab0..b4cf7f76c 100644 --- a/src/lib/utils/mem_ops.h +++ b/src/lib/utils/mem_ops.h @@ -15,25 +15,50 @@ namespace Botan { /** -* Zeroize memory -* @param ptr a pointer to memory to zero out +* Scrub memory contents in a way that a compiler should not elide, +* using some system specific technique. Note that this function might +* not zero the memory (for example, in some hypothetical +* implementation it might combine the memory contents with the output +* of a system PRNG), but if you can detect any difference in behavior +* at runtime then the clearing is side-effecting and you can just +* use `clear_mem`. +* +* Use this function to scrub memory just before deallocating it, or on +* a stack buffer before returning from the function. +* +* @param ptr a pointer to memory to scrub * @param n the number of bytes pointed to by ptr */ -BOTAN_DLL void zero_mem(void* ptr, size_t n); +BOTAN_DLL void secure_scrub_memory(void* ptr, size_t n); /** -* Zeroize memory -* @param ptr a pointer to an array -* @param n the number of Ts pointed to by ptr +* Zero out some bytes +* @param ptr a pointer to memory to zero +* @param bytes the number of bytes to zero in ptr */ -template inline void clear_mem(T* ptr, size_t n) +inline void clear_bytes(void* ptr, size_t bytes) { - if(n > 0) + if(bytes > 0) { - std::memset(ptr, 0, sizeof(T)*n); + std::memset(ptr, 0, bytes); } } +/** +* Zero memory before use. This simply calls memset and should not be +* used in cases where the compiler cannot see the call as a +* side-effecting operation (for example, if calling clear_mem before +* deallocating memory, the compiler would be allowed to omit the call +* to memset entirely under the as-if rule.) +* +* @param ptr a pointer to an array of Ts to zero +* @param n the number of Ts pointed to by ptr +*/ +template inline void clear_mem(T* ptr, size_t n) + { + clear_bytes(ptr, sizeof(T)*n); + } + /** * Copy memory * @param out the destination array diff --git a/src/lib/utils/os_utils.cpp b/src/lib/utils/os_utils.cpp index 4020a4be3..f40426613 100644 --- a/src/lib/utils/os_utils.cpp +++ b/src/lib/utils/os_utils.cpp @@ -283,11 +283,11 @@ void free_locked_pages(void* ptr, size_t length) return; #if defined(BOTAN_TARGET_OS_HAS_POSIX_MLOCK) - zero_mem(ptr, length); + secure_scrub_memory(ptr, length); ::munlock(ptr, length); ::munmap(ptr, length); #elif defined BOTAN_TARGET_OS_HAS_VIRTUAL_LOCK - zero_mem(ptr, length); + secure_scrub_memory(ptr, length); ::VirtualUnlock(ptr, length); ::VirtualFree(ptr, 0, MEM_RELEASE); #else diff --git a/src/lib/utils/zero_mem.cpp b/src/lib/utils/zero_mem.cpp deleted file mode 100644 index df195048a..000000000 --- a/src/lib/utils/zero_mem.cpp +++ /dev/null @@ -1,38 +0,0 @@ - /* -* Zero Memory -* (C) 2012,2015 Jack Lloyd -* -* Botan is released under the Simplified BSD License (see license.txt) -*/ - -#include - -#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY) - #include -#endif - -namespace Botan { - -void zero_mem(void* ptr, size_t n) - { -#if defined(BOTAN_TARGET_OS_HAS_RTLSECUREZEROMEMORY) - ::RtlSecureZeroMemory(ptr, n); -#elif defined(BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO) && (BOTAN_USE_VOLATILE_MEMSET_FOR_ZERO == 1) - /* - Call memset through a static volatile pointer, which the compiler - should not elide. This construct should be safe in conforming - compilers, but who knows. I did confirm that on x86-64 GCC 6.1 and - Clang 3.8 both create code that saves the memset address in the - data segment and uncondtionally loads and jumps to that address. - */ - static void* (*const volatile memset_ptr)(void*, int, size_t) = std::memset; - (memset_ptr)(ptr, 0, n); -#else - volatile byte* p = reinterpret_cast(ptr); - - for(size_t i = 0; i != n; ++i) - p[i] = 0; -#endif - } - -} -- cgit v1.2.3