aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.pylintrc12
-rwxr-xr-xconfigure.py441
-rw-r--r--doc/manual/building.rst16
-rw-r--r--src/build-data/os/openbsd.txt4
-rw-r--r--src/cli/speed.cpp12
-rw-r--r--src/lib/asn1/asn1_time.cpp4
-rw-r--r--src/lib/entropy/entropy_src.h6
-rw-r--r--src/lib/math/mp/mp_madd.h2
-rw-r--r--src/lib/utils/compiler.h4
-rw-r--r--src/lib/utils/os_utils.cpp34
-rw-r--r--src/lib/utils/os_utils.h3
-rw-r--r--src/lib/x509/x509path.h2
-rw-r--r--src/scripts/ci/appveyor.yml25
-rwxr-xr-xsrc/scripts/install.py16
-rw-r--r--src/tests/test_os_utils.cpp4
-rw-r--r--src/tests/unit_x509.cpp117
16 files changed, 439 insertions, 263 deletions
diff --git a/.pylintrc b/.pylintrc
index 58ddd65f1..f1de573b9 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -135,10 +135,10 @@ variable-rgx=[a-z_][a-z0-9_]{0,30}$
variable-name-hint=[a-z_][a-z0-9_]{0,30}$
# Regular expression matching correct attribute names
-attr-rgx=[a-z_][a-z0-9_]{2,30}$
+attr-rgx=[a-z_][a-z0-9_]{1,30}$
# Naming hint for attribute names
-attr-name-hint=[a-z_][a-z0-9_]{2,30}$
+attr-name-hint=[a-z_][a-z0-9_]{1,30}$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
@@ -227,7 +227,7 @@ ignored-modules=
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set). This supports can work
# with qualified names.
-ignored-classes=
+ignored-classes=LexResult
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
@@ -339,7 +339,7 @@ exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN]
# Maximum number of arguments for function / method
-max-args=5
+max-args=7
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
@@ -361,10 +361,10 @@ max-statements=50
max-parents=7
# Maximum number of attributes for a class (see R0902).
-max-attributes=7
+max-attributes=15
# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
+min-public-methods=0
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
diff --git a/configure.py b/configure.py
index 7e40545dd..ed021ad3e 100755
--- a/configure.py
+++ b/configure.py
@@ -209,7 +209,7 @@ class BuildConfigurationInformation(object):
return 'botan-%d.pc' % (self.version_major)
-def process_command_line(args):
+def process_command_line(args): # pylint: disable=too-many-locals
"""
Handle command line options
"""
@@ -388,7 +388,8 @@ def process_command_line(args):
dest='with_valgrind', action='store_true', default=False)
build_group.add_option('--with-bakefile', action='store_true',
- default=False, help='Generate bakefile which can be used to create Visual Studio or Xcode project files')
+ default=False,
+ help='Generate bakefile which can be used to create Visual Studio or Xcode project files')
build_group.add_option('--unsafe-fuzzer-mode', action='store_true', default=False,
help='disable essential checks for testing')
@@ -516,10 +517,16 @@ def process_command_line(args):
return options
-def lex_me_harder(infofile, to_obj, allowed_groups, name_val_pairs):
+
+class LexResult(object):
+ pass
+
+
+def lex_me_harder(infofile, allowed_groups, name_val_pairs):
"""
Generic lexer function for info.txt and src/build-data files
"""
+ out = LexResult()
# Format as a nameable Python variable
def py_var(group):
@@ -538,9 +545,9 @@ def lex_me_harder(infofile, to_obj, allowed_groups, name_val_pairs):
lexer.wordchars += '|:.<>/,-!+' # handle various funky chars in info.txt
for group in allowed_groups:
- to_obj.__dict__[py_var(group)] = []
+ out.__dict__[py_var(group)] = []
for (key, val) in name_val_pairs.items():
- to_obj.__dict__[key] = val
+ out.__dict__[key] = val
def lexed_tokens(): # Convert to an interator
token = lexer.get_token()
@@ -563,15 +570,15 @@ def lex_me_harder(infofile, to_obj, allowed_groups, name_val_pairs):
token = lexer.get_token()
while token != end_marker:
- to_obj.__dict__[py_var(group)].append(token)
+ out.__dict__[py_var(group)].append(token)
token = lexer.get_token()
if token is None:
raise LexerError('Group "%s" not terminated' % (group),
lexer.lineno)
elif token in name_val_pairs.keys():
- if isinstance(to_obj.__dict__[token], list):
- to_obj.__dict__[token].append(lexer.get_token())
+ if isinstance(out.__dict__[token], list):
+ out.__dict__[token].append(lexer.get_token())
# Dirty hack
if token == 'define':
@@ -580,13 +587,15 @@ def lex_me_harder(infofile, to_obj, allowed_groups, name_val_pairs):
raise LexerError('No version set for API', lexer.lineno)
if not re.match('^[0-9]{8}$', nxt):
raise LexerError('Bad API rev "%s"' % (nxt), lexer.lineno)
- to_obj.__dict__[token].append(nxt)
+ out.__dict__[token].append(nxt)
else:
- to_obj.__dict__[token] = lexer.get_token()
+ out.__dict__[token] = lexer.get_token()
else: # No match -> error
raise LexerError('Bad token "%s"' % (token), lexer.lineno)
+ return out
+
def force_to_dict(l):
"""
Convert a lex'ed map (from build-data files) from a list to a dict
@@ -622,14 +631,25 @@ class ModuleInfo(InfoObject):
def __init__(self, infofile):
super(ModuleInfo, self).__init__(infofile)
- lex_me_harder(infofile, self,
- ['header:internal', 'header:public',
- 'header:external', 'requires', 'os', 'arch',
- 'cc', 'libs', 'frameworks', 'comment',
- 'warning'],
- {'load_on': 'auto',
- 'define': [],
- 'need_isa': ''})
+ lex = lex_me_harder(
+ infofile,
+ [
+ 'header:internal', 'header:public', 'header:external', 'requires',
+ 'os', 'arch', 'cc', 'libs', 'frameworks', 'comment', 'warning'
+ ],
+ {
+ 'load_on': 'auto',
+ 'define': [],
+ 'need_isa': ''
+ })
+
+ def check_header_duplicates(header_list_public, header_list_internal):
+ pub_header = set(header_list_public)
+ int_header = set(header_list_internal)
+ if not pub_header.isdisjoint(int_header):
+ logging.error("Module %s header contains same header in public and internal sections" % self.infofile)
+
+ check_header_duplicates(lex.header_public, lex.header_internal)
all_source_files = []
all_header_files = []
@@ -642,20 +662,14 @@ class ModuleInfo(InfoObject):
self.source = all_source_files
- if self.need_isa == '':
- self.need_isa = []
- else:
- self.need_isa = self.need_isa.split(',')
-
# If not entry for the headers, all are assumed public
- if self.header_internal == [] and self.header_public == []:
+ if lex.header_internal == [] and lex.header_public == []:
self.header_public = list(all_header_files)
+ self.header_internal = []
else:
- pub_header = set(self.header_public)
- int_header = set(self.header_internal)
-
- if not pub_header.isdisjoint(int_header):
- logging.error("Module %s header contains same header in public and internal sections" % (infofile))
+ self.header_public = lex.header_public
+ self.header_internal = lex.header_internal
+ self.header_external = lex.header_external
# Coerce to more useful types
def convert_lib_list(l):
@@ -673,8 +687,18 @@ class ModuleInfo(InfoObject):
result[target] = result.setdefault(target, []) + vals
return result
- self.libs = convert_lib_list(self.libs)
- self.frameworks = convert_lib_list(self.frameworks)
+ # Convert remaining lex result to members
+ self.arch = lex.arch
+ self.cc = lex.cc
+ self.comment = ' '.join(lex.comment) if lex.comment else None
+ self.define = lex.define
+ self.frameworks = convert_lib_list(lex.frameworks)
+ self.libs = convert_lib_list(lex.libs)
+ self.load_on = lex.load_on
+ self.need_isa = lex.need_isa.split(',') if lex.need_isa else []
+ self.os = lex.os
+ self.requires = lex.requires
+ self.warning = ' '.join(lex.warning) if lex.warning else None
def add_dir_name(filename):
if filename.count(':') == 0:
@@ -687,25 +711,18 @@ class ModuleInfo(InfoObject):
return os.path.join(os.path.split(self.lives_in)[0],
*filename.split(':'))
+ # Modify members
self.source = [add_dir_name(s) for s in self.source]
self.header_internal = [add_dir_name(s) for s in self.header_internal]
self.header_public = [add_dir_name(s) for s in self.header_public]
self.header_external = [add_dir_name(s) for s in self.header_external]
+ # Filesystem read access check
for src in self.source + self.header_internal + self.header_public + self.header_external:
if not os.access(src, os.R_OK):
logging.error("Missing file %s in %s" % (src, infofile))
- if self.comment != []:
- self.comment = ' '.join(self.comment)
- else:
- self.comment = None
-
- if self.warning != []:
- self.warning = ' '.join(self.warning)
- else:
- self.warning = None
-
+ # Check for duplicates
def intersect_check(type_a, list_a, type_b, list_b):
intersection = set.intersection(set(list_a), set(list_b))
if len(intersection) > 0:
@@ -797,14 +814,20 @@ class ModuleInfo(InfoObject):
return 0
return 1
+
class ModulePolicyInfo(InfoObject):
def __init__(self, infofile):
super(ModulePolicyInfo, self).__init__(infofile)
- lex_me_harder(infofile, self,
- ['required', 'if_available', 'prohibited'], {})
+ lex = lex_me_harder(
+ infofile,
+ ['required', 'if_available', 'prohibited'],
+ {})
- def cross_check(self, modules):
+ self.if_available = lex.if_available
+ self.required = lex.required
+ self.prohibited = lex.prohibited
+ def cross_check(self, modules):
def check(tp, lst):
for mod in lst:
if mod not in modules:
@@ -819,19 +842,24 @@ class ModulePolicyInfo(InfoObject):
class ArchInfo(InfoObject):
def __init__(self, infofile):
super(ArchInfo, self).__init__(infofile)
- lex_me_harder(infofile, self,
- ['aliases', 'submodels', 'submodel_aliases', 'isa_extensions'],
- {'endian': None,
- 'family': None,
- 'unaligned': 'no',
- 'wordsize': 32
- })
-
- self.submodel_aliases = force_to_dict(self.submodel_aliases)
-
- self.unaligned_ok = (1 if self.unaligned == 'ok' else 0)
-
- self.wordsize = int(self.wordsize)
+ lex = lex_me_harder(
+ infofile,
+ ['aliases', 'submodels', 'submodel_aliases', 'isa_extensions'],
+ {
+ 'endian': None,
+ 'family': None,
+ 'unaligned': 'no',
+ 'wordsize': 32
+ })
+
+ self.aliases = lex.aliases
+ self.endian = lex.endian
+ self.family = lex.family
+ self.isa_extensions = lex.isa_extensions
+ self.unaligned_ok = (1 if lex.unaligned == 'ok' else 0)
+ self.submodels = lex.submodels
+ self.submodel_aliases = force_to_dict(lex.submodel_aliases)
+ self.wordsize = int(lex.wordsize)
def all_submodels(self):
"""
@@ -904,56 +932,74 @@ class ArchInfo(InfoObject):
class CompilerInfo(InfoObject):
def __init__(self, infofile):
super(CompilerInfo, self).__init__(infofile)
- lex_me_harder(infofile, self,
- ['so_link_commands', 'binary_link_commands', 'mach_opt', 'mach_abi_linking', 'isa_flags'],
- {'binary_name': None,
- 'linker_name': None,
- 'macro_name': None,
- 'output_to_option': '-o ',
- 'add_include_dir_option': '-I',
- 'add_lib_dir_option': '-L',
- 'add_lib_option': '-l',
- 'add_framework_option': '-framework ',
- 'compile_flags': '',
- 'debug_info_flags': '',
- 'optimization_flags': '',
- 'size_optimization_flags': '',
- 'coverage_flags': '',
- 'sanitizer_flags': '',
- 'stack_protector_flags': '',
- 'shared_flags': '',
- 'lang_flags': '',
- 'warning_flags': '',
- 'maintainer_warning_flags': '',
- 'visibility_build_flags': '',
- 'visibility_attribute': '',
- 'ar_command': None,
- 'makefile_style': ''
- })
-
- self.so_link_commands = force_to_dict(self.so_link_commands)
- self.binary_link_commands = force_to_dict(self.binary_link_commands)
- self.mach_abi_linking = force_to_dict(self.mach_abi_linking)
- self.isa_flags = force_to_dict(self.isa_flags)
+ lex = lex_me_harder(
+ infofile,
+ ['so_link_commands', 'binary_link_commands', 'mach_opt', 'mach_abi_linking', 'isa_flags'],
+ {
+ 'binary_name': None,
+ 'linker_name': None,
+ 'macro_name': None,
+ 'output_to_option': '-o ',
+ 'add_include_dir_option': '-I',
+ 'add_lib_dir_option': '-L',
+ 'add_lib_option': '-l',
+ 'add_framework_option': '-framework ',
+ 'compile_flags': '',
+ 'debug_info_flags': '',
+ 'optimization_flags': '',
+ 'size_optimization_flags': '',
+ 'coverage_flags': '',
+ 'sanitizer_flags': '',
+ 'stack_protector_flags': '',
+ 'shared_flags': '',
+ 'lang_flags': '',
+ 'warning_flags': '',
+ 'maintainer_warning_flags': '',
+ 'visibility_build_flags': '',
+ 'visibility_attribute': '',
+ 'ar_command': None,
+ 'makefile_style': ''
+ })
+
+ self.add_framework_option = lex.add_framework_option
+ self.add_include_dir_option = lex.add_include_dir_option
+ self.add_lib_option = lex.add_lib_option
+ self.ar_command = lex.ar_command
+ self.binary_link_commands = force_to_dict(lex.binary_link_commands)
+ self.binary_name = lex.binary_name
+ self.compile_flags = lex.compile_flags
+ self.coverage_flags = lex.coverage_flags
+ self.debug_info_flags = lex.debug_info_flags
+ self.isa_flags = force_to_dict(lex.isa_flags)
+ self.lang_flags = lex.lang_flags
+ self.linker_name = lex.linker_name
+ self.mach_abi_linking = force_to_dict(lex.mach_abi_linking)
+ self.macro_name = lex.macro_name
+ self.maintainer_warning_flags = lex.maintainer_warning_flags
+ self.makefile_style = lex.makefile_style
+ self.optimization_flags = lex.optimization_flags
+ self.output_to_option = lex.output_to_option
+ self.sanitizer_flags = lex.sanitizer_flags
+ self.shared_flags = lex.shared_flags
+ self.size_optimization_flags = lex.size_optimization_flags
+ self.so_link_commands = force_to_dict(lex.so_link_commands)
+ self.stack_protector_flags = lex.stack_protector_flags
+ self.visibility_build_flags = lex.visibility_build_flags
+ self.visibility_attribute = lex.visibility_attribute
+ self.warning_flags = lex.warning_flags
self.mach_opt_flags = {}
+ while lex.mach_opt:
+ proc = lex.mach_opt.pop(0)
+ if lex.mach_opt.pop(0) != '->':
+ raise ConfigureError('Parsing err in %s mach_opt' % self.basename)
- while self.mach_opt != []:
- proc = self.mach_opt.pop(0)
- if self.mach_opt.pop(0) != '->':
- raise ConfigureError('Parsing err in %s mach_opt' % (self.basename))
-
- flags = self.mach_opt.pop(0)
+ flags = lex.mach_opt.pop(0)
regex = ''
-
- if len(self.mach_opt) > 0 and \
- (len(self.mach_opt) == 1 or self.mach_opt[1] != '->'):
- regex = self.mach_opt.pop(0)
-
+ if lex.mach_opt and (len(lex.mach_opt) == 1 or lex.mach_opt[1] != '->'):
+ regex = lex.mach_opt.pop(0)
self.mach_opt_flags[proc] = (flags, regex)
- del self.mach_opt
-
def isa_flags_for(self, isa, arch):
if isa in self.isa_flags:
return self.isa_flags[isa]
@@ -985,13 +1031,13 @@ class CompilerInfo(InfoObject):
Return the machine specific ABI flags
"""
- def all():
+ def all_group():
if options.with_debug_info and 'all-debug' in self.mach_abi_linking:
return 'all-debug'
return 'all'
abi_link = list()
- for what in [all(), options.os, options.arch, options.cpu]:
+ for what in [all_group(), options.os, options.arch, options.cpu]:
flag = self.mach_abi_linking.get(what)
if flag != None and flag != '' and flag not in abi_link:
abi_link.append(flag)
@@ -1067,7 +1113,8 @@ class CompilerInfo(InfoObject):
return (' '.join(gen_flags())).strip()
- def _so_link_search(self, osname, debug_info):
+ @staticmethod
+ def _so_link_search(osname, debug_info):
if debug_info:
return [osname + '-debug', 'default-debug']
else:
@@ -1104,50 +1151,73 @@ class CompilerInfo(InfoObject):
return ['BUILD_COMPILER_IS_' + self.macro_name]
+
class OsInfo(InfoObject):
def __init__(self, infofile):
super(OsInfo, self).__init__(infofile)
- lex_me_harder(infofile, self,
- ['aliases', 'target_features'],
- {'os_type': None,
- 'program_suffix': '',
- 'obj_suffix': 'o',
- 'soname_suffix': '',
- 'soname_pattern_patch': '',
- 'soname_pattern_abi': '',
- 'soname_pattern_base': '',
- 'static_suffix': 'a',
- 'ar_command': 'ar crs',
- 'ar_needs_ranlib': False,
- 'install_root': '/usr/local',
- 'header_dir': 'include',
- 'bin_dir': 'bin',
- 'lib_dir': 'lib',
- 'doc_dir': 'share/doc',
- 'building_shared_supported': 'yes',
- 'install_cmd_data': 'install -m 644',
- 'install_cmd_exec': 'install -m 755'
- })
-
- if self.soname_pattern_base != '':
- if self.soname_pattern_patch == '' and self.soname_pattern_abi == '':
- self.soname_pattern_patch = self.soname_pattern_base
- self.soname_pattern_patch_abi = self.soname_pattern_base
-
- elif self.soname_pattern_abi != '' and self.soname_pattern_abi != '':
- pass # all 3 values set, nothing needs to happen here
+ lex = lex_me_harder(
+ infofile,
+ ['aliases', 'target_features'],
+ {
+ 'os_type': None,
+ 'program_suffix': '',
+ 'obj_suffix': 'o',
+ 'soname_suffix': '',
+ 'soname_pattern_patch': '',
+ 'soname_pattern_abi': '',
+ 'soname_pattern_base': '',
+ 'static_suffix': 'a',
+ 'ar_command': 'ar crs',
+ 'ar_needs_ranlib': False,
+ 'install_root': '/usr/local',
+ 'header_dir': 'include',
+ 'bin_dir': 'bin',
+ 'lib_dir': 'lib',
+ 'doc_dir': 'share/doc',
+ 'building_shared_supported': 'yes',
+ 'install_cmd_data': 'install -m 644',
+ 'install_cmd_exec': 'install -m 755'
+ })
+
+ if lex.soname_pattern_base:
+ self.soname_pattern_base = lex.soname_pattern_base
+ if lex.soname_pattern_patch == '' and lex.soname_pattern_abi == '':
+ self.soname_pattern_patch = lex.soname_pattern_base
+ self.soname_pattern_abi = lex.soname_pattern_base
+ elif lex.soname_pattern_abi != '' and lex.soname_pattern_abi != '':
+ self.soname_pattern_patch = lex.soname_pattern_patch
+ self.soname_pattern_abi = lex.soname_pattern_abi
else:
# base set, only one of patch/abi set
raise ConfigureError("Invalid soname_patterns in %s" % (self.infofile))
-
- if self.soname_pattern_base == '' and self.soname_suffix != '':
- self.soname_pattern_base = "libbotan-{version_major}.%s" % (self.soname_suffix)
- self.soname_pattern_abi = self.soname_pattern_base + ".{abi_rev}"
- self.soname_pattern_patch = self.soname_pattern_abi + ".{version_minor}.{version_patch}"
-
- self.ar_needs_ranlib = bool(self.ar_needs_ranlib)
-
- self.building_shared_supported = (True if self.building_shared_supported == 'yes' else False)
+ else:
+ if lex.soname_suffix:
+ self.soname_pattern_base = "libbotan-{version_major}.%s" % (lex.soname_suffix)
+ self.soname_pattern_abi = self.soname_pattern_base + ".{abi_rev}"
+ self.soname_pattern_patch = self.soname_pattern_abi + ".{version_minor}.{version_patch}"
+ else:
+ # Could not calculate soname_pattern_*
+ # This happens for OSs without shared library support (e.g. nacl, mingw, includeos, cygwin)
+ self.soname_pattern_base = None
+ self.soname_pattern_abi = None
+ self.soname_pattern_patch = None
+
+ self.aliases = lex.aliases
+ self.ar_command = lex.ar_command
+ self.ar_needs_ranlib = bool(lex.ar_needs_ranlib)
+ self.bin_dir = lex.bin_dir
+ self.building_shared_supported = (True if lex.building_shared_supported == 'yes' else False)
+ self.doc_dir = lex.doc_dir
+ self.header_dir = lex.header_dir
+ self.install_cmd_data = lex.install_cmd_data
+ self.install_cmd_exec = lex.install_cmd_exec
+ self.install_root = lex.install_root
+ self.lib_dir = lex.lib_dir
+ self.os_type = lex.os_type
+ self.obj_suffix = lex.obj_suffix
+ self.program_suffix = lex.program_suffix
+ self.static_suffix = lex.static_suffix
+ self.target_features = lex.target_features
def ranlib_command(self):
return 'ranlib' if self.ar_needs_ranlib else 'true'
@@ -1170,6 +1240,7 @@ class OsInfo(InfoObject):
r += sorted(feat_macros())
return r
+
def fixup_proc_name(proc):
proc = proc.lower().replace(' ', '')
for junk in ['(tm)', '(r)']:
@@ -1229,16 +1300,14 @@ def system_cpu_info():
return cpu_info
def guess_processor(archinfo):
- cpu_info = system_cpu_info()
-
- for input in cpu_info:
- if input != '':
- match = canon_processor(archinfo, input)
+ for info_part in system_cpu_info():
+ if info_part:
+ match = canon_processor(archinfo, info_part)
if match != None:
- logging.debug("Matched '%s' to processor '%s'" % (input, match))
+ logging.debug("Matched '%s' to processor '%s'" % (info_part, match))
return match
else:
- logging.debug("Failed to deduce CPU from '%s'" % (input))
+ logging.debug("Failed to deduce CPU from '%s'" % info_part)
raise ConfigureError('Could not determine target CPU; set with --cpu')
@@ -1276,17 +1345,17 @@ def gen_bakefile(build_config, options, external_libs):
def bakefile_sources(file, sources):
for src in sources:
- (dir, filename) = os.path.split(os.path.normpath(src))
- dir = dir.replace('\\', '/')
- _, dir = dir.split('src/', 1)
- file.write('\tsources { src/%s/%s } \n' % (dir, filename))
+ (directory, filename) = os.path.split(os.path.normpath(src))
+ directory = directory.replace('\\', '/')
+ _, directory = directory.split('src/', 1)
+ file.write('\tsources { src/%s/%s } \n' % (directory, filename))
def bakefile_cli_headers(file, headers):
for header in headers:
- (dir, filename) = os.path.split(os.path.normpath(header))
- dir = dir.replace('\\', '/')
- _, dir = dir.split('src/', 1)
- file.write('\theaders { src/%s/%s } \n' % (dir, filename))
+ (directory, filename) = os.path.split(os.path.normpath(header))
+ directory = directory.replace('\\', '/')
+ _, directory = directory.split('src/', 1)
+ file.write('\theaders { src/%s/%s } \n' % (directory, filename))
def bakefile_test_sources(file, sources):
for src in sources:
@@ -1391,9 +1460,9 @@ def gen_makefile_lists(var, build_config, options, modules, cc, arch, osinfo):
def objectfile_list(sources, obj_dir):
for src in sources:
- (dir, file) = os.path.split(os.path.normpath(src))
+ (directory, file) = os.path.split(os.path.normpath(src))
- parts = dir.split(os.sep)
+ parts = directory.split(os.sep)
if 'src' in parts:
parts = parts[parts.index('src')+2:]
elif 'tests' in parts:
@@ -1403,7 +1472,7 @@ def gen_makefile_lists(var, build_config, options, modules, cc, arch, osinfo):
elif file.find('botan_all') != -1:
parts = []
else:
- raise ConfigureError("Unexpected file '%s/%s'" % (dir, file))
+ raise ConfigureError("Unexpected file '%s/%s'" % (directory, file))
if parts != []:
@@ -1438,8 +1507,11 @@ def gen_makefile_lists(var, build_config, options, modules, cc, arch, osinfo):
"""
includes = cc.add_include_dir_option + build_config.include_dir
- includes += (' ' + cc.add_include_dir_option + build_config.external_include_dir) if build_config.external_headers else ''
- includes += (' ' + cc.add_include_dir_option + options.with_external_includedir) if options.with_external_includedir else ''
+ if build_config.external_headers:
+ includes += ' ' + cc.add_include_dir_option + build_config.external_include_dir
+ if options.with_external_includedir:
+ includes += ' ' + cc.add_include_dir_option + options.with_external_includedir
+
for (obj_file, src) in zip(objectfile_list(sources, obj_dir), sources):
yield '%s: %s\n\t$(CXX)%s $(%s_FLAGS) %s %s %s %s$@\n' % (
obj_file, src,
@@ -1539,7 +1611,7 @@ def create_template_vars(build_config, options, modules, cc, arch, osinfo):
return opts
- vars = {
+ variables = {
'version_major': build_config.version_major,
'version_minor': build_config.version_minor,
'version_patch': build_config.version_patch,
@@ -1614,7 +1686,10 @@ def create_template_vars(build_config, options, modules, cc, arch, osinfo):
'cli_link_cmd': cc.binary_link_command_for(osinfo.basename, options) + external_link_cmd(),
'test_link_cmd': cc.binary_link_command_for(osinfo.basename, options) + external_link_cmd(),
- 'link_to': ' '.join([cc.add_lib_option + lib for lib in link_to()] + [cc.add_framework_option + fw for fw in link_to_frameworks()]),
+ 'link_to': ' '.join(
+ [cc.add_lib_option + lib for lib in link_to()] +
+ [cc.add_framework_option + fw for fw in link_to_frameworks()]
+ ),
'module_defines': make_cpp_macros(sorted(flatten([m.defines() for m in modules]))),
@@ -1666,37 +1741,41 @@ def create_template_vars(build_config, options, modules, cc, arch, osinfo):
if options.os == 'darwin' and options.build_shared_lib:
# In order that these executables work from the build directory,
# we need to change the install names
- vars['cli_post_link_cmd'] = 'install_name_tool -change "$(INSTALLED_LIB_DIR)/$(SONAME_ABI)" "@executable_path/$(SONAME_ABI)" $(CLI)'
- vars['test_post_link_cmd'] = 'install_name_tool -change "$(INSTALLED_LIB_DIR)/$(SONAME_ABI)" "@executable_path/$(SONAME_ABI)" $(TEST)'
+ variables['cli_post_link_cmd'] = \
+ 'install_name_tool -change "$(INSTALLED_LIB_DIR)/$(SONAME_ABI)" "@executable_path/$(SONAME_ABI)" $(CLI)'
+ variables['test_post_link_cmd'] = \
+ 'install_name_tool -change "$(INSTALLED_LIB_DIR)/$(SONAME_ABI)" "@executable_path/$(SONAME_ABI)" $(TEST)'
else:
- vars['cli_post_link_cmd'] = ''
- vars['test_post_link_cmd'] = ''
+ variables['cli_post_link_cmd'] = ''
+ variables['test_post_link_cmd'] = ''
- gen_makefile_lists(vars, build_config, options, modules, cc, arch, osinfo)
+ gen_makefile_lists(variables, build_config, options, modules, cc, arch, osinfo)
if options.os != 'windows':
- vars['botan_pkgconfig'] = prefix_with_build_dir(os.path.join(build_config.build_dir,
- build_config.pkg_config_file()))
+ variables['botan_pkgconfig'] = prefix_with_build_dir(
+ os.path.join(build_config.build_dir, build_config.pkg_config_file()))
# 'botan' or 'botan-2'. Used in Makefile and install script
# This can be made consistent over all platforms in the future
- vars['libname'] = 'botan-%d' % (build_config.version_major)
+ variables['libname'] = 'botan-%d' % (build_config.version_major)
else:
if options.with_debug_info:
- vars['libname'] = 'botand'
+ variables['libname'] = 'botand'
else:
- vars['libname'] = 'botan'
+ variables['libname'] = 'botan'
- vars["header_in"] = process_template(os.path.join(options.makefile_dir, 'header.in'), vars)
+ variables["header_in"] = process_template(os.path.join(options.makefile_dir, 'header.in'), variables)
- if vars["makefile_style"] == "gmake":
- vars["gmake_commands_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_commands.in'), vars)
- vars["gmake_dso_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_dso.in'), vars) \
+ if variables["makefile_style"] == "gmake":
+ variables["gmake_commands_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_commands.in'),
+ variables)
+ variables["gmake_dso_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_dso.in'), variables) \
if options.build_shared_lib else ''
- vars["gmake_coverage_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_coverage.in'), vars) \
- if options.with_coverage_info else ''
+ variables["gmake_coverage_in"] = process_template(os.path.join(options.makefile_dir, 'gmake_coverage.in'),
+ variables) \
+ if options.with_coverage_info else ''
- return vars
+ return variables
def choose_modules_to_use(modules, module_policy, archinfo, ccinfo, options):
"""
diff --git a/doc/manual/building.rst b/doc/manual/building.rst
index 9c0dc0ab1..804b421f0 100644
--- a/doc/manual/building.rst
+++ b/doc/manual/building.rst
@@ -61,10 +61,11 @@ we might see lines like::
INFO: Skipping, requires external dependency - boost bzip2 lzma sqlite3 tpm
The ones that are skipped because they are require an external
-depedency have to be explicitly asked for, because they rely on third
+dependency have to be explicitly asked for, because they rely on third
party libraries which your system might not have or that you might not
want the resulting binary to depend on. For instance to enable zlib
support, add ``--with-zlib`` to your invocation of ``configure.py``.
+All available modules can be listed with ``--list-modules``.
You can control which algorithms and modules are built using the
options ``--enable-modules=MODS`` and ``--disable-modules=MODS``, for
@@ -84,7 +85,16 @@ For instance::
will set up a build that only includes RSA, OAEP, PSS along with any
required dependencies. A small subset of core features, including AES,
SHA-2, HMAC, and the multiple precision integer library, are always
-loaded.
+loaded. Note that a minimized build does not include any random number
+generator, which is needed for example to generate keys, nonces and IVs.
+See :doc:`rng` on which random number generators are available.
+
+The option ``--module-policy=POL`` enables modules required by and
+disables modules prohibited by a text policy in ``src/build-data/policy``.
+Additional modules can be enabled if not prohibited by the policy.
+Currently available policies include ``bsi``, ``nist`` and ``modern``::
+
+ $ ./configure.py --module-policy=bsi --enable-modules=tls,xts
The script tries to guess what kind of makefile to generate, and it
almost always guesses correctly (basically, Visual C++ uses NMAKE with
@@ -310,7 +320,7 @@ Multiple Builds
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It may be useful to run multiple builds with different configurations.
-Specify ``--build-dir=<dir>`` to set up a build environment in a
+Specify ``--with-build-dir=<dir>`` to set up a build environment in a
different directory.
Setting Distribution Info
diff --git a/src/build-data/os/openbsd.txt b/src/build-data/os/openbsd.txt
index 0e60abd2e..89d291ddc 100644
--- a/src/build-data/os/openbsd.txt
+++ b/src/build-data/os/openbsd.txt
@@ -1,6 +1,8 @@
os_type unix
-soname_suffix "so"
+soname_pattern_base "libbotan-{version_major}.so"
+soname_pattern_abi "libbotan-{version_major}.so.{abi_rev}"
+soname_pattern_patch "libbotan-{version_major}.so.{abi_rev}.{version_minor}"
<target_features>
clock_gettime
diff --git a/src/cli/speed.cpp b/src/cli/speed.cpp
index 772cd6a46..6ea70d3d1 100644
--- a/src/cli/speed.cpp
+++ b/src/cli/speed.cpp
@@ -45,6 +45,10 @@
#include <botan/hmac_rng.h>
#endif
+#if defined(BOTAN_HAS_RDRAND_RNG)
+ #include <botan/rdrand_rng.h>
+#endif
+
#if defined(BOTAN_HAS_FPE_FE1)
#include <botan/fpe_fe1.h>
#endif
@@ -535,6 +539,14 @@ class Speed final : public Command
bench_rng(Botan::system_rng(), "System_RNG", msec, buf_size);
#endif
+#if defined(BOTAN_HAS_RDRAND_RNG)
+ if(Botan::CPUID::has_rdrand())
+ {
+ Botan::RDRAND_RNG rdrand;
+ bench_rng(rdrand, "RDRAND", msec, buf_size);
+ }
+#endif
+
#if defined(BOTAN_HAS_HMAC_DRBG)
for(std::string hash : { "SHA-256", "SHA-384", "SHA-512" })
{
diff --git a/src/lib/asn1/asn1_time.cpp b/src/lib/asn1/asn1_time.cpp
index ef259740b..5890ac7ca 100644
--- a/src/lib/asn1/asn1_time.cpp
+++ b/src/lib/asn1/asn1_time.cpp
@@ -171,12 +171,12 @@ void X509_Time::set_to(const std::string& t_spec, ASN1_Tag spec_tag)
if(spec_tag == GENERALIZED_TIME)
{
- if(t_spec.size() != 13 && t_spec.size() != 15)
+ if(t_spec.size() != 15)
throw Invalid_Argument("Invalid GeneralizedTime string: '" + t_spec + "'");
}
else if(spec_tag == UTC_TIME)
{
- if(t_spec.size() != 11 && t_spec.size() != 13)
+ if(t_spec.size() != 13)
throw Invalid_Argument("Invalid UTCTime string: '" + t_spec + "'");
}
diff --git a/src/lib/entropy/entropy_src.h b/src/lib/entropy/entropy_src.h
index 94c67a18e..fe177657c 100644
--- a/src/lib/entropy/entropy_src.h
+++ b/src/lib/entropy/entropy_src.h
@@ -25,11 +25,11 @@ class RandomNumberGenerator;
class BOTAN_DLL Entropy_Source
{
public:
- /*
+ /**
* Return a new entropy source of a particular type, or null
* Each entropy source may require substantial resources (eg, a file handle
* or socket instance), so try to share them among multiple RNGs, or just
- * use the preconfigured global list accessed by global_entropy_sources()
+ * use the preconfigured global list accessed by Entropy_Sources::global_sources()
*/
static std::unique_ptr<Entropy_Source> create(const std::string& type);
@@ -41,7 +41,7 @@ class BOTAN_DLL Entropy_Source
/**
* Perform an entropy gathering poll
* @param rng will be provided with entropy via calls to add_entropy
- @ @return conservative estimate of actual entropy added to rng during poll
+ * @return conservative estimate of actual entropy added to rng during poll
*/
virtual size_t poll(RandomNumberGenerator& rng) = 0;
diff --git a/src/lib/math/mp/mp_madd.h b/src/lib/math/mp/mp_madd.h
index 2fa1d88ce..e72c16ecb 100644
--- a/src/lib/math/mp/mp_madd.h
+++ b/src/lib/math/mp/mp_madd.h
@@ -40,7 +40,7 @@ namespace Botan {
#if defined(BOTAN_USE_GCC_INLINE_ASM)
#define BOTAN_MP_USE_X86_32_ASM
#define ASM(x) x "\n\t"
- #elif defined(BOTAN_TARGET_COMPILER_IS_MSVC)
+ #elif defined(BOTAN_BUILD_COMPILER_IS_MSVC)
#define BOTAN_MP_USE_X86_32_MSVC_ASM
#endif
diff --git a/src/lib/utils/compiler.h b/src/lib/utils/compiler.h
index ed8fad9de..898412501 100644
--- a/src/lib/utils/compiler.h
+++ b/src/lib/utils/compiler.h
@@ -140,8 +140,8 @@
#define BOTAN_PARALLEL_SIMD_FOR _Pragma("simd") for
#elif defined(BOTAN_TARGET_HAS_OPENMP)
#define BOTAN_PARALLEL_SIMD_FOR _Pragma("omp simd") for
-#elif defined(BOTAN_TARGET_COMPILER_IS_GCC)
- #define BOTAN_PARALLEL_FOR _Pragma("GCC ivdep") for
+#elif defined(BOTAN_BUILD_COMPILER_IS_GCC)
+ #define BOTAN_PARALLEL_SIMD_FOR _Pragma("GCC ivdep") for
#else
#define BOTAN_PARALLEL_SIMD_FOR for
#endif
diff --git a/src/lib/utils/os_utils.cpp b/src/lib/utils/os_utils.cpp
index fc401c3c1..fe45ad82f 100644
--- a/src/lib/utils/os_utils.cpp
+++ b/src/lib/utils/os_utils.cpp
@@ -360,26 +360,17 @@ int OS::run_cpu_instruction_probe(std::function<int ()> probe_fn)
if(rc != 0)
throw Exception("run_cpu_instruction_probe sigaction failed");
- try
- {
- rc = ::sigsetjmp(g_sigill_jmp_buf, /*save sigs*/1);
+ rc = ::sigsetjmp(g_sigill_jmp_buf, /*save sigs*/1);
- if(rc == 0)
- {
- // first call to sigsetjmp
- probe_result = probe_fn();
- }
- else if(rc == 1)
- {
- // non-local return from siglongjmp in signal handler: return error
- probe_result = -1;
- }
- else
- throw Exception("run_cpu_instruction_probe unexpected sigsetjmp return value");
+ if(rc == 0)
+ {
+ // first call to sigsetjmp
+ probe_result = probe_fn();
}
- catch(...)
+ else if(rc == 1)
{
- probe_result = -2;
+ // non-local return from siglongjmp in signal handler: return error
+ probe_result = -1;
}
// Restore old SIGILL handler, if any
@@ -392,14 +383,7 @@ int OS::run_cpu_instruction_probe(std::function<int ()> probe_fn)
// Windows SEH
__try
{
- try
- {
- probe_result = probe_fn();
- }
- catch(...)
- {
- probe_result = -2;
- }
+ probe_result = probe_fn();
}
__except(::GetExceptionCode() == EXCEPTION_ILLEGAL_INSTRUCTION ?
EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH)
diff --git a/src/lib/utils/os_utils.h b/src/lib/utils/os_utils.h
index a1693bcc5..cae1192f1 100644
--- a/src/lib/utils/os_utils.h
+++ b/src/lib/utils/os_utils.h
@@ -98,9 +98,10 @@ void free_locked_pages(void* ptr, size_t length);
* thread safe. It should only be called in a single-threaded context
* (ie, at static init time).
*
+* If probe_fn throws an exception the result is undefined.
+*
* Return codes:
* -1 illegal instruction detected
-* -2 exception thrown
*/
int BOTAN_DLL run_cpu_instruction_probe(std::function<int ()> probe_fn);
diff --git a/src/lib/x509/x509path.h b/src/lib/x509/x509path.h
index a193ebe55..6dd3c21f7 100644
--- a/src/lib/x509/x509path.h
+++ b/src/lib/x509/x509path.h
@@ -229,7 +229,7 @@ Path_Validation_Result BOTAN_DLL x509_path_validate(
* @param hostname if not empty, compared against the DNS name in end_cert
* @param usage if not set to UNSPECIFIED, compared against the key usage in end_cert
* @param validation_time what reference time to use for validation
-* @param ocsp_timeout timeoutput for OCSP operations, 0 disables OCSP check
+* @param ocsp_timeout timeout for OCSP operations, 0 disables OCSP check
* @param ocsp_resp additional OCSP responses to consider (eg from peer)
* @return result of the path validation
*/
diff --git a/src/scripts/ci/appveyor.yml b/src/scripts/ci/appveyor.yml
index bcc5b118a..eef1e758c 100644
--- a/src/scripts/ci/appveyor.yml
+++ b/src/scripts/ci/appveyor.yml
@@ -1,10 +1,8 @@
-os: Visual Studio 2015
-
# Let's call MSVS 2015 the default compiler, 64 bit the default architecture,
# release the default configuration and --enable-shared the default mode.
#
# Build jobs
-# 1. four basic builds: 32/64bit on MSVS2013/2015
+# 1. six basic builds: 32/64bit on MSVS2013/2015/2017
# 2. add static lib and amalgamation
# 3. add a debug build on MSVS2013/2015
#
@@ -18,36 +16,54 @@ environment:
PLATFORM: x86
CONFIG:
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- MSVS: 2013
PLATFORM: x86_amd64
CONFIG:
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- MSVS: 2015
PLATFORM: x86
CONFIG:
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- MSVS: 2015
PLATFORM: x86_amd64
CONFIG:
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
+ - MSVS: 2017
+ PLATFORM: x86
+ CONFIG:
+ MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
+ - MSVS: 2017
+ PLATFORM: x86_amd64
+ CONFIG:
+ MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
# 2
- MSVS:
PLATFORM:
CONFIG:
MODE: --disable-shared
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- MSVS:
PLATFORM:
CONFIG:
MODE: --amalgamation
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
# 3
- MSVS: 2013
PLATFORM:
CONFIG: Debug
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- MSVS: 2015
PLATFORM:
CONFIG: Debug
MODE:
+ APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
install:
# Set defaults
@@ -66,6 +82,9 @@ install:
- if %MSVS% == 2015 (
call "%ProgramFiles(x86)%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %PLATFORM%
)
+ - if %MSVS% == 2017 (
+ call "%ProgramFiles(x86)%\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" %PLATFORM%
+ )
- cl # check compiler version
- appveyor DownloadFile http://download.qt.io/official_releases/jom/jom.zip -FileName jom.zip
diff --git a/src/scripts/install.py b/src/scripts/install.py
index 3384534d6..0a9683ca0 100755
--- a/src/scripts/install.py
+++ b/src/scripts/install.py
@@ -176,14 +176,14 @@ def main(args = None):
copy_executable(os.path.join(out_dir, soname_patch),
os.path.join(lib_dir, soname_patch))
- prev_cwd = os.getcwd()
-
- try:
- os.chdir(lib_dir)
- force_symlink(soname_patch, soname_abi)
- force_symlink(soname_patch, soname_base)
- finally:
- os.chdir(prev_cwd)
+ if target_os != "openbsd":
+ prev_cwd = os.getcwd()
+ try:
+ os.chdir(lib_dir)
+ force_symlink(soname_patch, soname_abi)
+ force_symlink(soname_patch, soname_base)
+ finally:
+ os.chdir(prev_cwd)
copy_executable(os.path.join(out_dir, app_exe), os.path.join(bin_dir, app_exe))
diff --git a/src/tests/test_os_utils.cpp b/src/tests/test_os_utils.cpp
index a077b529f..5153338d7 100644
--- a/src/tests/test_os_utils.cpp
+++ b/src/tests/test_os_utils.cpp
@@ -147,10 +147,6 @@ class OS_Utils_Tests : public Test
result.confirm("Correct result returned by working probe fn", run_rc == 5);
- std::function<int ()> throw_fn = []() -> int { throw 3.14159; return 5; };
- const int throw_rc = Botan::OS::run_cpu_instruction_probe(throw_fn);
- result.confirm("Error return if probe function threw exception", throw_rc < 0);
-
std::function<int ()> crash_probe;
#if defined(BOTAN_TARGET_COMPILER_IS_MSVC)
diff --git a/src/tests/unit_x509.cpp b/src/tests/unit_x509.cpp
index b51914ee8..e23017738 100644
--- a/src/tests/unit_x509.cpp
+++ b/src/tests/unit_x509.cpp
@@ -57,8 +57,8 @@ Botan::X509_Cert_Options req_opts1(const std::string& algo)
opts.dns = "botan.randombit.net";
opts.email = "[email protected]";
- opts.not_before("1601012000Z");
- opts.not_after("3001012000Z");
+ opts.not_before("160101200000Z");
+ opts.not_after("300101200000Z");
if(algo == "RSA")
{
@@ -170,23 +170,21 @@ Test::Result test_x509_dates()
Botan::X509_Time time;
result.confirm("unset time not set", !time.time_is_set());
- time = Botan::X509_Time("0802011822Z", Botan::ASN1_Tag::UTC_TIME);
+ time = Botan::X509_Time("080201182200Z", Botan::ASN1_Tag::UTC_TIME);
result.confirm("time set after construction", time.time_is_set());
result.test_eq("time readable_string", time.readable_string(), "2008/02/01 18:22:00 UTC");
- const std::vector<std::string> valid = {
- "0802010000Z",
- "0802011724Z",
- "0406142334Z",
- "9906142334Z",
- "0006142334Z",
+ time = Botan::X509_Time("200305100350Z", Botan::ASN1_Tag::UTC_TIME);
+ result.test_eq("UTC_TIME readable_string", time.readable_string(), "2020/03/05 10:03:50 UTC");
- "080201000000Z",
- "080201172412Z",
- "040614233433Z",
- "990614233444Z",
- "000614233455Z",
- };
+ time = Botan::X509_Time("200305100350Z", Botan::ASN1_Tag::UTC_OR_GENERALIZED_TIME);
+ result.test_eq("UTC_OR_GENERALIZED_TIME from UTC_TIME readable_string", time.readable_string(), "2020/03/05 10:03:50 UTC");
+
+ time = Botan::X509_Time("20200305100350Z", Botan::ASN1_Tag::UTC_OR_GENERALIZED_TIME);
+ result.test_eq("UTC_OR_GENERALIZED_TIME from GENERALIZED_TIME readable_string", time.readable_string(), "2020/03/05 10:03:50 UTC");
+
+ time = Botan::X509_Time("20200305100350Z", Botan::ASN1_Tag::GENERALIZED_TIME);
+ result.test_eq("GENERALIZED_TIME readable_string", time.readable_string(), "2020/03/05 10:03:50 UTC");
// Dates that are valid per X.500 but rejected as unsupported
const std::vector<std::string> valid_but_unsup = {
@@ -205,7 +203,16 @@ Test::Result test_x509_dates()
"000614233455+0530",
};
- const std::vector<std::string> invalid = {
+ // valid length 13
+ const std::vector<std::string> valid_utc = {
+ "080201000000Z",
+ "080201172412Z",
+ "040614233433Z",
+ "990614233444Z",
+ "000614233455Z",
+ };
+
+ const std::vector<std::string> invalid_utc = {
"",
" ",
"2008`02-01",
@@ -213,6 +220,13 @@ Test::Result test_x509_dates()
"2000-02-01 17",
"999921",
+ // No seconds
+ "0802010000Z",
+ "0802011724Z",
+ "0406142334Z",
+ "9906142334Z",
+ "0006142334Z",
+
// valid length 13 -> range check
"080201000061Z", // seconds too big (61)
"080201000060Z", // seconds too big (60, leap seconds not covered by the standard)
@@ -247,25 +261,84 @@ Test::Result test_x509_dates()
"2\n2211221122Z",
// wrong time zone
- "0802010000",
- "0802010000z"
+ "080201000000",
+ "080201000000z",
+
+ // Fractional seconds
+ "170217180154.001Z",
+
+ // Timezone offset
+ "170217180154+0100",
+
+ // Extra digits
+ "17021718015400Z",
+
+ // Non-digits
+ "17021718015aZ",
+
+ // Trailing garbage
+ "170217180154Zlongtrailinggarbage",
+
+ // Swapped type
+ "20170217180154Z",
+ };
+
+ // valid length 15
+ const std::vector<std::string> valid_generalized_time = {
+ "20000305100350Z",
+ };
+
+ const std::vector<std::string> invalid_generalized = {
+ // No trailing Z
+ "20000305100350",
+
+ // No seconds
+ "200003051003Z",
+
+ // Fractional seconds
+ "20000305100350.001Z",
+
+ // Timezone offset
+ "20170217180154+0100",
+
+ // Extra digits
+ "2017021718015400Z",
+
+ // Non-digits
+ "2017021718015aZ",
+
+ // Trailing garbage
+ "20170217180154Zlongtrailinggarbage",
+
+ // Swapped type
+ "170217180154Z",
};
- for(auto&& v : valid)
+ for(auto&& v : valid_but_unsup)
+ {
+ result.test_throws("valid but unsupported", [v]() { Botan::X509_Time t(v, Botan::ASN1_Tag::UTC_TIME); });
+ }
+
+ for(auto&& v : valid_utc)
{
Botan::X509_Time t(v, Botan::ASN1_Tag::UTC_TIME);
}
- for(auto&& v : valid_but_unsup)
+ for(auto&& v : valid_generalized_time)
{
- result.test_throws("valid but unsupported", [v]() { Botan::X509_Time t(v, Botan::ASN1_Tag::UTC_TIME); });
+ Botan::X509_Time t(v, Botan::ASN1_Tag::GENERALIZED_TIME);
}
- for(auto&& v : invalid)
+ for(auto&& v : invalid_utc)
{
result.test_throws("invalid", [v]() { Botan::X509_Time t(v, Botan::ASN1_Tag::UTC_TIME); });
}
+ for (auto&& v : invalid_generalized)
+ {
+ result.test_throws("invalid", [v]() { Botan::X509_Time t(v, Botan::ASN1_Tag::GENERALIZED_TIME); });
+ }
+
return result;
}