2017-04-18 14:27:27 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2016-2017 The OpenSSL Project Authors. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Licensed under the OpenSSL license (the "License"). You may not use
|
|
|
|
* this file except in compliance with the License. You can obtain a copy
|
|
|
|
* in the file LICENSE in the source distribution or at
|
|
|
|
* https://www.openssl.org/source/license.html
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "../testutil.h"
|
|
|
|
|
|
|
|
#include <string.h>
|
|
|
|
#include <assert.h>
|
|
|
|
|
|
|
|
#include "../../e_os.h"
|
|
|
|
#include <openssl/bio.h>
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Declares the structures needed to register each test case function.
|
|
|
|
*/
|
|
|
|
typedef struct test_info {
|
|
|
|
const char *test_case_name;
|
|
|
|
int (*test_fn) ();
|
|
|
|
int (*param_test_fn)(int idx);
|
|
|
|
int num;
|
2017-04-19 08:34:54 +00:00
|
|
|
|
|
|
|
/* flags */
|
|
|
|
int subtest:1;
|
2017-04-18 14:27:27 +00:00
|
|
|
} TEST_INFO;
|
|
|
|
|
|
|
|
static TEST_INFO all_tests[1024];
|
|
|
|
static int num_tests = 0;
|
|
|
|
/*
|
|
|
|
* A parameterised tests runs a loop of test cases.
|
|
|
|
* |num_test_cases| counts the total number of test cases
|
|
|
|
* across all tests.
|
|
|
|
*/
|
|
|
|
static int num_test_cases = 0;
|
|
|
|
|
|
|
|
void add_test(const char *test_case_name, int (*test_fn) ())
|
|
|
|
{
|
|
|
|
assert(num_tests != OSSL_NELEM(all_tests));
|
|
|
|
all_tests[num_tests].test_case_name = test_case_name;
|
|
|
|
all_tests[num_tests].test_fn = test_fn;
|
|
|
|
all_tests[num_tests].num = -1;
|
|
|
|
++num_tests;
|
|
|
|
++num_test_cases;
|
|
|
|
}
|
|
|
|
|
|
|
|
void add_all_tests(const char *test_case_name, int(*test_fn)(int idx),
|
2017-04-19 08:34:54 +00:00
|
|
|
int num, int subtest)
|
2017-04-18 14:27:27 +00:00
|
|
|
{
|
|
|
|
assert(num_tests != OSSL_NELEM(all_tests));
|
|
|
|
all_tests[num_tests].test_case_name = test_case_name;
|
|
|
|
all_tests[num_tests].param_test_fn = test_fn;
|
|
|
|
all_tests[num_tests].num = num;
|
2017-04-19 08:34:54 +00:00
|
|
|
all_tests[num_tests].subtest = subtest;
|
2017-04-18 14:27:27 +00:00
|
|
|
++num_tests;
|
|
|
|
num_test_cases += num;
|
|
|
|
}
|
|
|
|
|
2017-04-19 08:34:54 +00:00
|
|
|
static int level = 0;
|
|
|
|
|
|
|
|
int subtest_level(void)
|
|
|
|
{
|
|
|
|
return level;
|
|
|
|
}
|
|
|
|
|
2017-04-18 14:27:27 +00:00
|
|
|
#ifndef OPENSSL_NO_CRYPTO_MDEBUG
|
|
|
|
static int should_report_leaks()
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* When compiled with enable-crypto-mdebug, OPENSSL_DEBUG_MEMORY=0
|
|
|
|
* can be used to disable leak checking at runtime.
|
|
|
|
* Note this only works when running the test binary manually;
|
|
|
|
* the test harness always enables OPENSSL_DEBUG_MEMORY.
|
|
|
|
*/
|
|
|
|
char *mem_debug_env = getenv("OPENSSL_DEBUG_MEMORY");
|
|
|
|
|
|
|
|
return mem_debug_env == NULL
|
|
|
|
|| (strcmp(mem_debug_env, "0") && strcmp(mem_debug_env, ""));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static int err_cb(const char *str, size_t len, void *u)
|
|
|
|
{
|
|
|
|
return test_puts_stderr(str);
|
|
|
|
}
|
|
|
|
|
|
|
|
void setup_test()
|
|
|
|
{
|
2017-04-19 08:34:54 +00:00
|
|
|
char *TAP_levels = getenv("HARNESS_OSSL_LEVEL");
|
|
|
|
|
2017-04-18 14:27:27 +00:00
|
|
|
test_open_streams();
|
|
|
|
|
2017-04-19 08:34:54 +00:00
|
|
|
level = TAP_levels != NULL ? 4 * atoi(TAP_levels) : 0;
|
|
|
|
|
2017-04-18 14:27:27 +00:00
|
|
|
#ifndef OPENSSL_NO_CRYPTO_MDEBUG
|
|
|
|
if (should_report_leaks()) {
|
|
|
|
CRYPTO_set_mem_debug(1);
|
|
|
|
CRYPTO_mem_ctrl(CRYPTO_MEM_CHECK_ON);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
int finish_test(int ret)
|
|
|
|
{
|
|
|
|
#ifndef OPENSSL_NO_CRYPTO_MDEBUG
|
|
|
|
if (should_report_leaks() && CRYPTO_mem_leaks_cb(err_cb, NULL) <= 0)
|
|
|
|
return EXIT_FAILURE;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
test_close_streams();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void finalize(int success)
|
|
|
|
{
|
|
|
|
if (success)
|
|
|
|
ERR_clear_error();
|
|
|
|
else
|
|
|
|
ERR_print_errors_cb(err_cb, NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void helper_printf_stdout(const char *fmt, ...)
|
|
|
|
{
|
|
|
|
va_list ap;
|
|
|
|
|
|
|
|
va_start(ap, fmt);
|
|
|
|
test_vprintf_stdout(fmt, ap);
|
|
|
|
va_end(ap);
|
|
|
|
}
|
|
|
|
|
|
|
|
int run_tests(const char *test_prog_name)
|
|
|
|
{
|
|
|
|
int num_failed = 0;
|
2017-04-19 08:34:54 +00:00
|
|
|
char *verdict = NULL;
|
2017-04-18 14:27:27 +00:00
|
|
|
int i, j;
|
|
|
|
|
2017-04-19 08:34:54 +00:00
|
|
|
helper_printf_stdout("%*s%d..%d\n", level, "", 1, num_tests);
|
2017-04-18 14:27:27 +00:00
|
|
|
test_flush_stdout();
|
|
|
|
|
|
|
|
for (i = 0; i != num_tests; ++i) {
|
|
|
|
if (all_tests[i].num == -1) {
|
|
|
|
int ret = all_tests[i].test_fn();
|
|
|
|
|
2017-04-19 08:34:54 +00:00
|
|
|
verdict = "ok";
|
2017-04-18 14:27:27 +00:00
|
|
|
if (!ret) {
|
2017-04-19 08:34:54 +00:00
|
|
|
verdict = "not ok";
|
2017-04-18 14:27:27 +00:00
|
|
|
++num_failed;
|
|
|
|
}
|
2017-04-19 08:34:54 +00:00
|
|
|
helper_printf_stdout("%*s%s %d - %s\n", level, "", verdict, i + 1,
|
|
|
|
all_tests[i].test_case_name);
|
|
|
|
test_flush_stdout();
|
2017-04-18 14:27:27 +00:00
|
|
|
finalize(ret);
|
|
|
|
} else {
|
2017-04-19 08:34:54 +00:00
|
|
|
int num_failed_inner = 0;
|
|
|
|
|
|
|
|
level += 4;
|
|
|
|
if (all_tests[i].subtest) {
|
|
|
|
helper_printf_stdout("%*s# Subtest: %s\n", level, "",
|
|
|
|
all_tests[i].test_case_name);
|
|
|
|
helper_printf_stdout("%*s%d..%d\n", level, "", 1,
|
|
|
|
all_tests[i].num);
|
|
|
|
test_flush_stdout();
|
|
|
|
}
|
|
|
|
|
2017-04-18 14:27:27 +00:00
|
|
|
for (j = 0; j < all_tests[i].num; j++) {
|
|
|
|
int ret = all_tests[i].param_test_fn(j);
|
|
|
|
|
2017-04-19 08:34:54 +00:00
|
|
|
if (!ret)
|
|
|
|
++num_failed_inner;
|
|
|
|
|
|
|
|
finalize(ret);
|
|
|
|
|
|
|
|
if (all_tests[i].subtest) {
|
|
|
|
verdict = "ok";
|
|
|
|
if (!ret) {
|
|
|
|
verdict = "not ok";
|
|
|
|
++num_failed_inner;
|
|
|
|
}
|
|
|
|
helper_printf_stdout("%*s%s %d\n", level, "", verdict, j + 1);
|
2017-04-18 14:27:27 +00:00
|
|
|
test_flush_stdout();
|
|
|
|
}
|
|
|
|
}
|
2017-04-19 08:34:54 +00:00
|
|
|
|
|
|
|
level -= 4;
|
|
|
|
verdict = "ok";
|
|
|
|
if (num_failed_inner) {
|
|
|
|
verdict = "not ok";
|
|
|
|
++num_failed;
|
|
|
|
}
|
|
|
|
helper_printf_stdout("%*s%s %d - %s\n", level, "", verdict, i + 1,
|
|
|
|
all_tests[i].test_case_name);
|
|
|
|
test_flush_stdout();
|
2017-04-18 14:27:27 +00:00
|
|
|
}
|
|
|
|
}
|
2017-04-19 08:34:54 +00:00
|
|
|
if (num_failed != 0)
|
2017-04-18 14:27:27 +00:00
|
|
|
return EXIT_FAILURE;
|
|
|
|
return EXIT_SUCCESS;
|
|
|
|
}
|
|
|
|
|