Add TEST_ASSUME macro to allow skipping tests at runtime

This commit adds a macro TEST_ASSUME to the test infrastructure
which allows to skip tests based on unmet conditions determined
at runtime.
This commit is contained in:
Hanno Becker 2019-07-05 13:31:30 +01:00
parent 66b7edb108
commit e69d0150d7
3 changed files with 43 additions and 7 deletions

View file

@ -116,6 +116,21 @@ typedef enum
} \
} while( 0 )
/**
* \brief This macro tests the expression passed to it and skips the
* running test if it doesn't evaluate to 'true'.
*
* \param TEST The test expression to be tested.
*/
#define TEST_ASSUME( TEST ) \
do { \
if( ! (TEST) ) \
{ \
test_skip( #TEST, __LINE__, __FILE__ ); \
goto exit; \
} \
} while( 0 )
#if defined(MBEDTLS_CHECK_PARAMS) && !defined(MBEDTLS_PARAM_FAILED_ALT)
/**
* \brief This macro tests the statement passed to it as a test step or
@ -249,10 +264,17 @@ typedef enum
/*----------------------------------------------------------------------------*/
/* Global variables */
typedef enum
{
TEST_RESULT_SUCCESS = 0,
TEST_RESULT_FAILED,
TEST_RESULT_SKIPPED
} test_result_t;
static struct
{
paramfail_test_state_t paramfail_test_state;
int failed;
test_result_t result;
const char *test;
const char *filename;
int line_no;
@ -288,7 +310,15 @@ jmp_buf jmp_tmp;
void test_fail( const char *test, int line_no, const char* filename )
{
test_info.failed = 1;
test_info.result = TEST_RESULT_FAILED;
test_info.test = test;
test_info.line_no = line_no;
test_info.filename = filename;
}
void test_skip( const char *test, int line_no, const char* filename )
{
test_info.result = TEST_RESULT_SKIPPED;
test_info.test = test;
test_info.line_no = line_no;
test_info.filename = filename;
@ -327,7 +357,7 @@ void mbedtls_param_failed( const char *failure_condition,
/* Record the location of the failure, but not as a failure yet, in case
* it was part of the test */
test_fail( failure_condition, line, file );
test_info.failed = 0;
test_info.result = TEST_RESULT_SUCCESS;
longjmp( param_fail_jmp, 1 );
}

View file

@ -498,7 +498,8 @@ int execute_tests( int argc , const char ** argv )
if( ( ret = get_line( file, buf, sizeof(buf) ) ) != 0 )
break;
mbedtls_fprintf( stdout, "%s%.66s", test_info.failed ? "\n" : "", buf );
mbedtls_fprintf( stdout, "%s%.66s",
test_info.result == TEST_RESULT_FAILED ? "\n" : "", buf );
mbedtls_fprintf( stdout, " " );
for( i = strlen( buf ) + 1; i < 67; i++ )
mbedtls_fprintf( stdout, "." );
@ -545,7 +546,7 @@ int execute_tests( int argc , const char ** argv )
// If there are no unmet dependencies execute the test
if( unmet_dep_count == 0 )
{
test_info.failed = 0;
test_info.result = TEST_RESULT_SUCCESS;
test_info.paramfail_test_state = PARAMFAIL_TESTSTATE_IDLE;
#if defined(__unix__) || (defined(__APPLE__) && defined(__MACH__))
@ -610,10 +611,15 @@ int execute_tests( int argc , const char ** argv )
}
else if( ret == DISPATCH_TEST_SUCCESS )
{
if( test_info.failed == 0 )
if( test_info.result == TEST_RESULT_SUCCESS )
{
mbedtls_fprintf( stdout, "PASS\n" );
}
else if( test_info.result == TEST_RESULT_SKIPPED )
{
mbedtls_fprintf( stdout, "----\n" );
total_skipped++;
}
else
{
total_errors++;

View file

@ -159,7 +159,7 @@ void execute_function_ptr(TestWrapper_t fp, void **params)
else
{
/* Unexpected parameter validation error */
test_info.failed = 1;
test_info.result = TEST_RESULT_FAILED;
}
memset( param_fail_jmp, 0, sizeof(jmp_buf) );