Skip to content
Snippets Groups Projects
proxy.cpp 17.2 KiB
Newer Older
  • Learn to ignore specific revisions
  • toole1's avatar
    toole1 committed
    // proxy.cpp
    // NOTE: This is a generic file. Actual unit tests are located in
    //       unit_tests.cpp.
    // By Jack Toole for CS 225 spring 2011
    
    // For strsignal:
    #ifndef _GNU_SOURCE
    	#define _GNU_SOURCE
    #endif
    
    #include <signal.h>
    #include <string.h>
    #include <sys/types.h>
    #include <sys/wait.h>
    #include <sys/time.h>
    
    #include <iomanip>
    
    #include "memcheck.h"
    #include "monad_shared.h"
    #include "pipestream.h"
    #include "proxy.h"
    #include "util.h"
    #include "valgrind.h"
    
    using std::string;
    using std::vector;
    using std::pair;
    
    using namespace util;
    using namespace monad_shared;
    
    
    toole1's avatar
    toole1 committed
    namespace opts
    {
    
    toole1's avatar
    toole1 committed
    	bool verbose = false;
    
    toole1's avatar
    toole1 committed
    	bool redirect_test_output = true;
    }
    
    
    toole1's avatar
    toole1 committed
    
    OUTPUT_CHECK(equals)
    {
    	return output == expected;
    }
    
    
    OUTPUT_CHECK(contains)
    {
    	return output.find(expected) != string::npos;
    }
    
    
    
    toole1's avatar
    toole1 committed
    namespace proxy
    {
    	vector<unit_test> * global_tests = NULL;
    	output_check_map * global_output_checks = NULL;
    
    	double runtime_ratio[TIME_COUNT] =
    	{
    		1.0,
    		2.0,
    		2.30103, // for 200/100
    
    toole1's avatar
    toole1 committed
    //		2.82842712,
    
    toole1's avatar
    toole1 committed
    		4.0,
    		std::numeric_limits<double>::max()
    	};
    	const char * runtime_str[TIME_COUNT] =
    	{
    		"O(1)",
    		"O(n)",
    		"O(nlogn)",
    
    toole1's avatar
    toole1 committed
    //		"O(nrootn)",
    
    toole1's avatar
    toole1 committed
    		"O(n^2)",
    		"O(infinity)"
    	};
    }
    
    
    
    
    toole1's avatar
    toole1 committed
    int main(int argc, char ** argv)
    {
    	using namespace proxy;
    
    	// set up EXIT_IF_ERROR messages
    	output::set_error_message();
    
    	// Set up run-time environment
    	RunTimeEnvironment env(global_tests, global_output_checks);
    
    	// Set up the tests
    	RunTests runner(argc, argv, env);
    
    	// Execute
    	return runner.execute();
    }
    
    
    namespace proxy {
    
    // class add_unit_test
    
    toole1's avatar
    toole1 committed
    add_unit_test::add_unit_test(const char * name, unit_test::function func,
    
    toole1's avatar
    toole1 committed
                                 int32_t points_in_part, int32_t points_in_total, long timeout,
                                 bool is_valgrind)
    {
    	lazy_init_global_tests();
    	int32_t points = get_points(points_in_total, points_in_part);
    	// Add to global tests vector
    
    toole1's avatar
    toole1 committed
    	global_tests->push_back(unit_test(name, func, points, timeout, is_valgrind));
    
    toole1's avatar
    toole1 committed
    }
    
    
    // Check to make global tests vector
    void add_unit_test::lazy_init_global_tests()
    {
    	if (global_tests == NULL)
    		global_tests = new std::vector<unit_test>;
    }
    		
    
    // Discriminate which points value to add
    int32_t add_unit_test::get_points(int32_t points_in_total, int32_t points_in_part)
    {
    	#if MP_PART(NO_MP_PART)
    		return points_in_total;
    	#else
    		return points_in_part;
    	#endif
    }
    
    
    
    // class add_output_check
    add_output_check::add_output_check(const char * name, output_check func)
    {
    	if (global_output_checks == NULL)
    		global_output_checks = new output_check_map;
    	(*global_output_checks)[name] = func;
    }
    
    
    
    // class Run_Time_Environment
    RunTimeEnvironment::RunTimeEnvironment(vector<unit_test> *& init_tests,
                                           output_check_map *& init_output_checks)
    
    toole1's avatar
    toole1 committed
    	: timeout_signum0(SIGPROF),
    
    toole1's avatar
    toole1 committed
    	  timeout_signum1(SIGALRM),
    	  max_output_length(8*1024), //arbitrary
    	  single_test_passed_string("Result: passed"),
    	  heap_tests(init_tests),
    	  output_checks(init_output_checks)
    {
    	// Copy globals to the RunTimeEnvironment space
    	// And remove them from the global scope
    	static int8_t singleton = 0;
    	EXIT_IF_ERROR(singleton++ != 0, "There may only be one runtime environment");
    	EXIT_IF_ERROR(heap_tests == NULL, "No test cases found");
    	if (output_checks == NULL)
    		output_checks = new output_check_map;
    	
    	init_tests = NULL;
    	init_output_checks = NULL;
    }
    
    
    int RunTimeEnvironment::cleanup_globals()
    {
    	if (heap_tests    != NULL) delete heap_tests;
    	if (output_checks != NULL) delete output_checks;
    	heap_tests    = NULL;
    	output_checks = NULL;
    	return 0;
    }
    
    
    
    // class RunTests
    RunTests::RunTests(int argc, char ** argv, RunTimeEnvironment & env)
    	: environment(env)
    {
    	process_args(argc, argv); // sets up mode and test_arg
    
    toole1's avatar
    toole1 committed
    //	redirect_glibc_to_stderr();
    
    toole1's avatar
    toole1 committed
    }
    
    
    void RunTests::redirect_glibc_to_stderr()
    {
    	// Turn off glibc errors default write-to-terminal behaviour, because
    	// it does not get caught by stderr. This instead results in an abort.
    	// Unfortunately, this has still-reachable memory leaks under valgrind
    	if (RUNNING_ON_VALGRIND == 0)
    		setenv("LIBC_FATAL_STDERR_","1",1);
    		//setenv("MALLOC_CHECK_","2",1);
    }
    
    
    int32_t RunTests::execute()
    {
    	int32_t return_code = execute_by_mode();
    	environment.cleanup_globals();
    	return return_code;
    }
    
    
    int32_t RunTests::execute_by_mode()
    {
    	if (mode == SINGLE_TEST)
    		return run_single_test(test_arg);
    	else // if (mode == ALL_TESTS)
    		return run_all_tests();
    }
    
    
    void RunTests::process_args(int argc, char ** argv)
    {
    	if (argc > 2)
    	{
    		cout << "Usage: " << argv[0] << "[testname]" << endl;
    		exit(0);
    	}
    
    	if (argc == 2 && strcasecmp(argv[1], "--info") == 0)
    	{
    		printInfo();
    		exit(0);
    	}
    
    	if (argc == 1 || strcmp(argv[1], "all") == 0)
    		mode = ALL_TESTS;
    	else
    	{
    		mode = SINGLE_TEST;
    		test_arg = argv[1];
    	}
    }
    
    
    int32_t RunTests::run_single_test(const char * testname)
    {
    	vector<unit_test> & tests = *environment.heap_tests;
    
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    		if (strcmp(tests[test_i].name, testname) == 0)
    			return run_single_test(tests[test_i]);
    
    	cout << "Test not found" << endl;
    	exit(-1);
    }
    
    
    int32_t RunTests::run_single_test(unit_test & curr_test)
    {
    	cout << "Running " << curr_test.name << " [worth "
    		 << curr_test.points << " points, output below]" << endl;
    
    
    toole1's avatar
    toole1 committed
    	bool is_parent_process = execute_test(curr_test, false);
    
    toole1's avatar
    toole1 committed
    	if (!is_parent_process)
    		return environment.cleanup_globals();
    
    	string & error  = curr_test.errormsg;
    
    toole1's avatar
    toole1 committed
    
    
    toole1's avatar
    toole1 committed
    	handle_single_test_output(curr_test.output);
    
    	if (error == "")
    		error = "Unexpectedly Aborted";
    
    	if (curr_test.passed())
    		cout << environment.single_test_passed_string << endl;
    	else
    		cout << "Result: FAILED:" << endl << error << endl;
    
    	return curr_test.valgrind_flags;
    }
    
    
    void RunTests::handle_single_test_output(const string & output)
    {
    	if (output != "")
    	{
    		cout << output;
    		if (output[output.size()-1] != '\n')
    			cout << endl;
    	}
    }
    
    
    int RunTests::run_all_tests()
    {
    	vector<unit_test> & tests = *environment.heap_tests;
    
    	output::header("Running tests");
    
    	int32_t points_sum = get_sum_points();
    	int32_t max_testname_len = get_max_testname_length();
    	int32_t max_points_len   = get_max_points_length();
    
    
    toole1's avatar
    toole1 committed
    	if (points_sum != 0 && points_sum < 100)
    		output::warning("Unit test scores sum to " + to_string(points_sum) +
    
    toole1's avatar
    toole1 committed
    		                ", should be at least 100");
    
    toole1's avatar
    toole1 committed
    	if (points_sum > 125)
    		output::warning("Unit test scores sum to " + to_string(points_sum) +
    		                ", this will overflow the return value. Should be <= 125");
    
    toole1's avatar
    toole1 committed
    
    	int32_t score = 0;
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    	{
    		unit_test & curr_test = tests[test_i];
    		output::testname(curr_test, max_testname_len, max_points_len);
    
    
    toole1's avatar
    toole1 committed
    		bool is_parent_process = execute_test(curr_test, true);
    
    toole1's avatar
    toole1 committed
    		// Check for the child process
    		// This is unfortunately necessary (instead of an exit) to clean up
    		// all the memory in use in main and the global space for valgrind
    		if (!is_parent_process)
    			return environment.cleanup_globals();
    
    		// Check for success
    		if (curr_test.passed())
    			score += curr_test.points;
    		output_single_test_passfail(curr_test);
    	}
    
    	cout << endl << endl;
    	output_detailed_info_if_any_failed(score);
    
    toole1's avatar
    toole1 committed
    	output::total_score(score, get_sum_points());
    
    toole1's avatar
    toole1 committed
    	
    	return score;
    }
    
    int32_t RunTests::get_sum_points()
    {
    
    toole1's avatar
    toole1 committed
    	static int32_t cached_sum = INT_MIN;
    	if (cached_sum == INT_MIN)
    
    toole1's avatar
    toole1 committed
    	{
    		vector<unit_test> & tests = *environment.heap_tests;
    		int32_t points_sum = 0;
    		for (size_t test_i = 0; test_i < tests.size(); test_i++)
    			points_sum += tests[test_i].points;
    		cached_sum = points_sum;
    	}
    	return cached_sum;
    
    toole1's avatar
    toole1 committed
    }
    
    int32_t RunTests::get_max_testname_length()
    {
    	vector<unit_test> & tests = *environment.heap_tests;
    	int32_t max_testname_len = 0;
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    	{
    		int32_t currlen = strlen(tests[test_i].name) + (int)tests[test_i].is_valgrind * 11; // strlen(" (valgrind)");
    
    		if (currlen > max_testname_len)
    			max_testname_len = currlen;
    	}
    	return max_testname_len;
    }
    
    int32_t RunTests::get_max_points_length()
    {
    	vector<unit_test> & tests = *environment.heap_tests;
    	int32_t max_points_len = 0;
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    	{
    		if (tests[test_i].points >= 100)
    			max_points_len = 3;
    		else if (tests[test_i].points >= 10)
    			max_points_len = 2;
    	}
    	return max_points_len;
    }
    
    void RunTests::output_detailed_info_if_any_failed(int32_t score)
    {
    	vector<unit_test> & tests = *environment.heap_tests;
    	
    	bool any_failed = false;
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    		if (!tests[test_i].passed())
    			any_failed = true;
    	
    
    toole1's avatar
    toole1 committed
    	if (any_failed || opts::verbose)
    
    toole1's avatar
    toole1 committed
    		output_detailed_tests_info(score);
    }
    	
    	
    void RunTests::output_detailed_tests_info(int32_t score)
    {
    
    toole1's avatar
    toole1 committed
    	output::total_score(score, get_sum_points());
    
    toole1's avatar
    toole1 committed
    	cout << endl << endl;
    	
    	output::header("Detailed test output");
    	
    	vector<unit_test> & tests = *environment.heap_tests;
    	for (size_t test_i = 0; test_i < tests.size(); test_i++)
    
    toole1's avatar
    toole1 committed
    		if (!tests[test_i].passed() || opts::verbose)
    
    toole1's avatar
    toole1 committed
    			output::detailed_info(tests[test_i]);
    	
    	cout << endl << "--------------------------------" << endl;
    }
    
    
    void RunTests::output_single_test_passfail(const unit_test & curr_test)
    {
    	if (curr_test.passed())
    
    toole1's avatar
    toole1 committed
    		std::cout << output::passed_string() << endl;
    
    toole1's avatar
    toole1 committed
    	else
    
    toole1's avatar
    toole1 committed
    		std::cout << output::failed_string() << ": " << curr_test.errormsg << endl;
    
    toole1's avatar
    toole1 committed
    }
    
    test_execution::test_execution(unit_test & _test, RunTimeEnvironment & env, bool enable_valgrind_call)
    	: test(_test), environment(env)
    {
    	do_valgrind = enable_valgrind_call && test.is_valgrind;
    	if (!do_valgrind)
    		test.checkstream = new pipestream;
    }
    
    void test_execution::child()
    {
    	fmsg_pipe.close_read();
    	cout_pipe.close_read();
    	nums_pipe.close_read();
    
    	// Redirect stdout/stderr to pipe
    
    toole1's avatar
    toole1 committed
    	if (opts::redirect_test_output)
    	{
    		cout_pipe.steal_output(STDOUT_FILENO);
    		cout_pipe.steal_output(STDERR_FILENO);
    	}
    
    toole1's avatar
    toole1 committed
    
    	if (do_valgrind)
    	{
    		child_valgrind();
    	}
    	else // if (!test.is_valgrind)
    	{
    		child_test();
    	}
    }
    
    void test_execution::parent()
    {
    	fmsg_pipe.close_write();
    	cout_pipe.close_write();
    	nums_pipe.close_write();
    	if (test.checkstream != NULL)
    		test.checkstream->close_write();
    
    	// Read stdout/stderr pipe while process is running
    
    toole1's avatar
    toole1 committed
    	if (opts::redirect_test_output)
    		cout_pipe >> setmax(environment.max_output_length) >> test.output;
    	else
    		test.output = "Test output was displayed above instead of being buffered\n";
    	
    
    toole1's avatar
    toole1 committed
    	cout_pipe.close_read();
    }
    
    void test_execution::after_success(int8_t return_code)
    {
    	if (do_valgrind)
    		after_valgrind_success(return_code);
    	else
    		after_test_success();
    }
    
    void test_execution::after_failure(int8_t signal_number)
    {
    	fmsg_pipe.close_read();
    	nums_pipe.close_read();
    	if (environment.is_timeout_signal(signal_number))
    	{
    		test.errormsg = string("Timed out") + " (" + to_string(test.timeout) + "ms)";
    		test.time = test.timeout;
    	}
    	else
    		test.errormsg = strsignal(signal_number);
    }
    
    
    
    toole1's avatar
    toole1 committed
    bool RunTests::execute_test(unit_test & test, bool enable_valgrind_call)
    
    toole1's avatar
    toole1 committed
    {
    	cout << std::flush;
    	test_execution executor(test, environment, enable_valgrind_call);
    	return fork_execute(executor);
    }
    
    template <typename F>
    bool fork_execute(F & executor)
    {
    	// Fork
    	pid_t process_id;
    	process_id = fork();
    	EXIT_IF_ERROR(process_id < 0, "Could not fork application");
    
    	if (process_id == 0)
    	{
    		executor.child();
    		// Unfortunately necessary to use a return stack instead of
    		// exit() to get rid of valgrind errors
    		// (which is important if we use valgrind ./proxy recursively)
    		return false; // previously exit(0);
    	}
    	else // if (process_id > 0)
    	{
    		executor.parent();
    
    		int child_status;
    		pid_t ws = waitpid(process_id, &child_status, 0); //should return immediately
    		EXIT_IF_ERROR(ws == -1);
    
    		if (WIFEXITED(child_status)) /* exit code in child_status */
    			executor.after_success(WEXITSTATUS(child_status));
    		else if (WIFSIGNALED(child_status)) /* killed */
    			executor.after_failure(WTERMSIG(child_status));
    		else
    			executor.after_failure(SIGSTOP);
    
    		return true;
    	}
    }
    
    
    void test_execution::child_valgrind()
    {
    	// We're giving up control to valgrind, so we can't
    	// Use anything but the cout pipe now
    	fmsg_pipe.close_write();
    	nums_pipe.close_write();
    	
    	start_timeout();
    	exec("valgrind", "--trace-children=yes", /*"--log-fd=-1",*/ "-q", "./proxy", test.name, NULL);
    }
    
    
    void test_execution::child_test()
    {
    	test.checkstream->close_read();
    	// Execute test
    	start_timeout();
    	string * error_msg = new unit_test::return_type(test.func(test)); // execute function
    	long test_time = end_timeout();
    
    	// Write failure message to pipe
    	fmsg_pipe << *error_msg;
    	fmsg_pipe.close();
    
    	// write time and valgrind flags to pipe
    	bool test_failed = (*error_msg != unit_test::pass_string);
    	delete error_msg;
    	delete test.checkstream;
    	environment.cleanup_globals();
    	int32_t valgrind_flags = get_valgrind_flags(test_failed);
    	nums_pipe << test_time;
    	nums_pipe << valgrind_flags;
    	nums_pipe.close();
    }
    
    
    void test_execution::after_valgrind_success(int8_t return_code)
    {
    	fmsg_pipe.close_read();
    	nums_pipe.close_read();
    
    	size_t last_endl = findNthLast(test.output, '\n', 2);
    	if (last_endl == string::npos)
    
    toole1's avatar
    toole1 committed
    	{
    		if (opts::redirect_test_output)
    			test.errormsg = "Valgrind test did not complete";
    		else
    			test.errormsg = "Valgrind test output was not redirected to pipe because opts::redirect_test_output was set.";
    	}
    
    toole1's avatar
    toole1 committed
    	else
    	{
    		test.errormsg = test.output.substr(last_endl + 1,
    							test.output.length() - last_endl - 2);
    
    		if (test.errormsg == "")
    			test.errormsg = "Exception Thrown / Aborted";
    
    		test.valgrind_flags = return_code;
    		if (test.errormsg == environment.single_test_passed_string)
    			test.errormsg = get_valgrind_string(test.valgrind_flags);
    
    toole1's avatar
    toole1 committed
    			// This will always be unit_test::pass_string unless someone tried to hack monad, in which case
    			// basing our passing on the return code (valgrind flags) rather than string parsing is the
    			// right thing to do
    
    toole1's avatar
    toole1 committed
    	}
    }
    
    
    void test_execution::after_test_success()
    {
    	fmsg_pipe >> test.errormsg;
    	fmsg_pipe.close();
    	nums_pipe >> test.time;
    	nums_pipe >> test.valgrind_flags;
    	nums_pipe.close();
    	
    	// Check for output's correctness, if that was a condition of passing
    	if (test.passed())
    	{
    		while (!test.checkstream->eof())
    		{
    			string checkname;
    			string checkstr;
    			*test.checkstream >> checkname;
    			if (test.checkstream->eof()) break;
    			*test.checkstream >> checkstr;
    			if (test.checkstream->eof()) break;
    
    			output_check check_function = (*environment.output_checks)[checkname];
    			if (check_function == NULL)
    			{
    				cerr << "Internal Error: in test " << test.name << ": "
    				     << checkname << " is not a registered OUTPUT_CHECK function" << endl;
    				exit(-2);
    			}
    
    			if (!check_function(test.output, checkstr))
    				test.errormsg = "Incorrect Terminal Output";
    		}
    	}
    
    toole1's avatar
    toole1 committed
    
    	if (test.passed())
    		test.errormsg = get_valgrind_string(test.valgrind_flags);
    
    
    toole1's avatar
    toole1 committed
    	delete test.checkstream;
    }
    
    
    int32_t get_valgrind_flags(bool test_failed)
    {
    	// Check for valgrind errors or leaks (if running under valgrind)
    	unsigned long errors     = 0;
    	unsigned long leaked     = 0;
    	unsigned long dubious    = 0;
    	unsigned long reachable  = 0;
    	unsigned long suppressed = 0;
    
    	errors = VALGRIND_COUNT_ERRORS;
    	VALGRIND_DO_LEAK_CHECK; //QUICK
    	VALGRIND_COUNT_LEAK_BLOCKS(leaked, dubious, reachable, suppressed);
    
    	return bitflags(test_failed, errors, leaked, dubious, reachable);
    }
    
    
    const char * get_valgrind_string(int32_t flags)
    {
    	if (flags == 0) return unit_test::pass_string;
    
    	bool test_failed = bitflag(flags, 0);
    	bool errors      = bitflag(flags, 1);
    	bool leaked      = bitflag(flags, 2);
    	bool dubious     = bitflag(flags, 3);
    	bool reachable   = bitflag(flags, 4);
    
    	if (test_failed) return "Test failed (see output)";
    	if (errors)      return "Invalid read/write errors";
    	if (leaked)      return "Directly lost memory leaks";
    	if (dubious)     return "Possibly lost memory leaks";
    
    toole1's avatar
    toole1 committed
    	// For now we will ignore reachable errors, as they are always present on Mac
    	if (reachable)   return unit_test::pass_string; //"Still-reachable memory leaks";
    
    toole1's avatar
    toole1 committed
    	return "Unknown memory errors";
    }
    
    
    toole1's avatar
    toole1 committed
    bool test_execution::prof_timeout_enabled()
    {
    	struct itimerval temp;
    	if (getitimer(ITIMER_PROF, &temp) == 0)
    		return true;
    	if (errno == EINVAL)
    		return false;
    	cerr << __FILE__ << ":" << __LINE__ << ": ERROR: getitimer failed" << endl;
    	exit(-1);
    }
    
    toole1's avatar
    toole1 committed
    
    void test_execution::start_timeout()
    {
    
    toole1's avatar
    toole1 committed
    	static const bool prof_enabled = prof_timeout_enabled();
    
    
    toole1's avatar
    toole1 committed
    	struct itimerval timeout;
    	timeout.it_interval.tv_sec  = 0;
    	timeout.it_interval.tv_usec = 0;
    	timeout.it_value.tv_sec  = test.timeout/1000;
    	timeout.it_value.tv_usec = (test.timeout%1000) * 1000;
    
    
    toole1's avatar
    toole1 committed
    	if (prof_enabled)
    	{
    		EXIT_IF_ERROR(setitimer(ITIMER_PROF, &timeout, NULL));
    		// second real time signal in case the student calls a blocking call
    		timeout.it_value.tv_sec *= 10;
    		EXIT_IF_ERROR(setitimer(ITIMER_REAL, &timeout, NULL));
    	}
    	else
    	{
    		EXIT_IF_ERROR(setitimer(ITIMER_REAL, &timeout, NULL));
    	}
    
    toole1's avatar
    toole1 committed
    }
    
    
    long test_execution::end_timeout()
    {
    
    toole1's avatar
    toole1 committed
    	static const bool prof_enabled = prof_timeout_enabled();
    
    
    toole1's avatar
    toole1 committed
    	struct itimerval timeout;
    	timeout.it_interval.tv_sec  = 0;
    	timeout.it_interval.tv_usec = 0;
    	timeout.it_value.tv_sec  = 0;
    	timeout.it_value.tv_usec = 0;
    	struct itimerval remaining;
    
    
    toole1's avatar
    toole1 committed
    	if (prof_enabled)
    	{
    		EXIT_IF_ERROR(setitimer(ITIMER_PROF, &timeout, &remaining));
    		EXIT_IF_ERROR(setitimer(ITIMER_REAL, &timeout, NULL));
    	}
    	else
    	{
    		EXIT_IF_ERROR(setitimer(ITIMER_REAL, &timeout, &remaining));
    	}
    
    toole1's avatar
    toole1 committed
    
    	// There seems to be a strange -1 error here. I may just be tired,
    	// but I can't figure out why right now
    	long time = test.timeout - remaining.it_value.tv_sec*1000 - remaining.it_value.tv_usec/1000;
    	return (time < 0) ? 0 : time;
    }
    
    
    
    } // namespace proxy