Tests: add performance tests

This commit is contained in:
Jan Mrna
2025-10-16 12:31:40 +02:00
parent b25d006b9e
commit 3a8dce8996
2 changed files with 130 additions and 8 deletions

View File

@@ -134,31 +134,47 @@ add_custom_command(TARGET pathfinding_demo POST_BUILD
$<TARGET_FILE_DIR:pathfinding_demo>/resources $<TARGET_FILE_DIR:pathfinding_demo>/resources
) )
# Test executable # Unit tests executable
add_executable(tests cpp/test/test.cpp) add_executable(unit_tests
cpp/test/test.cpp
)
if(WIN32) if(WIN32)
target_link_libraries(tests GTest::gtest GTest::gtest_main GTest::gmock GTest::gmock_main) target_link_libraries(unit_tests GTest::gtest GTest::gtest_main GTest::gmock GTest::gmock_main)
else() else()
target_link_libraries(tests GTest::gtest GTest::gtest_main) target_link_libraries(unit_tests GTest::gtest GTest::gtest_main)
endif()
# Performance tests executable
add_executable(performance_tests
cpp/test/collision_performance.cpp
)
if(WIN32)
target_link_libraries(performance_tests GTest::gtest GTest::gtest_main GTest::gmock GTest::gmock_main)
else()
target_link_libraries(performance_tests GTest::gtest GTest::gtest_main)
endif() endif()
# Enable testing # Enable testing
enable_testing() enable_testing()
add_test(NAME unit_tests COMMAND tests) add_test(NAME unit_tests COMMAND unit_tests)
add_test(NAME performance_tests COMMAND performance_tests)
# Compiler-specific options with MSVC support # Compiler-specific options with MSVC support
if(MSVC) if(MSVC)
# MSVC-specific flags: disable permissive mode, enable high warning level # MSVC-specific flags: disable permissive mode, enable high warning level
target_compile_options(pathfinding_demo PRIVATE /W4 /permissive-) target_compile_options(pathfinding_demo PRIVATE /W4 /permissive-)
target_compile_options(tests PRIVATE /W4 /permissive-) target_compile_options(unit_tests PRIVATE /W4 /permissive-)
target_compile_options(performance_tests PRIVATE /W4 /permissive-)
# Additional MSVC flags for C++23 and modern standards # Additional MSVC flags for C++23 and modern standards
target_compile_options(pathfinding_demo PRIVATE /Zc:__cplusplus /Zc:preprocessor) target_compile_options(pathfinding_demo PRIVATE /Zc:__cplusplus /Zc:preprocessor)
target_compile_options(tests PRIVATE /Zc:__cplusplus /Zc:preprocessor) target_compile_options(unit_tests PRIVATE /Zc:__cplusplus /Zc:preprocessor)
target_compile_options(performance_tests PRIVATE /Zc:__cplusplus /Zc:preprocessor)
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang") elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
# GCC/Clang flags # GCC/Clang flags
target_compile_options(pathfinding_demo PRIVATE -Wall -Wextra -Wpedantic) target_compile_options(pathfinding_demo PRIVATE -Wall -Wextra -Wpedantic)
target_compile_options(tests PRIVATE -Wall -Wextra -Wpedantic) target_compile_options(unit_tests PRIVATE -Wall -Wextra -Wpedantic)
target_compile_options(performance_tests PRIVATE -Wall -Wextra -Wpedantic)
endif() endif()
# Platform-specific build configurations # Platform-specific build configurations

View File

@@ -0,0 +1,106 @@
#include <gtest/gtest.h>
#include <chrono>
#include <iostream>
#include <iomanip>
// TODO: Add necessary includes for collision testing
// #include "collision_shapes.hpp"
// #include "entities.hpp"
/**
* @file collision_performance.cpp
* @brief Performance tests for collision detection systems
*
* This file contains performance benchmarks and tests for various
* collision detection algorithms and optimizations.
*/
/**
* @brief Helper class to measure and print execution time
*/
class PerformanceTimer {
public:
using Clock = std::chrono::high_resolution_clock;
using TimePoint = std::chrono::time_point<Clock>;
using Duration = std::chrono::duration<double, std::milli>;
PerformanceTimer(const std::string& name) : name_(name) {
start_ = Clock::now();
}
~PerformanceTimer() {
auto end = Clock::now();
Duration duration = end - start_;
std::cout << std::fixed << std::setprecision(3)
<< "[PERF] " << name_ << ": "
<< duration.count() << " ms" << std::endl;
}
double elapsed_ms() const {
auto end = Clock::now();
Duration duration = end - start_;
return duration.count();
}
private:
std::string name_;
TimePoint start_;
};
/**
* @brief Run a function multiple times and measure average execution time
* @param name Name of the test for output
* @param iterations Number of iterations to run
* @param func Function to benchmark
*/
template<typename Func>
void benchmark_function(const std::string& name, int iterations, Func func) {
auto start = PerformanceTimer::Clock::now();
for (int i = 0; i < iterations; ++i) {
func();
}
auto end = PerformanceTimer::Clock::now();
PerformanceTimer::Duration total_duration = end - start;
double avg_duration = total_duration.count() / iterations;
std::cout << std::fixed << std::setprecision(6)
<< "[BENCHMARK] " << name << ":\n"
<< " Total: " << total_duration.count() << " ms\n"
<< " Iterations: " << iterations << "\n"
<< " Average: " << avg_duration << " ms\n"
<< " Throughput: " << (iterations / (total_duration.count() / 1000.0))
<< " ops/sec" << std::endl;
}
// Example test function 1
void test_function_1() {
// TODO: Implement actual collision test
volatile int sum = 0;
for (int i = 0; i < 1000; ++i) {
sum += i;
}
}
// Example test function 2
void test_function_2() {
// TODO: Implement actual collision test
volatile int product = 1;
for (int i = 1; i < 100; ++i) {
product *= (i % 10 + 1);
}
}
TEST(CollisionPerformance, CompareAlgorithms) {
std::cout << "\n=== Collision Performance Comparison ===\n" << std::endl;
const int iterations = 10000;
benchmark_function("Algorithm 1 (test_function_1)", iterations, test_function_1);
benchmark_function("Algorithm 2 (test_function_2)", iterations, test_function_2);
std::cout << "\n======================================\n" << std::endl;
SUCCEED();
}