|
| 1 | +#include "benchmarkConfig.h" |
| 2 | +#include "unwindStats.h" |
| 3 | +#include <chrono> |
| 4 | +#include <cstring> |
| 5 | +#include <fstream> |
| 6 | +#include <iostream> |
| 7 | +#include <random> |
| 8 | +#include <string> |
| 9 | +#include <vector> |
| 10 | + |
| 11 | +// Test data - using JVM stub function names |
| 12 | +const char *TEST_NAMES[] = {"Java_java_lang_String_toString", "Java_java_lang_Object_hashCode", |
| 13 | + "Java_java_lang_System_arraycopy", |
| 14 | + "Java_java_lang_Thread_currentThread", "Java_java_lang_Class_getName"}; |
| 15 | +const int NUM_NAMES = sizeof(TEST_NAMES) / sizeof(TEST_NAMES[0]); |
| 16 | + |
| 17 | +// Global variables |
| 18 | +std::vector<BenchmarkResult> results; |
| 19 | +BenchmarkConfig config; |
| 20 | + |
| 21 | +// Pre-generated random values for benchmarking |
| 22 | +struct RandomValues { |
| 23 | + std::vector<std::string> names; |
| 24 | + std::vector<UnwindFailureKind> kinds; |
| 25 | + std::vector<int> name_indices; |
| 26 | + |
| 27 | + void generate(int count) { |
| 28 | + std::mt19937 rng(42); // Fixed seed for reproducibility |
| 29 | + names.resize(count); |
| 30 | + kinds.resize(count); |
| 31 | + name_indices.resize(count); |
| 32 | + |
| 33 | + for (int i = 0; i < count; i++) { |
| 34 | + name_indices[i] = rng() % NUM_NAMES; |
| 35 | + names[i] = TEST_NAMES[name_indices[i]]; |
| 36 | + kinds[i] = static_cast<UnwindFailureKind>(rng() % 3); |
| 37 | + } |
| 38 | + } |
| 39 | +}; |
| 40 | + |
| 41 | +RandomValues random_values; |
| 42 | + |
| 43 | +void exportResultsToCSV(const std::string &filename) { |
| 44 | + std::ofstream file(filename); |
| 45 | + if (!file.is_open()) { |
| 46 | + std::cerr << "Failed to open file: " << filename << std::endl; |
| 47 | + return; |
| 48 | + } |
| 49 | + |
| 50 | + // Write header |
| 51 | + file << "Benchmark,Total Time (ns),Iterations,Average Time (ns)\n"; |
| 52 | + |
| 53 | + // Write data |
| 54 | + for (const auto &result : results) { |
| 55 | + file << result.name << "," << result.total_time_ns << "," << result.iterations << "," |
| 56 | + << result.avg_time_ns << "\n"; |
| 57 | + } |
| 58 | + |
| 59 | + file.close(); |
| 60 | + std::cout << "Results exported to CSV: " << filename << std::endl; |
| 61 | +} |
| 62 | + |
| 63 | +void exportResultsToJSON(const std::string &filename) { |
| 64 | + std::ofstream file(filename); |
| 65 | + if (!file.is_open()) { |
| 66 | + std::cerr << "Failed to open file: " << filename << std::endl; |
| 67 | + return; |
| 68 | + } |
| 69 | + |
| 70 | + file << "{\n \"benchmarks\": [\n"; |
| 71 | + for (size_t i = 0; i < results.size(); ++i) { |
| 72 | + const auto &result = results[i]; |
| 73 | + file << " {\n" |
| 74 | + << " \"name\": \"" << result.name << "\",\n" |
| 75 | + << " \"total_time_ns\": " << result.total_time_ns << ",\n" |
| 76 | + << " \"iterations\": " << result.iterations << ",\n" |
| 77 | + << " \"avg_time_ns\": " << result.avg_time_ns << "\n" |
| 78 | + << " }" << (i < results.size() - 1 ? "," : "") << "\n"; |
| 79 | + } |
| 80 | + file << " ]\n}\n"; |
| 81 | + |
| 82 | + file.close(); |
| 83 | + std::cout << "Results exported to JSON: " << filename << std::endl; |
| 84 | +} |
| 85 | + |
| 86 | +// Helper function to run a benchmark with warmup |
| 87 | +template <typename F> |
| 88 | +BenchmarkResult runBenchmark(const std::string &name, F &&func, double rng_overhead = 0.0) { |
| 89 | + std::cout << "\n--- Benchmark: " << name << " ---" << std::endl; |
| 90 | + |
| 91 | + // Warmup phase |
| 92 | + if (config.warmup_iterations > 0) { |
| 93 | + std::cout << "Warming up with " << config.warmup_iterations << " iterations..." |
| 94 | + << std::endl; |
| 95 | + for (int i = 0; i < config.warmup_iterations; i++) { |
| 96 | + func(i); |
| 97 | + } |
| 98 | + } |
| 99 | + |
| 100 | + // Measurement phase |
| 101 | + std::cout << "Running " << config.measurement_iterations << " iterations..." << std::endl; |
| 102 | + auto start = std::chrono::high_resolution_clock::now(); |
| 103 | + |
| 104 | + for (int i = 0; i < config.measurement_iterations; i++) { |
| 105 | + func(i); |
| 106 | + if (config.debug && i % 100000 == 0) { |
| 107 | + std::cout << "Progress: " << (i * 100 / config.measurement_iterations) << "%" |
| 108 | + << std::endl; |
| 109 | + } |
| 110 | + } |
| 111 | + |
| 112 | + auto end = std::chrono::high_resolution_clock::now(); |
| 113 | + auto duration = std::chrono::duration_cast<std::chrono::nanoseconds>(end - start); |
| 114 | + |
| 115 | + double avg_time = (double)duration.count() / config.measurement_iterations; |
| 116 | + if (rng_overhead > 0) { |
| 117 | + avg_time -= rng_overhead; |
| 118 | + } |
| 119 | + |
| 120 | + std::cout << "Total time: " << duration.count() << " ns" << std::endl; |
| 121 | + std::cout << "Average time per operation: " << avg_time << " ns" << std::endl; |
| 122 | + if (rng_overhead > 0) { |
| 123 | + std::cout << " (RNG overhead of " << rng_overhead << " ns has been subtracted)" |
| 124 | + << std::endl; |
| 125 | + } |
| 126 | + |
| 127 | + return {name, static_cast<long long>(avg_time * config.measurement_iterations), |
| 128 | + config.measurement_iterations, avg_time}; |
| 129 | +} |
| 130 | + |
| 131 | +// Benchmark just the RNG overhead |
| 132 | +BenchmarkResult measureRNGOverhead() { |
| 133 | + std::mt19937 rng(42); |
| 134 | + std::vector<const char *> names(config.measurement_iterations); |
| 135 | + std::vector<UnwindFailureKind> kinds(config.measurement_iterations); |
| 136 | + std::vector<int> indices(config.measurement_iterations); |
| 137 | + |
| 138 | + return runBenchmark("RNG Overhead", [&](int i) { |
| 139 | + indices[i] = rng() % NUM_NAMES; |
| 140 | + names[i] = TEST_NAMES[indices[i]]; |
| 141 | + kinds[i] = static_cast<UnwindFailureKind>(rng() % 3); |
| 142 | + }); |
| 143 | +} |
| 144 | + |
| 145 | +// Main benchmark function |
| 146 | +void benchmarkUnwindFailures() { |
| 147 | + UnwindFailures failures; |
| 148 | + results.clear(); // Clear any previous results |
| 149 | + |
| 150 | + std::cout << "=== Benchmarking UnwindFailures ===" << std::endl; |
| 151 | + std::cout << "Configuration:" << std::endl; |
| 152 | + std::cout << " Warmup iterations: " << config.warmup_iterations << std::endl; |
| 153 | + std::cout << " Measurement iterations: " << config.measurement_iterations << std::endl; |
| 154 | + std::cout << " Number of test names: " << NUM_NAMES << std::endl; |
| 155 | + std::cout << " Debug mode: " << (config.debug ? "enabled" : "disabled") << std::endl; |
| 156 | + |
| 157 | + // First measure RNG overhead |
| 158 | + std::cout << "\nMeasuring RNG overhead..." << std::endl; |
| 159 | + auto rng_overhead = measureRNGOverhead(); |
| 160 | + double overhead_per_op = rng_overhead.avg_time_ns; |
| 161 | + std::cout << "RNG overhead per operation: " << overhead_per_op << " ns" << std::endl; |
| 162 | + |
| 163 | + // Create RNG for actual benchmarks |
| 164 | + std::mt19937 rng(42); |
| 165 | + |
| 166 | + // Run actual benchmarks with RNG inline and overhead subtracted internally |
| 167 | + results.push_back(runBenchmark( |
| 168 | + "Record Single Failure Kind", |
| 169 | + [&](int) { |
| 170 | + int idx = rng() % NUM_NAMES; |
| 171 | + auto kind = static_cast<UnwindFailureKind>(rng() % 3); |
| 172 | + failures.record(UNWIND_FAILURE_STUB, TEST_NAMES[idx]); |
| 173 | + }, |
| 174 | + overhead_per_op)); |
| 175 | + |
| 176 | + results.push_back(runBenchmark( |
| 177 | + "Record Mixed Failures", |
| 178 | + [&](int) { |
| 179 | + int idx = rng() % NUM_NAMES; |
| 180 | + auto kind = static_cast<UnwindFailureKind>(rng() % 3); |
| 181 | + failures.record(kind, TEST_NAMES[idx]); |
| 182 | + }, |
| 183 | + overhead_per_op)); |
| 184 | + |
| 185 | + results.push_back(runBenchmark( |
| 186 | + "Find Name", |
| 187 | + [&](int) { |
| 188 | + int idx = rng() % NUM_NAMES; |
| 189 | + failures.findName(TEST_NAMES[idx]); |
| 190 | + }, |
| 191 | + overhead_per_op)); |
| 192 | + |
| 193 | + results.push_back(runBenchmark( |
| 194 | + "Count Failures with Mixed Kinds", |
| 195 | + [&](int) { |
| 196 | + int idx = rng() % NUM_NAMES; |
| 197 | + auto kind = static_cast<UnwindFailureKind>(rng() % 3); |
| 198 | + failures.count(TEST_NAMES[idx], kind); |
| 199 | + }, |
| 200 | + overhead_per_op)); |
| 201 | + |
| 202 | + // For merge benchmark, we'll pre-populate the collections since that's not part of what we're |
| 203 | + // measuring |
| 204 | + UnwindFailures failures1; |
| 205 | + UnwindFailures failures2; |
| 206 | + // Use a smaller number of items for pre-population to avoid overflow |
| 207 | + const int prePopulateCount = std::min(1000, config.measurement_iterations / 2); |
| 208 | + for (int i = 0; i < prePopulateCount; i++) { |
| 209 | + int idx = rng() % NUM_NAMES; |
| 210 | + auto kind = static_cast<UnwindFailureKind>(rng() % 3); |
| 211 | + failures1.record(kind, TEST_NAMES[idx]); |
| 212 | + failures2.record(kind, TEST_NAMES[idx]); |
| 213 | + } |
| 214 | + |
| 215 | + results.push_back(runBenchmark("Merge Failures", [&](int) { |
| 216 | + failures1.merge(failures2); |
| 217 | + })); |
| 218 | + |
| 219 | + std::cout << "\n=== Benchmark Complete ===" << std::endl; |
| 220 | +} |
| 221 | + |
| 222 | +void printUsage(const char *programName) { |
| 223 | + std::cout << "Usage: " << programName << " [options]\n" |
| 224 | + << "Options:\n" |
| 225 | + << " --csv <filename> Export results to CSV file\n" |
| 226 | + << " --json <filename> Export results to JSON file\n" |
| 227 | + << " --warmup <n> Number of warmup iterations (default: 100000)\n" |
| 228 | + << " --iterations <n> Number of measurement iterations (default: 1000000)\n" |
| 229 | + << " --debug Enable debug output\n" |
| 230 | + << " -h, --help Show this help message\n"; |
| 231 | +} |
| 232 | + |
| 233 | +int main(int argc, char *argv[]) { |
| 234 | + // Parse command line arguments |
| 235 | + for (int i = 1; i < argc; i++) { |
| 236 | + if (strcmp(argv[i], "--csv") == 0 && i + 1 < argc) { |
| 237 | + config.csv_file = argv[++i]; |
| 238 | + } else if (strcmp(argv[i], "--json") == 0 && i + 1 < argc) { |
| 239 | + config.json_file = argv[++i]; |
| 240 | + } else if (strcmp(argv[i], "--warmup") == 0 && i + 1 < argc) { |
| 241 | + config.warmup_iterations = std::atoi(argv[++i]); |
| 242 | + } else if (strcmp(argv[i], "--iterations") == 0 && i + 1 < argc) { |
| 243 | + config.measurement_iterations = std::atoi(argv[++i]); |
| 244 | + } else if (strcmp(argv[i], "--debug") == 0) { |
| 245 | + config.debug = true; |
| 246 | + } else if (strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "--help") == 0) { |
| 247 | + printUsage(argv[0]); |
| 248 | + return 0; |
| 249 | + } else { |
| 250 | + std::cerr << "Unknown option: " << argv[i] << std::endl; |
| 251 | + printUsage(argv[0]); |
| 252 | + return 1; |
| 253 | + } |
| 254 | + } |
| 255 | + |
| 256 | + std::cout << "Running UnwindFailures benchmark..." << std::endl; |
| 257 | + benchmarkUnwindFailures(); |
| 258 | + |
| 259 | + // Export results if requested |
| 260 | + if (!config.csv_file.empty()) { |
| 261 | + exportResultsToCSV(config.csv_file); |
| 262 | + } |
| 263 | + if (!config.json_file.empty()) { |
| 264 | + exportResultsToJSON(config.json_file); |
| 265 | + } |
| 266 | + |
| 267 | + return 0; |
| 268 | +} |
0 commit comments