]> git.proxmox.com Git - ceph.git/blame - ceph/src/jaegertracing/opentelemetry-cpp/third_party/benchmark/test/CMakeLists.txt
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / jaegertracing / opentelemetry-cpp / third_party / benchmark / test / CMakeLists.txt
CommitLineData
1e59de90
TL
1# Enable the tests
2
3find_package(Threads REQUIRED)
4include(CheckCXXCompilerFlag)
5
6# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must
7# strip -DNDEBUG from the default CMake flags in DEBUG mode.
8string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
9if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" )
10 add_definitions( -UNDEBUG )
11 add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS)
12 # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines.
13 foreach (flags_var_to_scrub
14 CMAKE_CXX_FLAGS_RELEASE
15 CMAKE_CXX_FLAGS_RELWITHDEBINFO
16 CMAKE_CXX_FLAGS_MINSIZEREL
17 CMAKE_C_FLAGS_RELEASE
18 CMAKE_C_FLAGS_RELWITHDEBINFO
19 CMAKE_C_FLAGS_MINSIZEREL)
20 string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " "
21 "${flags_var_to_scrub}" "${${flags_var_to_scrub}}")
22 endforeach()
23endif()
24
25check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
26set(BENCHMARK_O3_FLAG "")
27if (BENCHMARK_HAS_O3_FLAG)
28 set(BENCHMARK_O3_FLAG "-O3")
29endif()
30
31# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
32# they will break the configuration check.
33if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
34 list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
35endif()
36
37add_library(output_test_helper STATIC output_test_helper.cc output_test.h)
38
39macro(compile_benchmark_test name)
40 add_executable(${name} "${name}.cc")
41 target_link_libraries(${name} benchmark::benchmark ${CMAKE_THREAD_LIBS_INIT})
42endmacro(compile_benchmark_test)
43
44macro(compile_benchmark_test_with_main name)
45 add_executable(${name} "${name}.cc")
46 target_link_libraries(${name} benchmark::benchmark_main)
47endmacro(compile_benchmark_test_with_main)
48
49macro(compile_output_test name)
50 add_executable(${name} "${name}.cc" output_test.h)
51 target_link_libraries(${name} output_test_helper benchmark::benchmark
52 ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
53endmacro(compile_output_test)
54
55# Demonstration executable
56compile_benchmark_test(benchmark_test)
57add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01)
58
59compile_benchmark_test(filter_test)
60macro(add_filter_test name filter expect)
61 add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
62 add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
63endmacro(add_filter_test)
64
65add_filter_test(filter_simple "Foo" 3)
66add_filter_test(filter_simple_negative "-Foo" 2)
67add_filter_test(filter_suffix "BM_.*" 4)
68add_filter_test(filter_suffix_negative "-BM_.*" 1)
69add_filter_test(filter_regex_all ".*" 5)
70add_filter_test(filter_regex_all_negative "-.*" 0)
71add_filter_test(filter_regex_blank "" 5)
72add_filter_test(filter_regex_blank_negative "-" 0)
73add_filter_test(filter_regex_none "monkey" 0)
74add_filter_test(filter_regex_none_negative "-monkey" 5)
75add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
76add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
77add_filter_test(filter_regex_begin "^BM_.*" 4)
78add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
79add_filter_test(filter_regex_begin2 "^N" 1)
80add_filter_test(filter_regex_begin2_negative "-^N" 4)
81add_filter_test(filter_regex_end ".*Ba$" 1)
82add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
83
84compile_benchmark_test(options_test)
85add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01)
86
87compile_benchmark_test(basic_test)
88add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01)
89
90compile_benchmark_test(diagnostics_test)
91add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01)
92
93compile_benchmark_test(skip_with_error_test)
94add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01)
95
96compile_benchmark_test(donotoptimize_test)
97# Some of the issues with DoNotOptimize only occur when optimization is enabled
98check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
99if (BENCHMARK_HAS_O3_FLAG)
100 set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
101endif()
102add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01)
103
104compile_benchmark_test(fixture_test)
105add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01)
106
107compile_benchmark_test(register_benchmark_test)
108add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01)
109
110compile_benchmark_test(map_test)
111add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01)
112
113compile_benchmark_test(multiple_ranges_test)
114add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01)
115
116compile_benchmark_test(args_product_test)
117add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01)
118
119compile_benchmark_test_with_main(link_main_test)
120add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01)
121
122compile_output_test(reporter_output_test)
123add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01)
124
125compile_output_test(templated_fixture_test)
126add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01)
127
128compile_output_test(user_counters_test)
129add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01)
130
131compile_output_test(internal_threading_test)
132add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01)
133
134compile_output_test(report_aggregates_only_test)
135add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01)
136
137compile_output_test(display_aggregates_only_test)
138add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01)
139
140compile_output_test(user_counters_tabular_test)
141add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01)
142
143compile_output_test(user_counters_thousands_test)
144add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01)
145
146compile_output_test(memory_manager_test)
147add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01)
148
149check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
150if (BENCHMARK_HAS_CXX03_FLAG)
151 compile_benchmark_test(cxx03_test)
152 set_target_properties(cxx03_test
153 PROPERTIES
154 CXX_STANDARD 98
155 CXX_STANDARD_REQUIRED YES)
156 # libstdc++ provides different definitions within <map> between dialects. When
157 # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation
158 # causing the test to fail to compile. To prevent this we explicitly disable
159 # the warning.
160 check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR)
161 if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR)
162 set_target_properties(cxx03_test
163 PROPERTIES
164 LINK_FLAGS "-Wno-odr")
165 endif()
166 add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01)
167endif()
168
169# Attempt to work around flaky test failures when running on Appveyor servers.
170if (DEFINED ENV{APPVEYOR})
171 set(COMPLEXITY_MIN_TIME "0.5")
172else()
173 set(COMPLEXITY_MIN_TIME "0.01")
174endif()
175compile_output_test(complexity_test)
176add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
177
178###############################################################################
179# GoogleTest Unit Tests
180###############################################################################
181
182if (BENCHMARK_ENABLE_GTEST_TESTS)
183 macro(compile_gtest name)
184 add_executable(${name} "${name}.cc")
185 target_link_libraries(${name} benchmark::benchmark
186 gmock_main ${CMAKE_THREAD_LIBS_INIT})
187 endmacro(compile_gtest)
188
189 macro(add_gtest name)
190 compile_gtest(${name})
191 add_test(NAME ${name} COMMAND ${name})
192 endmacro()
193
194 add_gtest(benchmark_gtest)
195 add_gtest(benchmark_name_gtest)
196 add_gtest(commandlineflags_gtest)
197 add_gtest(statistics_gtest)
198 add_gtest(string_util_gtest)
199endif(BENCHMARK_ENABLE_GTEST_TESTS)
200
201###############################################################################
202# Assembly Unit Tests
203###############################################################################
204
205if (BENCHMARK_ENABLE_ASSEMBLY_TESTS)
206 if (NOT LLVM_FILECHECK_EXE)
207 message(FATAL_ERROR "LLVM FileCheck is required when including this file")
208 endif()
209 include(AssemblyTests.cmake)
210 add_filecheck_test(donotoptimize_assembly_test)
211 add_filecheck_test(state_assembly_test)
212 add_filecheck_test(clobber_memory_assembly_test)
213endif()
214
215
216
217###############################################################################
218# Code Coverage Configuration
219###############################################################################
220
221# Add the coverage command(s)
222if(CMAKE_BUILD_TYPE)
223 string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
224endif()
225if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
226 find_program(GCOV gcov)
227 find_program(LCOV lcov)
228 find_program(GENHTML genhtml)
229 find_program(CTEST ctest)
230 if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE)
231 add_custom_command(
232 OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html
233 COMMAND ${LCOV} -q -z -d .
234 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i
235 COMMAND ${CTEST} --force-new-ctest-process
236 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov
237 COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov
238 COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov
239 COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark
240 DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test
241 WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
242 COMMENT "Running LCOV"
243 )
244 add_custom_target(coverage
245 DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html
246 COMMENT "LCOV report at lcov/index.html"
247 )
248 message(STATUS "Coverage command added")
249 else()
250 if (HAVE_CXX_FLAG_COVERAGE)
251 set(CXX_FLAG_COVERAGE_MESSAGE supported)
252 else()
253 set(CXX_FLAG_COVERAGE_MESSAGE unavailable)
254 endif()
255 message(WARNING
256 "Coverage not available:\n"
257 " gcov: ${GCOV}\n"
258 " lcov: ${LCOV}\n"
259 " genhtml: ${GENHTML}\n"
260 " ctest: ${CTEST}\n"
261 " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}")
262 endif()
263endif()