/ externals / catch / tests / CMakeLists.txt
CMakeLists.txt
  1  include(CatchMiscFunctions)
  2  
  3  if (CATCH_BUILD_SURROGATES)
  4    message(STATUS "Configuring targets for surrogate TUs")
  5  
  6    # If the folder does not exist before we ask for output redirect to
  7    # a file, it won't work.
  8    file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/surrogates)
  9  
 10    # Creates target to generate the surrogate TU for provided header.
 11    # Returns the path to the generated file.
 12    function(createSurrogateFileTarget sourceHeader pathToFile)
 13      set(pathPrefix ${PROJECT_SOURCE_DIR}/src)
 14  
 15      file(RELATIVE_PATH includePath ${pathPrefix} ${sourceHeader})
 16  
 17      get_filename_component(basicFileName "${sourceHeader}" NAME_WE)
 18  
 19      set(surrogateFilePath ${CMAKE_CURRENT_BINARY_DIR}/surrogates/surrogate_${basicFileName}.cpp)
 20  
 21      add_custom_command(
 22        OUTPUT ${surrogateFilePath}
 23        COMMAND cmake -E echo "\#include <${includePath}>" > "${surrogateFilePath}"
 24        VERBATIM
 25      )
 26  
 27      set(${pathToFile} ${surrogateFilePath} PARENT_SCOPE)
 28    endfunction()
 29  
 30    # Extracts all non-helper (e.g. catch_all.hpp) headers from the
 31    # Catch2 target, and returns them through the argument.
 32    function(ExtractCatch2Headers OutArg)
 33      get_target_property(targetSources Catch2 SOURCES)
 34      foreach(Source ${targetSources})
 35        string(REGEX MATCH "^.*\\.hpp$" isHeader ${Source})
 36        string(REGEX MATCH "_all.hpp" isAllHeader ${Source})
 37        if(isHeader AND NOT isAllHeader)
 38          list(APPEND AllHeaders ${Source})
 39        endif()
 40      endforeach()
 41      set(${OutArg} ${AllHeaders} PARENT_SCOPE)
 42    endfunction()
 43  
 44  
 45    ExtractCatch2Headers(mainHeaders)
 46  
 47    if (NOT mainHeaders)
 48      message(FATAL_ERROR "No headers in the main target were detected. Something is broken.")
 49    endif()
 50  
 51    foreach(header ${mainHeaders})
 52      createSurrogateFileTarget(${header} pathToGeneratedFile)
 53      list(APPEND surrogateFiles ${pathToGeneratedFile})
 54    endforeach()
 55  
 56  
 57    add_executable(Catch2SurrogateTarget
 58      ${surrogateFiles}
 59    )
 60    target_link_libraries(Catch2SurrogateTarget PRIVATE Catch2WithMain)
 61  
 62  endif(CATCH_BUILD_SURROGATES)
 63  
 64  ####
 65  # Temporary workaround for VS toolset changes in 2017
 66  # We need to disable <UseFullPaths> property, but CMake doesn't support it
 67  # until 3.13 (not yet released)
 68  ####
 69  if (MSVC)
 70  configure_file(${CATCH_DIR}/tools/misc/SelfTest.vcxproj.user
 71                 ${CMAKE_BINARY_DIR}/tests
 72                 COPYONLY)
 73  endif(MSVC) #Temporary workaround
 74  
 75  
 76  # define the sources of the self test
 77  # Please keep these ordered alphabetically
 78  set(TEST_SOURCES
 79          ${SELF_TEST_DIR}/TestRegistrations.cpp
 80          ${SELF_TEST_DIR}/IntrospectiveTests/Algorithms.tests.cpp
 81          ${SELF_TEST_DIR}/IntrospectiveTests/AssertionHandler.tests.cpp
 82          ${SELF_TEST_DIR}/IntrospectiveTests/Clara.tests.cpp
 83          ${SELF_TEST_DIR}/IntrospectiveTests/CmdLine.tests.cpp
 84          ${SELF_TEST_DIR}/IntrospectiveTests/CmdLineHelpers.tests.cpp
 85          ${SELF_TEST_DIR}/IntrospectiveTests/ColourImpl.tests.cpp
 86          ${SELF_TEST_DIR}/IntrospectiveTests/Details.tests.cpp
 87          ${SELF_TEST_DIR}/IntrospectiveTests/FloatingPoint.tests.cpp
 88          ${SELF_TEST_DIR}/IntrospectiveTests/GeneratorsImpl.tests.cpp
 89          ${SELF_TEST_DIR}/IntrospectiveTests/Integer.tests.cpp
 90          ${SELF_TEST_DIR}/IntrospectiveTests/InternalBenchmark.tests.cpp
 91          ${SELF_TEST_DIR}/IntrospectiveTests/Json.tests.cpp
 92          ${SELF_TEST_DIR}/IntrospectiveTests/Parse.tests.cpp
 93          ${SELF_TEST_DIR}/IntrospectiveTests/PartTracker.tests.cpp
 94          ${SELF_TEST_DIR}/IntrospectiveTests/RandomNumberGeneration.tests.cpp
 95          ${SELF_TEST_DIR}/IntrospectiveTests/Reporters.tests.cpp
 96          ${SELF_TEST_DIR}/IntrospectiveTests/Tag.tests.cpp
 97          ${SELF_TEST_DIR}/IntrospectiveTests/TestCaseInfoHasher.tests.cpp
 98          ${SELF_TEST_DIR}/IntrospectiveTests/TestSpec.tests.cpp
 99          ${SELF_TEST_DIR}/IntrospectiveTests/TestSpecParser.tests.cpp
100          ${SELF_TEST_DIR}/IntrospectiveTests/TextFlow.tests.cpp
101          ${SELF_TEST_DIR}/IntrospectiveTests/Sharding.tests.cpp
102          ${SELF_TEST_DIR}/IntrospectiveTests/Stream.tests.cpp
103          ${SELF_TEST_DIR}/IntrospectiveTests/String.tests.cpp
104          ${SELF_TEST_DIR}/IntrospectiveTests/StringManip.tests.cpp
105          ${SELF_TEST_DIR}/IntrospectiveTests/Xml.tests.cpp
106          ${SELF_TEST_DIR}/IntrospectiveTests/Traits.tests.cpp
107          ${SELF_TEST_DIR}/IntrospectiveTests/ToString.tests.cpp
108          ${SELF_TEST_DIR}/IntrospectiveTests/UniquePtr.tests.cpp
109          ${SELF_TEST_DIR}/helpers/parse_test_spec.cpp
110          ${SELF_TEST_DIR}/TimingTests/Sleep.tests.cpp
111          ${SELF_TEST_DIR}/UsageTests/Approx.tests.cpp
112          ${SELF_TEST_DIR}/UsageTests/BDD.tests.cpp
113          ${SELF_TEST_DIR}/UsageTests/Benchmark.tests.cpp
114          ${SELF_TEST_DIR}/UsageTests/Class.tests.cpp
115          ${SELF_TEST_DIR}/UsageTests/Compilation.tests.cpp
116          ${SELF_TEST_DIR}/UsageTests/Condition.tests.cpp
117          ${SELF_TEST_DIR}/UsageTests/Decomposition.tests.cpp
118          ${SELF_TEST_DIR}/UsageTests/EnumToString.tests.cpp
119          ${SELF_TEST_DIR}/UsageTests/Exception.tests.cpp
120          ${SELF_TEST_DIR}/UsageTests/Generators.tests.cpp
121          ${SELF_TEST_DIR}/UsageTests/Message.tests.cpp
122          ${SELF_TEST_DIR}/UsageTests/Misc.tests.cpp
123          ${SELF_TEST_DIR}/UsageTests/Skip.tests.cpp
124          ${SELF_TEST_DIR}/UsageTests/ToStringByte.tests.cpp
125          ${SELF_TEST_DIR}/UsageTests/ToStringChrono.tests.cpp
126          ${SELF_TEST_DIR}/UsageTests/ToStringGeneral.tests.cpp
127          ${SELF_TEST_DIR}/UsageTests/ToStringOptional.tests.cpp
128          ${SELF_TEST_DIR}/UsageTests/ToStringPair.tests.cpp
129          ${SELF_TEST_DIR}/UsageTests/ToStringTuple.tests.cpp
130          ${SELF_TEST_DIR}/UsageTests/ToStringVariant.tests.cpp
131          ${SELF_TEST_DIR}/UsageTests/ToStringVector.tests.cpp
132          ${SELF_TEST_DIR}/UsageTests/ToStringWhich.tests.cpp
133          ${SELF_TEST_DIR}/UsageTests/Tricky.tests.cpp
134          ${SELF_TEST_DIR}/UsageTests/VariadicMacros.tests.cpp
135          ${SELF_TEST_DIR}/UsageTests/MatchersRanges.tests.cpp
136          ${SELF_TEST_DIR}/UsageTests/Matchers.tests.cpp
137          )
138  
139  set(TEST_HEADERS
140    ${SELF_TEST_DIR}/helpers/parse_test_spec.hpp
141    ${SELF_TEST_DIR}/helpers/range_test_helpers.hpp
142    ${SELF_TEST_DIR}/helpers/type_with_lit_0_comparisons.hpp
143  )
144  
145  
146  # Specify the headers, too, so CLion recognises them as project files
147  set(HEADERS
148          ${TOP_LEVEL_HEADERS}
149          ${EXTERNAL_HEADERS}
150          ${INTERNAL_HEADERS}
151          ${REPORTER_HEADERS}
152          ${BENCHMARK_HEADERS}
153          ${BENCHMARK_SOURCES}
154  )
155  
156  # Provide some groupings for IDEs
157  #SOURCE_GROUP("benchmark" FILES ${BENCHMARK_HEADERS} ${BENCHMARK_SOURCES})
158  #SOURCE_GROUP("Tests" FILES ${TEST_SOURCES})
159  
160  include(CTest)
161  
162  add_executable(SelfTest ${TEST_SOURCES} ${TEST_HEADERS})
163  target_include_directories(SelfTest PRIVATE ${SELF_TEST_DIR})
164  target_link_libraries(SelfTest PRIVATE Catch2WithMain)
165  if (BUILD_SHARED_LIBS AND WIN32)
166      add_custom_command(TARGET SelfTest PRE_LINK
167          COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:Catch2>
168          $<TARGET_FILE:Catch2WithMain> $<TARGET_FILE_DIR:SelfTest>
169      )
170  endif()
171  
172  if (CATCH_ENABLE_COVERAGE)
173      set(ENABLE_COVERAGE ON CACHE BOOL "Enable coverage build." FORCE)
174      find_package(codecov)
175      add_coverage(SelfTest)
176      list(APPEND LCOV_REMOVE_PATTERNS "'/usr/*'")
177      coverage_evaluate()
178  endif()
179  
180  # configure unit tests via CTest
181  add_test(NAME RunTests COMMAND $<TARGET_FILE:SelfTest> --order rand --rng-seed time)
182  set_tests_properties(RunTests PROPERTIES
183      FAIL_REGULAR_EXPRESSION "Filters:"
184      COST 15
185  )
186  
187  # Because CTest does not allow us to check both return code _and_ expected
188  # output in one test, we run these commands twice. First time we check
189  # the output, the second time we check the exit code.
190  add_test(NAME List::Tests::Output COMMAND $<TARGET_FILE:SelfTest> --list-tests --verbosity high)
191  set_tests_properties(List::Tests::Output PROPERTIES
192      PASS_REGULAR_EXPRESSION "[0-9]+ test cases"
193      FAIL_REGULAR_EXPRESSION "Hidden Test"
194  )
195  # This should be equivalent to the old --list-test-names-only and be usable
196  # with --input-file.
197  add_test(NAME List::Tests::Quiet COMMAND $<TARGET_FILE:SelfTest> --list-tests --verbosity quiet)
198  # Sadly we cannot ask for start-of-line and end-of-line in a ctest regex,
199  # so we fail if we see space/tab at the start...
200  set_tests_properties(List::Tests::Quiet PROPERTIES
201      PASS_REGULAR_EXPRESSION "\"#1905 -- test spec parser properly clears internal state between compound tests\"[\r\n]"
202      FAIL_REGULAR_EXPRESSION "[ \t]\"#1905 -- test spec parser properly clears internal state between compound tests\""
203  )
204  add_test(NAME List::Tests::ExitCode COMMAND $<TARGET_FILE:SelfTest> --list-tests --verbosity high)
205  add_test(NAME List::Tests::XmlOutput COMMAND $<TARGET_FILE:SelfTest> --list-tests --verbosity high -r xml)
206  set_tests_properties(List::Tests::XmlOutput PROPERTIES
207      PASS_REGULAR_EXPRESSION "<Line>[0-9]+</Line>"
208      FAIL_REGULAR_EXPRESSION "[0-9]+ test cases"
209  )
210  
211  add_test(NAME List::Tags::Output COMMAND $<TARGET_FILE:SelfTest> --list-tags)
212  set_tests_properties(List::Tags::Output PROPERTIES
213      PASS_REGULAR_EXPRESSION "[0-9]+ tags"
214      FAIL_REGULAR_EXPRESSION "\\[\\.\\]")
215  add_test(NAME List::Tags::ExitCode COMMAND $<TARGET_FILE:SelfTest> --list-tags)
216  add_test(NAME List::Tags::XmlOutput COMMAND $<TARGET_FILE:SelfTest> --list-tags -r xml)
217  set_tests_properties(List::Tags::XmlOutput PROPERTIES
218      PASS_REGULAR_EXPRESSION "<Count>18</Count>"
219      FAIL_REGULAR_EXPRESSION "[0-9]+ tags"
220  )
221  
222  
223  add_test(NAME List::Reporters::Output COMMAND $<TARGET_FILE:SelfTest> --list-reporters)
224  set_tests_properties(List::Reporters::Output PROPERTIES PASS_REGULAR_EXPRESSION "Available reporters:")
225  add_test(NAME List::Reporters::ExitCode COMMAND $<TARGET_FILE:SelfTest> --list-reporters)
226  add_test(NAME List::Reporters::XmlOutput COMMAND $<TARGET_FILE:SelfTest> --list-reporters -r xml)
227  set_tests_properties(List::Reporters::XmlOutput PROPERTIES
228      PASS_REGULAR_EXPRESSION "<Name>compact</Name>"
229      FAIL_REGULAR_EXPRESSION "Available reporters:"
230  )
231  
232  add_test(NAME List::Listeners::Output
233    COMMAND
234      $<TARGET_FILE:SelfTest> --list-listeners
235  )
236  set_tests_properties(List::Listeners::Output
237    PROPERTIES
238      PASS_REGULAR_EXPRESSION "Registered listeners:"
239  )
240  add_test(NAME List::Listeners::ExitCode
241    COMMAND
242      $<TARGET_FILE:SelfTest> --list-listeners
243  )
244  add_test(NAME List::Listeners::XmlOutput
245    COMMAND
246      $<TARGET_FILE:SelfTest>
247        --list-listeners
248        --reporter xml
249  )
250  set_tests_properties(List::Listeners::XmlOutput
251    PROPERTIES
252      PASS_REGULAR_EXPRESSION "<RegisteredListeners>"
253      FAIL_REGULAR_EXPRESSION "Registered listeners:"
254  )
255  
256  add_test(NAME NoAssertions COMMAND $<TARGET_FILE:SelfTest> -w NoAssertions "An empty test with no assertions")
257  set_tests_properties(NoAssertions PROPERTIES PASS_REGULAR_EXPRESSION "No assertions in test case")
258  
259  # We cannot combine a regular expression on output with return code check
260  # in one test, so we register two instead of making a checking script because
261  # the runtime overhead is small enough.
262  add_test(NAME TestSpecs::CombiningMatchingAndNonMatchingIsOk-1 COMMAND $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___")
263  
264  add_test(NAME TestSpecs::CombiningMatchingAndNonMatchingIsOk-2 COMMAND $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___")
265  set_tests_properties(TestSpecs::CombiningMatchingAndNonMatchingIsOk-2 PROPERTIES
266      PASS_REGULAR_EXPRESSION "No test cases matched '\"___nonexistent_test___\"'"
267      FAIL_REGULAR_EXPRESSION "No tests ran"
268  )
269  
270  add_test(NAME TestSpecs::NoMatchedTestsFail
271    COMMAND $<TARGET_FILE:SelfTest> "___nonexistent_test___"
272  )
273  set_tests_properties(TestSpecs::NoMatchedTestsFail
274    PROPERTIES
275      WILL_FAIL ON
276  )
277  add_test(NAME TestSpecs::OverrideFailureWithNoMatchedTests
278    COMMAND $<TARGET_FILE:SelfTest> "___nonexistent_test___" --allow-running-no-tests
279  )
280  
281  add_test(NAME TestSpecs::OverrideAllSkipFailure
282    COMMAND $<TARGET_FILE:SelfTest> "tests can be skipped dynamically at runtime" --allow-running-no-tests
283  )
284  
285  add_test(NAME TestSpecs::NonMatchingTestSpecIsRoundTrippable
286      COMMAND $<TARGET_FILE:SelfTest> Tracker, "this test does not exist" "[nor does this tag]"
287  )
288  set_tests_properties(TestSpecs::NonMatchingTestSpecIsRoundTrippable
289    PROPERTIES
290      PASS_REGULAR_EXPRESSION "No test cases matched '\"this test does not exist\" \\[nor does this tag\\]'"
291  )
292  
293  add_test(NAME Warnings::UnmatchedTestSpecIsAccepted
294    COMMAND $<TARGET_FILE:SelfTest> Tracker --warn UnmatchedTestSpec
295  )
296  set_tests_properties(Warnings::UnmatchedTestSpecIsAccepted
297    PROPERTIES
298      FAIL_REGULAR_EXPRESSION "Unrecognised warning option: "
299  )
300  
301  add_test(NAME Warnings::MultipleWarningsCanBeSpecified
302    COMMAND
303      $<TARGET_FILE:SelfTest> Tracker
304        --warn NoAssertions
305        --warn UnmatchedTestSpec
306  )
307  
308  add_test(NAME TestSpecs::WarnUnmatchedTestSpecFailsWithUnmatchedTestSpec
309    COMMAND
310      $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___" --warn UnmatchedTestSpec
311  )
312  set_tests_properties(TestSpecs::WarnUnmatchedTestSpecFailsWithUnmatchedTestSpec
313    PROPERTIES
314      WILL_FAIL ON
315  )
316  
317  add_test(NAME UnmatchedOutputFilter COMMAND $<TARGET_FILE:SelfTest> [this-tag-does-not-exist])
318  set_tests_properties(UnmatchedOutputFilter
319    PROPERTIES
320      PASS_REGULAR_EXPRESSION "No test cases matched '\\[this-tag-does-not-exist\\]'"
321  )
322  
323  add_test(NAME FilteredSection-1 COMMAND $<TARGET_FILE:SelfTest> \#1394 -c RunSection)
324  set_tests_properties(FilteredSection-1 PROPERTIES FAIL_REGULAR_EXPRESSION "No tests ran")
325  add_test(NAME FilteredSection-2 COMMAND $<TARGET_FILE:SelfTest> \#1394\ nested -c NestedRunSection -c s1)
326  set_tests_properties(FilteredSection-2 PROPERTIES FAIL_REGULAR_EXPRESSION "No tests ran")
327  
328  add_test(
329    NAME
330      FilteredSection::GeneratorsDontCauseInfiniteLoop-1
331    COMMAND
332      $<TARGET_FILE:SelfTest> "#2025: original repro" -c "fov_0"
333  )
334  set_tests_properties(FilteredSection::GeneratorsDontCauseInfiniteLoop-1
335    PROPERTIES
336      PASS_REGULAR_EXPRESSION "inside with fov: 0" # This should happen
337      FAIL_REGULAR_EXPRESSION "inside with fov: 1" # This would mean there was no filtering
338  )
339  
340  # GENERATE between filtered sections (both are selected)
341  add_test(
342    NAME
343      FilteredSection::GeneratorsDontCauseInfiniteLoop-2
344    COMMAND
345      $<TARGET_FILE:SelfTest> "#2025: same-level sections"
346      -c "A"
347      -c "B"
348      --colour-mode none
349  )
350  set_tests_properties(FilteredSection::GeneratorsDontCauseInfiniteLoop-2
351    PROPERTIES
352      PASS_REGULAR_EXPRESSION "All tests passed \\(4 assertions in 1 test case\\)"
353  )
354  
355  # AppVeyor has a Python 2.7 in path, but doesn't have .py files as autorunnable
356  add_test(NAME ApprovalTests
357    COMMAND
358      ${PYTHON_EXECUTABLE}
359      ${CATCH_DIR}/tools/scripts/approvalTests.py
360      $<TARGET_FILE:SelfTest>
361      "${CMAKE_CURRENT_BINARY_DIR}"
362  )
363  
364  set_tests_properties(ApprovalTests
365    PROPERTIES
366      FAIL_REGULAR_EXPRESSION "Results differed"
367  
368      # This is the most expensive test in the basic test suite, so we give
369      # it high cost estimate so that CI runs it as one of the first ones,
370      # for better parallelization.
371      COST 30
372      LABELS "uses-python"
373  )
374  
375  add_test(NAME RegressionCheck-1670 COMMAND $<TARGET_FILE:SelfTest> "#1670 regression check" -c A -r compact)
376  set_tests_properties(RegressionCheck-1670 PROPERTIES PASS_REGULAR_EXPRESSION "All tests passed \\(2 assertions in 1 test case\\)")
377  
378  add_test(NAME VersionCheck COMMAND $<TARGET_FILE:SelfTest> -h)
379  set_tests_properties(VersionCheck PROPERTIES PASS_REGULAR_EXPRESSION "Catch2 v${PROJECT_VERSION}")
380  
381  add_test(NAME LibIdentityTest COMMAND $<TARGET_FILE:SelfTest> --libidentify)
382  set_tests_properties(LibIdentityTest PROPERTIES PASS_REGULAR_EXPRESSION "description:    A Catch2 test executable")
383  
384  add_test(NAME FilenameAsTagsTest COMMAND $<TARGET_FILE:SelfTest> -\# --list-tags)
385  set_tests_properties(FilenameAsTagsTest PROPERTIES PASS_REGULAR_EXPRESSION "\\[#Approx.tests\\]")
386  
387  # Check that the filename tags can also be matched against (#2064)
388  add_test(NAME FilenameAsTagsMatching COMMAND $<TARGET_FILE:SelfTest> -\# --list-tags [\#Approx.tests])
389  set_tests_properties(FilenameAsTagsMatching
390    PROPERTIES
391      PASS_REGULAR_EXPRESSION "\\[#Approx.tests\\]"
392      # Avoids false positives by looking for start of line (newline) before the 0
393      FAIL_REGULAR_EXPRESSION "[\r\n]0 tag"
394  )
395  
396  add_test(NAME EscapeSpecialCharactersInTestNames COMMAND $<TARGET_FILE:SelfTest> "Test with special\\, characters \"in name")
397  set_tests_properties(EscapeSpecialCharactersInTestNames PROPERTIES PASS_REGULAR_EXPRESSION "1 assertion in 1 test case")
398  
399  add_test(NAME NegativeSpecNoHiddenTests COMMAND $<TARGET_FILE:SelfTest> --list-tests ~[approval])
400  set_tests_properties(NegativeSpecNoHiddenTests PROPERTIES FAIL_REGULAR_EXPRESSION "\\[\\.\\]")
401  
402  add_test(NAME TestsInFile::SimpleSpecs COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/plain-old-tests.input")
403  set_tests_properties(TestsInFile::SimpleSpecs PROPERTIES PASS_REGULAR_EXPRESSION "6 assertions in 2 test cases")
404  
405  add_test(NAME TestsInFile::EscapeSpecialCharacters COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/special-characters-in-file.input")
406  set_tests_properties(TestsInFile::EscapeSpecialCharacters PROPERTIES PASS_REGULAR_EXPRESSION "1 assertion in 1 test case")
407  
408  add_test(NAME TestsInFile::InvalidTestNames-1 COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/invalid-test-names.input")
409  set_tests_properties(TestsInFile::InvalidTestNames-1
410    PROPERTIES
411      PASS_REGULAR_EXPRESSION "Invalid Filter: \"Test with special, characters in \\\\\" name\""
412      FAIL_REGULAR_EXPRESSION "No tests ran"
413  )
414  
415  add_test(NAME TagAlias COMMAND $<TARGET_FILE:SelfTest> [@tricky] --list-tests)
416  set_tests_properties(TagAlias PROPERTIES
417    PASS_REGULAR_EXPRESSION "[0-9]+ matching test cases"
418    FAIL_REGULAR_EXPRESSION "0 matching test cases"
419  )
420  
421  add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE}
422    ${CATCH_DIR}/tests/TestScripts/testRandomOrder.py $<TARGET_FILE:SelfTest>)
423  set_tests_properties(RandomTestOrdering
424    PROPERTIES
425      LABELS "uses-python"
426  )
427  
428  add_test(NAME CheckConvenienceHeaders
429    COMMAND
430      ${PYTHON_EXECUTABLE} ${CATCH_DIR}/tools/scripts/checkConvenienceHeaders.py
431  )
432  set_tests_properties(CheckConvenienceHeaders
433    PROPERTIES
434      LABELS "uses-python"
435  )
436  
437  add_test(NAME "Benchmarking::SkipBenchmarkMacros"
438    COMMAND
439      $<TARGET_FILE:SelfTest> "Skip benchmark macros"
440        --reporter console
441        --skip-benchmarks
442        --colour-mode none
443  )
444  set_tests_properties("Benchmarking::SkipBenchmarkMacros"
445    PROPERTIES
446      PASS_REGULAR_EXPRESSION "All tests passed \\(2 assertions in 1 test case\\)"
447      FAIL_REGULAR_EXPRESSION "benchmark name"
448  )
449  
450  
451  add_test(NAME "Benchmarking::FailureReporting::OptimizedOut"
452    COMMAND
453      $<TARGET_FILE:SelfTest> "Failing benchmarks" -c "empty" -r xml
454    # This test only makes sense with the optimizer being enabled when
455    # the tests are being compiled.
456    CONFIGURATIONS Release
457  )
458  set_tests_properties("Benchmarking::FailureReporting::OptimizedOut"
459    PROPERTIES
460      PASS_REGULAR_EXPRESSION "could not measure benchmark\, maybe it was optimized away"
461      FAIL_REGULAR_EXPRESSION "successes=\"1\""
462  )
463  
464  add_test(NAME "Benchmarking::FailureReporting::ThrowingBenchmark"
465    COMMAND
466      $<TARGET_FILE:SelfTest> "Failing benchmarks" -c "throw" -r xml
467  )
468  set_tests_properties("Benchmarking::FailureReporting::ThrowingBenchmark"
469    PROPERTIES
470      PASS_REGULAR_EXPRESSION "<failed message=\"just a plain literal"
471      FAIL_REGULAR_EXPRESSION "successes=\"1\""
472  )
473  
474  add_test(NAME "Benchmarking::FailureReporting::FailedAssertion"
475    COMMAND
476      $<TARGET_FILE:SelfTest> "Failing benchmarks" -c "assert" -r xml
477  )
478  set_tests_properties("Benchmarking::FailureReporting::FailedAssertion"
479    PROPERTIES
480      PASS_REGULAR_EXPRESSION "<Expression success=\"false\""
481      FAIL_REGULAR_EXPRESSION "successes=\"1\""
482  )
483  
484  add_test(NAME "Benchmarking::FailureReporting::FailMacro"
485    COMMAND
486      $<TARGET_FILE:SelfTest> "Failing benchmarks" -c "fail" -r xml
487  )
488  set_tests_properties("Benchmarking::FailureReporting::FailMacro"
489    PROPERTIES
490      PASS_REGULAR_EXPRESSION "This benchmark only fails\, nothing else"
491      FAIL_REGULAR_EXPRESSION "successes=\"1\""
492  )
493  
494  add_test(NAME "Benchmarking::FailureReporting::ShouldFailIsRespected"
495    COMMAND
496      $<TARGET_FILE:SelfTest> "Failing benchmark respects should-fail"
497  )
498  set_tests_properties("Benchmarking::FailureReporting::ShouldFailIsRespected"
499    PROPERTIES
500      PASS_REGULAR_EXPRESSION "1 failed as expected"
501  )
502  
503  add_test(NAME "ErrorHandling::InvalidTestSpecExitsEarly"
504    COMMAND
505      $<TARGET_FILE:SelfTest> "[aa,a]"
506  )
507  set_tests_properties("ErrorHandling::InvalidTestSpecExitsEarly"
508    PROPERTIES
509      PASS_REGULAR_EXPRESSION "Invalid Filter: \\[aa\,a\\]"
510      FAIL_REGULAR_EXPRESSION "No tests ran"
511  )
512  
513  if (MSVC)
514    set(_NullFile "NUL")
515  else()
516    set(_NullFile "/dev/null")
517  endif()
518  
519  # This test checks that there is nothing written out from the process,
520  # but if CMake is running the tests under Valgrind or similar tool, then
521  # that will write its own output to stdout and the test would fail.
522  if (NOT MEMORYCHECK_COMMAND)
523    add_test(NAME "MultiReporter::CapturingReportersDontPropagateStdOut"
524      COMMAND
525        $<TARGET_FILE:SelfTest> "Sends stuff to stdout and stderr"
526          --reporter xml::out=${_NullFile}
527          --reporter junit::out=${_NullFile}
528    )
529    set_tests_properties("MultiReporter::CapturingReportersDontPropagateStdOut"
530      PROPERTIES
531        FAIL_REGULAR_EXPRESSION ".+"
532    )
533  endif()
534  
535  add_test(NAME "MultiReporter::NonCapturingReportersPropagateStdout"
536    COMMAND
537      $<TARGET_FILE:SelfTest> "Sends stuff to stdout and stderr"
538        --reporter xml::out=${_NullFile}
539        --reporter console::out=${_NullFile}
540  )
541  set_tests_properties("MultiReporter::NonCapturingReportersPropagateStdout"
542    PROPERTIES
543      PASS_REGULAR_EXPRESSION "A string sent to stderr via clog"
544  )
545  
546  add_test(NAME "Outputs::DashAsOutLocationSendsOutputToStdout"
547    COMMAND
548      $<TARGET_FILE:SelfTest> "Factorials are computed"
549        --out=-
550        --colour-mode none
551  )
552  set_tests_properties("Outputs::DashAsOutLocationSendsOutputToStdout"
553    PROPERTIES
554      PASS_REGULAR_EXPRESSION "All tests passed \\(5 assertions in 1 test case\\)"
555  )
556  
557  add_test(NAME "Reporters::DashAsLocationInReporterSpecSendsOutputToStdout"
558    COMMAND
559      $<TARGET_FILE:SelfTest> "Factorials are computed"
560        --reporter console::out=-
561        --colour-mode none
562  )
563  set_tests_properties("Reporters::DashAsLocationInReporterSpecSendsOutputToStdout"
564    PROPERTIES
565      PASS_REGULAR_EXPRESSION "All tests passed \\(5 assertions in 1 test case\\)"
566  )
567  
568  add_test(NAME "Reporters::ReporterSpecificColourOverridesDefaultColour"
569    COMMAND
570      $<TARGET_FILE:SelfTest> "Factorials are computed"
571        --reporter console::colour-mode=ansi
572        --colour-mode none
573  )
574  set_tests_properties("Reporters::ReporterSpecificColourOverridesDefaultColour"
575    PROPERTIES
576      PASS_REGULAR_EXPRESSION "\\[1\;32mAll tests passed"
577  )
578  
579  add_test(NAME "Reporters::UnrecognizedOptionInSpecCausesError"
580    COMMAND
581      $<TARGET_FILE:SelfTest> "Factorials are computed"
582        --reporter console::bad-option=ansi
583  )
584  set_tests_properties("Reporters::UnrecognizedOptionInSpecCausesError"
585    PROPERTIES
586      WILL_FAIL ON
587  )
588  
589  add_test(NAME "Colours::ColourModeCanBeExplicitlySetToAnsi"
590    COMMAND
591      $<TARGET_FILE:SelfTest> "Factorials are computed"
592        --reporter console
593        --colour-mode ansi
594  )
595  set_tests_properties("Colours::ColourModeCanBeExplicitlySetToAnsi"
596    PROPERTIES
597      PASS_REGULAR_EXPRESSION "\\[1\;32mAll tests passed"
598  )
599  
600  add_test(NAME "Reporters::JUnit::NamespacesAreNormalized"
601    COMMAND
602      $<TARGET_FILE:SelfTest>
603        --reporter junit
604        "A TEST_CASE_METHOD testing junit classname normalization"
605  )
606  set_tests_properties("Reporters::JUnit::NamespacesAreNormalized"
607    PROPERTIES
608      PASS_REGULAR_EXPRESSION "testcase classname=\"SelfTest(\.exe)?\\.A\\.B\\.TestClass\""
609  )
610  
611  if (CATCH_ENABLE_CONFIGURE_TESTS)
612      foreach(testName "DefaultReporter" "Disable" "DisableStringification"
613                       "ExperimentalRedirect")
614  
615          add_test(NAME "CMakeConfig::${testName}"
616            COMMAND
617              "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_LIST_DIR}/TestScripts/testConfigure${testName}.py" "${CATCH_DIR}" "${CMAKE_CURRENT_BINARY_DIR}"
618          )
619          set_tests_properties("CMakeConfig::${testName}"
620            PROPERTIES
621              COST 240
622              LABELS "uses-python"
623          )
624  
625      endforeach()
626  endif()
627  
628  if (CATCH_ENABLE_CMAKE_HELPER_TESTS)
629      add_test(NAME "CMakeHelper::DiscoverTests"
630        COMMAND
631          "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_LIST_DIR}/TestScripts/DiscoverTests/VerifyRegistration.py" "${CATCH_DIR}" "${CMAKE_CURRENT_BINARY_DIR}"
632      )
633      set_tests_properties("CMakeHelper::DiscoverTests"
634        PROPERTIES
635          COST 240
636          LABELS "uses-python"
637      )
638  endif()
639  
640  foreach (reporterName # "Automake" - the simple .trs format does not support any kind of comments/metadata
641                        "compact"
642                        "console"
643                        "JUnit"
644                        "SonarQube"
645                        "TAP"
646                        # "TeamCity" - does not seem to support test suite-level metadata/comments
647                        "XML"
648                        "JSON")
649  
650      add_test(NAME "Reporters:Filters:${reporterName}"
651        COMMAND
652          $<TARGET_FILE:SelfTest> [comparisons][string-case] "CaseInsensitiveLess is case insensitive"
653            --reporter ${reporterName}
654      )
655      # Different regex for these two reporters, because the commas end up xml-escaped
656      if (reporterName MATCHES "JUnit|XML")
657        set(testCaseNameFormat "&quot;CaseInsensitiveLess is case insensitive&quot;")
658      elseif(reporterName MATCHES "JSON")
659        set(testCaseNameFormat "\\\\\"CaseInsensitiveLess is case insensitive\\\\\"")
660      else()
661        set(testCaseNameFormat "\"CaseInsensitiveLess is case insensitive\"")
662      endif()
663      set_tests_properties("Reporters:Filters:${reporterName}"
664        PROPERTIES
665          PASS_REGULAR_EXPRESSION "[fF]ilters.+\\[comparisons\\] \\[string-case\\] ${testCaseNameFormat}"
666      )
667  
668      add_test(NAME "Reporters:RngSeed:${reporterName}"
669        COMMAND
670          $<TARGET_FILE:SelfTest> "Factorials are computed"
671            --reporter ${reporterName}
672            --rng-seed 18181818
673      )
674      set_tests_properties("Reporters:RngSeed:${reporterName}"
675        PROPERTIES
676          PASS_REGULAR_EXPRESSION "18181818"
677      )
678  
679  endforeach()
680  
681  
682  list(APPEND CATCH_WARNING_TARGETS SelfTest)
683  set(CATCH_WARNING_TARGETS ${CATCH_WARNING_TARGETS} PARENT_SCOPE)