summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorMickael Savinaud <savmickael@users.noreply.github.com>2012-08-30 17:14:39 +0000
committerMickael Savinaud <savmickael@users.noreply.github.com>2012-08-30 17:14:39 +0000
commitd5bb3b0039159a61c7e9b2ae157a7b6ec2c0aef5 (patch)
tree8114a5c8fef51e462e9baea495ad0fe210c3b3a6 /tests
parentf16216e2708c3b480f726d64b589f092bbc48b02 (diff)
[trunk] Convert CMake-language commands to lower case
Ancient CMake versions required upper-case commands. Later command names became case-insensitive. Now the preferred style is lower-case. This also changes all the key words to lower case. The primary reason for changing key words is that all documentation for CMakeLists.txt now shows the key words as lower case. Even the printed “Mastering CMake v5” uses lower case. Thanks to Hans Johnson
Diffstat (limited to 'tests')
-rw-r--r--tests/CMakeLists.txt82
-rw-r--r--tests/conformance/CMakeLists.txt244
-rw-r--r--tests/nonregression/CMakeLists.txt254
3 files changed, 290 insertions, 290 deletions
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index a5ada2f8..0b1ce71b 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -1,5 +1,5 @@
# Tests
-INCLUDE_DIRECTORIES(
+include_directories(
${OPENJPEG_SOURCE_DIR}/libopenjpeg
${OPENJPEG_SOURCE_DIR}/applications/codec
${OPENJPEG_SOURCE_DIR}/applications/common
@@ -9,72 +9,72 @@ INCLUDE_DIRECTORIES(
)
# First thing define the common source:
-SET(comparePGXimages_SRCS comparePGXimages.c
+set(comparePGXimages_SRCS comparePGXimages.c
${OPENJPEG_SOURCE_DIR}/applications/codec/convert.c
${OPENJPEG_SOURCE_DIR}/applications/common/opj_getopt.c
)
-SET(compare_dump_files_SRCS compare_dump_files.c
+set(compare_dump_files_SRCS compare_dump_files.c
${OPENJPEG_SOURCE_DIR}/applications/common/opj_getopt.c)
-SET(compareRAWimages_SRCS compareRAWimages.c
+set(compareRAWimages_SRCS compareRAWimages.c
${OPENJPEG_SOURCE_DIR}/applications/common/opj_getopt.c)
-ADD_EXECUTABLE(comparePGXimages ${comparePGXimages_SRCS})
-TARGET_LINK_LIBRARIES(comparePGXimages
+add_executable(comparePGXimages ${comparePGXimages_SRCS})
+target_link_libraries(comparePGXimages
${OPENJPEG_LIBRARY_NAME}
${PNG_LIBNAME} ${TIFF_LIBNAME}
)
# To support universal exe:
-IF(ZLIB_FOUND AND APPLE)
- TARGET_LINK_LIBRARIES(comparePGXimages z)
+if(ZLIB_FOUND AND APPLE)
+ target_link_libraries(comparePGXimages z)
ELSe(ZLIB_FOUND AND APPLE)
- TARGET_LINK_LIBRARIES(comparePGXimages ${Z_LIBNAME})
-ENDIF(ZLIB_FOUND AND APPLE)
+ target_link_libraries(comparePGXimages ${Z_LIBNAME})
+endif(ZLIB_FOUND AND APPLE)
-ADD_EXECUTABLE(compare_dump_files ${compare_dump_files_SRCS})
+add_executable(compare_dump_files ${compare_dump_files_SRCS})
-ADD_EXECUTABLE(j2k_random_tile_access j2k_random_tile_access.c)
-TARGET_LINK_LIBRARIES(j2k_random_tile_access ${OPENJPEG_LIBRARY_NAME})
+add_executable(j2k_random_tile_access j2k_random_tile_access.c)
+target_link_libraries(j2k_random_tile_access ${OPENJPEG_LIBRARY_NAME})
-ADD_EXECUTABLE(compareRAWimages ${compareRAWimages_SRCS})
+add_executable(compareRAWimages ${compareRAWimages_SRCS})
-ADD_EXECUTABLE(test_tile_encoder test_tile_encoder.c)
-TARGET_LINK_LIBRARIES(test_tile_encoder ${OPENJPEG_LIBRARY_NAME})
+add_executable(test_tile_encoder test_tile_encoder.c)
+target_link_libraries(test_tile_encoder ${OPENJPEG_LIBRARY_NAME})
# Let's try a couple of possibilities:
-ADD_TEST(tte0 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder)
-ADD_TEST(tte1 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 3 2048 2048 1024 1024 8 1 tte1.j2k)
-ADD_TEST(tte2 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 3 2048 2048 1024 1024 8 1 tte2.jp2)
-ADD_TEST(tte3 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 2048 2048 1024 1024 8 1 tte3.j2k)
-ADD_TEST(tte4 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 256 256 128 128 8 0 tte4.j2k)
-ADD_TEST(tte5 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 512 512 256 256 8 0 tte5.j2k)
+add_test(tte0 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder)
+add_test(tte1 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 3 2048 2048 1024 1024 8 1 tte1.j2k)
+add_test(tte2 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 3 2048 2048 1024 1024 8 1 tte2.jp2)
+add_test(tte3 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 2048 2048 1024 1024 8 1 tte3.j2k)
+add_test(tte4 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 256 256 128 128 8 0 tte4.j2k)
+add_test(tte5 ${EXECUTABLE_OUTPUT_PATH}/test_tile_encoder 1 512 512 256 256 8 0 tte5.j2k)
-ADD_EXECUTABLE(test_tile_decoder test_tile_decoder.c)
-TARGET_LINK_LIBRARIES(test_tile_decoder ${OPENJPEG_LIBRARY_NAME})
+add_executable(test_tile_decoder test_tile_decoder.c)
+target_link_libraries(test_tile_decoder ${OPENJPEG_LIBRARY_NAME})
-ADD_TEST(ttd0 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder)
+add_test(ttd0 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder)
set_property(TEST ttd0 APPEND PROPERTY DEPENDS tte0)
-ADD_TEST(ttd1 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder 0 0 1024 1024 tte1.j2k)
+add_test(ttd1 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder 0 0 1024 1024 tte1.j2k)
set_property(TEST ttd1 APPEND PROPERTY DEPENDS tte1)
-ADD_TEST(ttd2 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder 0 0 1024 1024 tte2.jp2)
+add_test(ttd2 ${EXECUTABLE_OUTPUT_PATH}/test_tile_decoder 0 0 1024 1024 tte2.jp2)
set_property(TEST ttd2 APPEND PROPERTY DEPENDS tte2)
# No image send to the dashboard if lib PNG is not available.
-IF(NOT HAVE_LIBPNG)
- MESSAGE(WARNING "Lib PNG seems to be not available: if you want run the non-regression tests with images reported to the dashboard, you need it (try BUILD_THIRDPARTY)")
-ENDIF(NOT HAVE_LIBPNG)
+if(NOT HAVE_LIBPNG)
+ message(WARNING "Lib PNG seems to be not available: if you want run the non-regression tests with images reported to the dashboard, you need it (try BUILD_THIRDPARTY)")
+endif(NOT HAVE_LIBPNG)
-ADD_SUBDIRECTORY(conformance)
-ADD_SUBDIRECTORY(nonregression)
+add_subdirectory(conformance)
+add_subdirectory(nonregression)
-IF(BUILD_JPIP)
- IF(JPIP_SERVER)
- #SET(s "http://jpip.example.com/myFCGI?target=16.jp2&fsiz=170,170&cnew=http&type=jpp-stream")
- SET(s "${JPIP_SERVER}?target=16.jp2&fsiz=170,170&cnew=http&type=jpp-stream")
- SET(p "${CMAKE_CURRENT_BINARY_DIR}/jpip.dat")
- SET(md5 "62b00c620fb0a600c5ffd413cada4674")
- ADD_TEST(TestJPIP1 ${CMAKE_COMMAND} -DD_URL:STRING=${s} -DD_FILE:PATH=${p}
+if(BUILD_JPIP)
+ if(JPIP_SERVER)
+ #set(s "http://jpip.example.com/myFCGI?target=16.jp2&fsiz=170,170&cnew=http&type=jpp-stream")
+ set(s "${JPIP_SERVER}?target=16.jp2&fsiz=170,170&cnew=http&type=jpp-stream")
+ set(p "${CMAKE_CURRENT_BINARY_DIR}/jpip.dat")
+ set(md5 "62b00c620fb0a600c5ffd413cada4674")
+ add_test(TestJPIP1 ${CMAKE_COMMAND} -DD_URL:STRING=${s} -DD_FILE:PATH=${p}
-DEXPECTED_MD5=${md5} -P ${PROJECT_SOURCE_DIR}/CMake/JPIPTestDriver.cmake)
- ENDIF(JPIP_SERVER)
-ENDIF(BUILD_JPIP)
+ endif(JPIP_SERVER)
+endif(BUILD_JPIP)
diff --git a/tests/conformance/CMakeLists.txt b/tests/conformance/CMakeLists.txt
index d23cdb1d..9dba0dd2 100644
--- a/tests/conformance/CMakeLists.txt
+++ b/tests/conformance/CMakeLists.txt
@@ -1,19 +1,19 @@
# CONFORMANCE TESTS AND NON-REGRESSION ON THIS DATASET
-FILE(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
+file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
-SET(TEMP ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
-SET(BASELINE_CONF ${OPJ_DATA_ROOT}/baseline/conformance)
-SET(BASELINE_NR ${OPJ_DATA_ROOT}/baseline/nonregression)
-SET(INPUT_CONF ${OPJ_DATA_ROOT}/input/conformance)
+set(TEMP ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
+set(BASELINE_CONF ${OPJ_DATA_ROOT}/baseline/conformance)
+set(BASELINE_NR ${OPJ_DATA_ROOT}/baseline/nonregression)
+set(INPUT_CONF ${OPJ_DATA_ROOT}/input/conformance)
# List of components by file (normaly p0_13.j2k have 257 components but for this
#set of test we consider only 4)
-SET( CP0_nbC_list "not_used;1;1;1;3;4;4;3;3;1;3;1;1;4;3;1;1")
-SET( CP1_nbC_list "not_used;1;3;4;1;3;3;2")
+set( CP0_nbC_list "not_used;1;1;1;3;4;4;3;3;1;3;1;1;4;3;1;1")
+set( CP1_nbC_list "not_used;1;3;4;1;3;3;2")
-SET(COMMENTCODEVAR FALSE)
+set(COMMENTCODEVAR FALSE)
#--------------------------------------------------------------------------
@@ -23,20 +23,20 @@ SET(COMMENTCODEVAR FALSE)
# non regression comparison
# Parameters and tolerances given by Table C.1
-SET( C0P0_ResFactor_list "not_used;0;0;0;3;3;3;0;5;2;0;0;0;0;2;0;0")
-SET( C0P0_PEAK_list "not_used;0;0;0;33;54;109;10;7;4;10;0;0;0;0;0;0")
-SET( C0P0_MSE_list "not_used;0;0;0;55.8;68;743;0.34;6.72;1.47;2.84;0;0;0;0;0;0")
+set( C0P0_ResFactor_list "not_used;0;0;0;3;3;3;0;5;2;0;0;0;0;2;0;0")
+set( C0P0_PEAK_list "not_used;0;0;0;33;54;109;10;7;4;10;0;0;0;0;0;0")
+set( C0P0_MSE_list "not_used;0;0;0;55.8;68;743;0.34;6.72;1.47;2.84;0;0;0;0;0;0")
-FOREACH(numFileC0P0 RANGE 1 16)
+foreach(numFileC0P0 RANGE 1 16)
# Build filenames
- IF(${numFileC0P0} LESS 10)
- SET( filenameInput p0_0${numFileC0P0}.j2k )
- SET( filenameRef c0p0_0${numFileC0P0}.pgx )
- ELSE(${numFileC0P0} LESS 10)
- SET( filenameInput p0_${numFileC0P0}.j2k )
- SET( filenameRef c0p0_${numFileC0P0}.pgx )
- ENDIF(${numFileC0P0} LESS 10)
+ if(${numFileC0P0} LESS 10)
+ set( filenameInput p0_0${numFileC0P0}.j2k )
+ set( filenameRef c0p0_0${numFileC0P0}.pgx )
+ else(${numFileC0P0} LESS 10)
+ set( filenameInput p0_${numFileC0P0}.j2k )
+ set( filenameRef c0p0_${numFileC0P0}.pgx )
+ endif(${numFileC0P0} LESS 10)
# Get corresponding tests parameters
list(GET C0P0_ResFactor_list ${numFileC0P0} ResFactor)
@@ -45,18 +45,18 @@ FOREACH(numFileC0P0 RANGE 1 16)
list(GET C0P0_MSE_list ${numFileC0P0} MSE_limit)
# Manage cases which need to try different resolution reduction
- IF (numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
+ if (numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
get_filename_component(filenameRefSub ${filenameRef} NAME_WE)
#r = 0
- ADD_TEST(ETS-C0P0-${filenameInput}-r0-decode
+ add_test(ETS-C0P0-${filenameInput}-r0-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}-r0.pgx
-r 0
)
- ADD_TEST(ETS-C0P0-${filenameInput}-r0-compare2ref
+ add_test(ETS-C0P0-${filenameInput}-r0-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRefSub}r0.pgx
-t ${TEMP}/c0${filenameInput}-r0.pgx
@@ -66,11 +66,11 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P0-${filenameInput}-r0-compare2ref
+ set_tests_properties(ETS-C0P0-${filenameInput}-r0-compare2ref
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-r0-decode)
- ADD_TEST(NR-C0P0-${filenameInput}-r0-compare2base
+ add_test(NR-C0P0-${filenameInput}-r0-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRefSub}-r0.pgx
-t ${TEMP}/c0${filenameInput}-r0.pgx
@@ -79,19 +79,19 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P0-${filenameInput}-r0-compare2base
+ set_tests_properties(NR-C0P0-${filenameInput}-r0-compare2base
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-r0-decode)
#r = 1
- ADD_TEST(ETS-C0P0-${filenameInput}-r1-decode
+ add_test(ETS-C0P0-${filenameInput}-r1-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}-r1.pgx
-r 1
)
- ADD_TEST(ETS-C0P0-${filenameInput}-r1-compare2ref
+ add_test(ETS-C0P0-${filenameInput}-r1-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRefSub}r1.pgx
-t ${TEMP}/c0${filenameInput}-r1.pgx
@@ -101,11 +101,11 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P0-${filenameInput}-r1-compare2ref
+ set_tests_properties(ETS-C0P0-${filenameInput}-r1-compare2ref
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-r1-decode)
- ADD_TEST(NR-C0P0-${filenameInput}-r1-compare2base
+ add_test(NR-C0P0-${filenameInput}-r1-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRefSub}-r1.pgx
-t ${TEMP}/c0${filenameInput}-r1.pgx
@@ -114,20 +114,20 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P0-${filenameInput}-r1-compare2base
+ set_tests_properties(NR-C0P0-${filenameInput}-r1-compare2base
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-r1-decode)
- ELSE(numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
+ else(numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
- ADD_TEST(ETS-C0P0-${filenameInput}-decode
+ add_test(ETS-C0P0-${filenameInput}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}.pgx
-r ${ResFactor}
)
- ADD_TEST(ETS-C0P0-${filenameInput}-compare2ref
+ add_test(ETS-C0P0-${filenameInput}-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRef}
-t ${TEMP}/c0${filenameInput}.pgx
@@ -137,11 +137,11 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P0-${filenameInput}-compare2ref
+ set_tests_properties(ETS-C0P0-${filenameInput}-compare2ref
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-decode)
- ADD_TEST(NR-C0P0-${filenameInput}-compare2base
+ add_test(NR-C0P0-${filenameInput}-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRef}
-t ${TEMP}/c0${filenameInput}.pgx
@@ -150,13 +150,13 @@ FOREACH(numFileC0P0 RANGE 1 16)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P0-${filenameInput}-compare2base
+ set_tests_properties(NR-C0P0-${filenameInput}-compare2base
PROPERTIES DEPENDS
ETS-C0P0-${filenameInput}-decode)
- ENDIF(numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
+ endif(numFileC0P0 EQUAL 3 OR numFileC0P0 EQUAL 15)
-ENDFOREACH(numFileC0P0)
+endforeach(numFileC0P0)
#--------------------------------------------------------------------------
# Tests about class 0 profile 1
@@ -165,15 +165,15 @@ ENDFOREACH(numFileC0P0)
# non regression comparison
# Parameters and tolerances given by Table C.4
-SET( C0P1_ResFactor_list "not_used;0;3;3;0;4;1;0")
-SET( C0P1_PEAK_list "not_used;0;35;28;2;128;128;0")
-SET( C0P1_MSE_list "not_used;0;74;18.8;0.550;16384;16384;0")
+set( C0P1_ResFactor_list "not_used;0;3;3;0;4;1;0")
+set( C0P1_PEAK_list "not_used;0;35;28;2;128;128;0")
+set( C0P1_MSE_list "not_used;0;74;18.8;0.550;16384;16384;0")
-FOREACH(numFileC0P1 RANGE 1 7)
+foreach(numFileC0P1 RANGE 1 7)
# Build filenames
- SET( filenameInput p1_0${numFileC0P1}.j2k )
- SET( filenameRef c0p1_0${numFileC0P1}.pgx )
+ set( filenameInput p1_0${numFileC0P1}.j2k )
+ set( filenameRef c0p1_0${numFileC0P1}.pgx )
# Get corresponding tests parameters
list(GET C0P1_ResFactor_list ${numFileC0P1} ResFactor)
@@ -182,18 +182,18 @@ FOREACH(numFileC0P1 RANGE 1 7)
list(GET C0P1_MSE_list ${numFileC0P1} MSE_limit)
# Manage cases which need to try different resolution reduction
- IF (numFileC0P1 EQUAL 4 )
+ if (numFileC0P1 EQUAL 4 )
get_filename_component(filenameRefSub ${filenameRef} NAME_WE)
#r = 0
- ADD_TEST(ETS-C0P1-${filenameInput}-r0-decode
+ add_test(ETS-C0P1-${filenameInput}-r0-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}-r0.pgx
-r 0
)
- ADD_TEST(ETS-C0P1-${filenameInput}-r0-compare2ref
+ add_test(ETS-C0P1-${filenameInput}-r0-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRefSub}r0.pgx
-t ${TEMP}/c0${filenameInput}-r0.pgx
@@ -203,11 +203,11 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P1-${filenameInput}-r0-compare2ref
+ set_tests_properties(ETS-C0P1-${filenameInput}-r0-compare2ref
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-r0-decode)
- ADD_TEST(NR-C0P1-${filenameInput}-r0-compare2base
+ add_test(NR-C0P1-${filenameInput}-r0-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRefSub}-r0.pgx
-t ${TEMP}/c0${filenameInput}-r0.pgx
@@ -216,19 +216,19 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P1-${filenameInput}-r0-compare2base
+ set_tests_properties(NR-C0P1-${filenameInput}-r0-compare2base
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-r0-decode)
#r = 3
- ADD_TEST(ETS-C0P1-${filenameInput}-r3-decode
+ add_test(ETS-C0P1-${filenameInput}-r3-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}-r3.pgx
-r 3
)
- ADD_TEST(ETS-C0P1-${filenameInput}-r3-compare2ref
+ add_test(ETS-C0P1-${filenameInput}-r3-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRefSub}r3.pgx
-t ${TEMP}/c0${filenameInput}-r3.pgx
@@ -238,11 +238,11 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P1-${filenameInput}-r3-compare2ref
+ set_tests_properties(ETS-C0P1-${filenameInput}-r3-compare2ref
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-r3-decode)
- ADD_TEST(NR-C0P1-${filenameInput}-r3-compare2base
+ add_test(NR-C0P1-${filenameInput}-r3-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRefSub}-r3.pgx
-t ${TEMP}/c0${filenameInput}-r3.pgx
@@ -251,20 +251,20 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P1-${filenameInput}-r3-compare2base
+ set_tests_properties(NR-C0P1-${filenameInput}-r3-compare2base
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-r3-decode)
- ELSE(numFileC0P1 EQUAL 4)
+ else(numFileC0P1 EQUAL 4)
- ADD_TEST(ETS-C0P1-${filenameInput}-decode
+ add_test(ETS-C0P1-${filenameInput}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c0${filenameInput}.pgx
-r ${ResFactor}
)
- ADD_TEST(ETS-C0P1-${filenameInput}-compare2ref
+ add_test(ETS-C0P1-${filenameInput}-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRef}
-t ${TEMP}/c0${filenameInput}.pgx
@@ -274,11 +274,11 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s t_
)
- SET_TESTS_PROPERTIES(ETS-C0P1-${filenameInput}-compare2ref
+ set_tests_properties(ETS-C0P1-${filenameInput}-compare2ref
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-decode)
- ADD_TEST(NR-C0P1-${filenameInput}-compare2base
+ add_test(NR-C0P1-${filenameInput}-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRef}
-t ${TEMP}/c0${filenameInput}.pgx
@@ -287,13 +287,13 @@ FOREACH(numFileC0P1 RANGE 1 7)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C0P1-${filenameInput}-compare2base
+ set_tests_properties(NR-C0P1-${filenameInput}-compare2base
PROPERTIES DEPENDS
ETS-C0P1-${filenameInput}-decode)
- ENDIF(numFileC0P1 EQUAL 4)
+ endif(numFileC0P1 EQUAL 4)
-ENDFOREACH(numFileC0P1)
+endforeach(numFileC0P1)
#--------------------------------------------------------------------------
# Tests about class 1 profile 0
@@ -302,20 +302,20 @@ ENDFOREACH(numFileC0P1)
# non regression comparison
# Parameters and tolerances given by Table C.6
-SET( C1P0_ResFactor_list "not_used;0;0;0;0;0;0;0;1;0;0;0;0;0;0;0;0")
-SET( C1P0_PEAK_list "not_used;0;0;0;5:4:6;2:2:2:0;635:403:378:0;0:0:0;0:0:0;0;0:0:0;0;0;0:0:0:0;0:0:0;0;0")
-SET( C1P0_MSE_list "not_used;0;0;0;0.776:0.626:1.070;0.302:0.307:0.269:0;11287:6124:3968:0;0:0:0;0:0:0;0;0:0:0;0;0;0:0:0:0;0:0:0;0;0")
+set( C1P0_ResFactor_list "not_used;0;0;0;0;0;0;0;1;0;0;0;0;0;0;0;0")
+set( C1P0_PEAK_list "not_used;0;0;0;5:4:6;2:2:2:0;635:403:378:0;0:0:0;0:0:0;0;0:0:0;0;0;0:0:0:0;0:0:0;0;0")
+set( C1P0_MSE_list "not_used;0;0;0;0.776:0.626:1.070;0.302:0.307:0.269:0;11287:6124:3968:0;0:0:0;0:0:0;0;0:0:0;0;0;0:0:0:0;0:0:0;0;0")
-FOREACH(numFileC1P0 RANGE 1 16)
+foreach(numFileC1P0 RANGE 1 16)
# Build filenames
- IF(${numFileC1P0} LESS 10)
- SET( filenameInput p0_0${numFileC1P0}.j2k )
- SET( filenameRef c1p0_0${numFileC1P0}.pgx )
- ELSE(${numFileC1P0} LESS 10)
- SET( filenameInput p0_${numFileC1P0}.j2k )
- SET( filenameRef c1p0_${numFileC1P0}.pgx )
- ENDIF(${numFileC1P0} LESS 10)
+ if(${numFileC1P0} LESS 10)
+ set( filenameInput p0_0${numFileC1P0}.j2k )
+ set( filenameRef c1p0_0${numFileC1P0}.pgx )
+ else(${numFileC1P0} LESS 10)
+ set( filenameInput p0_${numFileC1P0}.j2k )
+ set( filenameRef c1p0_${numFileC1P0}.pgx )
+ endif(${numFileC1P0} LESS 10)
# Get corresponding tests parameters
list(GET CP0_nbC_list ${numFileC1P0} nbComponents)
@@ -323,14 +323,14 @@ FOREACH(numFileC1P0 RANGE 1 16)
list(GET C1P0_PEAK_list ${numFileC1P0} PEAK_limit)
list(GET C1P0_MSE_list ${numFileC1P0} MSE_limit)
- ADD_TEST(ETS-C1P0-${filenameInput}-decode
+ add_test(ETS-C1P0-${filenameInput}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c1${filenameInput}.pgx
-r ${ResFactor}
)
- ADD_TEST(ETS-C1P0-${filenameInput}-compare2ref
+ add_test(ETS-C1P0-${filenameInput}-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRef}
-t ${TEMP}/c1${filenameInput}.pgx
@@ -340,11 +340,11 @@ FOREACH(numFileC1P0 RANGE 1 16)
-s b_t_
)
- SET_TESTS_PROPERTIES(ETS-C1P0-${filenameInput}-compare2ref
+ set_tests_properties(ETS-C1P0-${filenameInput}-compare2ref
PROPERTIES DEPENDS
ETS-C1P0-${filenameInput}-decode)
- ADD_TEST(NR-C1P0-${filenameInput}-compare2base
+ add_test(NR-C1P0-${filenameInput}-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRef}
-t ${TEMP}/c1${filenameInput}.pgx
@@ -353,11 +353,11 @@ FOREACH(numFileC1P0 RANGE 1 16)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C1P0-${filenameInput}-compare2base
+ set_tests_properties(NR-C1P0-${filenameInput}-compare2base
PROPERTIES DEPENDS
ETS-C1P0-${filenameInput}-decode)
-ENDFOREACH(numFileC1P0)
+endforeach(numFileC1P0)
#--------------------------------------------------------------------------
# Tests about class 1 profile 1
@@ -366,28 +366,28 @@ ENDFOREACH(numFileC1P0)
# non regression comparison
# Parameters and tolerances given by Table C.7
-SET( C1P1_PEAK_list "not_used;0;5:4:6;2:2:1:0;624;40:40:40;2:2:2;0:0")
-SET( C1P1_MSE_list "not_used;0;0.765:0.616:1.051;0.3:0.210:0.200:0;3080;8.458:9.816:10.154;0.6:0.6:0.6;0:0")
+set( C1P1_PEAK_list "not_used;0;5:4:6;2:2:1:0;624;40:40:40;2:2:2;0:0")
+set( C1P1_MSE_list "not_used;0;0.765:0.616:1.051;0.3:0.210:0.200:0;3080;8.458:9.816:10.154;0.6:0.6:0.6;0:0")
-FOREACH(numFileC1P1 RANGE 1 7)
+foreach(numFileC1P1 RANGE 1 7)
# Build filenames
- SET( filenameInput p1_0${numFileC1P1}.j2k )
- SET( filenameRef c1p1_0${numFileC1P1}.pgx )
+ set( filenameInput p1_0${numFileC1P1}.j2k )
+ set( filenameRef c1p1_0${numFileC1P1}.pgx )
# Get corresponding tests parameters
list(GET CP1_nbC_list ${numFileC1P1} nbComponents)
list(GET C1P1_PEAK_list ${numFileC1P1} PEAK_limit)
list(GET C1P1_MSE_list ${numFileC1P1} MSE_limit)
- ADD_TEST(ETS-C1P1-${filenameInput}-decode
+ add_test(ETS-C1P1-${filenameInput}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/c1${filenameInput}.pgx
-r 0
)
- ADD_TEST(ETS-C1P1-${filenameInput}-compare2ref
+ add_test(ETS-C1P1-${filenameInput}-compare2ref
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_CONF}/${filenameRef}
-t ${TEMP}/c1${filenameInput}.pgx
@@ -397,11 +397,11 @@ FOREACH(numFileC1P1 RANGE 1 7)
-s b_t_
)
- SET_TESTS_PROPERTIES(ETS-C1P1-${filenameInput}-compare2ref
+ set_tests_properties(ETS-C1P1-${filenameInput}-compare2ref
PROPERTIES DEPENDS
ETS-C1P1-${filenameInput}-decode)
- ADD_TEST(NR-C1P1-${filenameInput}-compare2base
+ add_test(NR-C1P1-${filenameInput}-compare2base
${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
-b ${BASELINE_NR}/opj_${filenameRef}
-t ${TEMP}/c1${filenameInput}.pgx
@@ -410,11 +410,11 @@ FOREACH(numFileC1P1 RANGE 1 7)
-s b_t_
)
- SET_TESTS_PROPERTIES(NR-C1P1-${filenameInput}-compare2base
+ set_tests_properties(NR-C1P1-${filenameInput}-compare2base
PROPERTIES DEPENDS
ETS-C1P1-${filenameInput}-decode)
-ENDFOREACH(numFileC1P1)
+endforeach(numFileC1P1)
#--------------------------------------------------------------------------
# Tests about JP2 file
@@ -424,16 +424,16 @@ ENDFOREACH(numFileC1P1)
# Tolerances given by Table G.1
-FOREACH(numFileJP2 RANGE 1 9)
+foreach(numFileJP2 RANGE 1 9)
# Build filenames
- SET( filenameInput "file${numFileJP2}.jp2" )
- SET( filenameRef jp2_${numFileJP2}.tif )
+ set( filenameInput "file${numFileJP2}.jp2" )
+ set( filenameRef jp2_${numFileJP2}.tif )
# Get corresponding tests parameters
list(GET JP2_PEAK_list ${numFileJP2} PEAK_limit)
- ADD_TEST(ETS-JP2-${filenameInput}-decode
+ add_test(ETS-JP2-${filenameInput}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/${filenameInput}.tif
@@ -441,29 +441,29 @@ FOREACH(numFileJP2 RANGE 1 9)
#
# FIXME: Need to implement a compare tif images function
#
-# ADD_TEST(ETS-JP2-${filenameInput}-compare2ref
+# add_test(ETS-JP2-${filenameInput}-compare2ref
# ${EXECUTABLE_OUTPUT_PATH}/compareTIFimages
# -b ${BASELINE_CONF}/${filenameRef_tif}
# -t ${TEMP}/${filenameInput}.pgx
# -p 4
# )
#
-# SET_TESTS_PROPERTIES(ETS-JP2-${filenameInput}-compare2ref
+# set_tests_properties(ETS-JP2-${filenameInput}-compare2ref
# PROPERTIES DEPENDS
# ETS-JP2-${filenameInput}-decode)
#
-# ADD_TEST(NR-JP2-${filenameInput}-compare2base
+# add_test(NR-JP2-${filenameInput}-compare2base
# ${EXECUTABLE_OUTPUT_PATH}/compareTIFimages
# -b ${BASELINE_NR}/opj_${filenameRef}
# -t ${TEMP}/${filenameInput}.tif
# -d
# )
#
-# SET_TESTS_PROPERTIES(NR-JP2-${filenameInput}-compare2base
+# set_tests_properties(NR-JP2-${filenameInput}-compare2base
# PROPERTIES DEPENDS
# ETS-JP2-${filenameInput}-decode)
-ENDFOREACH(numFileJP2)
+endforeach(numFileJP2)
#--------------------------------------------------------------------------#
#--------------------------------------------------------------------------#
@@ -474,89 +474,89 @@ ENDFOREACH(numFileJP2)
# try to dump image and codestream informations into a file
# non regression comparison this file to the baseline
-FOREACH(numFileP0 RANGE 1 16)
+foreach(numFileP0 RANGE 1 16)
# Build filenames
- IF(${numFileP0} LESS 10)
- SET( filenameInput p0_0${numFileP0}.j2k )
- ELSE(${numFileP0} LESS 10)
- SET( filenameInput p0_${numFileP0}.j2k )
- ENDIF(${numFileP0} LESS 10)
+ if(${numFileP0} LESS 10)
+ set( filenameInput p0_0${numFileP0}.j2k )
+ else(${numFileP0} LESS 10)
+ set( filenameInput p0_${numFileP0}.j2k )
+ endif(${numFileP0} LESS 10)
get_filename_component(filenameRefSub ${filenameInput} NAME_WE)
- ADD_TEST(NR-${filenameInput}-dump
+ add_test(NR-${filenameInput}-dump
${EXECUTABLE_OUTPUT_PATH}/j2k_dump
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/${filenameInput}.txt
)
- ADD_TEST(NR-${filenameInput}-compare_dump2base
+ add_test(NR-${filenameInput}-compare_dump2base
${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
-b ${BASELINE_NR}/opj_v2_${filenameRefSub}.txt
-t ${TEMP}/${filenameInput}.txt
)
- SET_TESTS_PROPERTIES(NR-${filenameInput}-compare_dump2base
+ set_tests_properties(NR-${filenameInput}-compare_dump2base
PROPERTIES DEPENDS
NR-${filenameInput}-dump)
-ENDFOREACH(numFileP0)
+endforeach(numFileP0)
#--------------------------------------------------------------------------
# Tests about dump of profile 1 file
# try to dump image and codestream informations into a file
# non regression comparison this file to the baseline
-FOREACH(numFileP1 RANGE 1 7)
+foreach(numFileP1 RANGE 1 7)
# Build filenames
- SET( filenameInput p1_0${numFileP1}.j2k )
+ set( filenameInput p1_0${numFileP1}.j2k )
get_filename_component(filenameInputSub ${filenameInput} NAME_WE)
- ADD_TEST(NR-${filenameInput}-dump
+ add_test(NR-${filenameInput}-dump
${EXECUTABLE_OUTPUT_PATH}/j2k_dump
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/${filenameInput}.txt
)
- ADD_TEST(NR-${filenameInput}-compare_dump2base
+ add_test(NR-${filenameInput}-compare_dump2base
${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
-b ${BASELINE_NR}/opj_v2_${filenameInputSub}.txt
-t ${TEMP}/${filenameInput}.txt
)
- SET_TESTS_PROPERTIES(NR-${filenameInput}-compare_dump2base
+ set_tests_properties(NR-${filenameInput}-compare_dump2base
PROPERTIES DEPENDS
NR-${filenameInput}-dump)
-ENDFOREACH(numFileP1)
+endforeach(numFileP1)
#--------------------------------------------------------------------------
# Tests about dump of JP2 file
# try to dump image and codestream informations into a file
# non regression comparison this file to the baseline
-FOREACH(numFileJP2 RANGE 1 9)
+foreach(numFileJP2 RANGE 1 9)
# Build filenames
- SET( filenameInput "file${numFileJP2}.jp2" )
+ set( filenameInput "file${numFileJP2}.jp2" )
get_filename_component(filenameInputSub ${filenameInput} NAME_WE)
- ADD_TEST(NR-${filenameInput}-dump
+ add_test(NR-${filenameInput}-dump
${EXECUTABLE_OUTPUT_PATH}/j2k_dump
-i ${INPUT_CONF}/${filenameInput}
-o ${TEMP}/${filenameInput}.txt
)
- ADD_TEST(NR-${filenameInput}-compare_dump2base
+ add_test(NR-${filenameInput}-compare_dump2base
${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
-b ${BASELINE_NR}/opj_v2_${filenameInputSub}.txt
-t ${TEMP}/${filenameInput}.txt
)
- SET_TESTS_PROPERTIES(NR-${filenameInput}-compare_dump2base
+ set_tests_properties(NR-${filenameInput}-compare_dump2base
PROPERTIES DEPENDS
NR-${filenameInput}-dump)
-ENDFOREACH(numFileJP2) \ No newline at end of file
+endforeach(numFileJP2) \ No newline at end of file
diff --git a/tests/nonregression/CMakeLists.txt b/tests/nonregression/CMakeLists.txt
index 8a743e7d..f3f859bc 100644
--- a/tests/nonregression/CMakeLists.txt
+++ b/tests/nonregression/CMakeLists.txt
@@ -1,15 +1,15 @@
# NON-REGRESSION TESTS ON THIS DATASET LOCATED ${OPJ_DATA_ROOT}/input/nonregression
-FILE(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
+file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
-SET(TEMP ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
-SET(BASELINE_NR ${OPJ_DATA_ROOT}/baseline/nonregression)
-SET(INPUT_NR ${OPJ_DATA_ROOT}/input/nonregression)
+set(TEMP ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
+set(BASELINE_NR ${OPJ_DATA_ROOT}/baseline/nonregression)
+set(INPUT_NR ${OPJ_DATA_ROOT}/input/nonregression)
-SET(INPUT_NR_PATH ${INPUT_NR})
-SET(TEMP_PATH ${TEMP})
-SET(INPUT_CONF_PATH ${OPJ_DATA_ROOT}/input/conformance)
+set(INPUT_NR_PATH ${INPUT_NR})
+set(TEMP_PATH ${TEMP})
+set(INPUT_CONF_PATH ${OPJ_DATA_ROOT}/input/conformance)
# need kdu_expand if possible
find_package(KAKADU)
@@ -19,49 +19,49 @@ find_package(KAKADU)
# Dump all files with the selected extension inside the input directory
# Define a list of file which should be gracefully rejected:
-SET(BLACKLIST_JPEG2000
+set(BLACKLIST_JPEG2000
empty
)
-FILE(GLOB_RECURSE OPJ_DATA_NR_LIST
+file(GLOB_RECURSE OPJ_DATA_NR_LIST
"${INPUT_NR}/*.j2k"
"${INPUT_NR}/*.j2c"
"${INPUT_NR}/*.jp2"
#"${INPUT_NR}/*.jpx"
)
-FOREACH(INPUT_FILENAME ${OPJ_DATA_NR_LIST})
- GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
- GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
- STRING(REGEX MATCH ${INPUT_FILENAME_NAME} bad_jpeg2000 ${BLACKLIST_JPEG2000})
+foreach(INPUT_FILENAME ${OPJ_DATA_NR_LIST})
+ get_filename_component(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
+ get_filename_component(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
+ string(REGEX MATCH ${INPUT_FILENAME_NAME} bad_jpeg2000 ${BLACKLIST_JPEG2000})
# Dump the input image
- ADD_TEST(NR-${INPUT_FILENAME_NAME}-dump
+ add_test(NR-${INPUT_FILENAME_NAME}-dump
${EXECUTABLE_OUTPUT_PATH}/j2k_dump
-i ${INPUT_FILENAME}
-o ${TEMP}/${INPUT_FILENAME_NAME}.txt
-v
)
- IF(bad_jpeg2000)
- SET_TESTS_PROPERTIES(NR-${INPUT_FILENAME_NAME}-dump
+ if(bad_jpeg2000)
+ set_tests_properties(NR-${INPUT_FILENAME_NAME}-dump
PROPERTIES WILL_FAIL TRUE)
- ELSE(bad_jpeg2000)
+ else(bad_jpeg2000)
# Compare the dump output with the baseline
- ADD_TEST(NR-${INPUT_FILENAME_NAME}-compare_dump2base
+ add_test(NR-${INPUT_FILENAME_NAME}-compare_dump2base
${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
-b ${BASELINE_NR}/opj_v2_${INPUT_FILENAME_NAME_WE}.txt
-t ${TEMP}/${INPUT_FILENAME_NAME}.txt
)
- SET_TESTS_PROPERTIES(NR-${INPUT_FILENAME_NAME}-compare_dump2base
+ set_tests_properties(NR-${INPUT_FILENAME_NAME}-compare_dump2base
PROPERTIES DEPENDS
NR-${INPUT_FILENAME_NAME}-dump)
- ENDIF(bad_jpeg2000)
+ endif(bad_jpeg2000)
-ENDFOREACH(INPUT_FILENAME)
+endforeach(INPUT_FILENAME)
#########################################################################
@@ -70,233 +70,233 @@ ENDFOREACH(INPUT_FILENAME)
# to know which files processed and with which options.
# Configure the test suite file:
-CONFIGURE_FILE("test_suite.ctest.in"
+configure_file("test_suite.ctest.in"
"${CMAKE_CURRENT_BINARY_DIR}/test_suite.ctest"
@ONLY)
# Read the file into a list
-FILE(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/test_suite.ctest OPJ_TEST_CMD_LINE_LIST)
+file(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/test_suite.ctest OPJ_TEST_CMD_LINE_LIST)
# Try to find and configure and read another test suite file
-FILE(GLOB TEST_SUITE_FILES *.ctest.in)
-IF (TEST_SUITE_FILES)
- FOREACH(TEST_SUITE_FILE ${TEST_SUITE_FILES})
+file(GLOB TEST_SUITE_FILES *.ctest.in)
+if (TEST_SUITE_FILES)
+ foreach(TEST_SUITE_FILE ${TEST_SUITE_FILES})
# Avoid to process the official test suite
- SET(FILE_ALREADY_READ 0)
- GET_FILENAME_COMPONENT(TEST_SUITE_FILENAME ${TEST_SUITE_FILE} NAME)
- STRING(REGEX MATCH "^test_suite.ctest.in$" FILE_ALREADY_READ ${TEST_SUITE_FILENAME})
+ set(FILE_ALREADY_READ 0)
+ get_filename_component(TEST_SUITE_FILENAME ${TEST_SUITE_FILE} NAME)
+ string(REGEX MATCH "^test_suite.ctest.in$" FILE_ALREADY_READ ${TEST_SUITE_FILENAME})
- IF(NOT FILE_ALREADY_READ)
+ if(NOT FILE_ALREADY_READ)
# Configure the additional test suite file:
- GET_FILENAME_COMPONENT(TEST_SUITE_FILE_SUB ${TEST_SUITE_FILE} NAME_WE)
- CONFIGURE_FILE("${TEST_SUITE_FILE}"
+ get_filename_component(TEST_SUITE_FILE_SUB ${TEST_SUITE_FILE} NAME_WE)
+ configure_file("${TEST_SUITE_FILE}"
"${CMAKE_CURRENT_BINARY_DIR}/${TEST_SUITE_FILE_SUB}.ctest"
@ONLY)
# Read the additional file into a list
- FILE(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/${TEST_SUITE_FILE_SUB}.ctest OPJ_TEST_CMD_LINE_LIST_TEMP)
+ file(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/${TEST_SUITE_FILE_SUB}.ctest OPJ_TEST_CMD_LINE_LIST_TEMP)
# Append the list of command
- SET(OPJ_TEST_CMD_LINE_LIST ${OPJ_TEST_CMD_LINE_LIST} ${OPJ_TEST_CMD_LINE_LIST_TEMP})
+ set(OPJ_TEST_CMD_LINE_LIST ${OPJ_TEST_CMD_LINE_LIST} ${OPJ_TEST_CMD_LINE_LIST_TEMP})
- ENDIF(NOT FILE_ALREADY_READ)
+ endif(NOT FILE_ALREADY_READ)
- ENDFOREACH(TEST_SUITE_FILE)
+ endforeach(TEST_SUITE_FILE)
-ELSE(TEST_SUITE_FILES)
+else(TEST_SUITE_FILES)
- MESSAGE(FATAL_ERROR "One test suite should be available (test_suite.ctest.in) !!!")
+ message(FATAL_ERROR "One test suite should be available (test_suite.ctest.in) !!!")
-ENDIF(TEST_SUITE_FILES)
+endif(TEST_SUITE_FILES)
# Parse the command line found in the file(s)
-SET(IT_TEST_ENC 0)
-SET(IT_TEST_DEC 0)
-FOREACH(OPJ_TEST_CMD_LINE ${OPJ_TEST_CMD_LINE_LIST})
+set(IT_TEST_ENC 0)
+set(IT_TEST_DEC 0)
+foreach(OPJ_TEST_CMD_LINE ${OPJ_TEST_CMD_LINE_LIST})
- SET(IGNORE_LINE_FOUND 0)
+ set(IGNORE_LINE_FOUND 0)
# Replace space by ; to generate a list
- STRING(REPLACE " " ";" CMD_ARG_LIST ${OPJ_TEST_CMD_LINE})
+ string(REPLACE " " ";" CMD_ARG_LIST ${OPJ_TEST_CMD_LINE})
# Check if the first argument begin by the comment sign
- LIST(GET CMD_ARG_LIST 0 EXE_NAME)
+ list(GET CMD_ARG_LIST 0 EXE_NAME)
- IF(EXE_NAME)
- STRING(REGEX MATCH "^#" IGNORE_LINE_FOUND ${EXE_NAME})
- ENDIF(EXE_NAME)
+ if(EXE_NAME)
+ string(REGEX MATCH "^#" IGNORE_LINE_FOUND ${EXE_NAME})
+ endif(EXE_NAME)
- IF(IGNORE_LINE_FOUND OR NOT EXE_NAME)
+ if(IGNORE_LINE_FOUND OR NOT EXE_NAME)
- #MESSAGE( STATUS "Current line is ignored: ${OPJ_TEST_CMD_LINE}")
+ #message( STATUS "Current line is ignored: ${OPJ_TEST_CMD_LINE}")
- ELSE(IGNORE_LINE_FOUND OR NOT EXE_NAME)
+ else(IGNORE_LINE_FOUND OR NOT EXE_NAME)
# Check if the first argument begin by the failed sign
- SET(FAILED_TEST_FOUND 0)
- STRING(REGEX MATCH "^!" FAILED_TEST_FOUND ${EXE_NAME})
+ set(FAILED_TEST_FOUND 0)
+ string(REGEX MATCH "^!" FAILED_TEST_FOUND ${EXE_NAME})
- IF (FAILED_TEST_FOUND)
+ if (FAILED_TEST_FOUND)
# Manage the different cases with the failed sign to remove the first argument which must be image_to_j2k
- SET(FAILED_TEST_FOUND_1 0)
- STRING(REGEX MATCH "^!image_to_j2k$|^!j2k_to_image$" FAILED_TEST_FOUND_1 ${EXE_NAME})
+ set(FAILED_TEST_FOUND_1 0)
+ string(REGEX MATCH "^!image_to_j2k$|^!j2k_to_image$" FAILED_TEST_FOUND_1 ${EXE_NAME})
- IF (FAILED_TEST_FOUND_1)
+ if (FAILED_TEST_FOUND_1)
- LIST(REMOVE_AT CMD_ARG_LIST 0)
+ list(REMOVE_AT CMD_ARG_LIST 0)
- ELSE (FAILED_TEST_FOUND_1)
+ else (FAILED_TEST_FOUND_1)
- SET(FAILED_TEST_FOUND_2 0)
- LIST(GET CMD_ARG_LIST 1 EXE_NAME)
- STRING(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" FAILED_TEST_FOUND_2 ${EXE_NAME})
+ set(FAILED_TEST_FOUND_2 0)
+ list(GET CMD_ARG_LIST 1 EXE_NAME)
+ string(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" FAILED_TEST_FOUND_2 ${EXE_NAME})
- IF (FAILED_TEST_FOUND_2)
+ if (FAILED_TEST_FOUND_2)
- LIST(REMOVE_AT CMD_ARG_LIST 0)
- LIST(REMOVE_AT CMD_ARG_LIST 0)
+ list(REMOVE_AT CMD_ARG_LIST 0)
+ list(REMOVE_AT CMD_ARG_LIST 0)
- ELSE (FAILED_TEST_FOUND_2)
+ else (FAILED_TEST_FOUND_2)
- MESSAGE( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k or j2k_to_image)")
+ message( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k or j2k_to_image)")
- ENDIF (FAILED_TEST_FOUND_2)
- ENDIF (FAILED_TEST_FOUND_1)
+ endif (FAILED_TEST_FOUND_2)
+ endif (FAILED_TEST_FOUND_1)
- ELSE (FAILED_TEST_FOUND)
+ else (FAILED_TEST_FOUND)
# Check if the first argument is equal to image_to_j2k
- STRING(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" EXE_NAME_FOUND ${EXE_NAME})
+ string(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" EXE_NAME_FOUND ${EXE_NAME})
- IF(EXE_NAME_FOUND)
+ if(EXE_NAME_FOUND)
- STRING(REGEX MATCH "image_to_j2k" ENC_TEST_FOUND ${EXE_NAME})
+ string(REGEX MATCH "image_to_j2k" ENC_TEST_FOUND ${EXE_NAME})
- ELSE(EXE_NAME_FOUND)
+ else(EXE_NAME_FOUND)
- MESSAGE( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k)")
+ message( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k)")
- ENDIF(EXE_NAME_FOUND)
+ endif(EXE_NAME_FOUND)
- LIST(REMOVE_AT CMD_ARG_LIST 0)
+ list(REMOVE_AT CMD_ARG_LIST 0)
- ENDIF (FAILED_TEST_FOUND)
+ endif (FAILED_TEST_FOUND)
# Parse the argument list to find the input filename and output filename
- SET(CMD_ARG_LIST_2 "")
- SET(ARG_POS 0)
- SET(INPUT_ARG_POS 0)
- SET(OUTPUT_ARG_POS 0)
+ set(CMD_ARG_LIST_2 "")
+ set(ARG_POS 0)
+ set(INPUT_ARG_POS 0)
+ set(OUTPUT_ARG_POS 0)
- FOREACH(CMD_ARG_ELT ${CMD_ARG_LIST})
+ foreach(CMD_ARG_ELT ${CMD_ARG_LIST})
math(EXPR ARG_POS "${ARG_POS}+1" )
- STRING(COMPARE EQUAL ${CMD_ARG_ELT} "-i" INPUT_ARG_FOUND)
- IF(INPUT_ARG_FOUND)
- SET(INPUT_ARG_POS ${ARG_POS})
- SET(INPUT_ARG_FOUND 0)
- ENDIF(INPUT_ARG_FOUND)
+ string(COMPARE EQUAL ${CMD_ARG_ELT} "-i" INPUT_ARG_FOUND)
+ if(INPUT_ARG_FOUND)
+ set(INPUT_ARG_POS ${ARG_POS})
+ set(INPUT_ARG_FOUND 0)
+ endif(INPUT_ARG_FOUND)
- STRING(COMPARE EQUAL ${CMD_ARG_ELT} "-o" OUTPUT_ARG_FOUND)
- IF(OUTPUT_ARG_FOUND)
- SET(OUTPUT_ARG_POS ${ARG_POS})
- SET(OUTPUT_ARG_FOUND 0)
- ENDIF(OUTPUT_ARG_FOUND)
+ string(COMPARE EQUAL ${CMD_ARG_ELT} "-o" OUTPUT_ARG_FOUND)
+ if(OUTPUT_ARG_FOUND)
+ set(OUTPUT_ARG_POS ${ARG_POS})
+ set(OUTPUT_ARG_FOUND 0)
+ endif(OUTPUT_ARG_FOUND)
- LIST (APPEND CMD_ARG_LIST_2 ${CMD_ARG_ELT})
+ list (APPEND CMD_ARG_LIST_2 ${CMD_ARG_ELT})
- ENDFOREACH(CMD_ARG_ELT)
+ endforeach(CMD_ARG_ELT)
- LIST(GET CMD_ARG_LIST_2 ${INPUT_ARG_POS} INPUT_FILENAME)
- GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
- GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
- LIST(GET CMD_ARG_LIST_2 ${OUTPUT_ARG_POS} OUTPUT_FILENAME)
- GET_FILENAME_COMPONENT(OUTPUT_FILENAME_NAME_WE ${OUTPUT_FILENAME} NAME_WE)
+ list(GET CMD_ARG_LIST_2 ${INPUT_ARG_POS} INPUT_FILENAME)
+ get_filename_component(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
+ get_filename_component(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
+ list(GET CMD_ARG_LIST_2 ${OUTPUT_ARG_POS} OUTPUT_FILENAME)
+ get_filename_component(OUTPUT_FILENAME_NAME_WE ${OUTPUT_FILENAME} NAME_WE)
#-----
# Now we can add the test suite corresponding to a line command in the file
#-----
# ENCODER TEST SUITE
- IF(ENC_TEST_FOUND)
+ if(ENC_TEST_FOUND)
math(EXPR IT_TEST_ENC "${IT_TEST_ENC}+1" )
# Encode an image into the jpeg2000 format
- ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode
+ add_test(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode
${EXECUTABLE_OUTPUT_PATH}/image_to_j2k
${CMD_ARG_LIST_2}
)
- IF(FAILED_TEST_FOUND)
- SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode PROPERTIES WILL_FAIL TRUE)
- ELSE(FAILED_TEST_FOUND)
+ if(FAILED_TEST_FOUND)
+ set_tests_properties(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode PROPERTIES WILL_FAIL TRUE)
+ else(FAILED_TEST_FOUND)
# Dump the encoding file
- ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
+ add_test(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
${EXECUTABLE_OUTPUT_PATH}/j2k_dump
-i ${OUTPUT_FILENAME}
-o ${OUTPUT_FILENAME}-ENC-${IT_TEST_ENC}.txt
)
- SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
+ set_tests_properties(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
PROPERTIES DEPENDS
NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode)
# Compare the dump file with the baseline
- ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
+ add_test(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
-b ${BASELINE_NR}/opj_v2_${OUTPUT_FILENAME_NAME_WE}-ENC-${IT_TEST_ENC}.txt
-t ${OUTPUT_FILENAME}-ENC-${IT_TEST_ENC}.txt
)
- SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
+ set_tests_properties(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
PROPERTIES DEPENDS
NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump)
# Decode the encoding file with kakadu expand command
- IF (KDU_EXPAND_EXECUTABLE)
- ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
+ if (KDU_EXPAND_EXECUTABLE)
+ add_test(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
${KDU_EXPAND_EXECUTABLE}
-i ${OUTPUT_FILENAME}
-o ${OUTPUT_FILENAME}.raw
)
- SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
+ set_tests_properties(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
PROPERTIES DEPENDS
NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode)
# Compare the decoding file with baseline generated from the kdu_expand and baseline.j2k
- ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
+ add_test(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
${EXECUTABLE_OUTPUT_PATH}/compareRAWimages
-b ${BASELINE_NR}/opj_${OUTPUT_FILENAME_NAME_WE}-ENC-${IT_TEST_ENC}.raw
-t ${OUTPUT_FILENAME}.raw
)
- SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
+ set_tests_properties(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
PROPERTIES DEPENDS
NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref)
- ENDIF()
- ENDIF(FAILED_TEST_FOUND)
+ endif()
+ endif(FAILED_TEST_FOUND)
# DECODER TEST SUITE
- ELSE(ENC_TEST_FOUND)
+ else(ENC_TEST_FOUND)
math(EXPR IT_TEST_DEC "${IT_TEST_DEC}+1" )
# Decode the input image
- ADD_TEST(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode
+ add_test(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode
${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
${CMD_ARG_LIST_2}
)
- IF(FAILED_TEST_FOUND)
+ if(FAILED_TEST_FOUND)
- SET_TESTS_PROPERTIES(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode PROPERTIES WILL_FAIL TRUE)
+ set_tests_properties(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode PROPERTIES WILL_FAIL TRUE)
- ELSE(FAILED_TEST_FOUND)
+ else(FAILED_TEST_FOUND)
# FIXME: add a compare2base function base on raw which
# can output png diff files if necesary
-# ADD_TEST(NR-${filename}-compare2base
+# add_test(NR-${filename}-compare2base
# ${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
# -b ${BASELINE_NR}/opj_${filenameRef}
# -t ${TEMP}/${filename}.pgx
@@ -305,14 +305,14 @@ FOREACH(OPJ_TEST_CMD_LINE ${OPJ_TEST_CMD_LINE_LIST})
# -s b_t_
# )
#
-# SET_TESTS_PROPERTIES(NR-${filename}-compare2base
+# set_tests_properties(NR-${filename}-compare2base
# PROPERTIES DEPENDS
# NR-${filename}-decode)
- ENDIF(FAILED_TEST_FOUND)
+ endif(FAILED_TEST_FOUND)
- ENDIF(ENC_TEST_FOUND)
+ endif(ENC_TEST_FOUND)
- ENDIF(IGNORE_LINE_FOUND OR NOT EXE_NAME)
+ endif(IGNORE_LINE_FOUND OR NOT EXE_NAME)
-ENDFOREACH(OPJ_TEST_CMD_LINE)
+endforeach(OPJ_TEST_CMD_LINE)