1 # NON-REGRESSION TESTS ON THIS DATASET LOCATED ${OPJ_DATA_ROOT}/input/nonregression
3 FILE(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
5 SET(TEMP ${CMAKE_CURRENT_BINARY_DIR}/Temporary)
6 SET(BASELINE_NR ${OPJ_DATA_ROOT}/baseline/nonregression)
7 SET(INPUT_NR ${OPJ_DATA_ROOT}/input/nonregression)
10 SET(INPUT_NR_PATH ${INPUT_NR})
11 SET(TEMP_PATH ${TEMP})
12 SET(INPUT_CONF_PATH ${OPJ_DATA_ROOT}/input/conformance)
14 # need kdu_expand if possible
17 #########################################################################
18 # GENERATION OF THE TEST SUITE (DUMP)
19 # Dump all files with the selected extension inside the input directory
21 # Define a list of file which should be gracefully rejected:
22 SET(BLACKLIST_JPEG2000
26 FILE(GLOB_RECURSE OPJ_DATA_NR_LIST
33 FOREACH(INPUT_FILENAME ${OPJ_DATA_NR_LIST})
34 GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
35 GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
36 STRING(REGEX MATCH ${INPUT_FILENAME_NAME} bad_jpeg2000 ${BLACKLIST_JPEG2000})
38 # Dump the input image
39 ADD_TEST(NR-${INPUT_FILENAME_NAME}-dump
40 ${EXECUTABLE_OUTPUT_PATH}/j2k_dump
42 -o ${TEMP}/${INPUT_FILENAME_NAME}.txt
47 SET_TESTS_PROPERTIES(NR-${INPUT_FILENAME_NAME}-dump
48 PROPERTIES WILL_FAIL TRUE)
52 # Compare the dump output with the baseline
53 ADD_TEST(NR-${INPUT_FILENAME_NAME}-compare_dump2base
54 ${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
55 -b ${BASELINE_NR}/opj_v2_${INPUT_FILENAME_NAME_WE}.txt
56 -t ${TEMP}/${INPUT_FILENAME_NAME}.txt
59 SET_TESTS_PROPERTIES(NR-${INPUT_FILENAME_NAME}-compare_dump2base
61 NR-${INPUT_FILENAME_NAME}-dump)
64 ENDFOREACH(INPUT_FILENAME)
67 #########################################################################
68 # GENERATION OF THE TEST SUITE (DECODE AND ENCODE)
69 # Read one and more input file(s) (located in ${OPJ_DATA_ROOT}/input/nonregression)
70 # to know which files processed and with which options.
72 # Configure the test suite file:
73 CONFIGURE_FILE("test_suite.ctest.in"
74 "${CMAKE_CURRENT_BINARY_DIR}/test_suite.ctest"
77 # Read the file into a list
78 FILE(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/test_suite.ctest OPJ_TEST_CMD_LINE_LIST)
80 # Try to find and configure and read another test suite file
81 FILE(GLOB TEST_SUITE_FILES *.ctest.in)
83 FOREACH(TEST_SUITE_FILE ${TEST_SUITE_FILES})
85 # Avoid to process the official test suite
86 SET(FILE_ALREADY_READ 0)
87 GET_FILENAME_COMPONENT(TEST_SUITE_FILENAME ${TEST_SUITE_FILE} NAME)
88 STRING(REGEX MATCH "^test_suite.ctest.in$" FILE_ALREADY_READ ${TEST_SUITE_FILENAME})
90 IF(NOT FILE_ALREADY_READ)
91 # Configure the additional test suite file:
92 GET_FILENAME_COMPONENT(TEST_SUITE_FILE_SUB ${TEST_SUITE_FILE} NAME_WE)
93 CONFIGURE_FILE("${TEST_SUITE_FILE}"
94 "${CMAKE_CURRENT_BINARY_DIR}/${TEST_SUITE_FILE_SUB}.ctest"
96 # Read the additional file into a list
97 FILE(STRINGS ${CMAKE_CURRENT_BINARY_DIR}/${TEST_SUITE_FILE_SUB}.ctest OPJ_TEST_CMD_LINE_LIST_TEMP)
98 # Append the list of command
99 SET(OPJ_TEST_CMD_LINE_LIST ${OPJ_TEST_CMD_LINE_LIST} ${OPJ_TEST_CMD_LINE_LIST_TEMP})
101 ENDIF(NOT FILE_ALREADY_READ)
103 ENDFOREACH(TEST_SUITE_FILE)
105 ELSE(TEST_SUITE_FILES)
107 MESSAGE(FATAL_ERROR "One test suite should be available (test_suite.ctest.in) !!!")
109 ENDIF(TEST_SUITE_FILES)
112 # Parse the command line found in the file(s)
115 FOREACH(OPJ_TEST_CMD_LINE ${OPJ_TEST_CMD_LINE_LIST})
117 SET(IGNORE_LINE_FOUND 0)
119 # Replace space by ; to generate a list
120 STRING(REPLACE " " ";" CMD_ARG_LIST ${OPJ_TEST_CMD_LINE})
122 # Check if the first argument begin by the comment sign
123 LIST(GET CMD_ARG_LIST 0 EXE_NAME)
126 STRING(REGEX MATCH "^#" IGNORE_LINE_FOUND ${EXE_NAME})
129 IF(IGNORE_LINE_FOUND OR NOT EXE_NAME)
131 #MESSAGE( STATUS "Current line is ignored: ${OPJ_TEST_CMD_LINE}")
133 ELSE(IGNORE_LINE_FOUND OR NOT EXE_NAME)
135 # Check if the first argument begin by the failed sign
136 SET(FAILED_TEST_FOUND 0)
137 STRING(REGEX MATCH "^!" FAILED_TEST_FOUND ${EXE_NAME})
139 IF (FAILED_TEST_FOUND)
140 # Manage the different cases with the failed sign to remove the first argument which must be image_to_j2k
141 SET(FAILED_TEST_FOUND_1 0)
142 STRING(REGEX MATCH "^!image_to_j2k$|^!j2k_to_image$" FAILED_TEST_FOUND_1 ${EXE_NAME})
144 IF (FAILED_TEST_FOUND_1)
146 LIST(REMOVE_AT CMD_ARG_LIST 0)
148 ELSE (FAILED_TEST_FOUND_1)
150 SET(FAILED_TEST_FOUND_2 0)
151 LIST(GET CMD_ARG_LIST 1 EXE_NAME)
152 STRING(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" FAILED_TEST_FOUND_2 ${EXE_NAME})
154 IF (FAILED_TEST_FOUND_2)
156 LIST(REMOVE_AT CMD_ARG_LIST 0)
157 LIST(REMOVE_AT CMD_ARG_LIST 0)
159 ELSE (FAILED_TEST_FOUND_2)
161 MESSAGE( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k or j2k_to_image)")
163 ENDIF (FAILED_TEST_FOUND_2)
164 ENDIF (FAILED_TEST_FOUND_1)
166 ELSE (FAILED_TEST_FOUND)
167 # Check if the first argument is equal to image_to_j2k
168 STRING(REGEX MATCH "^image_to_j2k$|^j2k_to_image$" EXE_NAME_FOUND ${EXE_NAME})
172 STRING(REGEX MATCH "image_to_j2k" ENC_TEST_FOUND ${EXE_NAME})
176 MESSAGE( FATAL_ERROR "${EXE_NAME} is not the right executable name to encode file (try to use image_to_j2k)")
178 ENDIF(EXE_NAME_FOUND)
180 LIST(REMOVE_AT CMD_ARG_LIST 0)
182 ENDIF (FAILED_TEST_FOUND)
184 # Parse the argument list to find the input filename and output filename
185 SET(CMD_ARG_LIST_2 "")
188 SET(OUTPUT_ARG_POS 0)
190 FOREACH(CMD_ARG_ELT ${CMD_ARG_LIST})
192 math(EXPR ARG_POS "${ARG_POS}+1" )
194 STRING(COMPARE EQUAL ${CMD_ARG_ELT} "-i" INPUT_ARG_FOUND)
196 SET(INPUT_ARG_POS ${ARG_POS})
197 SET(INPUT_ARG_FOUND 0)
198 ENDIF(INPUT_ARG_FOUND)
200 STRING(COMPARE EQUAL ${CMD_ARG_ELT} "-o" OUTPUT_ARG_FOUND)
202 SET(OUTPUT_ARG_POS ${ARG_POS})
203 SET(OUTPUT_ARG_FOUND 0)
204 ENDIF(OUTPUT_ARG_FOUND)
206 LIST (APPEND CMD_ARG_LIST_2 ${CMD_ARG_ELT})
208 ENDFOREACH(CMD_ARG_ELT)
210 LIST(GET CMD_ARG_LIST_2 ${INPUT_ARG_POS} INPUT_FILENAME)
211 GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME ${INPUT_FILENAME} NAME)
212 GET_FILENAME_COMPONENT(INPUT_FILENAME_NAME_WE ${INPUT_FILENAME_NAME} NAME_WE)
213 LIST(GET CMD_ARG_LIST_2 ${OUTPUT_ARG_POS} OUTPUT_FILENAME)
214 GET_FILENAME_COMPONENT(OUTPUT_FILENAME_NAME_WE ${OUTPUT_FILENAME} NAME_WE)
217 # Now we can add the test suite corresponding to a line command in the file
222 math(EXPR IT_TEST_ENC "${IT_TEST_ENC}+1" )
224 # Encode an image into the jpeg2000 format
225 ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode
226 ${EXECUTABLE_OUTPUT_PATH}/image_to_j2k
230 IF(FAILED_TEST_FOUND)
231 SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode PROPERTIES WILL_FAIL TRUE)
232 ELSE(FAILED_TEST_FOUND)
234 # Dump the encoding file
235 ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
236 ${EXECUTABLE_OUTPUT_PATH}/j2k_dump
237 -i ${OUTPUT_FILENAME}
238 -o ${OUTPUT_FILENAME}-ENC-${IT_TEST_ENC}.txt
240 SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump
242 NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode)
244 # Compare the dump file with the baseline
245 ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
246 ${EXECUTABLE_OUTPUT_PATH}/compare_dump_files
247 -b ${BASELINE_NR}/opj_v2_${OUTPUT_FILENAME_NAME_WE}-ENC-${IT_TEST_ENC}.txt
248 -t ${OUTPUT_FILENAME}-ENC-${IT_TEST_ENC}.txt
251 SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dump2base
253 NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-dump)
255 # Decode the encoding file with kakadu expand command
256 IF (KDU_EXPAND_EXECUTABLE)
257 ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
258 ${KDU_EXPAND_EXECUTABLE}
259 -i ${OUTPUT_FILENAME}
260 -o ${OUTPUT_FILENAME}.raw
263 SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref
265 NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-encode)
267 # Compare the decoding file with baseline generated from the kdu_expand and baseline.j2k
268 ADD_TEST(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
269 ${EXECUTABLE_OUTPUT_PATH}/compareRAWimages
270 -b ${BASELINE_NR}/opj_${OUTPUT_FILENAME_NAME_WE}-ENC-${IT_TEST_ENC}.raw
271 -t ${OUTPUT_FILENAME}.raw
274 SET_TESTS_PROPERTIES(NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-compare_dec-ref-out2base
276 NR-ENC-${INPUT_FILENAME_NAME}-${IT_TEST_ENC}-decode-ref)
279 ENDIF(FAILED_TEST_FOUND)
283 math(EXPR IT_TEST_DEC "${IT_TEST_DEC}+1" )
285 # Decode the input image
286 ADD_TEST(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode
287 ${EXECUTABLE_OUTPUT_PATH}/j2k_to_image
291 IF(FAILED_TEST_FOUND)
293 SET_TESTS_PROPERTIES(NR-DEC-${INPUT_FILENAME_NAME}-${IT_TEST_DEC}-decode PROPERTIES WILL_FAIL TRUE)
295 ELSE(FAILED_TEST_FOUND)
297 # FIXME: add a compare2base function base on raw which
298 # can output png diff files if necesary
299 # ADD_TEST(NR-${filename}-compare2base
300 # ${EXECUTABLE_OUTPUT_PATH}/comparePGXimages
301 # -b ${BASELINE_NR}/opj_${filenameRef}
302 # -t ${TEMP}/${filename}.pgx
308 # SET_TESTS_PROPERTIES(NR-${filename}-compare2base
310 # NR-${filename}-decode)
312 ENDIF(FAILED_TEST_FOUND)
314 ENDIF(ENC_TEST_FOUND)
316 ENDIF(IGNORE_LINE_FOUND OR NOT EXE_NAME)
318 ENDFOREACH(OPJ_TEST_CMD_LINE)