Hi, Sergey,
thanks for the patch! See my comments.
Sergey
This commit introduces CMake building scripts for the benches introduced
before. The benchmarks are enabled only if `LUAJIT_ENABLE_PERF` option
is set. For each suite (LuaJIT-benches in this patch set)
`AddBenchTarget()` macro generates 2 targets:
* Target to run all benches and store results in the
perf/output/<suite_name> directory.
* Target to run all benches via CTest and inspect results in the
console.
For the LuaJIT-benches there are 2 generated files:
* FASTA_5000000 -- is used as an input for <k-nukleotide.lua> and
<revcomp.lua>.
* SUMCOLL_5000.txt -- is used as an input for <sum-file.lua>.
These files and <perf/output> directory are added to the .gitignore files.
---
.gitignore | 5 ++
CMakeLists.txt | 11 ++++
perf/CMakeLists.txt | 99 ++++++++++++++++++++++++++++++
perf/LuaJIT-benches/CMakeLists.txt | 52 ++++++++++++++++
4 files changed, 167 insertions(+)
create mode 100644 perf/CMakeLists.txt
create mode 100644 perf/LuaJIT-benches/CMakeLists.txt
diff --git a/.gitignore b/.gitignore
index c26a7eb8..bfc7d401 100644
--- a/.gitignore
+++ b/.gitignore
@@ -28,3 +28,8 @@ luajit-parse-memprof
luajit-parse-sysprof
luajit.pc
*.c_test
+
+# Generated by the performance tests.
+FASTA_5000000
+SUMCOL_5000.txt
+perf/output/
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c0da4362..73f46835 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -464,6 +464,17 @@ if(LUAJIT_USE_TEST)
endif()
add_subdirectory(test)
+# --- Benchmarks source tree ---------------------------------------------------
+
+# The option to enable performance tests for the LuaJIT.
+# Disabled by default, since commonly it is used only by LuaJIT
+# developers and run in the CI with the specially set-up machine.
+option(LUAJIT_ENABLE_PERF "Generate <perf> target" OFF)
+
+if(LUAJIT_ENABLE_PERF)
option name confuses a bit due to `perf` utility.
I would rename to something like "LUAJIT_ENABLE_PERF_TESTS".
Feel free to ignore.
+ add_subdirectory(perf) +endif() + # --- Misc rules --------------------------------------------------------------- # XXX: Implement <uninstall> target using the following recipe: diff --git a/perf/CMakeLists.txt b/perf/CMakeLists.txt new file mode 100644 index 00000000..cc3c312f --- /dev/null +++ b/perf/CMakeLists.txt @@ -0,0 +1,99 @@ +# Running various bench suites against LuaJIT. + +include(MakeLuaPath) + +if(CMAKE_BUILD_TYPE STREQUAL "Debug") + message(WARNING "LuaJIT and perf tests are built in the Debug mode."
s/./. /
missed whitespace after dot
+ "Timings may be affected.")
+endif()
+
+set(PERF_OUTPUT_DIR ${PROJECT_BINARY_DIR}/perf/output)
+file(MAKE_DIRECTORY ${PERF_OUTPUT_DIR})
+
+# List of paths that will be used for each suite.
+make_lua_path(LUA_PATH_BENCH_BASE
+ PATHS
+ # Use of the bench module.
+ ${CMAKE_CURRENT_SOURCE_DIR}/utils/?.lua
+ # Simple usage with `jit.dump()`, etc.
+ ${LUAJIT_SOURCE_DIR}/?.lua
+ ${LUAJIT_BINARY_DIR}/?.lua
+)
+
+make_lua_path(LUA_CPATH
+ PATHS
+ # XXX: Some arches may have installed the cjson module here.
+ /usr/lib64/lua/5.1/?.so
+)
+
+# Produce the pair:
+# Target to run for reporting and target to inspect from the
+# console, runnable by the CTest.
+macro(AddBenchTarget perf_suite)
+ file(MAKE_DIRECTORY "${PERF_OUTPUT_DIR}/${perf_suite}/")
+ message(STATUS "Add perf suite ${perf_suite}")
+ add_custom_target(${perf_suite})
+ add_custom_target(${perf_suite}-console
+ COMMAND ${CMAKE_CTEST_COMMAND}
+ -L ${perf_suite}
+ --parallel 1
+ --verbose
+ --output-on-failure
+ --no-tests=error
may be --schedule-random, --timeout XXX (default timeout is
10000000)?
+ )
+ add_dependencies(${perf_suite}-console luajit-main)
+endmacro()
+
+# Add the bench to the pair of targets created by the call above.
+macro(AddBench bench_name bench_path perf_suite LUA_PATH)
+ set(bench_title "perf/${perf_suite}/${bench_name}")
+ get_filename_component(bench_name_stripped ${bench_name} NAME_WE)
+ set(bench_out_file
+ ${PERF_OUTPUT_DIR}/${perf_suite}/${bench_name_stripped}.json
+ )
+ set(bench_command "${LUAJIT_BINARY} ${bench_path}")
+ if(${ARGC} GREATER 4)
+ set(input_file ${ARGV4})
+ set(bench_command "${bench_command} < ${input_file}")
+ endif()
+ set(BENCH_FLAGS
+ "--benchmark_out_format=json --benchmark_out=${bench_out_file}"
+ )
+ set(bench_command_flags ${bench_command} ${BENCH_FLAGS})
+ separate_arguments(bench_command_separated UNIX_COMMAND ${bench_command})
+ add_custom_command(
+ COMMAND ${CMAKE_COMMAND} -E env
+ LUA_PATH="${LUA_PATH}"
+ LUA_CPATH="${LUA_CPATH}"
+ ${bench_command_separated}
+ --benchmark_out_format=json
+ --benchmark_out="${bench_out_file}"
previous two lines can be replaced with ${BENCH_FLAGS}, right?
+ OUTPUT ${bench_out_file}
+ DEPENDS luajit-main
+ COMMENT
+ "Running benchmark ${bench_title} saving results in ${bench_out_file}."
+ )
+ add_custom_target(${bench_name} DEPENDS ${bench_out_file})
+ add_dependencies(${perf_suite} ${bench_name})
+
+ # Report in the console.
+ add_test(NAME ${bench_title}
+ COMMAND sh -c "${bench_command}"
+ )
+ set_tests_properties(${bench_title} PROPERTIES
+ ENVIRONMENT "LUA_PATH=${LUA_PATH}"
+ LABELS ${perf_suite}
+ DEPENDS luajit-main
+ )
+ unset(input_file)
+endmacro()
+
+add_subdirectory(LuaJIT-benches)
+
+add_custom_target(${PROJECT_NAME}-perf
+ DEPENDS LuaJIT-benches
missed a COMMENT field
+)
+
+add_custom_target(${PROJECT_NAME}-perf-console
+ DEPENDS LuaJIT-benches-console
missed a COMMENT field
it is not a bench-specific suffix. May be LUA_SUFFIX?+) diff --git a/perf/LuaJIT-benches/CMakeLists.txt b/perf/LuaJIT-benches/CMakeLists.txt new file mode 100644 index 00000000..d9909f36 --- /dev/null +++ b/perf/LuaJIT-benches/CMakeLists.txt @@ -0,0 +1,52 @@ +set(PERF_SUITE_NAME LuaJIT-benches) +set(LUA_BENCH_SUFFIX .lua)
+
+AddBenchTarget(${PERF_SUITE_NAME})
+
+# Input for the k-nucleotide and revcomp benchmarks.
+set(FASTA_NAME ${CMAKE_CURRENT_BINARY_DIR}/FASTA_5000000)
+add_custom_target(FASTA_5000000
+ COMMAND ${LUAJIT_BINARY}
+ ${CMAKE_CURRENT_SOURCE_DIR}/libs/fasta.lua 5000000 > ${FASTA_NAME}
FASTA_5000000 is a plain text file. I propose to add extension .txt for its full name and
probably postfix "_autogenerated". Like we do this for SUMCOL_5000 and SUMCOL_1.
+ OUTPUT ${FASTA_NAME}
+ DEPENDS luajit-main
+ COMMENT "Generate ${FASTA_NAME}."
+)
+
+make_lua_path(LUA_PATH
+ PATHS
+ ${LUA_PATH_BENCH_BASE}
+ ${CMAKE_CURRENT_SOURCE_DIR}/libs/?.lua
+)
+
+# Input for the <sum-file.lua> benchmark.
+set(SUM_NAME ${CMAKE_CURRENT_BINARY_DIR}/SUMCOL_5000.txt)
+# Remove possibly existing file.
+file(REMOVE ${SUM_NAME})
Why do we need generate file after every cmake configuration?
I propose to skip generation if file already exist or regenerate if SHA256 is not the same.
+
+set(SUMCOL_FILE ${CMAKE_CURRENT_SOURCE_DIR}/SUMCOL_1.txt)
+file(READ ${SUMCOL_FILE} SUMCOL_CONTENT)
+foreach(_unused RANGE 4999)
+ file(APPEND ${SUM_NAME} "${SUMCOL_CONTENT}")
+endforeach()
+
+file(GLOB benches "${CMAKE_CURRENT_SOURCE_DIR}/*${LUA_BENCH_SUFFIX}")
+foreach(bench_path ${benches})
+ file(RELATIVE_PATH bench_name ${CMAKE_CURRENT_SOURCE_DIR} ${bench_path})
+ set(bench_title "perf/${PERF_SUITE_NAME}/${bench_name}")
+ if(bench_name MATCHES "k-nucleotide" OR bench_name MATCHES "revcomp")
+ AddBench(${bench_name}
+ ${bench_path} ${PERF_SUITE_NAME} "${LUA_PATH}" ${FASTA_NAME}
+ )
+ add_dependencies(${bench_name} FASTA_5000000)
+ elseif(bench_name MATCHES "sum-file")
+ AddBench(${bench_name}
+ ${bench_path} ${PERF_SUITE_NAME} "${LUA_PATH}" ${SUM_NAME}
+ )
+ else()
+ AddBench(${bench_name} ${bench_path} ${PERF_SUITE_NAME} "${LUA_PATH}")
+ endif()
+endforeach()
+
+# We need to generate the file before we run tests.
+add_dependencies(${PERF_SUITE_NAME}-console FASTA_5000000)