# Test report related configuration
TEST_REPORT_PARTIAL="*_test_report.rst"
-TEST_REPORT_DIR="${WORKSPACE}/docs/results"
+TEST_REPORT_DIR="${WORKSPACE}/docs/testing/developer/devguide/results"
TEST_REPORT_INDEX="${TEST_REPORT_DIR}/index.rst"
TEST_REPORT_LINK_OLD="https://wiki.opnfv.org/wiki/vsperf_results"
-TEST_REPORT_FILE="${WORKSPACE}/docs_output/results/index.html"
+TEST_REPORT_FILE="${WORKSPACE}/docs_output/testing_developer_devguide_results/index.html"
TEST_REPORT_TARBALL="vswitchperf_logs_${DATE}.tar.gz"
if [[ "x${BRANCH}" == "xmaster" ]]; then
# $1 - directory with results
function print_results() {
for i in $TESTCASES ; do
- RES_FILE=`ls -1 $1 | egrep "result_${i}_[0-9a-zA-Z\-]+.csv"`
-
- if [ "x$RES_FILE" != "x" -a -e "${1}/${RES_FILE}" ]; then
- if grep ^FAILED "${1}/${RES_FILE}" &> /dev/null ; then
+ if [ ! -e $1 ] ; then
+ printf " %-70s %-6s\n" "result_${i}" "FAILED"
+ EXIT=$EXIT_TC_FAILED
+ else
+ RES_FILE=`ls -1 $1 | egrep "result_${i}_[0-9a-zA-Z\-]+.csv"`
+
+ if [ "x$RES_FILE" != "x" -a -e "${1}/${RES_FILE}" ]; then
+ if grep ^FAILED "${1}/${RES_FILE}" &> /dev/null ; then
+ printf " %-70s %-6s\n" "result_${i}" "FAILED"
+ EXIT=$EXIT_TC_FAILED
+ else
+ printf " %-70s %-6s\n" "result_${i}" "OK"
+ fi
+ else
printf " %-70s %-6s\n" "result_${i}" "FAILED"
EXIT=$EXIT_TC_FAILED
- else
- printf " %-70s %-6s\n" "result_${i}" "OK"
fi
- else
- printf " %-70s %-6s\n" "result_${i}" "FAILED"
- EXIT=$EXIT_TC_FAILED
fi
done
}
# prepare final tarball with all logs...
tar --exclude "${TEST_REPORT_TARBALL}" -czf "${TEST_REPORT_LOG_DIR}/${TEST_REPORT_TARBALL}" $(find "${TEST_REPORT_LOG_DIR}" -mindepth 1 -maxdepth 1 -type d)
- # ...and move original log files to the archive directory
- find "${TEST_REPORT_LOG_DIR}" -mindepth 1 -maxdepth 1 -type d -exec mv \{\} ${RESULTS_ARCHIVE} \;
+ # ...and move original log files to the archive directory...
+ find "${TEST_REPORT_LOG_DIR}" -maxdepth 2 -name "results_*" -type d -exec mv \{\} ${RESULTS_ARCHIVE} \;
+ # ...and remove the rest
+ find "${TEST_REPORT_LOG_DIR}" -mindepth 1 -maxdepth 1 -type d -exec rm -rf \{\} \;
# clone opnfvdocs repository
echo "Cloning opnfvdocs repository..."
# generates graphs from recent test results
function generate_and_push_graphs() {
# create graphs from results in archive directory
- ./ci/plot-results.sh "phy2phy_tput back2back pvp_tput pvvp_tput" ",OvsDpdkVhost," $RESULTS_ARCHIVE
+ ./ci/plot-results.sh "$1" "$2" "$RESULTS_ARCHIVE"
# push graphs into artifactory
if ls *png &> /dev/null ; then
# configure hugepages
function configure_hugepages() {
- sudo bash -c "echo 2048 > /sys/devices/system/node/node0/hugepages/hugepages-2048kB/nr_hugepages"
- sudo bash -c "echo 0 > /sys/devices/system/node/node1/hugepages/hugepages-2048kB/nr_hugepages"
+ HP_MAX=8192
+ HP_REQUESTED=3072
+ HP_NR=`cat /sys/devices/system/node/node0/hugepages/hugepages-2048kB/nr_hugepages`
+ HP_FREE=`cat /sys/devices/system/node/node0/hugepages/hugepages-2048kB/free_hugepages`
+ # check if HP must be (re)configured
+ if [ $HP_FREE -lt $HP_REQUESTED ] ; then
+ HP_NR_NEW=$(($HP_NR+($HP_REQUESTED-$HP_FREE)))
+ if [ $HP_NR_NEW -gt $HP_MAX ] ; then
+ HP_NR_NEW=$HP_MAX
+ fi
+ sudo bash -c "echo $HP_NR_NEW > /sys/devices/system/node/node0/hugepages/hugepages-2048kB/nr_hugepages"
+ fi
+
+ if [ -f /sys/devices/system/node/node1/hugepages/hugepages-2048kB/nr_hugepages ] ; then
+ sudo bash -c "echo 0 > /sys/devices/system/node/node1/hugepages/hugepages-2048kB/nr_hugepages"
+ fi
}
# dump hugepages configuration
initialize_logdir
# configure hugepages
+echo "Configure hugepages"
+echo "==================="
configure_hugepages
+hugepages_info | grep -v '^--'
+echo
# execute job based on passed parameter
case $1 in
push_results_to_artifactory
- generate_and_push_graphs
+ generate_and_push_graphs "$TESTCASES_DAILY" ",OvsDpdkVhost,"
+ generate_and_push_graphs "$TESTCASES_DAILY" ",OvsVanilla,"
+ generate_and_push_graphs "$TESTCASES_DAILY_VPP" ",VppDpdkVhost,"
+ generate_and_push_graphs "$TESTCASES_SRIOV" ",none,"
cleanup