@@ -339,6 +339,12 @@ function check_cli_parameters() {
339339 PG_VERSION=" $POSTGRES_VERSION_DEFAULT "
340340 fi
341341
342+ if [[ " $PG_VERSION " = " 9.6" ]]; then
343+ CURRENT_LSN_FUNCTION=" pg_current_xlog_location()"
344+ else
345+ CURRENT_LSN_FUNCTION=" pg_current_wal_lsn()"
346+ fi
347+
342348 if [[ -z ${TMP_PATH+x} ]]; then
343349 TMP_PATH=" /tmp"
344350 err " NOTICE: The directory for temporary files is not specified. Default will be used: ${TMP_PATH} ."
@@ -1821,7 +1827,7 @@ function apply_postgres_configuration() {
18211827# Prepare to start workload.
18221828# Save restore db log, vacuumdb, clear log
18231829# Globals:
1824- # ARTIFACTS_DIRNAME, MACHINE_HOME, DB_NAME
1830+ # ARTIFACTS_DIRNAME, MACHINE_HOME, DB_NAME, CURRENT_LSN_FUNCTION
18251831# Arguments:
18261832# $1 - run number
18271833# Returns:
@@ -1840,9 +1846,11 @@ function prepare_start_workload() {
18401846 docker_exec bash -c " gzip -c $LOG_PATH > $MACHINE_HOME /$ARTIFACTS_DIRNAME /postgresql.prepare.log.gz"
18411847 fi
18421848
1843- msg " Reset pg_stat_*** and Postgres log"
1849+ dbg " Resetting pg_stat_*** and Postgres log and remembering current LSN... "
18441850 (docker_exec psql -U postgres $DB_NAME -f - << EOF
18451851 select pg_stat_reset(), pg_stat_statements_reset(), pg_stat_kcache_reset(), pg_stat_reset_shared('archiver'), pg_stat_reset_shared('bgwriter');
1852+ drop table if exists pg_stat_nancy_lsn;
1853+ create table pg_stat_nancy_lsn as select now() as created_at, ${CURRENT_LSN_FUNCTION} as lsn;
18461854EOF
18471855) > /dev/null
18481856 docker_exec bash -c " echo '' > $LOG_PATH "
@@ -1988,7 +1996,8 @@ function save_artifacts() {
19881996# ######################################
19891997# Collect results of workload execution
19901998# Globals:
1991- # CONTAINER_HASH, MACHINE_HOME, ARTIFACTS_DESTINATION, PG_STAT_TOTAL_TIME
1999+ # CONTAINER_HASH, MACHINE_HOME, ARTIFACTS_DESTINATION, PG_STAT_TOTAL_TIME,
2000+ # CURRENT_LSN_FUNCTION
19922001# Arguments:
19932002# $1 - run number
19942003# Returns:
@@ -2030,9 +2039,20 @@ function collect_results() {
20302039 " pg_stat_user_functions order by schemaname, funcname" \
20312040 " pg_stat_xact_user_functions order by schemaname, funcname" \
20322041 ; do
2033- docker_exec bash -c " psql -U postgres $DB_NAME -b -c \" copy (select * from $table2export ) to stdout with csv header delimiter ',';\" > /$MACHINE_HOME /$ARTIFACTS_DIRNAME /\$ (echo \" $table2export \" | awk '{print \$ 1}').$run_number .csv"
2042+ docker_exec bash -c " psql -U postgres $DB_NAME -b -c \" copy (select * from $table2export ) to stdout with csv header delimiter ',';\" > /$MACHINE_HOME /$ARTIFACTS_DIRNAME /\$ (echo \" $table2export \" | awk '{print \$ 1}').$run_number .csv"
20342043 done
20352044
2045+ docker_exec bash -c "
2046+ psql -U postgres $DB_NAME -b -c \"
2047+ copy (
2048+ select
2049+ ${CURRENT_LSN_FUNCTION} - lsn as wal_bytes_generated,
2050+ pg_size_pretty(${CURRENT_LSN_FUNCTION} - lsn) wal_pretty_generated,
2051+ pg_size_pretty(3600 * round(((${CURRENT_LSN_FUNCTION} - lsn) / extract(epoch from now() - created_at))::numeric, 2)) || '/h' as wal_avg_per_h
2052+ from pg_stat_nancy_lsn
2053+ ) to stdout with csv header delimiter ',';\" > /$MACHINE_HOME /$ARTIFACTS_DIRNAME /wal_stats.${run_number} .csv
2054+ "
2055+
20362056 docker_exec bash -c " gzip -c $LOG_PATH > $MACHINE_HOME /$ARTIFACTS_DIRNAME /postgresql.workload.$run_number .log.gz"
20372057 docker_exec bash -c " cp /etc/postgresql/$PG_VERSION /main/postgresql.conf $MACHINE_HOME /$ARTIFACTS_DIRNAME /postgresql.$run_number .conf"
20382058
@@ -2369,7 +2389,6 @@ if [[ ! -z ${AWS_ZFS+x} ]]; then
23692389 zfs_create_snapshot
23702390fi
23712391
2372- msg " Start runs..."
23732392runs_count=${# RUNS[*]}
23742393let runs_count=runs_count/3
23752394i=0
@@ -2378,7 +2397,7 @@ while : ; do
23782397 d=$j +1
23792398 u=$j +2
23802399 let num=$i +1
2381- msg " Start run #$num ."
2400+ msg " Experimental run (sequential number): #$num ."
23822401 delta_config=${RUNS[$j]}
23832402 delta_ddl_do=${RUNS[$d]}
23842403 delta_ddl_undo=${RUNS[$u]}
@@ -2454,25 +2473,45 @@ while : ; do
24542473 echo -e " ${MSG_PREFIX} Query groups: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /pgbadger.$num .json | jq ' .normalyzed_info | length' )
24552474 echo -e " ${MSG_PREFIX} Errors: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /pgbadger.$num .json | jq ' .overall_stat.errors_number' )
24562475 echo -e " ${MSG_PREFIX} Errors groups: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /pgbadger.$num .json | jq ' .error_info | length' )
2457- if [[ ! -z ${WORKLOAD_PGBENCH+x} ]]; then
2458- tps_string=$( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /workload_output.$num .txt | grep " including connections establishing" )
2459- tps=${tps_string// [!0-9.]/ }
2460- if [[ ! -z " $tps " ]]; then
2461- echo -e " ${MSG_PREFIX} TPS: $tps (including connections establishing)"
2462- fi
2463- fi
2464- if [[ ! -z ${WORKLOAD_REAL+x} ]]; then
2465- avg_num_con_string=$( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /workload_output.$num .txt | grep " Average number of concurrent connections" )
2466- avg_num_con=${avg_num_con_string// [!0-9.]/ }
2467- if [[ ! -z " $avg_num_con " ]]; then
2468- echo -e " ${MSG_PREFIX} Avg. connection number: $avg_num_con "
2469- fi
2470- fi
24712476 else
24722477 if [[ ! -z ${PG_STAT_TOTAL_TIME+x} ]]; then
24732478 echo -e " ${MSG_PREFIX} Total query time: $PG_STAT_TOTAL_TIME ms"
24742479 fi
24752480 fi
2481+
2482+ if [[ ! -z ${WORKLOAD_PGBENCH+x} ]]; then
2483+ tps_string=$( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /workload_output.$num .txt | grep " including connections establishing" )
2484+ tps=${tps_string// [!0-9.]/ }
2485+ if [[ ! -z " $tps " ]]; then
2486+ echo -e " ${MSG_PREFIX} TPS: $tps (including connections establishing)"
2487+ fi
2488+ fi
2489+
2490+ if [[ ! -z ${WORKLOAD_REAL+x} ]]; then
2491+ avg_num_con_string=$( docker_exec cat $MACHINE_HOME /$ARTIFACTS_DIRNAME /workload_output.$num .txt | grep " Average number of concurrent connections" )
2492+ avg_num_con=${avg_num_con_string// [!0-9.]/ }
2493+ if [[ ! -z " $avg_num_con " ]]; then
2494+ echo -e " ${MSG_PREFIX} Avg. connection number: $avg_num_con "
2495+ fi
2496+ fi
2497+
2498+ echo -e " ${MSG_PREFIX} WAL: $( docker_exec tail -1 $MACHINE_HOME /$ARTIFACTS_DIRNAME /wal_stats.$num .csv | awk -F' ,' ' {print $1" bytes generated ("$2"), avg tput: "$3}' ) "
2499+ checkpoint_data=$( docker_exec tail -1 $MACHINE_HOME /$ARTIFACTS_DIRNAME /pg_stat_bgwriter.$num .csv)
2500+ echo -e " ${MSG_PREFIX} Checkpoints: $( echo $checkpoint_data | awk -F' ,' ' {print $1}' ) planned (timed)"
2501+ echo -e " ${MSG_PREFIX} $( echo $checkpoint_data | awk -F' ,' ' {print $2}' ) forced (requested)"
2502+ checkpoint_buffers=$( echo $checkpoint_data | awk -F' ,' ' {print $5}' )
2503+ checkpoint_write_t=$( echo $checkpoint_data | awk -F' ,' ' {print $3}' )
2504+ checkpoint_sync_t=$( echo $checkpoint_data | awk -F' ,' ' {print $4}' )
2505+ checkpoint_t=$(( checkpoint_write_t + checkpoint_sync_t ))
2506+ checkpoint_mb=$(( checkpoint_buffers * 8 / 1024 ))
2507+
2508+ if [[ $checkpoint_t > 0 ]]; then
2509+ checkpoint_mbps=$(( checkpoint_buffers * 8000 / (1024 * checkpoint_t) ))
2510+ else
2511+ checkpoint_mbps=0
2512+ fi
2513+ echo -e " ${MSG_PREFIX} ${checkpoint_buffers} buffers (${checkpoint_mb} MiB), took ${checkpoint_t} ms, avg tput: ${checkpoint_mbps} MiB/s"
2514+
24762515 echo -e " ------------------------------------------------------------------------------"
24772516
24782517 # revert delta
0 commit comments