#!/bin/bash # version: 1.0 # it will generate a file test_benchmark_data.sql with 10.000 inserts and load that file into the test_benchmark database and meassure time how long this takes # usage: # su - root # chmod +x ./script.sh # ./script.sh # Variables for benchmark NUM_RECORDS=10000 DBNAME="test_benchmark" FILENAME=$DBNAME # Generate the SQL file for ((j=0; j<=9; j++)); do echo "Generating SQL file $FILENAME-table-$j.sql with $NUM_RECORDS records..." { for ((i=1; i<=NUM_RECORDS; i++)); do echo "INSERT INTO table$j (firstname, lastname, code, place) VALUES ('bro', 'code', '1234567', 'earth');" done } > "$FILENAME-table-$j.sql" echo "SQL file $FILENAME-table-$j.sql generated successfully with the size of:" du -hs $FILENAME-table-$j.sql done echo "... creating database $DBNAME" su - postgres -c "psql -d postgres -c \"CREATE DATABASE $DBNAME;\"" echo "... creating table0 to table9 in database $DBNAME" for i in {0..10} do su - postgres -c "psql -d $DBNAME -c \"CREATE TABLE table$i (id SERIAL PRIMARY KEY, firstname VARCHAR(50), lastname VARCHAR(50), code VARCHAR(20), place VARCHAR(50));\"" done # capture start time in milliseconds POSTGRES_FILES=$(./postgres_info.sh |grep -A2 data_directory|sed -n '3p' | xargs) ./postgres_info_usage.sh echo "... size of postgres dir: $POSTGRES_FILES" du -hs $POSTGRES_FILES echo "... starting benchmark: import 10x dump.sql into 10x tables of database $DBNAME" start_time=$(date +%s%3N) for i in {0..9} do cat ./$FILENAME-table-$i.sql | psql -q -d $DBNAME -U postgres done # capture end time in milliseconds end_time=$(date +%s%3N) # calculate the duration duration=$(( end_time - start_time )) echo "=== benchmark finished in: $duration ms" ./postgres_info_usage.sh echo "... size of postgres dir: $POSTGRES_FILES" du -hs $POSTGRES_FILES echo "... tidy up deleting $DBNAME" su - postgres -c "psql -d postgres -c \"DROP DATABASE $DBNAME;\"" rm -rf $FILENAME-table-*.sql