2017-01-06 21:53:21 +00:00
|
|
|
#!/bin/bash
|
|
|
|
|
2024-01-24 01:02:07 +00:00
|
|
|
# SPDX-FileCopyrightText: 2021-2024 Ole Tange, http://ole.tange.dk and Free Software and Foundation, Inc.
|
2021-03-22 20:16:35 +00:00
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
|
|
|
2020-01-12 08:14:21 +00:00
|
|
|
# GNU Parallel SQL tests
|
2017-01-06 21:53:21 +00:00
|
|
|
# The tests must be able to run in parallel
|
|
|
|
|
|
|
|
export SQLITE=sqlite3:///%2Frun%2Fshm%2Fparallel.db
|
2024-01-21 06:53:12 +00:00
|
|
|
export PG=pg://`whoami`:`whoami`@localhost/`whoami`
|
|
|
|
export MYSQL=mysql://`whoami`:`whoami`@localhost/`whoami`
|
2020-06-25 11:07:02 +00:00
|
|
|
export CSV=csv:///%2Frun%2Fshm
|
2024-01-21 06:53:12 +00:00
|
|
|
export INFLUX=influx:///parallel
|
2017-01-06 21:53:21 +00:00
|
|
|
|
|
|
|
export DEBUG=false
|
2020-06-11 21:25:18 +00:00
|
|
|
rm -f /run/shm/parallel.db
|
|
|
|
mkdir -p /run/shm/csv
|
2018-07-22 06:58:32 +00:00
|
|
|
|
2024-01-21 06:53:12 +00:00
|
|
|
overlay_mysql() {
|
|
|
|
# MySQL is rediculously slow: Force it to work in RAM
|
|
|
|
sudo service mysql stop
|
|
|
|
mysqldir=/var/lib/mysql
|
|
|
|
upper=/dev/shm/mysql
|
|
|
|
work=/dev/shm/mysql-work
|
|
|
|
sudo umount $mysqldir 2>/dev/null
|
|
|
|
mkdir -p $upper $work
|
|
|
|
sudo mount -t overlay overlay -o lowerdir=$mysqldir,upperdir=$upper,workdir=$work $mysqldir
|
|
|
|
sudo chown mysql:mysql $mysqldir
|
|
|
|
sudo service mysql start
|
|
|
|
}
|
|
|
|
|
2018-07-22 06:58:32 +00:00
|
|
|
p_showsqlresult() {
|
2022-05-13 23:27:17 +00:00
|
|
|
# print results stored in $SERVERURL/$TABLE
|
|
|
|
SERVERURL=$1
|
|
|
|
TABLE=$2
|
|
|
|
sql $SERVERURL "select Host,Command,V1,V2,Stdout,Stderr from $TABLE order by seq;"
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
p_wrapper() {
|
2022-05-13 23:27:17 +00:00
|
|
|
INNER=$1
|
|
|
|
SERVERURL=$(eval echo $2)
|
|
|
|
# Use a random table for each test
|
|
|
|
TABLE=TBL$RANDOM
|
|
|
|
DBURL=$SERVERURL/$TABLE
|
2022-05-22 08:36:45 +00:00
|
|
|
T1=$(mktemp)
|
|
|
|
T2=$(mktemp)
|
2022-05-13 23:27:17 +00:00
|
|
|
# Run $INNER (all the par_* functions)
|
|
|
|
eval "$INNER"
|
|
|
|
echo Exit=$?
|
|
|
|
# $INNER can start background processes - wait for those
|
|
|
|
wait
|
|
|
|
echo Exit=$?
|
|
|
|
# For debugging show the tempfiles
|
2023-01-30 21:51:56 +00:00
|
|
|
$DEBUG && sort -u "$T1" "$T2";
|
|
|
|
rm "$T1" "$T2"
|
2022-05-13 23:27:17 +00:00
|
|
|
p_showsqlresult $SERVERURL $TABLE
|
|
|
|
# Drop the table if not debugging
|
|
|
|
$DEBUG || sql $SERVERURL "drop table $TABLE;" >/dev/null 2>/dev/null
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
p_template() {
|
2024-01-21 06:53:12 +00:00
|
|
|
# Run the jobs with both master and worker
|
2022-05-13 23:27:17 +00:00
|
|
|
(
|
|
|
|
# Make sure there is work to be done
|
|
|
|
sleep 6;
|
2024-01-21 06:53:12 +00:00
|
|
|
parallel --sqlworker $DBURL "$@" sleep .3\;echo >"$T1"
|
2022-05-13 23:27:17 +00:00
|
|
|
) &
|
2023-01-30 21:51:56 +00:00
|
|
|
parallel --sqlandworker $DBURL "$@" sleep .3\;echo ::: {1..5} ::: {a..e} >"$T2";
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_lo() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template -S lo
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_results() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template --results /tmp/out--sql
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_linebuffer() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template --linebuffer
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_tag() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template --tag
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_linebuffer_tag() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template --linebuffer --tag
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_compress_linebuffer_tag() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template --compress --linebuffer --tag
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_unbuffer() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template -u
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_sqlandworker_total_jobs() {
|
2022-05-13 23:27:17 +00:00
|
|
|
p_template echo {#} of '{=1 $_=total_jobs(); =};'
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_append() {
|
2023-01-30 21:51:56 +00:00
|
|
|
parallel --sqlmaster $DBURL sleep .3\;echo ::: {1..5} ::: {a..e} >"$T2";
|
|
|
|
parallel --sqlmaster +$DBURL sleep .3\;echo ::: {11..15} ::: {A..E} >>"$T2";
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T1"
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
par_shuf() {
|
2022-05-13 23:27:17 +00:00
|
|
|
MD5=$(echo $SERVERURL | md5sum | perl -pe 's/(...).*/$1/')
|
|
|
|
T=/tmp/parallel-bug49791-$MD5
|
|
|
|
[ -e $T ] && rm -rf $T
|
|
|
|
export PARALLEL="--shuf --result $T"
|
|
|
|
parallel --sqlandworker $DBURL sleep .3\;echo \
|
2023-01-30 21:51:56 +00:00
|
|
|
::: {1..5} ::: {a..e} >"$T2";
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
2022-05-13 23:27:17 +00:00
|
|
|
unset PARALLEL
|
|
|
|
wait;
|
|
|
|
# Did it compute correctly?
|
|
|
|
cat $T/1/*/*/*/stdout
|
|
|
|
# Did it shuffle (Compare job table to non-shuffled)
|
|
|
|
SHUF=$(sql $SERVERURL "select Host,Command,V1,V2,Stdout,Stderr from $TABLE order by seq;")
|
|
|
|
export PARALLEL="--result $T"
|
|
|
|
parallel --sqlandworker $DBURL sleep .3\;echo \
|
2023-01-30 21:51:56 +00:00
|
|
|
::: {1..5} ::: {a..e} >"$T2";
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
|
|
|
parallel --sqlworker $DBURL sleep .3\;echo >"$T2" &
|
2022-05-13 23:27:17 +00:00
|
|
|
unset PARALLEL
|
|
|
|
wait;
|
|
|
|
NOSHUF=$(sql $SERVERURL "select Host,Command,V1,V2,Stdout,Stderr from $TABLE order by seq;")
|
|
|
|
DIFFSIZE=$(diff <(echo "$SHUF") <(echo "$NOSHUF") | wc -c)
|
|
|
|
if [ $DIFFSIZE -gt 2500 ]; then
|
|
|
|
echo OK: Diff bigger than 2500 char
|
|
|
|
fi
|
|
|
|
[ -e $T ] && rm -rf $T
|
2023-01-30 21:51:56 +00:00
|
|
|
touch "$T1"
|
2018-07-22 06:58:32 +00:00
|
|
|
}
|
2017-01-06 21:53:21 +00:00
|
|
|
|
|
|
|
par_empty() {
|
2022-05-13 23:27:17 +00:00
|
|
|
echo Do nothing: TBL99999 does not exist because it is not created
|
2017-01-06 21:53:21 +00:00
|
|
|
true;
|
|
|
|
}
|
|
|
|
|
2024-01-21 06:53:12 +00:00
|
|
|
par_sql_joblog() {
|
|
|
|
echo '### should only give a single --joblog heading'
|
|
|
|
echo '### --sqlmaster/--sqlworker'
|
|
|
|
parallel -k --joblog - --sqlmaster $DBURL --wait sleep .3\;echo ::: {1..5} ::: {a..e} |
|
|
|
|
perl -pe 's/\d+\.\d+/999.999/g' | sort -n &
|
|
|
|
sleep 0.5
|
|
|
|
T=$(mktemp)
|
|
|
|
parallel -k --joblog - --sqlworker $DBURL > "$T"
|
|
|
|
wait
|
|
|
|
# Needed because of race condition
|
|
|
|
cat "$T"; rm "$T"
|
|
|
|
echo '### --sqlandworker'
|
|
|
|
parallel -k --joblog - --sqlandworker $DBURL sleep .3\;echo ::: {1..5} ::: {a..e} |
|
|
|
|
perl -pe 's/\d+\.\d+/999.999/g' | sort -n
|
|
|
|
# TODO --sqlandworker --wait
|
|
|
|
}
|
2019-01-29 05:58:12 +00:00
|
|
|
|
2024-01-21 06:53:12 +00:00
|
|
|
par_no_table() {
|
|
|
|
echo 'bug #50018: --dburl without table dies'
|
|
|
|
parallel --sqlworker $SERVERURL
|
|
|
|
echo $?
|
|
|
|
parallel --sqlandworker $SERVERURL echo ::: no_output
|
|
|
|
echo $?
|
|
|
|
parallel --sqlmaster $SERVERURL echo ::: no_output
|
|
|
|
echo $?
|
|
|
|
# For p_wrapper to remove table
|
|
|
|
parallel --sqlandworker $DBURL true ::: dummy ::: dummy
|
|
|
|
}
|
|
|
|
|
|
|
|
export -f $(compgen -A function | grep p_)
|
|
|
|
export -f $(compgen -A function | G par_ "$@")
|
|
|
|
|
|
|
|
# Run the DBURLs in parallel, but only one of the same DBURL at the same time
|
|
|
|
|
|
|
|
joblog=/tmp/jl-`basename $0`
|
|
|
|
true > $joblog
|
|
|
|
|
|
|
|
do_dburl() {
|
|
|
|
export dbvar=$1
|
|
|
|
hostname=`hostname`
|
|
|
|
compgen -A function | G par_ | sort |
|
|
|
|
stdout parallel -vj1 -k --tag --joblog +$joblog p_wrapper {} \$$dbvar |
|
|
|
|
perl -pe 's/tbl\d+/TBL99999/gi;' |
|
|
|
|
perl -pe 's/(from TBL99999 order) .*/$1/g' |
|
|
|
|
perl -pe 's/ *\b'"$hostname"'\b */hostname/g' |
|
|
|
|
grep -v -- --------------- |
|
|
|
|
perl -pe 's/ *\bhost\b */host/g' |
|
2024-06-16 10:37:14 +00:00
|
|
|
perl -pe 's/ +/ /g' |
|
|
|
|
# SQLITE par_empty Error: near line 1: in prepare, no such table: TBL99999 (1)
|
|
|
|
# SQLITE par_empty Parse error near line 1: no such table: TBL99999
|
|
|
|
perl -pe 's/Error: near line 1: in prepare, (.*) .../Parse error near line 1: /'
|
2024-01-21 06:53:12 +00:00
|
|
|
}
|
|
|
|
export -f do_dburl
|
|
|
|
parallel -vk --tag do_dburl ::: CSV INFLUX MYSQL PG SQLITE
|