Skip to content

Commit

Permalink
Merge pull request #252 from JelteF/fix-q28
Browse files Browse the repository at this point in the history
Fix running of Q28 by not removing important backslashes from regex
  • Loading branch information
rschu1ze authored Nov 15, 2024
2 parents d8a9b59 + 0d89d4f commit 39b20b5
Show file tree
Hide file tree
Showing 65 changed files with 67 additions and 67 deletions.
2 changes: 1 addition & 1 deletion alloydb/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion athena/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
for i in $(seq 1 $TRIES); do
aws athena --output json start-query-execution --query-execution-context 'Database=test' --result-configuration "OutputLocation=${OUTPUT}" --query-string "${query}" | jq '.QueryExecutionId'
done
Expand Down
2 changes: 1 addition & 1 deletion aurora-mysql/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
for i in $(seq 1 $TRIES); do
mysql -h "${FQDN}" -u admin --password="${PASSWORD}" test -vvv -e "${query}"
done;
Expand Down
2 changes: 1 addition & 1 deletion aurora-postgresql/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
echo "$query";
for i in $(seq 1 $TRIES); do
psql -U postgres -h "${FQDN}" test -t -c '\timing' -c "$query" | grep 'Time'
Expand Down
2 changes: 1 addition & 1 deletion bigquery/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
echo "$query";
for i in $(seq 1 $TRIES); do
time bq query --use_legacy_sql=false --use_cache=false <<< "$query"
Expand Down
2 changes: 1 addition & 1 deletion byconity/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
[ -z "$FQDN" ] && sync
[ -z "$FQDN" ] && echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion bytehouse/run.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

TRIES=3
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
for i in $(seq 1 $TRIES); do
./bytehouse-cli --user "$user" --account "$account" --password "$password" --region ap-southeast-1 --secure --warehouse "$warehouse" --database test --query "${query}"
done
Expand Down
2 changes: 1 addition & 1 deletion chdb-parquet/run.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion chdb/run.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion citus/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion clickhouse-cloud/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
echo -n "["
for i in $(seq 1 $TRIES); do
clickhouse-client --host "${FQDN:=localhost}" --password "${PASSWORD:=}" ${PASSWORD:+--secure} --time --format=Null --query="$query" --progress 0 2>&1 |
Expand Down
2 changes: 1 addition & 1 deletion clickhouse-datalake/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do

echo -n "["
for i in $(seq 1 $TRIES); do
Expand Down
2 changes: 1 addition & 1 deletion clickhouse-parquet/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion clickhouse-web/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
clickhouse-client --query "SYSTEM DROP FILESYSTEM CACHE"

echo -n "["
Expand Down
2 changes: 1 addition & 1 deletion clickhouse/extended/latency.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ PASSWORD="$2"

TRIES=10

cat queries_latency.sql | while read query; do
cat queries_latency.sql | while read -r query; do
echo "$query"
clickhouse-local --query "SELECT format(\$\$ $query \$\$, c1) FROM file('random_counters.tsv') ORDER BY rand() LIMIT ${TRIES} FORMAT TSV" |
clickhouse-benchmark --concurrency 10 --iterations "${TRIES}" --delay 0 --secure --host "$FQDN" --password "$PASSWORD" 2>&1 | grep -F '50.000%'
Expand Down
2 changes: 1 addition & 1 deletion clickhouse/extended/throughput.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ PASSWORD="$2"

TRIES=3
QUERY_NUM=1
cat queries_throughput.sql | while read query; do
cat queries_throughput.sql | while read -r query; do
echo -n "["
for i in $(seq 1 $TRIES); do
RES=$(clickhouse-client --host "$FQDN" --password "$PASSWORD" --secure --time --format=Null --query="$query" 2>&1 ||:)
Expand Down
2 changes: 1 addition & 1 deletion clickhouse/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fi

TRIES=3
QUERY_NUM=1
cat queries"$SUFFIX".sql | while read query; do
cat queries"$SUFFIX".sql | while read -r query; do
[ -z "$FQDN" ] && sync
[ -z "$FQDN" ] && echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion cloudberry/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion cratedb/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion databend/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
[ -z "$FQDN" ] && sync
[ -z "$FQDN" ] && echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion datafusion/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fi
TRIES=3
QUERY_NUM=1
echo $1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion datafusion/run2.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion druid/run.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

TRIES=3
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
for i in $(seq 1 100); do
CHECK=$(curl -o /dev/null -w '%{http_code}' -s -XPOST -H'Content-Type: application/json' http://localhost:8888/druid/v2/sql/ -d @check.json })
Expand Down
2 changes: 1 addition & 1 deletion duckdb-parquet/run.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion duckdb/run.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion generate-results.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ fi
sed '/^const data = \[$/q' index.html

FIRST=1
LANG="" ls -1 */results/*.json | while read file
LANG="" ls -1 */results/*.json | while read -r file
do
[[ $file =~ ^(hardware|versions|gravitons)/ ]] && continue;

Expand Down
2 changes: 1 addition & 1 deletion glaredb/benchmark.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ curl https://glaredb.com/install.sh | sh

wget https://clickhouse-public-datasets.s3.eu-central-1.amazonaws.com/hits_compatible/athena/hits.parquet

cat queries.sql | while read query
cat queries.sql | while read -r query
do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches
Expand Down
2 changes: 1 addition & 1 deletion gravitons/generate-results.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
sed '/^const data = \[$/q' index.html

FIRST=1
ls -1 results/*.json | while read file
ls -1 results/*.json | while read -r file
do
[ "${FIRST}" = "0" ] && echo -n ','
jq --compact-output ". += {\"source\": \"${file}\"}" "${file}"
Expand Down
2 changes: 1 addition & 1 deletion greenplum/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion hardware/benchmark-chyt.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ QUERIES_FILE="queries.sql"
TABLE=$1
TRIES=3

cat "$QUERIES_FILE" | sed "s|{table}|\"${TABLE}\"|g" | while read query; do
cat "$QUERIES_FILE" | sed "s|{table}|\"${TABLE}\"|g" | while read -r query; do

echo -n "["
for i in $(seq 1 $TRIES); do
Expand Down
2 changes: 1 addition & 1 deletion hardware/benchmark-cloud.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ fi
QUERY_ID_PREFIX="benchmark_$RANDOM"
QUERY_NUM=1

cat "$QUERIES_FILE" | sed "s/{table}/${TABLE}/g" | while read query
cat "$QUERIES_FILE" | sed "s/{table}/${TABLE}/g" | while read -r query
do
for i in $(seq 1 $TRIES)
do
Expand Down
2 changes: 1 addition & 1 deletion hardware/benchmark-new.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ else
exit 1
fi

cat "$QUERIES_FILE" | sed "s/{table}/${TABLE}/g" | while read query; do
cat "$QUERIES_FILE" | sed "s/{table}/${TABLE}/g" | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion hardware/benchmark-yql.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ QUERIES_FILE="queries.sql"
TABLE=$1
TRIES=3

cat "$QUERIES_FILE" | sed "s|{table}|\"${TABLE}\"|g" | while read query; do
cat "$QUERIES_FILE" | sed "s|{table}|\"${TABLE}\"|g" | while read -r query; do

echo -n "["
for i in $(seq 1 $TRIES); do
Expand Down
2 changes: 1 addition & 1 deletion hardware/generate-results.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ fi
sed '/^const data = \[$/q' index.html

FIRST=1
ls -1 results/*.json | while read file
ls -1 results/*.json | while read -r file
do
[ "${FIRST}" = "0" ] && echo -n ','
jq --compact-output ". += {\"source\": \"${file}\"}" "${file}"
Expand Down
2 changes: 1 addition & 1 deletion hardware/hardware.sh
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ echo
>result.csv
QUERY_NUM=1

cat "$QUERIES_FILE" | sed "s/{table}/hits/g" | while read query; do
cat "$QUERIES_FILE" | sed "s/{table}/hits/g" | while read -r query; do
sync
if [ "${OS}" = "Darwin" ]
then
Expand Down
2 changes: 1 addition & 1 deletion heavyai/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion hydra/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion infobright/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion kinetica/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ export KI_PWD=admin

TRIES=3
QUERY_NUM=1
cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
[ -z "$FQDN" ] && sync
[ -z "$FQDN" ] && echo 3 | sudo tee /proc/sys/vm/drop_caches >/dev/null

Expand Down
2 changes: 1 addition & 1 deletion mariadb-columnstore/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion mariadb/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion monetdb/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion motherduck/run.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
./query.py <<< "${query}"
done
2 changes: 1 addition & 1 deletion mysql-myisam/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
2 changes: 1 addition & 1 deletion mysql/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

TRIES=3

cat queries.sql | while read query; do
cat queries.sql | while read -r query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

Expand Down
Loading

0 comments on commit 39b20b5

Please sign in to comment.