Fixing docker_update.sh script to not add context. (#5820)

* Fixing docker_update.sh script to not add context.

Use docker build from a higher level to not load massive context.

* Adding a custom postgresql for testing.

* Fixing docker update

* Fixing dir.

* Fix rust log

* Add bash / shell file format checking.

* Adding shfmt package.

* Running bash format.

* Fixing bash fmt.

* Fixing bash fmt 2.

* Adding pgtune note.
This commit is contained in:
Dessalines 2025-06-26 03:48:39 -04:00 committed by GitHub
parent 78fc703ebd
commit 9ccd647e02
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 192 additions and 140 deletions

View file

@ -48,6 +48,15 @@ steps:
when: when:
- event: pull_request - event: pull_request
bash_fmt:
image: alpine:3
commands:
- apk add shfmt
- shfmt -i 2 -d */**.bash
- shfmt -i 2 -d */**.sh
when:
- event: pull_request
toml_fmt: toml_fmt:
image: tamasfe/taplo:0.9.3 image: tamasfe/taplo:0.9.3
commands: commands:

View file

@ -0,0 +1,25 @@
# You can use https://pgtune.leopard.in.ua to tune this for your system.
# DB Version: 16
# OS Type: linux
# DB Type: web
# Total Memory (RAM): 12 GB
# CPUs num: 16
# Data Storage: ssd
max_connections = 200
shared_buffers = 3GB
effective_cache_size = 9GB
maintenance_work_mem = 768MB
checkpoint_completion_target = 0.9
wal_buffers = 16MB
default_statistics_target = 100
random_page_cost = 1.1
effective_io_concurrency = 200
work_mem = 3932kB
huge_pages = try
min_wal_size = 1GB
max_wal_size = 4GB
max_worker_processes = 16
max_parallel_workers_per_gather = 4
max_parallel_workers = 16
max_parallel_maintenance_workers = 4

View file

@ -22,11 +22,13 @@ services:
logging: *default-logging logging: *default-logging
lemmy: lemmy:
image: dessalines/lemmy:1.0.0-alpha.4 build:
context: ../
dockerfile: docker/Dockerfile
hostname: lemmy hostname: lemmy
restart: unless-stopped restart: unless-stopped
environment: environment:
- RUST_LOG=extism=info,lemmy_api_common=info,error - RUST_LOG=warn,extism=info,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug
volumes: volumes:
- ./lemmy.hjson:/config/config.hjson:Z - ./lemmy.hjson:/config/config.hjson:Z
- ./plugins:/plugins:Z - ./plugins:/plugins:Z
@ -109,5 +111,6 @@ services:
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- ./volumes/postgres:/var/lib/postgresql/data:Z - ./volumes/postgres:/var/lib/postgresql/data:Z
- ./customPostgresql.conf:/etc/postgresql.conf:Z
restart: unless-stopped restart: unless-stopped
logging: *default-logging logging: *default-logging

View file

@ -1,2 +1,2 @@
#!/usr/bin/env bash #!/usr/bin/env bash
docker-compose exec postgres pg_dumpall -c -U lemmy > dump_`date +%Y-%m-%d"_"%H_%M_%S`.sql docker-compose exec postgres pg_dumpall -c -U lemmy >dump_$(date +%Y-%m-%d"_"%H_%M_%S).sql

View file

@ -1,8 +1,7 @@
#!/bin/sh #!/bin/sh
set -e set -e
Help() Help() {
{
# Display help # Display help
echo "Usage: ./docker_update.sh [OPTIONS]" echo "Usage: ./docker_update.sh [OPTIONS]"
echo "" echo ""
@ -15,12 +14,17 @@ Help()
while getopts ":hu:" option; do while getopts ":hu:" option; do
case $option in case $option in
h) Help h)
exit;; Help
u) DOCKER_USER=$OPTARG exit
;;
u)
DOCKER_USER=$OPTARG
;;
*)
echo "Invalid option $OPTARG."
exit
;; ;;
*) echo "Invalid option $OPTARG."
exit;;
esac esac
done done

View file

@ -3,7 +3,7 @@ set -e
sudo docker compose down sudo docker compose down
for Item in alpha beta gamma delta epsilon ; do for Item in alpha beta gamma delta epsilon; do
sudo mkdir -p volumes/pictrs_$Item sudo mkdir -p volumes/pictrs_$Item
sudo chown -R 991:991 volumes/pictrs_$Item sudo chown -R 991:991 volumes/pictrs_$Item
done done

View file

@ -3,7 +3,7 @@ set -e
times=3 times=3
duration=0 duration=0
for ((i=0; i < times; i++)) ; do for ((i = 0; i < times; i++)); do
echo "Starting iteration $i" echo "Starting iteration $i"
echo "cargo clean" echo "cargo clean"
# to benchmark incremental compilation time, do a full build with the same compiler version first, # to benchmark incremental compilation time, do a full build with the same compiler version first,
@ -14,10 +14,10 @@ for ((i=0; i < times; i++)) ; do
start=$(date +%s.%N) start=$(date +%s.%N)
RUSTC_WRAPPER='' cargo build -q RUSTC_WRAPPER='' cargo build -q
end=$(date +%s.%N) end=$(date +%s.%N)
echo "Finished iteration $i after $(bc <<< "scale=0; $end - $start") seconds" echo "Finished iteration $i after $(bc <<<"scale=0; $end - $start") seconds"
duration=$(bc <<< "$duration + $end - $start") duration=$(bc <<<"$duration + $end - $start")
done done
average=$(bc <<< "scale=0; $duration / $times") average=$(bc <<<"scale=0; $duration / $times")
echo "Average compilation time over $times runs is $average seconds" echo "Average compilation time over $times runs is $average seconds"

View file

@ -25,22 +25,25 @@ ask_for_db_config() {
default_config_final=0 default_config_final=0
default_config_valid=0 default_config_valid=0
while [ "$default_config_valid" == 0 ] while [ "$default_config_valid" == 0 ]; do
do
read -p "Use this configuration (y/n)? " default_config read -p "Use this configuration (y/n)? " default_config
case "$default_config" in case "$default_config" in
[yY]* ) default_config_valid=1; default_config_final=1;; [yY]*)
[nN]* ) default_config_valid=1; default_config_final=0;; default_config_valid=1
* ) yes_no_prompt_invalid;; default_config_final=1
;;
[nN]*)
default_config_valid=1
default_config_final=0
;;
*) yes_no_prompt_invalid ;;
esac esac
echo echo
done done
if [ "$default_config_final" == 0 ] if [ "$default_config_final" == 0 ]; then
then
config_ok_final=0 config_ok_final=0
while [ "$config_ok_final" == 0 ] while [ "$config_ok_final" == 0 ]; do
do
read -p "Database name: " dbname read -p "Database name: " dbname
read -p "Username: " username read -p "Username: " username
read -p "Password: password" read -p "Password: password"
@ -52,13 +55,18 @@ ask_for_db_config() {
#echo #echo
config_ok_valid=0 config_ok_valid=0
while [ "$config_ok_valid" == 0 ] while [ "$config_ok_valid" == 0 ]; do
do
read -p "Use this configuration (y/n)? " config_ok read -p "Use this configuration (y/n)? " config_ok
case "$config_ok" in case "$config_ok" in
[yY]* ) config_ok_valid=1; config_ok_final=1;; [yY]*)
[nN]* ) config_ok_valid=1; config_ok_final=0;; config_ok_valid=1
* ) yes_no_prompt_invalid;; config_ok_final=1
;;
[nN]*)
config_ok_valid=1
config_ok_final=0
;;
*) yes_no_prompt_invalid ;;
esac esac
echo echo
done done
@ -73,4 +81,3 @@ psql -c "CREATE DATABASE $dbname WITH OWNER $username;" -U postgres
export LEMMY_DATABASE_URL=postgres://$username:$password@localhost:$port/$dbname export LEMMY_DATABASE_URL=postgres://$username:$password@localhost:$port/$dbname
echo "The database URL is $LEMMY_DATABASE_URL" echo "The database URL is $LEMMY_DATABASE_URL"

View file

@ -17,18 +17,22 @@ yes_no_prompt_invalid() {
ask_to_init_db() { ask_to_init_db() {
init_db_valid=0 init_db_valid=0
init_db_final=0 init_db_final=0
while [ "$init_db_valid" == 0 ] while [ "$init_db_valid" == 0 ]; do
do
read -p "Initialize database (y/n)? " init_db read -p "Initialize database (y/n)? " init_db
case "$init_db" in case "$init_db" in
[yY]* ) init_db_valid=1; init_db_final=1;; [yY]*)
[nN]* ) init_db_valid=1; init_db_final=0;; init_db_valid=1
* ) yes_no_prompt_invalid;; init_db_final=1
;;
[nN]*)
init_db_valid=1
init_db_final=0
;;
*) yes_no_prompt_invalid ;;
esac esac
echo echo
done done
if [ "$init_db_final" = 1 ] if [ "$init_db_final" = 1 ]; then
then
source ./db-init.sh source ./db-init.sh
read -n 1 -s -r -p "Press ANY KEY to continue execution of this script, press CTRL+C to quit..." read -n 1 -s -r -p "Press ANY KEY to continue execution of this script, press CTRL+C to quit..."
echo echo
@ -38,20 +42,24 @@ ask_to_init_db() {
ask_to_auto_reload() { ask_to_auto_reload() {
auto_reload_valid=0 auto_reload_valid=0
auto_reload_final=0 auto_reload_final=0
while [ "$auto_reload_valid" == 0 ] while [ "$auto_reload_valid" == 0 ]; do
do
echo "Automagically reload the project when source files are changed?" echo "Automagically reload the project when source files are changed?"
echo "ONLY ENABLE THIS FOR DEVELOPMENT!" echo "ONLY ENABLE THIS FOR DEVELOPMENT!"
read -p "(y/n) " auto_reload read -p "(y/n) " auto_reload
case "$auto_reload" in case "$auto_reload" in
[yY]* ) auto_reload_valid=1; auto_reload_final=1;; [yY]*)
[nN]* ) auto_reload_valid=1; auto_reload_final=0;; auto_reload_valid=1
* ) yes_no_prompt_invalid;; auto_reload_final=1
;;
[nN]*)
auto_reload_valid=1
auto_reload_final=0
;;
*) yes_no_prompt_invalid ;;
esac esac
echo echo
done done
if [ "$auto_reload_final" = 1 ] if [ "$auto_reload_final" = 1 ]; then
then
cd ui && pnpm dev cd ui && pnpm dev
cd server && cargo watch -x run cd server && cargo watch -x run
fi fi

View file

@ -2,13 +2,13 @@
set -e set -e
declare -a arr=( declare -a arr=(
"https://mastodon.social/" "https://mastodon.social/"
"https://peertube.social/" "https://peertube.social/"
"https://lemmy.ml/" "https://lemmy.ml/"
"https://lemmy.ml/feeds/all.xml" "https://lemmy.ml/feeds/all.xml"
"https://lemmy.ml/.well-known/nodeinfo" "https://lemmy.ml/.well-known/nodeinfo"
"https://fediverse.blog/.well-known/nodeinfo" "https://fediverse.blog/.well-known/nodeinfo"
"https://torrents-csv.ml/service/search?q=wheel&page=1&type_=torrent" "https://torrents-csv.ml/service/search?q=wheel&page=1&type_=torrent"
) )
## check if ab installed ## check if ab installed
@ -18,9 +18,8 @@ if ! [ -x "$(command -v ab)" ]; then
fi fi
## now loop through the above array ## now loop through the above array
for i in "${arr[@]}" for i in "${arr[@]}"; do
do ab -c 10 -t 10 "$i" >out.abtest
ab -c 10 -t 10 "$i" > out.abtest
grep "Server Hostname:" out.abtest grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest grep "Document Path:" out.abtest
grep "Requests per second" out.abtest grep "Requests per second" out.abtest

View file

@ -6,13 +6,13 @@ set -e
DOMAIN=${1:-"http://127.0.0.1:8536"} DOMAIN=${1:-"http://127.0.0.1:8536"}
declare -a arr=( declare -a arr=(
"/api/v1/site" "/api/v1/site"
"/api/v1/categories" "/api/v1/categories"
"/api/v1/modlog" "/api/v1/modlog"
"/api/v1/search?q=test&type_=Posts&sort=Hot" "/api/v1/search?q=test&type_=Posts&sort=Hot"
"/api/v1/community" "/api/v1/community"
"/api/v1/community/list?sort=Hot" "/api/v1/community/list?sort=Hot"
"/api/v1/post/list?sort=Hot&type_=All" "/api/v1/post/list?sort=Hot&type_=All"
) )
## check if ab installed ## check if ab installed
@ -22,13 +22,12 @@ if ! [ -x "$(command -v ab)" ]; then
fi fi
## now loop through the above array ## now loop through the above array
for path in "${arr[@]}" for path in "${arr[@]}"; do
do
URL="$DOMAIN$path" URL="$DOMAIN$path"
printf "\n\n\n" printf "\n\n\n"
echo "testing $URL" echo "testing $URL"
curl --show-error --fail --silent "$URL" >/dev/null curl --show-error --fail --silent "$URL" >/dev/null
ab -c 64 -t 10 "$URL" > out.abtest ab -c 64 -t 10 "$URL" >out.abtest
grep "Server Hostname:" out.abtest grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest grep "Document Path:" out.abtest
grep "Requests per second" out.abtest grep "Requests per second" out.abtest

View file

@ -1,3 +1,3 @@
#!/bin/bash #!/bin/bash
sudo docker exec -i docker-postgres-1 psql -Ulemmy -c "EXPLAIN (ANALYZE, COSTS, VERBOSE, BUFFERS, FORMAT JSON) SELECT post.id, post.name, post.url, post.body, post.creator_id, post.community_id, post.removed, post.locked, post.published, post.updated, post.deleted, post.nsfw, post.embed_title, post.embed_description, post.embed_video_url, post.thumbnail_url, post.ap_id, post.local, post.language_id, post.featured_community, post.featured_local, person.id, person.name, person.display_name, person.avatar, person.banned, person.published, person.updated, person.actor_id, person.bio, person.local, person.banner, person.deleted, person.inbox_url, person.shared_inbox_url, person.matrix_user_id, person.admin, person.bot_account, person.ban_expires, person.instance_id, community.id, community.name, community.title, community.description, community.removed, community.published, community.updated, community.deleted, community.nsfw, community.actor_id, community.local, community.icon, community.banner, community.hidden, community.posting_restricted_to_mods, community.instance_id, community_person_ban.id, community_person_ban.community_id, community_person_ban.person_id, community_person_ban.published, community_person_ban.expires, post_aggregates.id, post_aggregates.post_id, post_aggregates.comments, post_aggregates.score, post_aggregates.upvotes, post_aggregates.downvotes, post_aggregates.published, post_aggregates.newest_comment_time_necro, post_aggregates.newest_comment_time, post_aggregates.featured_community, post_aggregates.featured_local, community_follower.id, community_follower.community_id, community_follower.person_id, community_follower.published, community_follower.pending, post_saved.id, post_saved.post_id, post_saved.person_id, post_saved.published, post_read.id, post_read.post_id, post_read.person_id, post_read.published, person_block.id, person_block.person_id, person_block.target_id, person_block.published, post_like.score, coalesce((post_aggregates.comments - person_post_aggregates.read_comments), post_aggregates.comments) FROM ((((((((((((post INNER JOIN person ON (post.creator_id = person.id)) INNER JOIN community ON (post.community_id = community.id)) LEFT OUTER JOIN community_person_ban ON (((post.community_id = community_person_ban.community_id) AND (community_person_ban.person_id = post.creator_id)) AND ((community_person_ban.expires IS NULL) OR (community_person_ban.expires > CURRENT_TIMESTAMP)))) INNER JOIN post_aggregates ON (post_aggregates.post_id = post.id)) LEFT OUTER JOIN community_follower ON ((post.community_id = community_follower.community_id) AND (community_follower.person_id = '33517'))) LEFT OUTER JOIN post_saved ON ((post.id = post_saved.post_id) AND (post_saved.person_id = '33517'))) LEFT OUTER JOIN post_read ON ((post.id = post_read.post_id) AND (post_read.person_id = '33517'))) LEFT OUTER JOIN person_block ON ((post.creator_id = person_block.target_id) AND (person_block.person_id = '33517'))) LEFT OUTER JOIN community_block ON ((community.id = community_block.community_id) AND (community_block.person_id = '33517'))) LEFT OUTER JOIN post_like ON ((post.id = post_like.post_id) AND (post_like.person_id = '33517'))) LEFT OUTER JOIN person_post_aggregates ON ((post.id = person_post_aggregates.post_id) AND (person_post_aggregates.person_id = '33517'))) LEFT OUTER JOIN local_user_language ON ((post.language_id = local_user_language.language_id) AND (local_user_language.local_user_id = '11402'))) WHERE ((((((((((community_follower.person_id IS NOT NULL) AND (post.nsfw = 'f')) AND (community.nsfw = 'f')) AND (local_user_language.language_id IS NOT NULL)) AND (community_block.person_id IS NULL)) AND (person_block.person_id IS NULL)) AND (post.removed = 'f')) AND (post.deleted = 'f')) AND (community.removed = 'f')) AND (community.deleted = 'f')) ORDER BY post_aggregates.featured_local DESC , post_aggregates.hot_rank DESC LIMIT '40' OFFSET '0';" > query_results.json sudo docker exec -i docker-postgres-1 psql -Ulemmy -c "EXPLAIN (ANALYZE, COSTS, VERBOSE, BUFFERS, FORMAT JSON) SELECT post.id, post.name, post.url, post.body, post.creator_id, post.community_id, post.removed, post.locked, post.published, post.updated, post.deleted, post.nsfw, post.embed_title, post.embed_description, post.embed_video_url, post.thumbnail_url, post.ap_id, post.local, post.language_id, post.featured_community, post.featured_local, person.id, person.name, person.display_name, person.avatar, person.banned, person.published, person.updated, person.actor_id, person.bio, person.local, person.banner, person.deleted, person.inbox_url, person.shared_inbox_url, person.matrix_user_id, person.admin, person.bot_account, person.ban_expires, person.instance_id, community.id, community.name, community.title, community.description, community.removed, community.published, community.updated, community.deleted, community.nsfw, community.actor_id, community.local, community.icon, community.banner, community.hidden, community.posting_restricted_to_mods, community.instance_id, community_person_ban.id, community_person_ban.community_id, community_person_ban.person_id, community_person_ban.published, community_person_ban.expires, post_aggregates.id, post_aggregates.post_id, post_aggregates.comments, post_aggregates.score, post_aggregates.upvotes, post_aggregates.downvotes, post_aggregates.published, post_aggregates.newest_comment_time_necro, post_aggregates.newest_comment_time, post_aggregates.featured_community, post_aggregates.featured_local, community_follower.id, community_follower.community_id, community_follower.person_id, community_follower.published, community_follower.pending, post_saved.id, post_saved.post_id, post_saved.person_id, post_saved.published, post_read.id, post_read.post_id, post_read.person_id, post_read.published, person_block.id, person_block.person_id, person_block.target_id, person_block.published, post_like.score, coalesce((post_aggregates.comments - person_post_aggregates.read_comments), post_aggregates.comments) FROM ((((((((((((post INNER JOIN person ON (post.creator_id = person.id)) INNER JOIN community ON (post.community_id = community.id)) LEFT OUTER JOIN community_person_ban ON (((post.community_id = community_person_ban.community_id) AND (community_person_ban.person_id = post.creator_id)) AND ((community_person_ban.expires IS NULL) OR (community_person_ban.expires > CURRENT_TIMESTAMP)))) INNER JOIN post_aggregates ON (post_aggregates.post_id = post.id)) LEFT OUTER JOIN community_follower ON ((post.community_id = community_follower.community_id) AND (community_follower.person_id = '33517'))) LEFT OUTER JOIN post_saved ON ((post.id = post_saved.post_id) AND (post_saved.person_id = '33517'))) LEFT OUTER JOIN post_read ON ((post.id = post_read.post_id) AND (post_read.person_id = '33517'))) LEFT OUTER JOIN person_block ON ((post.creator_id = person_block.target_id) AND (person_block.person_id = '33517'))) LEFT OUTER JOIN community_block ON ((community.id = community_block.community_id) AND (community_block.person_id = '33517'))) LEFT OUTER JOIN post_like ON ((post.id = post_like.post_id) AND (post_like.person_id = '33517'))) LEFT OUTER JOIN person_post_aggregates ON ((post.id = person_post_aggregates.post_id) AND (person_post_aggregates.person_id = '33517'))) LEFT OUTER JOIN local_user_language ON ((post.language_id = local_user_language.language_id) AND (local_user_language.local_user_id = '11402'))) WHERE ((((((((((community_follower.person_id IS NOT NULL) AND (post.nsfw = 'f')) AND (community.nsfw = 'f')) AND (local_user_language.language_id IS NOT NULL)) AND (community_block.person_id IS NULL)) AND (person_block.person_id IS NULL)) AND (post.removed = 'f')) AND (post.deleted = 'f')) AND (community.removed = 'f')) AND (community.deleted = 'f')) ORDER BY post_aggregates.featured_local DESC , post_aggregates.hot_rank DESC LIMIT '40' OFFSET '0';" >query_results.json

View file

@ -9,40 +9,40 @@ pushd reports
PSQL_CMD="docker exec -i dev_postgres_1 psql -qAt -U lemmy" PSQL_CMD="docker exec -i dev_postgres_1 psql -qAt -U lemmy"
echo "explain (analyze, format json) select * from user_fast limit 100" > explain.sql echo "explain (analyze, format json) select * from user_fast limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > user_fast.json cat explain.sql | $PSQL_CMD >user_fast.json
echo "explain (analyze, format json) select * from post_view where user_id is null order by hot_rank desc, published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post_view where user_id is null order by hot_rank desc, published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_view.json cat explain.sql | $PSQL_CMD >post_view.json
echo "explain (analyze, format json) select * from post_fast_view where user_id is null order by hot_rank desc, published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post_fast_view where user_id is null order by hot_rank desc, published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_fast_view.json cat explain.sql | $PSQL_CMD >post_fast_view.json
echo "explain (analyze, format json) select * from comment_view where user_id is null limit 100" > explain.sql echo "explain (analyze, format json) select * from comment_view where user_id is null limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > comment_view.json cat explain.sql | $PSQL_CMD >comment_view.json
echo "explain (analyze, format json) select * from comment_fast_view where user_id is null limit 100" > explain.sql echo "explain (analyze, format json) select * from comment_fast_view where user_id is null limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > comment_fast_view.json cat explain.sql | $PSQL_CMD >comment_fast_view.json
echo "explain (analyze, format json) select * from community_view where user_id is null order by hot_rank desc limit 100" > explain.sql echo "explain (analyze, format json) select * from community_view where user_id is null order by hot_rank desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > community_view.json cat explain.sql | $PSQL_CMD >community_view.json
echo "explain (analyze, format json) select * from community_fast_view where user_id is null order by hot_rank desc limit 100" > explain.sql echo "explain (analyze, format json) select * from community_fast_view where user_id is null order by hot_rank desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > community_fast_view.json cat explain.sql | $PSQL_CMD >community_fast_view.json
echo "explain (analyze, format json) select * from site_view limit 100" > explain.sql echo "explain (analyze, format json) select * from site_view limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > site_view.json cat explain.sql | $PSQL_CMD >site_view.json
echo "explain (analyze, format json) select * from reply_fast_view where user_id = 34 and recipient_id = 34 limit 100" > explain.sql echo "explain (analyze, format json) select * from reply_fast_view where user_id = 34 and recipient_id = 34 limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > reply_fast_view.json cat explain.sql | $PSQL_CMD >reply_fast_view.json
echo "explain (analyze, format json) select * from user_mention_view where user_id = 34 and recipient_id = 34 limit 100" > explain.sql echo "explain (analyze, format json) select * from user_mention_view where user_id = 34 and recipient_id = 34 limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > user_mention_view.json cat explain.sql | $PSQL_CMD >user_mention_view.json
echo "explain (analyze, format json) select * from user_mention_fast_view where user_id = 34 and recipient_id = 34 limit 100" > explain.sql echo "explain (analyze, format json) select * from user_mention_fast_view where user_id = 34 and recipient_id = 34 limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > user_mention_fast_view.json cat explain.sql | $PSQL_CMD >user_mention_fast_view.json
grep "Execution Time" *.json > ../timings-`date +%Y-%m-%d_%H-%M-%S`.out grep "Execution Time" *.json >../timings-$(date +%Y-%m-%d_%H-%M-%S).out
rm explain.sql rm explain.sql

View file

@ -7,49 +7,49 @@ pushd reports
PSQL_CMD="docker exec -i dev_postgres_1 psql -qAt -U lemmy" PSQL_CMD="docker exec -i dev_postgres_1 psql -qAt -U lemmy"
echo "explain (analyze, format json) select * from user_ limit 100" > explain.sql echo "explain (analyze, format json) select * from user_ limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > user_.json cat explain.sql | $PSQL_CMD >user_.json
echo "explain (analyze, format json) select * from post p limit 100" > explain.sql echo "explain (analyze, format json) select * from post p limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post.json cat explain.sql | $PSQL_CMD >post.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by hot_rank(pa.score, pa.published) desc, pa.published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by hot_rank(pa.score, pa.published) desc, pa.published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_rank.json cat explain.sql | $PSQL_CMD >post_ordered_by_rank.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, hot_rank(pa.score, pa.published) desc, pa.published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, hot_rank(pa.score, pa.published) desc, pa.published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_stickied_then_rank.json cat explain.sql | $PSQL_CMD >post_ordered_by_stickied_then_rank.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.score desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.score desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_score.json cat explain.sql | $PSQL_CMD >post_ordered_by_score.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, pa.score desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, pa.score desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_stickied_then_score.json cat explain.sql | $PSQL_CMD >post_ordered_by_stickied_then_score.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_published.json cat explain.sql | $PSQL_CMD >post_ordered_by_published.json
echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, pa.published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from post p, post_aggregates pa where p.id = pa.post_id order by pa.stickied desc, pa.published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > post_ordered_by_stickied_then_published.json cat explain.sql | $PSQL_CMD >post_ordered_by_stickied_then_published.json
echo "explain (analyze, format json) select * from comment limit 100" > explain.sql echo "explain (analyze, format json) select * from comment limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > comment.json cat explain.sql | $PSQL_CMD >comment.json
echo "explain (analyze, format json) select * from community limit 100" > explain.sql echo "explain (analyze, format json) select * from community limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > community.json cat explain.sql | $PSQL_CMD >community.json
echo "explain (analyze, format json) select * from community c, community_aggregates ca where c.id = ca.community_id order by hot_rank(ca.subscribers, ca.published) desc, ca.published desc limit 100" > explain.sql echo "explain (analyze, format json) select * from community c, community_aggregates ca where c.id = ca.community_id order by hot_rank(ca.subscribers, ca.published) desc, ca.published desc limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > community_ordered_by_subscribers.json cat explain.sql | $PSQL_CMD >community_ordered_by_subscribers.json
echo "explain (analyze, format json) select * from site s" > explain.sql echo "explain (analyze, format json) select * from site s" >explain.sql
cat explain.sql | $PSQL_CMD > site.json cat explain.sql | $PSQL_CMD >site.json
echo "explain (analyze, format json) select * from user_mention limit 100" > explain.sql echo "explain (analyze, format json) select * from user_mention limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > user_mention.json cat explain.sql | $PSQL_CMD >user_mention.json
echo "explain (analyze, format json) select * from private_message limit 100" > explain.sql echo "explain (analyze, format json) select * from private_message limit 100" >explain.sql
cat explain.sql | $PSQL_CMD > private_message.json cat explain.sql | $PSQL_CMD >private_message.json
grep "Execution Time" *.json > ../timings-`date +%Y-%m-%d_%H-%M-%S`.out grep "Execution Time" *.json >../timings-$(date +%Y-%m-%d_%H-%M-%S).out
rm explain.sql rm explain.sql

View file

@ -12,13 +12,11 @@ export LEMMY_DATABASE_URL=$DATABASE_URL
export PGDATABASE=lemmy export PGDATABASE=lemmy
# If cluster exists, stop the server and delete the cluster # If cluster exists, stop the server and delete the cluster
if [[ -d $PGDATA ]] if [[ -d $PGDATA ]]; then
then
# Only stop server if it is running # Only stop server if it is running
pg_status_exit_code=0 pg_status_exit_code=0
(pg_ctl status > /dev/null) || pg_status_exit_code=$? (pg_ctl status >/dev/null) || pg_status_exit_code=$?
if [[ ${pg_status_exit_code} -ne 3 ]] if [[ ${pg_status_exit_code} -ne 3 ]]; then
then
pg_ctl stop --silent pg_ctl stop --silent
fi fi

View file

@ -3,4 +3,4 @@ set -e
dest=${1-config/defaults.hjson} dest=${1-config/defaults.hjson}
cargo run --manifest-path crates/utils/Cargo.toml --features full > "$dest" cargo run --manifest-path crates/utils/Cargo.toml --features full >"$dest"