Improve migration diff check and fix old migrations (#5204)

* Update schema.rs

* rename

* stuff

* finish new implementation of schema_setup::run (not including revert, test, etc.)

* fmt

* refactor

* fix sql

* migriation run command

* use trigger on migrations table

* add Options with disable_migrations field for test

* rename to enable_forbid_diesel_cli_trigger

* fix

* fix merge

* diff_checker (partial)

* Revert "diff_checker (partial)"

This reverts commit 6709882e14.

* Revert "Revert "diff_checker (partial)""

This reverts commit d4bdda5d11.

* diff check

* improve schema diff

* timestamp replacement

* ignore column order

* remove fedi_name default

* stuff

* improve diff

* stuff

* attempt parallel pg_dump

* attempt 2

* Revert "attempt 2"

This reverts commit a909d2d643.

* Revert "attempt parallel pg_dump"

This reverts commit 592a127954.

* improve diff check

* finish fixing migrations

* stuff

* use advisory lock

* stuff

* Update lib.rs

* fmt

* fmt

* clippy

* Update diff_check.rs

* Update .woodpecker.yml

* Update lib.rs

* Update lib.rs

* Update lib.rs

* Update .woodpecker.yml

* Update .woodpecker.yml

* Update lib.rs

* re-run ci

* fmt

* fmt

* Update .woodpecker.yml

* Update .woodpecker.yml

* create separate database in ci

* Update .woodpecker.yml

* Update .woodpecker.yml

* Update .woodpecker.yml

* Update .woodpecker.yml

* try to fix env var

* Update diff_check.rs

* Remove condition that's not needed anymore

* clippy

* exclude views and fast tables

* revert some migration changes

* fix

* fmt

* re-attempt checking character after skipped trigger name, and make code less confusing

* fmt

* fix

* rerun ci

* rerun ci

* fix strip_prefix order

* fix weird big Cargo.lock change by running `git checkout upstream/main Cargo.lock` then letting it auto update again

* fix

* remove installation commands that were removed in main branch

* Revert "remove installation commands that were removed in main branch"

This reverts commit fd65234a76.

* move create_database_user woodpecker step to make diff less weird

* Fix migration duplication caused by merge

* optimize

* Merge remote-tracking branch 'upstream/main' into new-migration-diff-check

* fmt

* Revert "fmt"

This reverts commit 7d5652945f.

* Revert "Merge remote-tracking branch 'upstream/main' into new-migration-diff-check"

This reverts commit 28bb1c7497.

* fmt

* move diff dependency to db_schema_file

* stuff

* revert some migration changes

* stuff

* refactor pair selection

* fix all migrations

* clippy

* stuff

* remove thing from removed dbg

* remove use of BTreeSet::difference

* trim_matching_chunks_at_beginning_and_end

* use sorted_unstable itertools method

* display_diffs

* use aho-corasick for chunk filtering

* fsync=off

* remove some unimportant optimizations

* Revert "remove some unimportant optimizations"

This reverts commit f8d88ddcce.

* Revert "use aho-corasick for chunk filtering"

This reverts commit 6b1f3e4416.

* remove some unimportant optimizations

* refactor diff_check.rs

* clippers
This commit is contained in:
dullbananas 2025-06-16 08:35:41 -07:00 committed by GitHub
parent d1aed75956
commit bb63eaa794
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 332 additions and 71 deletions

62
Cargo.lock generated
View file

@ -1378,6 +1378,18 @@ dependencies = [
"uuid",
]
[[package]]
name = "deprecate-until"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a3767f826efbbe5a5ae093920b58b43b01734202be697e1354914e862e8e704"
dependencies = [
"proc-macro2",
"quote",
"semver",
"syn 2.0.102",
]
[[package]]
name = "der"
version = "0.7.10"
@ -1611,19 +1623,6 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "diffutils"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8d7ce619b5c0e13f7543dc2c203a7e6fa37e0111d876339aada7ec9540a58d5"
dependencies = [
"chrono",
"diff",
"regex",
"same-file",
"unicode-width 0.1.14",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -2402,7 +2401,7 @@ dependencies = [
"html5ever 0.31.0",
"tendril",
"thiserror 2.0.12",
"unicode-width 0.2.1",
"unicode-width",
]
[[package]]
@ -2901,6 +2900,15 @@ dependencies = [
"generic-array",
]
[[package]]
name = "integer-sqrt"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770"
dependencies = [
"num-traits",
]
[[package]]
name = "invisible-characters"
version = "0.1.3"
@ -3377,8 +3385,10 @@ dependencies = [
"diesel-derive-enum",
"diesel_ltree",
"diesel_migrations",
"diffutils",
"diff",
"itertools 0.14.0",
"lemmy_utils",
"pathfinding",
"serde",
"serial_test",
"strum",
@ -4680,6 +4690,20 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pathfinding"
version = "4.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59ac35caa284c08f3721fb33c2741b5f763decaf42d080c8a6a722154347017e"
dependencies = [
"deprecate-until",
"indexmap 2.9.0",
"integer-sqrt",
"num-traits",
"rustc-hash 2.1.1",
"thiserror 2.0.12",
]
[[package]]
name = "pem"
version = "3.0.5"
@ -6939,12 +6963,6 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.1"
@ -7546,7 +7564,7 @@ dependencies = [
"bumpalo",
"leb128fmt",
"memchr",
"unicode-width 0.2.1",
"unicode-width",
"wasm-encoder 0.233.0",
]

View file

@ -45,4 +45,6 @@ tracing = { workspace = true, optional = true }
[dev-dependencies]
serial_test = { workspace = true }
diffutils = "0.4.2"
diff = "0.1.13"
itertools = { workspace = true }
pathfinding = "4.14.0"

View file

@ -1,11 +1,18 @@
#![cfg(test)]
#![expect(clippy::expect_used)]
use itertools::Itertools;
use lemmy_utils::settings::SETTINGS;
use std::process::{Command, Stdio};
use pathfinding::matrix::Matrix;
use std::{
borrow::Cow,
process::{Command, Stdio},
};
// It's not possible to call `export_snapshot()` for each dump and run the dumps in parallel with
// the `--snapshot` flag. Don't waste your time!!!!
/// Returns almost all things currently in the database, represented as SQL statements that would
/// recreate them.
pub fn get_dump() -> String {
let db_url = SETTINGS.get_database_url();
let output = Command::new("pg_dump")
@ -13,12 +20,22 @@ pub fn get_dump() -> String {
// Specify database URL
"--dbname",
&db_url,
// Disable some things
// Allow differences in row data and old fast tables
"--schema-only",
"--exclude-table=comment_aggregates_fast",
"--exclude-table=community_aggregates_fast",
"--exclude-table=post_aggregates_fast",
"--exclude-table=user_fast",
// Ignore some things to reduce the amount of queries done by pg_dump
"--no-owner",
"--no-privileges",
"--no-comments",
"--no-publications",
"--no-security-labels",
"--no-subscriptions",
"--no-table-access-method",
"--schema-only",
"--no-sync",
"--no-tablespaces",
"--no-large-objects",
])
.stderr(Stdio::inherit())
.output()
@ -30,12 +47,166 @@ pub fn get_dump() -> String {
String::from_utf8(output.stdout).expect("pg_dump output is not valid UTF-8 text")
}
pub fn check_dump_diff(before: String, after: String, label: &str) {
if before != after {
let diff_bytes =
diffutilslib::unified_diff(before.as_bytes(), after.as_bytes(), &Default::default());
let diff = String::from_utf8_lossy(&diff_bytes);
/// Checks dumps returned by [`get_dump`] and panics if they differ in a way that indicates a
/// mistake in whatever was run in between the dumps.
///
/// The panic message shows `label_of_change_from_0_to_1` and a diff from `dumps[0]` to `dumps[1]`.
/// For example, if something only exists in `dumps[1]`, then the diff represents the addition of
/// that thing.
///
/// `label_of_change_from_0_to_1` must say something about the change from `dumps[0]` to `dumps[1]`,
/// not `dumps[1]` to `dumps[0]`. This requires the two `dumps` elements being in an order that fits
/// with `label_of_change_from_0_to_1`. This does not necessarily match the order in which the dumps
/// were created.
pub fn check_dump_diff(dumps: [&str; 2], label_of_change_from_0_to_1: &str) {
let [sorted_statements_in_0, sorted_statements_in_1] = dumps.map(|dump| {
dump
.split("\n\n")
.map(str::trim_start)
.filter(|&chunk| !(is_ignored_trigger(chunk) || is_view(chunk) || is_comment(chunk)))
.map(remove_ignored_uniqueness_from_statement)
.sorted_unstable()
.collect::<Vec<_>>()
});
let mut statements_only_in_0 = Vec::new();
let mut statements_only_in_1 = Vec::new();
for diff in diff::slice(&sorted_statements_in_0, &sorted_statements_in_1) {
match diff {
diff::Result::Left(statement) => statements_only_in_0.push(&**statement),
diff::Result::Right(statement) => statements_only_in_1.push(&**statement),
diff::Result::Both(_, _) => {}
}
}
panic!("{label}\n\n{diff}");
if !(statements_only_in_0.is_empty() && statements_only_in_1.is_empty()) {
panic!(
"{label_of_change_from_0_to_1}\n\n{}",
select_pairs([&statements_only_in_0, &statements_only_in_1])
.flat_map(|pair| {
display_change(pair).chain(["\n"]) // Blank line after each chunk diff
})
.collect::<String>()
);
}
}
fn is_ignored_trigger(chunk: &str) -> bool {
[
"refresh_comment_like",
"refresh_comment",
"refresh_community_follower",
"refresh_community_user_ban",
"refresh_community",
"refresh_post_like",
"refresh_post",
"refresh_private_message",
"refresh_user",
]
.into_iter()
.any(|trigger_name| {
[("CREATE FUNCTION public.", '('), ("CREATE TRIGGER ", ' ')]
.into_iter()
.any(|(before, after)| {
chunk
.strip_prefix(before)
.and_then(|s| s.strip_prefix(trigger_name))
.is_some_and(|s| s.starts_with(after))
})
})
}
fn is_view(chunk: &str) -> bool {
[
"CREATE VIEW ",
"CREATE OR REPLACE VIEW ",
"CREATE MATERIALIZED VIEW ",
]
.into_iter()
.any(|prefix| chunk.starts_with(prefix))
}
fn is_comment(s: &str) -> bool {
s.lines().all(|line| line.starts_with("--"))
}
fn remove_ignored_uniqueness_from_statement(statement: &str) -> Cow<'_, str> {
// Sort column names, so differences in column order are ignored
if statement.starts_with("CREATE TABLE ") {
let mut lines = statement
.lines()
.map(|line| line.strip_suffix(',').unwrap_or(line))
.collect::<Vec<_>>();
sort_within_sections(&mut lines, |line| {
match line.chars().next() {
// CREATE
Some('C') => 0,
// Indented column name
Some(' ') => 1,
// End of column list
Some(')') => 2,
_ => panic!("unrecognized part of `CREATE TABLE` statement: {line}"),
}
});
Cow::Owned(lines.join("\n"))
} else {
Cow::Borrowed(statement)
}
}
fn sort_within_sections<T: Ord + ?Sized>(vec: &mut [&T], mut section: impl FnMut(&T) -> u8) {
vec.sort_unstable_by_key(|&i| (section(i), i));
}
/// For each string in list 0, makes a guess of which string in list 1 is a variant of it (or vice
/// versa).
fn select_pairs<'a>([a, b]: [&'a [&'a str]; 2]) -> impl Iterator<Item = [&'a str; 2]> {
let len = std::cmp::max(a.len(), b.len());
let get_candidate_pair_at =
|(row, column)| [a.get(row), b.get(column)].map(|item| *item.unwrap_or(&""));
let difference_amounts = Matrix::from_fn(len, len, |position| {
amount_of_difference_between(get_candidate_pair_at(position))
});
pathfinding::kuhn_munkres::kuhn_munkres_min(&difference_amounts)
.1
.into_iter()
.enumerate()
.map(get_candidate_pair_at)
}
/// Computes string distance, using the already required [`diff`] crate to avoid adding another
/// dependency.
fn amount_of_difference_between([a, b]: [&str; 2]) -> isize {
diff::chars(a, b)
.into_iter()
.filter(|i| !matches!(i, diff::Result::Both(_, _)))
.fold(0, |count, _| count.saturating_add(1))
}
/// Returns a string representation of the change from string 0 to string 1.
fn display_change([before, after]: [&str; 2]) -> impl Iterator<Item = &str> {
diff::lines(before, after)
.into_iter()
.flat_map(|line| match line {
diff::Result::Left(s) => ["- ", s, "\n"],
diff::Result::Right(s) => ["+ ", s, "\n"],
diff::Result::Both(s, _) => [" ", s, "\n"],
})
}
// `#[cfg(test)]` would be redundant here
mod tests {
#[test]
fn test_select_pairs() {
let x = "Cupcake";
let x_variant = "Cupcaaaaake";
let y = "eee";
let y_variant = "ee";
let z = "bruh";
assert_eq!(
super::select_pairs([&[x, y, z], &[y_variant, x_variant]]).collect::<Vec<_>>(),
vec![[x, x_variant], [y, y_variant], [z, ""]]
);
}
}

View file

@ -51,7 +51,7 @@ const REPLACEABLE_SCHEMA_PATH: &str = "crates/db_schema/replaceable_schema";
struct MigrationHarnessWrapper<'a> {
conn: &'a mut PgConnection,
#[cfg(test)]
diff_checked_migration_name: Option<String>,
enable_diff_check: bool,
options: &'a Options,
}
@ -80,7 +80,7 @@ impl MigrationHarness<Pg> for MigrationHarnessWrapper<'_> {
migration: &dyn Migration<Pg>,
) -> diesel::migration::Result<MigrationVersion<'static>> {
#[cfg(test)]
if self.diff_checked_migration_name == Some(migration.name().to_string()) {
if self.enable_diff_check {
let before = diff_check::get_dump();
self.run_migration_inner(migration)?;
@ -89,8 +89,7 @@ impl MigrationHarness<Pg> for MigrationHarnessWrapper<'_> {
let after = diff_check::get_dump();
diff_check::check_dump_diff(
after,
before,
[&after, &before],
&format!(
"These changes need to be applied in migrations/{}/down.sql:",
migration.name()
@ -234,7 +233,7 @@ pub fn run(options: Options) -> LemmyResult<Branch> {
let after = diff_check::get_dump();
diff_check::check_dump_diff(before, after, "The code in crates/db_schema/replaceable_schema incorrectly created or modified things outside of the `r` schema, causing these changes to be left behind after dropping the schema:");
diff_check::check_dump_diff([&before, &after], "The code in crates/db_schema/replaceable_schema incorrectly created or modified things outside of the `r` schema, causing these changes to be left behind after dropping the schema:");
}
run_replaceable_schema(&mut conn)?;
@ -284,17 +283,7 @@ fn run_selected_migrations(
conn,
options,
#[cfg(test)]
diff_checked_migration_name: options
.enable_diff_check
.then(|| diesel::migration::MigrationSource::<Pg>::migrations(&migrations()))
.transpose()?
// Get the migration with the highest version
.and_then(|migrations| {
migrations
.into_iter()
.map(|migration| migration.name().to_string())
.max()
}),
enable_diff_check: options.enable_diff_check,
};
if options.revert {
@ -344,8 +333,7 @@ mod tests {
// Start with consistent state by dropping everything
conn.batch_execute("DROP OWNED BY CURRENT_USER;")?;
// Run all migrations, make sure the newest migration can be redone, and check the newest
// down.sql file
// Run all migrations, and make sure that changes can be correctly reverted
assert_eq!(run(o.run().enable_diff_check())?, ReplaceableSchemaRebuilt);
// Check for early return

View file

@ -5,6 +5,8 @@ ALTER TABLE user_
ADD COLUMN fedi_name varchar(40) NOT NULL DEFAULT 'http://fake.com';
ALTER TABLE user_
-- Default is only for existing rows
ALTER COLUMN fedi_name DROP DEFAULT,
ADD CONSTRAINT user__name_fedi_name_key UNIQUE (name, fedi_name);
-- Community

View file

@ -493,3 +493,5 @@ SELECT
FROM
post_aggregates_fast pav;
CREATE INDEX idx_post_aggregates_fast_hot_rank_published ON post_aggregates_fast (hot_rank DESC, published DESC);

View file

@ -1,5 +1,5 @@
ALTER TABLE activity
ADD COLUMN user_id integer;
ADD COLUMN user_id integer REFERENCES user_ ON UPDATE CASCADE ON DELETE CASCADE NOT NULL;
ALTER TABLE activity
DROP COLUMN sensitive;

View file

@ -34,3 +34,9 @@ INSERT INTO category (name)
ALTER TABLE community
ADD category_id int REFERENCES category ON UPDATE CASCADE ON DELETE CASCADE NOT NULL DEFAULT 1;
-- Default is only for existing rows
ALTER TABLE community
ALTER COLUMN category_id DROP DEFAULT;
CREATE INDEX idx_community_category ON community (category_id);

View file

@ -229,7 +229,7 @@ ALTER SEQUENCE person_id_seq
-- Add the columns back in
ALTER TABLE user_
ADD COLUMN password_encrypted text NOT NULL DEFAULT 'changeme',
ADD COLUMN email text,
ADD COLUMN email text UNIQUE,
ADD COLUMN admin boolean DEFAULT FALSE NOT NULL,
ADD COLUMN show_nsfw boolean DEFAULT FALSE NOT NULL,
ADD COLUMN theme character varying(20) DEFAULT 'darkly'::character varying NOT NULL,
@ -238,7 +238,11 @@ ALTER TABLE user_
ADD COLUMN lang character varying(20) DEFAULT 'browser'::character varying NOT NULL,
ADD COLUMN show_avatars boolean DEFAULT TRUE NOT NULL,
ADD COLUMN send_notifications_to_email boolean DEFAULT FALSE NOT NULL,
ADD COLUMN matrix_user_id text;
ADD COLUMN matrix_user_id text UNIQUE;
-- Default is only for existing rows
ALTER TABLE user_
ALTER COLUMN password_encrypted DROP DEFAULT;
-- Update the user_ table with the local_user data
UPDATE
@ -260,6 +264,8 @@ FROM
WHERE
lu.person_id = u.id;
CREATE UNIQUE INDEX idx_user_email_lower ON user_ (lower(email));
CREATE VIEW user_alias_1 AS
SELECT
*

View file

@ -1,2 +1,4 @@
DROP TABLE secret;
DROP EXTENSION pgcrypto;

View file

@ -1,6 +1,6 @@
ALTER TABLE post
DROP COLUMN embed_url;
DROP COLUMN embed_video_url;
ALTER TABLE post
ADD COLUMN embed_video_url text;
ADD COLUMN embed_html text;

View file

@ -116,6 +116,10 @@ FROM
WHERE
c.id = ct.id;
-- Without this, `DROP EXTENSION` in down.sql throws an object dependency error if up.sql and down.sql
-- are run in the same database connection
DROP TABLE comment_temp;
-- Update the child counts
UPDATE
comment_aggregates ca

View file

@ -6,7 +6,7 @@ ALTER TABLE site
ADD COLUMN community_creation_admin_only boolean DEFAULT FALSE NOT NULL,
ADD COLUMN require_email_verification boolean DEFAULT FALSE NOT NULL,
ADD COLUMN require_application boolean DEFAULT TRUE NOT NULL,
ADD COLUMN application_question text DEFAULT 'to verify that you are human, please explain why you want to create an account on this site'::text,
ADD COLUMN application_question text DEFAULT 'To verify that you are human, please explain why you want to create an account on this site'::text,
ADD COLUMN private_instance boolean DEFAULT FALSE NOT NULL,
ADD COLUMN default_theme text DEFAULT 'browser'::text NOT NULL,
ADD COLUMN default_post_listing_type text DEFAULT 'Local'::text NOT NULL,

View file

@ -65,3 +65,15 @@ CREATE TRIGGER post_aggregates_stickied
WHEN (OLD.stickied IS DISTINCT FROM NEW.stickied)
EXECUTE PROCEDURE post_aggregates_stickied ();
CREATE INDEX idx_post_aggregates_stickied_newest_comment_time ON post_aggregates (stickied DESC, newest_comment_time DESC);
CREATE INDEX idx_post_aggregates_stickied_comments ON post_aggregates (stickied DESC, comments DESC);
CREATE INDEX idx_post_aggregates_stickied_hot ON post_aggregates (stickied DESC, hot_rank (score, published) DESC, published DESC);
CREATE INDEX idx_post_aggregates_stickied_active ON post_aggregates (stickied DESC, hot_rank (score, newest_comment_time_necro) DESC, newest_comment_time_necro DESC);
CREATE INDEX idx_post_aggregates_stickied_score ON post_aggregates (stickied DESC, score DESC);
CREATE INDEX idx_post_aggregates_stickied_published ON post_aggregates (stickied DESC, published DESC);

View file

@ -8,7 +8,7 @@ CREATE INDEX idx_post_aggregates_comments ON post_aggregates (comments DESC);
CREATE INDEX idx_post_aggregates_hot ON post_aggregates (hot_rank (score, published) DESC, published DESC);
CREATE INDEX idx_post_aggregates_active ON post_aggregates (hot_rank (score, newest_comment_time) DESC, newest_comment_time DESC);
CREATE INDEX idx_post_aggregates_active ON post_aggregates (hot_rank (score, newest_comment_time_necro) DESC, newest_comment_time_necro DESC);
CREATE INDEX idx_post_aggregates_score ON post_aggregates (score DESC);

View file

@ -1,3 +1,3 @@
ALTER TABLE local_site
ADD COLUMN federation_debug int DEFAULT 0;
ADD COLUMN federation_debug boolean DEFAULT FALSE NOT NULL;

View file

@ -117,7 +117,7 @@ ALTER TABLE local_site
ALTER COLUMN default_post_listing_type TYPE text;
ALTER TABLE local_site
ALTER COLUMN default_post_listing_type SET DEFAULT 1;
ALTER COLUMN default_post_listing_type SET DEFAULT 'Local';
-- Drop the types
DROP TYPE listing_type_enum;

View file

@ -1,3 +1,6 @@
ALTER TABLE local_user
ALTER default_sort_type DROP DEFAULT;
-- update the default sort type
UPDATE
local_user
@ -29,6 +32,9 @@ ALTER TABLE local_user
ALTER COLUMN default_sort_type TYPE sort_type_enum
USING default_sort_type::text::sort_type_enum;
ALTER TABLE local_user
ALTER default_sort_type SET DEFAULT 'Active';
-- drop the old enum
DROP TYPE sort_type_enum__;

View file

@ -1,3 +1,6 @@
ALTER TABLE local_user
ALTER default_sort_type DROP DEFAULT;
-- update the default sort type
UPDATE
local_user
@ -32,6 +35,9 @@ ALTER TABLE local_user
ALTER COLUMN default_sort_type TYPE sort_type_enum
USING default_sort_type::text::sort_type_enum;
ALTER TABLE local_user
ALTER default_sort_type SET DEFAULT 'Active';
-- drop the old enum
DROP TYPE sort_type_enum__;

View file

@ -26,3 +26,5 @@ DROP TABLE sent_activity;
DROP TABLE received_activity;
CREATE UNIQUE INDEX idx_activity_ap_id ON activity (ap_id);

View file

@ -6,3 +6,5 @@ DROP INDEX idx_person_trigram;
DROP INDEX idx_community_trigram;
DROP EXTENSION pg_trgm;

View file

@ -14,3 +14,7 @@ WHERE
ALTER TABLE local_user
DROP COLUMN admin;
CREATE INDEX idx_person_admin ON person (admin)
WHERE
admin;

View file

@ -328,7 +328,9 @@ ALTER TABLE captcha_answer
ALTER COLUMN published TYPE timestamp
USING published;
CREATE OR REPLACE FUNCTION hot_rank (score numeric, published timestamp without time zone)
DROP FUNCTION hot_rank;
CREATE FUNCTION hot_rank (score numeric, published timestamp without time zone)
RETURNS integer
AS $$
DECLARE

View file

@ -85,3 +85,18 @@ ALTER TABLE local_user
-- drop the old enum
DROP TYPE sort_type_enum__;
-- Remove int to float conversions that were automatically added to index filters
DROP INDEX idx_comment_aggregates_nonzero_hotrank, idx_community_aggregates_nonzero_hotrank, idx_post_aggregates_nonzero_hotrank;
CREATE INDEX idx_community_aggregates_nonzero_hotrank ON community_aggregates (published)
WHERE
hot_rank != 0;
CREATE INDEX idx_comment_aggregates_nonzero_hotrank ON comment_aggregates (published)
WHERE
hot_rank != 0;
CREATE INDEX idx_post_aggregates_nonzero_hotrank ON post_aggregates (published DESC)
WHERE
hot_rank != 0 OR hot_rank_active != 0;

View file

@ -1,5 +1,5 @@
DROP TABLE login_token;
ALTER TABLE local_user
ADD COLUMN validator_time timestamp NOT NULL DEFAULT now();
ADD COLUMN validator_time timestamptz NOT NULL DEFAULT now();

View file

@ -1,3 +1,3 @@
ALTER TABLE mod_remove_community
ADD COLUMN expires timestamp;
ADD COLUMN expires timestamptz;

View file

@ -148,7 +148,7 @@ ALTER TABLE post_read
ALTER TABLE received_activity
ADD UNIQUE (ap_id),
DROP CONSTRAINT received_activity_pkey,
ADD COLUMN id serial PRIMARY KEY;
ADD COLUMN id bigserial PRIMARY KEY;
CREATE INDEX idx_post_saved_person_id ON post_saved (person_id);

View file

@ -945,6 +945,13 @@ CREATE TRIGGER site_aggregates_comment_insert
WHEN ((new.local = TRUE))
EXECUTE FUNCTION site_aggregates_comment_insert ();
CREATE TRIGGER site_aggregates_community_delete
AFTER DELETE OR UPDATE OF removed,
deleted ON community
FOR EACH ROW
WHEN (OLD.local = TRUE)
EXECUTE PROCEDURE site_aggregates_community_delete ();
CREATE TRIGGER site_aggregates_community_insert
AFTER INSERT OR UPDATE OF removed,
deleted ON community

View file

@ -1,5 +1,5 @@
ALTER TABLE custom_emoji
ADD COLUMN local_site_id int REFERENCES local_site (site_id) ON UPDATE CASCADE ON DELETE CASCADE;
ADD COLUMN local_site_id int REFERENCES local_site ON UPDATE CASCADE ON DELETE CASCADE;
UPDATE
custom_emoji
@ -15,7 +15,7 @@ ALTER TABLE custom_emoji
ALTER COLUMN local_site_id SET NOT NULL;
ALTER TABLE tagline
ADD COLUMN local_site_id int REFERENCES local_site (site_id) ON UPDATE CASCADE ON DELETE CASCADE;
ADD COLUMN local_site_id int REFERENCES local_site ON UPDATE CASCADE ON DELETE CASCADE;
UPDATE
tagline

View file

@ -1,5 +1,5 @@
ALTER TABLE local_site
ADD COLUMN enable_nsfw boolean NOT NULL DEFAULT FALSE;
ADD COLUMN enable_nsfw boolean NOT NULL DEFAULT TRUE;
UPDATE
local_site

View file

@ -1,5 +1,5 @@
ALTER TABLE comment_like
ADD COLUMN post_id int;
ADD COLUMN post_id int REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE NOT NULL;
UPDATE
comment_like
@ -13,3 +13,5 @@ WHERE
ALTER TABLE comment_like
ALTER COLUMN post_id SET NOT NULL;
CREATE INDEX idx_comment_like_post ON comment_like (post_id);

View file

@ -90,7 +90,7 @@ CREATE TABLE person_post_aggregates (
person_id int REFERENCES person ON UPDATE CASCADE ON DELETE CASCADE NOT NULL,
post_id int REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE NOT NULL,
read_comments bigint DEFAULT 0 NOT NULL,
published timestamptz NOT NULL,
published timestamptz NOT NULL DEFAULT now(),
PRIMARY KEY (person_id, post_id)
);

View file

@ -48,8 +48,7 @@ BEGIN
p.community_id
FROM
comment_like cl
INNER JOIN comment c ON cl.comment_id = comment.id
INNER JOIN post p ON comment.post_id = p.id
INNER JOIN post p ON cl.post_id = p.id
INNER JOIN person pe ON cl.person_id = pe.id
WHERE
cl.published > ('now'::timestamp - i::interval)

View file

@ -41,6 +41,9 @@ config_args=(
# Don't log parameter values
-c auto_explain.log_parameter_max_length=0
# Disable fsync, a feature that prevents corruption on crash (doesn't matter on a temporary test database) and slows things down, especially migration tests
-c fsync=off
)
# Create cluster