Fixing .drone.yml (#2677)

* Try to fix docker/drone plugin

* Trying to use one rust image.

* Fixing drone 1.

* Fixing drone 2.

* Add drone notify.

* Fixing drone 3.

* Fixing drone 4.

* Fix clippy.

* Remove uninlined format lint.

* Combine all cargo tasks

* Fixing drone 5.

* Fixing drone 6.

* Fixing drone 7.

* Fixing drone 8.

* Fixing drone 9.

* Fixing drone 10.

* Fixing drone 12.

* Fixing drone 13.

* Fixing drone 14.

* Fixing drone 15.

* Fixing drone 16.

* Fixing drone 17.

* Fixing drone 18.

* Fixing drone 19.

* Fixing drone 20.

* Fixing drone 21.

* Fixing drone 22.

* Fixing drone 23.

* Fixing drone 24.

* Fixing drone 25.

* Fixing drone 26.

* Fixing drone 27.

* Fixing drone 28.

* Fixing drone 29.

* Fixing drone 30.

* Fixing drone 31.

* Fixing drone 32.

* Fixing drone 33.

* Fixing drone 34.

* Fixing drone 35.

* Fixing drone 36.

* Fixing drone 37.

* Fixing drone 38.

* Fixing drone 39.

* Fixing drone 40.

* Fixing drone 41.

* Fixing drone 43.

* Fixing drone 44.

* Fixing drone 45.

* Last cleanup.

* Fixing drone 46.

* Separate ci steps (#2679)

* separate ci steps

* fix 1

* add comments

* dont add rustfmt explicitly

* Revert "dont add rustfmt explicitly"

This reverts commit 358ce3302a134b7ac88d90a854079356995e9725.

* dont use all features for tests

---------

Co-authored-by: Nutomic <me@nutomic.com>
This commit is contained in:
Dessalines 2023-01-30 14:17:24 -05:00 committed by GitHub
parent a8232fe3d6
commit a610211557
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 104 additions and 84 deletions

View file

@ -10,27 +10,33 @@ steps:
# use minimum supported rust version for most steps # use minimum supported rust version for most steps
- name: prepare repo - name: prepare repo
image: clux/muslrust:1.64.0 image: alpine:3
commands: commands:
- apk add git
- git fetch --tags - git fetch --tags
- git submodule init - git submodule init
- git submodule update --recursive --remote - git submodule update --recursive --remote
- chown 1000:1000 . -R
- name: check formatting - name: cargo fmt
image: rustdocker/rust:nightly image: clux/muslrust:1.67.0
environment:
# store cargo data in repo folder so that it gets cached between steps
CARGO_HOME: .cargo
commands: commands:
- /root/.cargo/bin/cargo fmt -- --check # need make existing toolchain available
- cp ~/.cargo . -r
- rustup toolchain install nightly
- rustup component add rustfmt --toolchain nightly
- cargo +nightly fmt -- --check
# latest rust for clippy to get extra checks
# when adding new clippy lints, make sure to also add them in scripts/fix-clippy.sh
- name: cargo clippy - name: cargo clippy
image: rust:1.65-buster image: clux/muslrust:1.67.0
environment: environment:
CARGO_HOME: .cargo CARGO_HOME: .cargo
commands: commands:
- apt-get update # latest rust for clippy to get extra checks
- apt-get install -y --no-install-recommends protobuf-compiler libprotobuf-dev # when adding new clippy lints, make sure to also add them in scripts/fix-clippy.sh
- rustup component add clippy - rustup component add clippy
- cargo clippy --workspace --tests --all-targets --all-features -- - cargo clippy --workspace --tests --all-targets --all-features --
-D warnings -D deprecated -D clippy::perf -D clippy::complexity -D warnings -D deprecated -D clippy::perf -D clippy::complexity
@ -40,45 +46,56 @@ steps:
-D clippy::wildcard_imports -D clippy::cast_lossless -D clippy::wildcard_imports -D clippy::cast_lossless
-D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls -D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls
-D clippy::unused_self -D clippy::unused_self
-A clippy::uninlined_format_args
- cargo clippy --workspace --all-features -- -D clippy::unwrap_used - cargo clippy --workspace --all-features -- -D clippy::unwrap_used
- name: cargo check
image: clux/muslrust:1.67.0
environment:
CARGO_HOME: .cargo
commands:
- cargo check --package lemmy_utils
- cargo check --package lemmy_db_schema
- cargo check --package lemmy_db_views
- cargo check --package lemmy_db_views_actor
- cargo check --package lemmy_db_views_moderator
- cargo check --package lemmy_api_common
- cargo check --package lemmy_api
- cargo check --package lemmy_api_crud
- cargo check --package lemmy_apub
- cargo check --package lemmy_routes
- cargo check --workspace --no-default-features
- cargo check --workspace --all-features
- name: lemmy_api_common doesnt depend on diesel
image: clux/muslrust:1.67.0
environment:
CARGO_HOME: .cargo
commands:
- "! cargo tree -p lemmy_api_common --no-default-features -i diesel"
- name: check defaults.hjson updated
image: clux/muslrust:1.67.0
environment:
CARGO_HOME: .cargo
commands:
- export LEMMY_CONFIG_LOCATION=./config/config.hjson
- ./scripts/update_config_defaults.sh config/defaults_current.hjson
- diff config/defaults.hjson config/defaults_current.hjson
- name: cargo test - name: cargo test
image: clux/muslrust:1.64.0 image: clux/muslrust:1.67.0
environment: environment:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
LEMMY_CONFIG_LOCATION: ../../config/config.hjson
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
RUST_TEST_THREADS: 1 RUST_TEST_THREADS: 1
CARGO_HOME: .cargo CARGO_HOME: .cargo
commands: commands:
- apt-get update - export LEMMY_CONFIG_LOCATION=../../config/config.hjson
- apt-get -y install --no-install-recommends postgresql-client protobuf-compiler libprotobuf-dev - cargo test --workspace --no-fail-fast
- cargo test --workspace --no-fail-fast --all-features
- name: check defaults.hjson updated
image: clux/muslrust:1.64.0
environment:
CARGO_HOME: .cargo
commands:
- ./scripts/update_config_defaults.sh config/defaults_current.hjson
- diff config/defaults.hjson config/defaults_current.hjson
- name: check with different features
image: clux/muslrust:1.64.0
environment:
CARGO_HOME: .cargo
commands:
- cargo install cargo-workspaces
- cargo workspaces exec cargo check --no-default-features
- cargo workspaces exec cargo check --all-features
- name: lemmy_api_common doesnt depend on diesel
image: rust:1.64-buster
commands:
- "! cargo tree -p lemmy_api_common --no-default-features -i diesel"
- name: cargo build - name: cargo build
image: clux/muslrust:1.64.0 image: clux/muslrust:1.67.0
environment: environment:
CARGO_HOME: .cargo CARGO_HOME: .cargo
commands: commands:
@ -106,6 +123,7 @@ steps:
password: password:
from_secret: docker_password from_secret: docker_password
repo: dessalines/lemmy repo: dessalines/lemmy
add_host: github.com:140.82.112.3,static.crates.io:18.154.227.73,crates.io:108.138.64.68,dl-cdn.alpinelinux.org:146.75.30.133
tags: tags:
- dev - dev
when: when:
@ -121,6 +139,7 @@ steps:
password: password:
from_secret: docker_password from_secret: docker_password
repo: dessalines/lemmy repo: dessalines/lemmy
add_host: github.com:140.82.112.3,static.crates.io:18.154.227.73,crates.io:108.138.64.68,dl-cdn.alpinelinux.org:146.75.30.133
auto_tag: true auto_tag: true
auto_tag_suffix: linux-amd64 auto_tag_suffix: linux-amd64
when: when:
@ -163,7 +182,7 @@ steps:
# using https://github.com/pksunkara/cargo-workspaces # using https://github.com/pksunkara/cargo-workspaces
- name: publish to crates.io - name: publish to crates.io
image: rustlang/rust:nightly image: clux/muslrust:1.67.0
environment: environment:
CARGO_TOKEN: CARGO_TOKEN:
from_secret: cargo_api_token from_secret: cargo_api_token
@ -176,9 +195,18 @@ steps:
ref: ref:
- refs/tags/* - refs/tags/*
- name: Notify on failure
image: alpine:3
commands:
- apk add curl
- "curl -d'Drone build failed: ${DRONE_BUILD_LINK}' ntfy.sh/lemmy_drone_ci"
when:
status:
- failure
services: services:
- name: database - name: database
image: postgres:14-alpine image: postgres:15-alpine
environment: environment:
POSTGRES_USER: lemmy POSTGRES_USER: lemmy
POSTGRES_PASSWORD: password POSTGRES_PASSWORD: password
@ -197,7 +225,6 @@ steps:
image: rust:1.57-slim image: rust:1.57-slim
user: root user: root
commands: commands:
- chown 1000:1000 . -R
- apt update - apt update
- apt install --no-install-recommends --yes git - apt install --no-install-recommends --yes git
- git fetch --tags - git fetch --tags

View file

@ -769,15 +769,15 @@ pub fn generate_local_apub_endpoint(
EndpointType::PrivateMessage => "private_message", EndpointType::PrivateMessage => "private_message",
}; };
Ok(Url::parse(&format!("{}/{}/{}", domain, point, name))?.into()) Ok(Url::parse(&format!("{domain}/{point}/{name}"))?.into())
} }
pub fn generate_followers_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> { pub fn generate_followers_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> {
Ok(Url::parse(&format!("{}/followers", actor_id))?.into()) Ok(Url::parse(&format!("{actor_id}/followers"))?.into())
} }
pub fn generate_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> { pub fn generate_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> {
Ok(Url::parse(&format!("{}/inbox", actor_id))?.into()) Ok(Url::parse(&format!("{actor_id}/inbox"))?.into())
} }
pub fn generate_site_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> { pub fn generate_site_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> {
@ -793,7 +793,7 @@ pub fn generate_shared_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, LemmyError>
&actor_id.scheme(), &actor_id.scheme(),
&actor_id.host_str().context(location_info!())?, &actor_id.host_str().context(location_info!())?,
if let Some(port) = actor_id.port() { if let Some(port) = actor_id.port() {
format!(":{}", port) format!(":{port}")
} else { } else {
String::new() String::new()
}, },
@ -802,9 +802,9 @@ pub fn generate_shared_inbox_url(actor_id: &DbUrl) -> Result<DbUrl, LemmyError>
} }
pub fn generate_outbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> { pub fn generate_outbox_url(actor_id: &DbUrl) -> Result<DbUrl, ParseError> {
Ok(Url::parse(&format!("{}/outbox", actor_id))?.into()) Ok(Url::parse(&format!("{actor_id}/outbox"))?.into())
} }
pub fn generate_moderators_url(community_id: &DbUrl) -> Result<DbUrl, LemmyError> { pub fn generate_moderators_url(community_id: &DbUrl) -> Result<DbUrl, LemmyError> {
Ok(Url::parse(&format!("{}/moderators", community_id))?.into()) Ok(Url::parse(&format!("{community_id}/moderators"))?.into())
} }

View file

@ -25,10 +25,7 @@ where
.splitn(2, '@') .splitn(2, '@')
.collect_tuple() .collect_tuple()
.ok_or_else(|| LemmyError::from_message("Invalid webfinger query, missing domain"))?; .ok_or_else(|| LemmyError::from_message("Invalid webfinger query, missing domain"))?;
let fetch_url = format!( let fetch_url = format!("{protocol}://{domain}/.well-known/webfinger?resource=acct:{identifier}");
"{}://{}/.well-known/webfinger?resource=acct:{}",
protocol, domain, identifier
);
debug!("Fetching webfinger url: {}", &fetch_url); debug!("Fetching webfinger url: {}", &fetch_url);
*request_counter += 1; *request_counter += 1;

View file

@ -105,11 +105,10 @@ update comment_aggregates ca set child_count = c.child_count
from ( from (
select c.id, c.path, count(c2.id) as child_count from comment c select c.id, c.path, count(c2.id) as child_count from comment c
join comment c2 on c2.path <@ c.path and c2.path != c.path join comment c2 on c2.path <@ c.path and c2.path != c.path
and c.path <@ '{}' and c.path <@ '{top_parent}'
group by c.id group by c.id
) as c ) as c
where ca.comment_id = c.id", where ca.comment_id = c.id"
top_parent
); );
sql_query(update_child_count_stmt).execute(conn).await?; sql_query(update_child_count_stmt).execute(conn).await?;

View file

@ -357,7 +357,7 @@ impl ApubActor for Community {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
community community
.filter(lower(name).eq(lower(community_name))) .filter(lower(name).eq(lower(community_name)))
.filter(actor_id.like(format!("{}%", protocol_domain))) .filter(actor_id.like(format!("{protocol_domain}%")))
.first::<Self>(conn) .first::<Self>(conn)
.await .await
} }

View file

@ -79,7 +79,7 @@ impl PasswordResetRequest {
fn bytes_to_hex(bytes: Vec<u8>) -> String { fn bytes_to_hex(bytes: Vec<u8>) -> String {
let mut str = String::new(); let mut str = String::new();
for byte in bytes { for byte in bytes {
str = format!("{}{:02x}", str, byte); str = format!("{str}{byte:02x}");
} }
str str
} }

View file

@ -219,7 +219,7 @@ impl ApubActor for Person {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
person person
.filter(lower(name).eq(lower(person_name))) .filter(lower(name).eq(lower(person_name)))
.filter(actor_id.like(format!("{}%", protocol_domain))) .filter(actor_id.like(format!("{protocol_domain}%")))
.first::<Self>(conn) .first::<Self>(conn)
.await .await
} }

View file

@ -46,7 +46,7 @@ pub fn get_database_url_from_env() -> Result<String, VarError> {
pub fn fuzzy_search(q: &str) -> String { pub fn fuzzy_search(q: &str) -> String {
let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%"); let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%");
format!("%{}%", replaced) format!("%{replaced}%")
} }
pub fn limit_and_offset( pub fn limit_and_offset(
@ -67,7 +67,7 @@ pub fn limit_and_offset(
Some(limit) => { Some(limit) => {
if !(1..=FETCH_LIMIT_MAX).contains(&limit) { if !(1..=FETCH_LIMIT_MAX).contains(&limit) {
return Err(QueryBuilderError( return Err(QueryBuilderError(
format!("Fetch limit is > {}", FETCH_LIMIT_MAX).into(), format!("Fetch limit is > {FETCH_LIMIT_MAX}").into(),
)); ));
} else { } else {
limit limit
@ -154,7 +154,7 @@ pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
pub fn run_migrations(db_url: &str) { pub fn run_migrations(db_url: &str) {
// Needs to be a sync connection // Needs to be a sync connection
let mut conn = let mut conn =
PgConnection::establish(db_url).unwrap_or_else(|_| panic!("Error connecting to {}", db_url)); PgConnection::establish(db_url).unwrap_or_else(|_| panic!("Error connecting to {db_url}"));
info!("Running Database migrations (This may take a long time)..."); info!("Running Database migrations (This may take a long time)...");
let _ = &mut conn let _ = &mut conn
.run_pending_migrations(MIGRATIONS) .run_pending_migrations(MIGRATIONS)
@ -178,10 +178,7 @@ pub fn get_database_url(settings: Option<&Settings>) -> String {
Ok(url) => url, Ok(url) => url,
Err(e) => match settings { Err(e) => match settings {
Some(settings) => settings.get_database_url(), Some(settings) => settings.get_database_url(),
None => panic!( None => panic!("Failed to read database URL from env var LEMMY_DATABASE_URL: {e}"),
"Failed to read database URL from env var LEMMY_DATABASE_URL: {}",
e
),
}, },
} }
} }

View file

@ -329,7 +329,7 @@ async fn get_feed_inbox(
channel_builder channel_builder
.namespaces(RSS_NAMESPACE.clone()) .namespaces(RSS_NAMESPACE.clone())
.title(&format!("{} - Inbox", site_view.site.name)) .title(&format!("{} - Inbox", site_view.site.name))
.link(format!("{}/inbox", protocol_and_hostname,)) .link(format!("{protocol_and_hostname}/inbox",))
.items(items); .items(items);
if let Some(site_desc) = site_view.site.description { if let Some(site_desc) = site_view.site.description {
@ -392,11 +392,10 @@ fn build_item(
protocol_and_hostname: &str, protocol_and_hostname: &str,
) -> Result<Item, LemmyError> { ) -> Result<Item, LemmyError> {
let mut i = ItemBuilder::default(); let mut i = ItemBuilder::default();
i.title(format!("Reply from {}", creator_name)); i.title(format!("Reply from {creator_name}"));
let author_url = format!("{}/u/{}", protocol_and_hostname, creator_name); let author_url = format!("{protocol_and_hostname}/u/{creator_name}");
i.author(format!( i.author(format!(
"/u/{} <a href=\"{}\">(link)</a>", "/u/{creator_name} <a href=\"{author_url}\">(link)</a>"
creator_name, author_url
)); ));
let dt = DateTime::<Utc>::from_utc(*published, Utc); let dt = DateTime::<Utc>::from_utc(*published, Utc);
i.pub_date(dt.to_rfc2822()); i.pub_date(dt.to_rfc2822());
@ -451,7 +450,7 @@ fn create_post_items(
// If its a url post, add it to the description // If its a url post, add it to the description
if let Some(url) = p.post.url { if let Some(url) = p.post.url {
let link_html = format!("<br><a href=\"{url}\">{url}</a>", url = url); let link_html = format!("<br><a href=\"{url}\">{url}</a>");
description.push_str(&link_html); description.push_str(&link_html);
} }

View file

@ -158,7 +158,7 @@ async fn full_res(
let mut url = format!("{}image/process.{}?src={}", pictrs_config.url, format, name,); let mut url = format!("{}image/process.{}?src={}", pictrs_config.url, format, name,);
if let Some(size) = params.thumbnail { if let Some(size) = params.thumbnail {
url = format!("{}&thumbnail={}", url, size,); url = format!("{url}&thumbnail={size}",);
} }
url url
}; };

View file

@ -16,7 +16,7 @@ pub fn generate_actor_keypair() -> Result<Keypair, Error> {
Ok(s) => Ok(s), Ok(s) => Ok(s),
Err(e) => Err(Error::new( Err(e) => Err(Error::new(
ErrorKind::Other, ErrorKind::Other,
format!("Failed converting key to string: {}", e), format!("Failed converting key to string: {e}"),
)), )),
}; };
Ok(Keypair { Ok(Keypair {

View file

@ -86,7 +86,7 @@ impl Debug for LemmyError {
impl Display for LemmyError { impl Display for LemmyError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(message) = &self.message { if let Some(message) = &self.message {
write!(f, "{}: ", message)?; write!(f, "{message}: ")?;
} }
writeln!(f, "{}", self.inner)?; writeln!(f, "{}", self.inner)?;
fmt::Display::fmt(&self.context, f) fmt::Display::fmt(&self.context, f)

View file

@ -1,4 +1,4 @@
ARG RUST_BUILDER_IMAGE=clux/muslrust:1.64.0 ARG RUST_BUILDER_IMAGE=clux/muslrust:1.67.0
FROM $RUST_BUILDER_IMAGE as chef FROM $RUST_BUILDER_IMAGE as chef
USER root USER root

View file

@ -1,5 +1,5 @@
# Build the project # Build the project
FROM clux/muslrust:1.64.0 as builder FROM clux/muslrust:1.67.0 as builder
ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl ARG CARGO_BUILD_TARGET=x86_64-unknown-linux-musl
ARG RUSTRELEASEDIR="release" ARG RUSTRELEASEDIR="release"
@ -17,7 +17,7 @@ RUN cp ./target/$CARGO_BUILD_TARGET/$RUSTRELEASEDIR/lemmy_server /app/lemmy_serv
FROM alpine:3 as lemmy FROM alpine:3 as lemmy
# Install libpq for postgres # Install libpq for postgres
RUN apk add libpq RUN apk update && apk add libpq
# Copy resources # Copy resources
COPY --from=builder /app/lemmy_server /app/lemmy COPY --from=builder /app/lemmy_server /app/lemmy

View file

@ -1,11 +1,12 @@
#!/bin/bash #!/bin/bash
set -e set -e
cargo clippy --workspace --fix --allow-staged --tests --all-targets --all-features -- \ cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-targets --all-features -- \
-D warnings -D deprecated -D clippy::perf -D clippy::complexity \ -D warnings -D deprecated -D clippy::perf -D clippy::complexity \
-D clippy::style -D clippy::correctness -D clippy::suspicious \ -D clippy::style -D clippy::correctness -D clippy::suspicious \
-D clippy::dbg_macro -D clippy::inefficient_to_string \ -D clippy::dbg_macro -D clippy::inefficient_to_string \
-D clippy::items-after-statements -D clippy::implicit_clone \ -D clippy::items-after-statements -D clippy::implicit_clone \
-D clippy::wildcard_imports -D clippy::cast_lossless \ -D clippy::wildcard_imports -D clippy::cast_lossless \
-D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls \ -D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls \
-D clippy::unused_self -D clippy::unused_self \
-A clippy::uninlined_format_args

View file

@ -17,7 +17,7 @@ if [ -n "$PACKAGE" ];
then then
cargo test -p $PACKAGE --all-features --no-fail-fast cargo test -p $PACKAGE --all-features --no-fail-fast
else else
cargo test --workspace --all-features --no-fail-fast cargo test --workspace --no-fail-fast
fi fi
# Add this to do printlns: -- --nocapture # Add this to do printlns: -- --nocapture

View file

@ -248,7 +248,7 @@ async fn post_thumbnail_url_updates_2020_07_27(
info!("Running post_thumbnail_url_updates_2020_07_27"); info!("Running post_thumbnail_url_updates_2020_07_27");
let domain_prefix = format!("{}/pictrs/image/", protocol_and_hostname,); let domain_prefix = format!("{protocol_and_hostname}/pictrs/image/",);
let incorrect_thumbnails = post.filter(thumbnail_url.not_like("http%")); let incorrect_thumbnails = post.filter(thumbnail_url.not_like("http%"));

View file

@ -65,8 +65,8 @@ fn handle_error(span: Span, status_code: StatusCode, response_error: &dyn Respon
} }
// pre-formatting errors is a workaround for https://github.com/tokio-rs/tracing/issues/1565 // pre-formatting errors is a workaround for https://github.com/tokio-rs/tracing/issues/1565
let display_error = format!("{}", response_error); let display_error = format!("{response_error}");
let debug_error = format!("{:?}", response_error); let debug_error = format!("{response_error:?}");
tracing::info_span!( tracing::info_span!(
parent: None, parent: None,

View file

@ -21,7 +21,7 @@ pub fn setup(db_url: String) -> Result<(), LemmyError> {
reindex_aggregates_tables(&mut conn, true); reindex_aggregates_tables(&mut conn, true);
scheduler.every(1.hour()).run(move || { scheduler.every(1.hour()).run(move || {
let conn = &mut PgConnection::establish(&db_url) let conn = &mut PgConnection::establish(&db_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", db_url)); .unwrap_or_else(|_| panic!("Error connecting to {db_url}"));
active_counts(conn); active_counts(conn);
update_banned_when_expired(conn); update_banned_when_expired(conn);
reindex_aggregates_tables(conn, true); reindex_aggregates_tables(conn, true);
@ -56,7 +56,7 @@ fn reindex_aggregates_tables(conn: &mut PgConnection, concurrently: bool) {
fn reindex_table(conn: &mut PgConnection, table_name: &str, concurrently: bool) { fn reindex_table(conn: &mut PgConnection, table_name: &str, concurrently: bool) {
let concurrently_str = if concurrently { "concurrently" } else { "" }; let concurrently_str = if concurrently { "concurrently" } else { "" };
info!("Reindexing table {} {} ...", concurrently_str, table_name); info!("Reindexing table {} {} ...", concurrently_str, table_name);
let query = format!("reindex table {} {}", concurrently_str, table_name); let query = format!("reindex table {concurrently_str} {table_name}");
sql_query(query).execute(conn).expect("reindex table"); sql_query(query).execute(conn).expect("reindex table");
info!("Done."); info!("Done.");
} }