Make Searcher::open_or_create() accept path and tokenizers

This commit is contained in:
Kitaiti Makoto 2021-01-06 23:40:56 +09:00
parent 09d9164a1c
commit 996dc309f7
2 changed files with 12 additions and 10 deletions

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
config::SearchTokenizerConfig, instance::Instance, posts::Post, schema::posts, config::SearchTokenizerConfig, instance::Instance, posts::Post, schema::posts,
search::query::PlumeQuery, tags::Tag, Connection, Error, Result, CONFIG, search::query::PlumeQuery, tags::Tag, Connection, Error, Result,
}; };
use chrono::{Datelike, Utc}; use chrono::{Datelike, Utc};
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
@ -69,16 +69,15 @@ impl Searcher {
schema_builder.build() schema_builder.build()
} }
pub fn open_or_recreate() -> Self { pub fn open_or_recreate(path: &dyn AsRef<Path>, tokenizers: &SearchTokenizerConfig) -> Self {
let mut open_searcher = Self::open(&CONFIG.search_index, &CONFIG.search_tokenizers); let mut open_searcher = Self::open(path, tokenizers);
if let Err(Error::Search(SearcherError::InvalidIndexDataError)) = open_searcher { if let Err(Error::Search(SearcherError::InvalidIndexDataError)) = open_searcher {
if Self::create(&CONFIG.search_index, &CONFIG.search_tokenizers).is_err() { if Self::create(path, tokenizers).is_err() {
let current_path = Path::new(&CONFIG.search_index); let backup_path = format!("{}.{}", path.as_ref().display(), Utc::now().timestamp());
let backup_path = format!("{}.{}", &current_path.display(), Utc::now().timestamp());
let backup_path = Path::new(&backup_path); let backup_path = Path::new(&backup_path);
fs::rename(current_path, backup_path) fs::rename(path, backup_path)
.expect("main: error on backing up search index directory for recreating"); .expect("main: error on backing up search index directory for recreating");
if Self::create(&CONFIG.search_index, &CONFIG.search_tokenizers).is_ok() { if Self::create(path, tokenizers).is_ok() {
if fs::remove_dir_all(backup_path).is_err() { if fs::remove_dir_all(backup_path).is_err() {
warn!( warn!(
"error on removing backup directory: {}. it remains", "error on removing backup directory: {}. it remains",
@ -89,7 +88,7 @@ impl Searcher {
panic!("main: error on recreating search index in new index format. remove search index and run `plm search init` manually"); panic!("main: error on recreating search index in new index format. remove search index and run `plm search init` manually");
} }
} }
open_searcher = Self::open(&CONFIG.search_index, &CONFIG.search_tokenizers); open_searcher = Self::open(path, tokenizers);
} }
#[allow(clippy::match_wild_err_arm)] #[allow(clippy::match_wild_err_arm)]
let searcher = match open_searcher { let searcher = match open_searcher {

View file

@ -100,7 +100,10 @@ Then try to restart Plume.
} }
let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get()); let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get());
// we want a fast exit here, so // we want a fast exit here, so
let searcher = Arc::new(UnmanagedSearcher::open_or_recreate()); let searcher = Arc::new(UnmanagedSearcher::open_or_recreate(
&CONFIG.search_index,
&CONFIG.search_tokenizers,
));
let commiter = searcher.clone(); let commiter = searcher.clone();
workpool.execute_with_fixed_delay( workpool.execute_with_fixed_delay(
Duration::from_secs(5), Duration::from_secs(5),