Newer
Older
TelosDB / src-tauri / src / utils / tokenizer.rs
use anyhow::Result;

pub struct JapaneseTokenizer;

impl JapaneseTokenizer {
    pub fn new() -> Result<Self> {
        Ok(JapaneseTokenizer)
    }

    /// ダミー実装: スペースや句読点で分割
    pub fn tokenize_to_string(&self, text: &str) -> Result<String> {
        let tokens = self.tokenize_to_vec(text)?;
        Ok(tokens.join(" "))
    }

    /// ダミー実装: 単純な分割
    pub fn tokenize_to_vec(&self, text: &str) -> Result<Vec<String>> {
        let tokens: Vec<String> = text
            .split(|c: char| c.is_whitespace() || "、。!?,.!?".contains(c))
            .filter(|s| !s.is_empty())
            .map(|s| s.to_string())
            .collect();
        Ok(tokens)
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_japanese_tokenization() {
        let tokenizer = JapaneseTokenizer::new().unwrap();
        let text = "すもももももももものうち";
        let tokenized = tokenizer.tokenize_to_string(text).unwrap();
        assert!(tokenized.contains("すもも"));
        assert!(tokenized.contains("もも"));
    }
}