From 242d99602b538608cbfa3a734fe5cee8c8e8a0be Mon Sep 17 00:00:00 2001 From: gvozdvmozgu Date: Fri, 10 Jan 2025 23:05:26 -0800 Subject: [PATCH] refactor: simplify `trie_filter` closure in `Tokenizer` initialization --- sqlglotrs/src/tokenizer.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/sqlglotrs/src/tokenizer.rs b/sqlglotrs/src/tokenizer.rs index 3557a074bf..21b0862ec7 100644 --- a/sqlglotrs/src/tokenizer.rs +++ b/sqlglotrs/src/tokenizer.rs @@ -24,13 +24,10 @@ impl Tokenizer { #[new] pub fn new(settings: TokenizerSettings, token_types: TokenTypeSettings) -> Tokenizer { let mut keyword_trie = Trie::default(); - let single_token_strs: Vec = settings - .single_tokens - .keys() - .map(|s| s.to_string()) - .collect(); - let trie_filter = - |key: &&String| key.contains(" ") || single_token_strs.iter().any(|t| key.contains(t)); + + let trie_filter = |key: &&String| { + key.contains(" ") || settings.single_tokens.keys().any(|&t| key.contains(t)) + }; keyword_trie.add(settings.keywords.keys().filter(trie_filter)); keyword_trie.add(settings.comments.keys().filter(trie_filter));