Skip to content

Commit

Permalink
refactor: replace std::mem::replace with std::mem::take and `Vec:…
Browse files Browse the repository at this point in the history
…:drain` (#4600)

* refactor: replace `std::mem::replace` with `Vec::drain`

* refactor: replace `std::mem::replace` with `std::mem::take`
  • Loading branch information
gvozdvmozgu authored Jan 13, 2025
1 parent c0f7309 commit fb93219
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 5 deletions.
4 changes: 1 addition & 3 deletions sqlglotrs/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,11 @@ impl Token {
pub fn append_comments(&self, comments: &mut Vec<String>) {
Python::with_gil(|py| {
let pylist = self.comments.bind(py);
for comment in comments.iter() {
for comment in comments.drain(..) {
if let Err(_) = pylist.append(comment) {
panic!("Failed to append comments to the Python list");
}
}
});
// Simulate `Vec::append`.
let _ = std::mem::replace(comments, Vec::new());
}
}
4 changes: 2 additions & 2 deletions sqlglotrs/src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ impl<'a> TokenizerState<'a> {

fn tokenize(&mut self) -> Result<Vec<Token>, TokenizerError> {
self.scan(None)?;
Ok(std::mem::replace(&mut self.tokens, Vec::new()))
Ok(std::mem::take(&mut self.tokens))
}

fn scan(&mut self, until_peek_char: Option<char>) -> Result<(), TokenizerError> {
Expand Down Expand Up @@ -234,7 +234,7 @@ impl<'a> TokenizerState<'a> {
self.column,
self.start,
self.current - 1,
std::mem::replace(&mut self.comments, Vec::new()),
std::mem::take(&mut self.comments),
));

// If we have either a semicolon or a begin token before the command's token, we'll parse
Expand Down

0 comments on commit fb93219

Please sign in to comment.