Skip to content

Commit fb93219

Browse files
authored
refactor: replace std::mem::replace with std::mem::take and Vec::drain (#4600)
* refactor: replace `std::mem::replace` with `Vec::drain` * refactor: replace `std::mem::replace` with `std::mem::take`
1 parent c0f7309 commit fb93219

File tree

2 files changed

+3
-5
lines changed

2 files changed

+3
-5
lines changed

sqlglotrs/src/token.rs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,13 +49,11 @@ impl Token {
4949
pub fn append_comments(&self, comments: &mut Vec<String>) {
5050
Python::with_gil(|py| {
5151
let pylist = self.comments.bind(py);
52-
for comment in comments.iter() {
52+
for comment in comments.drain(..) {
5353
if let Err(_) = pylist.append(comment) {
5454
panic!("Failed to append comments to the Python list");
5555
}
5656
}
5757
});
58-
// Simulate `Vec::append`.
59-
let _ = std::mem::replace(comments, Vec::new());
6058
}
6159
}

sqlglotrs/src/tokenizer.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ impl<'a> TokenizerState<'a> {
111111

112112
fn tokenize(&mut self) -> Result<Vec<Token>, TokenizerError> {
113113
self.scan(None)?;
114-
Ok(std::mem::replace(&mut self.tokens, Vec::new()))
114+
Ok(std::mem::take(&mut self.tokens))
115115
}
116116

117117
fn scan(&mut self, until_peek_char: Option<char>) -> Result<(), TokenizerError> {
@@ -234,7 +234,7 @@ impl<'a> TokenizerState<'a> {
234234
self.column,
235235
self.start,
236236
self.current - 1,
237-
std::mem::replace(&mut self.comments, Vec::new()),
237+
std::mem::take(&mut self.comments),
238238
));
239239

240240
// If we have either a semicolon or a begin token before the command's token, we'll parse

0 commit comments

Comments
 (0)