@@ -265,7 +265,7 @@ enum ParserState {
265265}
266266
267267pub struct Parser<'a> {
268- tokens: Vec<TokenWithLocation >,
268+ tokens: Vec<TokenWithSpan >,
269269 /// The index of the first unprocessed token in [`Parser::tokens`].
270270 index: usize,
271271 /// The current state of the parser.
@@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
359359 }
360360
361361 /// Reset this parser to parse the specified token stream
362- pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithLocation >) -> Self {
362+ pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan >) -> Self {
363363 self.tokens = tokens;
364364 self.index = 0;
365365 self
@@ -368,9 +368,9 @@ impl<'a> Parser<'a> {
368368 /// Reset this parser state to parse the specified tokens
369369 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
370370 // Put in dummy locations
371- let tokens_with_locations: Vec<TokenWithLocation > = tokens
371+ let tokens_with_locations: Vec<TokenWithSpan > = tokens
372372 .into_iter()
373- .map(|token| TokenWithLocation {
373+ .map(|token| TokenWithSpan {
374374 token,
375375 span: Span::empty(),
376376 })
@@ -1147,7 +1147,7 @@ impl<'a> Parser<'a> {
11471147 match self.peek_token().token {
11481148 Token::LParen | Token::Period => {
11491149 let mut id_parts: Vec<Ident> = vec![w.to_ident(w_span)];
1150- let mut ending_wildcard: Option<TokenWithLocation > = None;
1150+ let mut ending_wildcard: Option<TokenWithSpan > = None;
11511151 while self.consume_token(&Token::Period) {
11521152 let next_token = self.next_token();
11531153 match next_token.token {
@@ -3273,7 +3273,7 @@ impl<'a> Parser<'a> {
32733273
32743274 /// Return the first non-whitespace token that has not yet been processed
32753275 /// (or None if reached end-of-file)
3276- pub fn peek_token(&self) -> TokenWithLocation {
3276+ pub fn peek_token(&self) -> TokenWithSpan {
32773277 self.peek_nth_token(0)
32783278 }
32793279
@@ -3308,38 +3308,38 @@ impl<'a> Parser<'a> {
33083308 /// yet been processed.
33093309 ///
33103310 /// See [`Self::peek_token`] for an example.
3311- pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithLocation ; N] {
3311+ pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan ; N] {
33123312 let mut index = self.index;
33133313 core::array::from_fn(|_| loop {
33143314 let token = self.tokens.get(index);
33153315 index += 1;
3316- if let Some(TokenWithLocation {
3316+ if let Some(TokenWithSpan {
33173317 token: Token::Whitespace(_),
33183318 span: _,
33193319 }) = token
33203320 {
33213321 continue;
33223322 }
3323- break token.cloned().unwrap_or(TokenWithLocation {
3323+ break token.cloned().unwrap_or(TokenWithSpan {
33243324 token: Token::EOF,
33253325 span: Span::empty(),
33263326 });
33273327 })
33283328 }
33293329
33303330 /// Return nth non-whitespace token that has not yet been processed
3331- pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation {
3331+ pub fn peek_nth_token(&self, mut n: usize) -> TokenWithSpan {
33323332 let mut index = self.index;
33333333 loop {
33343334 index += 1;
33353335 match self.tokens.get(index - 1) {
3336- Some(TokenWithLocation {
3336+ Some(TokenWithSpan {
33373337 token: Token::Whitespace(_),
33383338 span: _,
33393339 }) => continue,
33403340 non_whitespace => {
33413341 if n == 0 {
3342- return non_whitespace.cloned().unwrap_or(TokenWithLocation {
3342+ return non_whitespace.cloned().unwrap_or(TokenWithSpan {
33433343 token: Token::EOF,
33443344 span: Span::empty(),
33453345 });
@@ -3352,16 +3352,16 @@ impl<'a> Parser<'a> {
33523352
33533353 /// Return the first token, possibly whitespace, that has not yet been processed
33543354 /// (or None if reached end-of-file).
3355- pub fn peek_token_no_skip(&self) -> TokenWithLocation {
3355+ pub fn peek_token_no_skip(&self) -> TokenWithSpan {
33563356 self.peek_nth_token_no_skip(0)
33573357 }
33583358
33593359 /// Return nth token, possibly whitespace, that has not yet been processed.
3360- pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithLocation {
3360+ pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
33613361 self.tokens
33623362 .get(self.index + n)
33633363 .cloned()
3364- .unwrap_or(TokenWithLocation {
3364+ .unwrap_or(TokenWithSpan {
33653365 token: Token::EOF,
33663366 span: Span::empty(),
33673367 })
@@ -3378,25 +3378,25 @@ impl<'a> Parser<'a> {
33783378 /// Return the first non-whitespace token that has not yet been processed
33793379 /// (or None if reached end-of-file) and mark it as processed. OK to call
33803380 /// repeatedly after reaching EOF.
3381- pub fn next_token(&mut self) -> TokenWithLocation {
3381+ pub fn next_token(&mut self) -> TokenWithSpan {
33823382 loop {
33833383 self.index += 1;
33843384 match self.tokens.get(self.index - 1) {
3385- Some(TokenWithLocation {
3385+ Some(TokenWithSpan {
33863386 token: Token::Whitespace(_),
33873387 span: _,
33883388 }) => continue,
33893389 token => {
33903390 return token
33913391 .cloned()
3392- .unwrap_or_else(|| TokenWithLocation ::wrap(Token::EOF))
3392+ .unwrap_or_else(|| TokenWithSpan ::wrap(Token::EOF))
33933393 }
33943394 }
33953395 }
33963396 }
33973397
33983398 /// Return the first unprocessed token, possibly whitespace.
3399- pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation > {
3399+ pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan > {
34003400 self.index += 1;
34013401 self.tokens.get(self.index - 1)
34023402 }
@@ -3408,7 +3408,7 @@ impl<'a> Parser<'a> {
34083408 loop {
34093409 assert!(self.index > 0);
34103410 self.index -= 1;
3411- if let Some(TokenWithLocation {
3411+ if let Some(TokenWithSpan {
34123412 token: Token::Whitespace(_),
34133413 span: _,
34143414 }) = self.tokens.get(self.index)
@@ -3420,7 +3420,7 @@ impl<'a> Parser<'a> {
34203420 }
34213421
34223422 /// Report `found` was encountered instead of `expected`
3423- pub fn expected<T>(&self, expected: &str, found: TokenWithLocation ) -> Result<T, ParserError> {
3423+ pub fn expected<T>(&self, expected: &str, found: TokenWithSpan ) -> Result<T, ParserError> {
34243424 parser_err!(
34253425 format!("Expected: {expected}, found: {found}"),
34263426 found.span.start
@@ -3435,7 +3435,7 @@ impl<'a> Parser<'a> {
34353435 }
34363436
34373437 #[must_use]
3438- pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithLocation > {
3438+ pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithSpan > {
34393439 match self.peek_token().token {
34403440 Token::Word(w) if expected == w.keyword => Some(self.next_token()),
34413441 _ => None,
@@ -3524,7 +3524,7 @@ impl<'a> Parser<'a> {
35243524
35253525 /// If the current token is the `expected` keyword, consume the token.
35263526 /// Otherwise, return an error.
3527- pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithLocation , ParserError> {
3527+ pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan , ParserError> {
35283528 if let Some(token) = self.parse_keyword_token(expected) {
35293529 Ok(token)
35303530 } else {
@@ -3568,7 +3568,7 @@ impl<'a> Parser<'a> {
35683568 }
35693569
35703570 /// Bail out if the current token is not an expected keyword, or consume it if it is
3571- pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithLocation , ParserError> {
3571+ pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan , ParserError> {
35723572 if self.peek_token() == *expected {
35733573 Ok(self.next_token())
35743574 } else {
@@ -4107,7 +4107,7 @@ impl<'a> Parser<'a> {
41074107 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
41084108 _ => self.expected(
41094109 "JAR, FILE or ARCHIVE, got {:?}",
4110- TokenWithLocation ::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4110+ TokenWithSpan ::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
41114111 ),
41124112 }
41134113 }
@@ -6832,7 +6832,7 @@ impl<'a> Parser<'a> {
68326832 if let Some(name) = name {
68336833 return self.expected(
68346834 "FULLTEXT or SPATIAL option without constraint name",
6835- TokenWithLocation {
6835+ TokenWithSpan {
68366836 token: Token::make_keyword(&name.to_string()),
68376837 span: next_token.span,
68386838 },
@@ -7808,15 +7808,15 @@ impl<'a> Parser<'a> {
78087808 Some('\'') => Ok(Value::SingleQuotedString(w.value)),
78097809 _ => self.expected(
78107810 "A value?",
7811- TokenWithLocation {
7811+ TokenWithSpan {
78127812 token: Token::Word(w),
78137813 span,
78147814 },
78157815 )?,
78167816 },
78177817 _ => self.expected(
78187818 "a concrete value",
7819- TokenWithLocation {
7819+ TokenWithSpan {
78207820 token: Token::Word(w),
78217821 span,
78227822 },
@@ -7878,7 +7878,7 @@ impl<'a> Parser<'a> {
78787878 }
78797879 unexpected => self.expected(
78807880 "a value",
7881- TokenWithLocation {
7881+ TokenWithSpan {
78827882 token: unexpected,
78837883 span,
78847884 },
@@ -7927,7 +7927,7 @@ impl<'a> Parser<'a> {
79277927 Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
79287928 unexpected => self.expected(
79297929 "a string value",
7930- TokenWithLocation {
7930+ TokenWithSpan {
79317931 token: unexpected,
79327932 span,
79337933 },
@@ -8618,7 +8618,7 @@ impl<'a> Parser<'a> {
86188618 let token = self
86198619 .next_token_no_skip()
86208620 .cloned()
8621- .unwrap_or(TokenWithLocation ::wrap(Token::EOF));
8621+ .unwrap_or(TokenWithSpan ::wrap(Token::EOF));
86228622 requires_whitespace = match token.token {
86238623 Token::Word(next_word) if next_word.quote_style.is_none() => {
86248624 ident.value.push_str(&next_word.value);
@@ -11683,7 +11683,7 @@ impl<'a> Parser<'a> {
1168311683 /// If it is not possible to parse it, will return an option.
1168411684 pub fn parse_wildcard_additional_options(
1168511685 &mut self,
11686- wildcard_token: TokenWithLocation ,
11686+ wildcard_token: TokenWithSpan ,
1168711687 ) -> Result<WildcardAdditionalOptions, ParserError> {
1168811688 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
1168911689 self.parse_optional_select_item_ilike()?
@@ -12708,7 +12708,7 @@ impl<'a> Parser<'a> {
1270812708 }
1270912709
1271012710 /// Consume the parser and return its underlying token buffer
12711- pub fn into_tokens(self) -> Vec<TokenWithLocation > {
12711+ pub fn into_tokens(self) -> Vec<TokenWithSpan > {
1271212712 self.tokens
1271312713 }
1271412714
0 commit comments