@@ -970,15 +970,15 @@ impl<'a> Parser<'a> {
970970 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
971971 if self.peek_token().token == Token::Period {
972972 let mut id_parts: Vec<Ident> = vec![match t {
973- Token::Word(w) => w.to_ident (next_token.span),
973+ Token::Word(w) => w.into_ident (next_token.span),
974974 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
975975 _ => unreachable!(), // We matched above
976976 }];
977977
978978 while self.consume_token(&Token::Period) {
979979 let next_token = self.next_token();
980980 match next_token.token {
981- Token::Word(w) => id_parts.push(w.to_ident (next_token.span)),
981+ Token::Word(w) => id_parts.push(w.into_ident (next_token.span)),
982982 Token::SingleQuotedString(s) => {
983983 // SQLite has single-quoted identifiers
984984 id_parts.push(Ident::with_quote('\'', s))
@@ -1108,7 +1108,7 @@ impl<'a> Parser<'a> {
11081108 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
11091109 {
11101110 Ok(Some(Expr::Function(Function {
1111- name: ObjectName(vec![w.to_ident (w_span)]),
1111+ name: ObjectName(vec![w.clone().into_ident (w_span)]),
11121112 uses_odbc_syntax: false,
11131113 parameters: FunctionArguments::None,
11141114 args: FunctionArguments::None,
@@ -1123,7 +1123,7 @@ impl<'a> Parser<'a> {
11231123 | Keyword::CURRENT_DATE
11241124 | Keyword::LOCALTIME
11251125 | Keyword::LOCALTIMESTAMP => {
1126- Ok(Some(self.parse_time_functions(ObjectName(vec![w.to_ident (w_span)]))?))
1126+ Ok(Some(self.parse_time_functions(ObjectName(vec![w.clone().into_ident (w_span)]))?))
11271127 }
11281128 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
11291129 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
@@ -1148,7 +1148,7 @@ impl<'a> Parser<'a> {
11481148 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
11491149 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
11501150 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1151- Ok(Some(self.parse_position_expr(w.to_ident (w_span))?))
1151+ Ok(Some(self.parse_position_expr(w.clone().into_ident (w_span))?))
11521152 }
11531153 Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)),
11541154 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
@@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
11671167 let query = self.parse_query()?;
11681168 self.expect_token(&Token::RParen)?;
11691169 Ok(Some(Expr::Function(Function {
1170- name: ObjectName(vec![w.to_ident (w_span)]),
1170+ name: ObjectName(vec![w.clone().into_ident (w_span)]),
11711171 uses_odbc_syntax: false,
11721172 parameters: FunctionArguments::None,
11731173 args: FunctionArguments::Subquery(query),
@@ -1203,11 +1203,12 @@ impl<'a> Parser<'a> {
12031203 w_span: Span,
12041204 ) -> Result<Expr, ParserError> {
12051205 match self.peek_token().token {
1206- Token::Period => {
1207- self.parse_compound_field_access(Expr::Identifier(w.to_ident(w_span)), vec![])
1208- }
1206+ Token::Period => self.parse_compound_field_access(
1207+ Expr::Identifier(w.clone().into_ident(w_span)),
1208+ vec![],
1209+ ),
12091210 Token::LParen => {
1210- let id_parts = vec![w.to_ident (w_span)];
1211+ let id_parts = vec![w.clone().into_ident (w_span)];
12111212 if let Some(expr) = self.parse_outer_join_expr(&id_parts) {
12121213 Ok(expr)
12131214 } else {
@@ -1220,7 +1221,7 @@ impl<'a> Parser<'a> {
12201221 }
12211222 Token::LBracket if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect | ClickHouseDialect | BigQueryDialect) =>
12221223 {
1223- let ident = Expr::Identifier(w.to_ident (w_span));
1224+ let ident = Expr::Identifier(w.clone().into_ident (w_span));
12241225 let mut fields = vec![];
12251226 self.parse_multi_dim_subscript(&mut fields)?;
12261227 self.parse_compound_field_access(ident, fields)
@@ -1250,11 +1251,11 @@ impl<'a> Parser<'a> {
12501251 Token::Arrow if self.dialect.supports_lambda_functions() => {
12511252 self.expect_token(&Token::Arrow)?;
12521253 Ok(Expr::Lambda(LambdaFunction {
1253- params: OneOrManyWithParens::One(w.to_ident (w_span)),
1254+ params: OneOrManyWithParens::One(w.clone().into_ident (w_span)),
12541255 body: Box::new(self.parse_expr()?),
12551256 }))
12561257 }
1257- _ => Ok(Expr::Identifier(w.to_ident (w_span))),
1258+ _ => Ok(Expr::Identifier(w.clone().into_ident (w_span))),
12581259 }
12591260 }
12601261
@@ -1438,7 +1439,7 @@ impl<'a> Parser<'a> {
14381439 } else {
14391440 let tok = self.next_token();
14401441 let key = match tok.token {
1441- Token::Word(word) => word.to_ident (tok.span),
1442+ Token::Word(word) => word.into_ident (tok.span),
14421443 _ => {
14431444 return parser_err!(
14441445 format!("Expected identifier, found: {tok}"),
@@ -1490,7 +1491,7 @@ impl<'a> Parser<'a> {
14901491 let next_token = self.next_token();
14911492 match next_token.token {
14921493 Token::Word(w) => {
1493- let expr = Expr::Identifier(w.to_ident (next_token.span));
1494+ let expr = Expr::Identifier(w.into_ident (next_token.span));
14941495 chain.push(AccessExpr::Dot(expr));
14951496 if self.peek_token().token == Token::LBracket {
14961497 if self.dialect.supports_partiql() {
@@ -1670,7 +1671,7 @@ impl<'a> Parser<'a> {
16701671 while p.consume_token(&Token::Period) {
16711672 let tok = p.next_token();
16721673 let name = match tok.token {
1673- Token::Word(word) => word.to_ident (tok.span),
1674+ Token::Word(word) => word.into_ident (tok.span),
16741675 _ => return p.expected("identifier", tok),
16751676 };
16761677 let func = match p.parse_function(ObjectName(vec![name]))? {
@@ -8242,7 +8243,7 @@ impl<'a> Parser<'a> {
82428243 // This because snowflake allows numbers as placeholders
82438244 let next_token = self.next_token();
82448245 let ident = match next_token.token {
8245- Token::Word(w) => Ok(w.to_ident (next_token.span)),
8246+ Token::Word(w) => Ok(w.into_ident (next_token.span)),
82468247 Token::Number(w, false) => Ok(Ident::new(w)),
82478248 _ => self.expected("placeholder", next_token),
82488249 }?;
@@ -8753,7 +8754,7 @@ impl<'a> Parser<'a> {
87538754 // (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword,
87548755 // not an alias.)
87558756 Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
8756- Ok(Some(w.to_ident (next_token.span)))
8757+ Ok(Some(w.into_ident (next_token.span)))
87578758 }
87588759 // MSSQL supports single-quoted strings as aliases for columns
87598760 // We accept them as table aliases too, although MSSQL does not.
@@ -8920,7 +8921,7 @@ impl<'a> Parser<'a> {
89208921 loop {
89218922 match &self.peek_token_ref().token {
89228923 Token::Word(w) => {
8923- idents.push(w.to_ident (self.peek_token_ref().span));
8924+ idents.push(w.clone().into_ident (self.peek_token_ref().span));
89248925 }
89258926 Token::EOF | Token::Eq => break,
89268927 _ => {}
@@ -8975,7 +8976,7 @@ impl<'a> Parser<'a> {
89758976 // expecting at least one word for identifier
89768977 let next_token = self.next_token();
89778978 match next_token.token {
8978- Token::Word(w) => idents.push(w.to_ident (next_token.span)),
8979+ Token::Word(w) => idents.push(w.into_ident (next_token.span)),
89798980 Token::EOF => {
89808981 return Err(ParserError::ParserError(
89818982 "Empty input when parsing identifier".to_string(),
@@ -8995,7 +8996,7 @@ impl<'a> Parser<'a> {
89958996 Token::Period => {
89968997 let next_token = self.next_token();
89978998 match next_token.token {
8998- Token::Word(w) => idents.push(w.to_ident (next_token.span)),
8999+ Token::Word(w) => idents.push(w.into_ident (next_token.span)),
89999000 Token::EOF => {
90009001 return Err(ParserError::ParserError(
90019002 "Trailing period in identifier".to_string(),
@@ -9024,7 +9025,7 @@ impl<'a> Parser<'a> {
90249025 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
90259026 let next_token = self.next_token();
90269027 match next_token.token {
9027- Token::Word(w) => Ok(w.to_ident (next_token.span)),
9028+ Token::Word(w) => Ok(w.into_ident (next_token.span)),
90289029 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
90299030 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
90309031 _ => self.expected("identifier", next_token),
@@ -9044,9 +9045,10 @@ impl<'a> Parser<'a> {
90449045 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
90459046 match self.peek_token().token {
90469047 Token::Word(w) => {
9048+ let quote_style_is_none = w.quote_style.is_none();
90479049 let mut requires_whitespace = false;
9048- let mut ident = w.to_ident (self.next_token().span);
9049- if w.quote_style.is_none() {
9050+ let mut ident = w.into_ident (self.next_token().span);
9051+ if quote_style_is_none {
90509052 while matches!(self.peek_token_no_skip().token, Token::Minus) {
90519053 self.next_token();
90529054 ident.value.push('-');
@@ -13475,13 +13477,23 @@ impl<'a> Parser<'a> {
1347513477}
1347613478
1347713479impl Word {
13480+ #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
1347813481 pub fn to_ident(&self, span: Span) -> Ident {
1347913482 Ident {
1348013483 value: self.value.clone(),
1348113484 quote_style: self.quote_style,
1348213485 span,
1348313486 }
1348413487 }
13488+
13489+ /// Convert this word into an [`Ident`] identifier
13490+ pub fn into_ident(self, span: Span) -> Ident {
13491+ Ident {
13492+ value: self.value,
13493+ quote_style: self.quote_style,
13494+ span,
13495+ }
13496+ }
1348513497}
1348613498
1348713499#[cfg(test)]
0 commit comments