diff --git a/src/ast/helpers/attached_token.rs b/src/ast/helpers/attached_token.rs index 6b930b513..8aded21d3 100644 --- a/src/ast/helpers/attached_token.rs +++ b/src/ast/helpers/attached_token.rs @@ -80,7 +80,7 @@ use sqlparser_derive::{Visit, VisitMut}; #[derive(Clone)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AttachedToken(pub TokenWithSpan); +pub struct AttachedToken(pub TokenWithSpan<'static>); impl AttachedToken { /// Return a new Empty AttachedToken @@ -123,13 +123,13 @@ impl Hash for AttachedToken { } } -impl From for AttachedToken { - fn from(value: TokenWithSpan) -> Self { +impl From> for AttachedToken { + fn from(value: TokenWithSpan<'static>) -> Self { AttachedToken(value) } } -impl From for TokenWithSpan { +impl From for TokenWithSpan<'static> { fn from(value: AttachedToken) -> Self { value.0 } diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 482c38132..12f37a383 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2790,7 +2790,7 @@ impl fmt::Display for Declare { } /// Sql options of a `CREATE TABLE` statement. -#[derive(Debug, Default, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum CreateTableOptions { diff --git a/src/ast/spans.rs b/src/ast/spans.rs index cfaaf8f09..2f96abcca 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -2407,7 +2407,7 @@ pub mod tests { #[test] fn test_join() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id", ); @@ -2432,7 +2432,7 @@ pub mod tests { #[test] pub fn test_union() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source", ); @@ -2449,7 +2449,7 @@ pub mod tests { #[test] pub fn test_subquery() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT a FROM (SELECT a FROM postgres.public.source) AS b", ); @@ -2474,7 +2474,7 @@ pub mod tests { #[test] pub fn test_cte() { let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + let test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); let query = test.0.parse_query().unwrap(); @@ -2486,7 +2486,7 @@ pub mod tests { #[test] pub fn test_snowflake_lateral_flatten() { let dialect = &SnowflakeDialect; - let mut test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + let test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); let query = test.0.parse_select().unwrap(); @@ -2498,7 +2498,7 @@ pub mod tests { #[test] pub fn test_wildcard_from_cte() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "WITH cte AS (SELECT a FROM postgres.public.source) SELECT cte.* FROM cte", ); @@ -2524,7 +2524,7 @@ pub mod tests { #[test] fn test_case_expr_span() { let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END"); + let test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END"); let expr = test.0.parse_expr().unwrap(); let expr_span = expr.span(); assert_eq!( diff --git a/src/ast/visitor.rs b/src/ast/visitor.rs index 328f925f7..d91426ed8 100644 --- a/src/ast/visitor.rs +++ b/src/ast/visitor.rs @@ -17,6 +17,11 @@ //! Recursive visitors for ast Nodes. See [`Visitor`] for more details. +#[cfg(not(feature = "std"))] +use alloc::borrow::Cow; +#[cfg(feature = "std")] +use std::borrow::Cow; + use crate::ast::{Expr, ObjectName, Query, Statement, TableFactor, Value}; use core::ops::ControlFlow; @@ -118,6 +123,19 @@ visit_noop!(u8, u16, u32, u64, i8, i16, i32, i64, char, bool, String); #[cfg(feature = "bigdecimal")] visit_noop!(bigdecimal::BigDecimal); +// Implement Visit and VisitMut for Cow to support the lifetime parameter in BorrowedToken +impl<'a> Visit for Cow<'a, str> { + fn visit(&self, _visitor: &mut V) -> ControlFlow { + ControlFlow::Continue(()) + } +} + +impl<'a> VisitMut for Cow<'a, str> { + fn visit(&mut self, _visitor: &mut V) -> ControlFlow { + ControlFlow::Continue(()) + } +} + /// A visitor that can be used to walk an AST tree. /// /// `pre_visit_` methods are invoked before visiting all children of the @@ -751,7 +769,7 @@ mod tests { fn do_visit>(sql: &str, visitor: &mut V) -> Statement { let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let tokens = Tokenizer::new(&dialect, sql).tokenized_owned().unwrap(); let s = Parser::new(&dialect) .with_tokens(tokens) .parse_statement() @@ -942,7 +960,9 @@ mod tests { let sql = format!("SELECT x where {cond}"); let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap(); + let tokens = Tokenizer::new(&dialect, sql.as_str()) + .tokenized_owned() + .unwrap(); let s = Parser::new(&dialect) .with_tokens(tokens) .parse_statement() @@ -983,7 +1003,7 @@ mod visit_mut_tests { fn do_visit_mut>(sql: &str, visitor: &mut V) -> Statement { let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let tokens = Tokenizer::new(&dialect, sql).tokenized_owned().unwrap(); let mut s = Parser::new(&dialect) .with_tokens(tokens) .parse_statement() diff --git a/src/dialect/bigquery.rs b/src/dialect/bigquery.rs index 27fd3cca3..d1a61553e 100644 --- a/src/dialect/bigquery.rs +++ b/src/dialect/bigquery.rs @@ -46,7 +46,7 @@ const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ pub struct BigQueryDialect; impl Dialect for BigQueryDialect { - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::BEGIN) { if parser.peek_keyword(Keyword::TRANSACTION) || parser.peek_token_ref().token == Token::SemiColon @@ -145,7 +145,7 @@ impl Dialect for BigQueryDialect { true } - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !RESERVED_FOR_COLUMN_ALIAS.contains(kw) } diff --git a/src/dialect/mod.rs b/src/dialect/mod.rs index ef4e1cdde..d4678f545 100644 --- a/src/dialect/mod.rs +++ b/src/dialect/mod.rs @@ -53,7 +53,7 @@ use crate::ast::{ColumnOption, Expr, GranteesType, Ident, ObjectNamePart, Statem pub use crate::keywords; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; +use crate::tokenizer::BorrowedToken; #[cfg(not(feature = "std"))] use alloc::boxed::Box; @@ -466,7 +466,7 @@ pub trait Dialect: Debug + Any { } /// Dialect-specific prefix parser override - fn parse_prefix(&self, _parser: &mut Parser) -> Option> { + fn parse_prefix(&self, _parser: &Parser) -> Option> { // return None to fall back to the default behavior None } @@ -615,7 +615,7 @@ pub trait Dialect: Debug + Any { /// If `None` is returned, falls back to the default behavior. fn parse_infix( &self, - _parser: &mut Parser, + _parser: &Parser, _expr: &Expr, _precedence: u8, ) -> Option> { @@ -655,16 +655,16 @@ pub trait Dialect: Debug + Any { let token = parser.peek_token(); debug!("get_next_precedence_full() {token:?}"); match token.token { - Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)), - Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)), - Token::Word(w) if w.keyword == Keyword::XOR => Ok(p!(Xor)), + BorrowedToken::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)), + BorrowedToken::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)), + BorrowedToken::Word(w) if w.keyword == Keyword::XOR => Ok(p!(Xor)), - Token::Word(w) if w.keyword == Keyword::AT => { + BorrowedToken::Word(w) if w.keyword == Keyword::AT => { match ( parser.peek_nth_token(1).token, parser.peek_nth_token(2).token, ) { - (Token::Word(w), Token::Word(w2)) + (BorrowedToken::Word(w), BorrowedToken::Word(w2)) if w.keyword == Keyword::TIME && w2.keyword == Keyword::ZONE => { Ok(p!(AtTz)) @@ -673,102 +673,112 @@ pub trait Dialect: Debug + Any { } } - Token::Word(w) if w.keyword == Keyword::NOT => match parser.peek_nth_token(1).token { - // The precedence of NOT varies depending on keyword that - // follows it. If it is followed by IN, BETWEEN, or LIKE, - // it takes on the precedence of those tokens. Otherwise, it - // is not an infix operator, and therefore has zero - // precedence. - Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), - Token::Word(w) - if w.keyword == Keyword::NULL && !parser.in_column_definition_state() => - { - Ok(p!(Is)) + BorrowedToken::Word(w) if w.keyword == Keyword::NOT => { + match parser.peek_nth_token(1).token { + // The precedence of NOT varies depending on keyword that + // follows it. If it is followed by IN, BETWEEN, or LIKE, + // it takes on the precedence of those tokens. Otherwise, it + // is not an infix operator, and therefore has zero + // precedence. + BorrowedToken::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), + BorrowedToken::Word(w) + if w.keyword == Keyword::NULL && !parser.in_column_definition_state() => + { + Ok(p!(Is)) + } + _ => Ok(self.prec_unknown()), } - _ => Ok(self.prec_unknown()), - }, - Token::Word(w) if w.keyword == Keyword::NOTNULL && self.supports_notnull_operator() => { - Ok(p!(Is)) - } - Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)), - Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::OVERLAPS => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)), - Token::Period => Ok(p!(Period)), - Token::Assignment - | Token::Eq - | Token::Lt - | Token::LtEq - | Token::Neq - | Token::Gt - | Token::GtEq - | Token::DoubleEq - | Token::Tilde - | Token::TildeAsterisk - | Token::ExclamationMarkTilde - | Token::ExclamationMarkTildeAsterisk - | Token::DoubleTilde - | Token::DoubleTildeAsterisk - | Token::ExclamationMarkDoubleTilde - | Token::ExclamationMarkDoubleTildeAsterisk - | Token::Spaceship => Ok(p!(Eq)), - Token::Pipe - | Token::QuestionMarkDash - | Token::DoubleSharp - | Token::Overlap - | Token::AmpersandLeftAngleBracket - | Token::AmpersandRightAngleBracket - | Token::QuestionMarkDashVerticalBar - | Token::AmpersandLeftAngleBracketVerticalBar - | Token::VerticalBarAmpersandRightAngleBracket - | Token::TwoWayArrow - | Token::LeftAngleBracketCaret - | Token::RightAngleBracketCaret - | Token::QuestionMarkSharp - | Token::QuestionMarkDoubleVerticalBar - | Token::QuestionPipe - | Token::TildeEqual - | Token::AtSign - | Token::ShiftLeftVerticalBar - | Token::VerticalBarShiftRight => Ok(p!(Pipe)), - Token::Caret | Token::Sharp | Token::ShiftRight | Token::ShiftLeft => Ok(p!(Caret)), - Token::Ampersand => Ok(p!(Ampersand)), - Token::Plus | Token::Minus => Ok(p!(PlusMinus)), - Token::Mul | Token::Div | Token::DuckIntDiv | Token::Mod | Token::StringConcat => { - Ok(p!(MulDivModOp)) } - Token::DoubleColon | Token::ExclamationMark | Token::LBracket | Token::CaretAt => { - Ok(p!(DoubleColon)) + BorrowedToken::Word(w) + if w.keyword == Keyword::NOTNULL && self.supports_notnull_operator() => + { + Ok(p!(Is)) } - Token::Arrow - | Token::LongArrow - | Token::HashArrow - | Token::HashLongArrow - | Token::AtArrow - | Token::ArrowAt - | Token::HashMinus - | Token::AtQuestion - | Token::AtAt - | Token::Question - | Token::QuestionAnd - | Token::CustomBinaryOperator(_) => Ok(p!(PgOther)), + BorrowedToken::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)), + BorrowedToken::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::OVERLAPS => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), + BorrowedToken::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)), + BorrowedToken::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)), + BorrowedToken::Period => Ok(p!(Period)), + BorrowedToken::Assignment + | BorrowedToken::Eq + | BorrowedToken::Lt + | BorrowedToken::LtEq + | BorrowedToken::Neq + | BorrowedToken::Gt + | BorrowedToken::GtEq + | BorrowedToken::DoubleEq + | BorrowedToken::Tilde + | BorrowedToken::TildeAsterisk + | BorrowedToken::ExclamationMarkTilde + | BorrowedToken::ExclamationMarkTildeAsterisk + | BorrowedToken::DoubleTilde + | BorrowedToken::DoubleTildeAsterisk + | BorrowedToken::ExclamationMarkDoubleTilde + | BorrowedToken::ExclamationMarkDoubleTildeAsterisk + | BorrowedToken::Spaceship => Ok(p!(Eq)), + BorrowedToken::Pipe + | BorrowedToken::QuestionMarkDash + | BorrowedToken::DoubleSharp + | BorrowedToken::Overlap + | BorrowedToken::AmpersandLeftAngleBracket + | BorrowedToken::AmpersandRightAngleBracket + | BorrowedToken::QuestionMarkDashVerticalBar + | BorrowedToken::AmpersandLeftAngleBracketVerticalBar + | BorrowedToken::VerticalBarAmpersandRightAngleBracket + | BorrowedToken::TwoWayArrow + | BorrowedToken::LeftAngleBracketCaret + | BorrowedToken::RightAngleBracketCaret + | BorrowedToken::QuestionMarkSharp + | BorrowedToken::QuestionMarkDoubleVerticalBar + | BorrowedToken::QuestionPipe + | BorrowedToken::TildeEqual + | BorrowedToken::AtSign + | BorrowedToken::ShiftLeftVerticalBar + | BorrowedToken::VerticalBarShiftRight => Ok(p!(Pipe)), + BorrowedToken::Caret + | BorrowedToken::Sharp + | BorrowedToken::ShiftRight + | BorrowedToken::ShiftLeft => Ok(p!(Caret)), + BorrowedToken::Ampersand => Ok(p!(Ampersand)), + BorrowedToken::Plus | BorrowedToken::Minus => Ok(p!(PlusMinus)), + BorrowedToken::Mul + | BorrowedToken::Div + | BorrowedToken::DuckIntDiv + | BorrowedToken::Mod + | BorrowedToken::StringConcat => Ok(p!(MulDivModOp)), + BorrowedToken::DoubleColon + | BorrowedToken::ExclamationMark + | BorrowedToken::LBracket + | BorrowedToken::CaretAt => Ok(p!(DoubleColon)), + BorrowedToken::Arrow + | BorrowedToken::LongArrow + | BorrowedToken::HashArrow + | BorrowedToken::HashLongArrow + | BorrowedToken::AtArrow + | BorrowedToken::ArrowAt + | BorrowedToken::HashMinus + | BorrowedToken::AtQuestion + | BorrowedToken::AtAt + | BorrowedToken::Question + | BorrowedToken::QuestionAnd + | BorrowedToken::CustomBinaryOperator(_) => Ok(p!(PgOther)), _ => Ok(self.prec_unknown()), } } @@ -778,7 +788,7 @@ pub trait Dialect: Debug + Any { /// This method is called to parse the next statement. /// /// If `None` is returned, falls back to the default behavior. - fn parse_statement(&self, _parser: &mut Parser) -> Option> { + fn parse_statement(&self, _parser: &Parser) -> Option> { // return None to fall back to the default behavior None } @@ -790,7 +800,7 @@ pub trait Dialect: Debug + Any { /// If `None` is returned, falls back to the default behavior. fn parse_column_option( &self, - _parser: &mut Parser, + _parser: &Parser, ) -> Result, ParserError>>, ParserError> { // return None to fall back to the default behavior Ok(None) @@ -1021,33 +1031,33 @@ pub trait Dialect: Debug + Any { /// Returns true if the specified keyword should be parsed as a column identifier. /// See [keywords::RESERVED_FOR_COLUMN_ALIAS] - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) } /// Returns true if the specified keyword should be parsed as a select item alias. /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided /// to enable looking ahead if needed. - fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &Parser) -> bool { explicit || self.is_column_alias(kw, parser) } /// Returns true if the specified keyword should be parsed as a table factor identifier. /// See [keywords::RESERVED_FOR_TABLE_FACTOR] - fn is_table_factor(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_factor(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_TABLE_FACTOR.contains(kw) } /// Returns true if the specified keyword should be parsed as a table factor alias. /// See [keywords::RESERVED_FOR_TABLE_ALIAS] - fn is_table_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) } /// Returns true if the specified keyword should be parsed as a table factor alias. /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided /// to enable looking ahead if needed. - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &Parser) -> bool { explicit || self.is_table_alias(kw, parser) } @@ -1400,14 +1410,14 @@ mod tests { fn parse_prefix( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, ) -> Option> { self.0.parse_prefix(parser) } fn parse_infix( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, expr: &Expr, precedence: u8, ) -> Option> { @@ -1423,7 +1433,7 @@ mod tests { fn parse_statement( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, ) -> Option> { self.0.parse_statement(parser) } diff --git a/src/dialect/mssql.rs b/src/dialect/mssql.rs index e1902b389..f5a0bdd81 100644 --- a/src/dialect/mssql.rs +++ b/src/dialect/mssql.rs @@ -23,7 +23,7 @@ use crate::ast::{ use crate::dialect::Dialect; use crate::keywords::{self, Keyword}; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; +use crate::tokenizer::BorrowedToken; #[cfg(not(feature = "std"))] use alloc::{vec, vec::Vec}; @@ -128,11 +128,11 @@ impl Dialect for MsSqlDialect { &[GranteesType::Public] } - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw) } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.peek_keyword(Keyword::IF) { Some(self.parse_if_stmt(parser)) } else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) { @@ -157,7 +157,7 @@ impl MsSqlDialect { /// [ ELSE /// { sql_statement | statement_block } ] /// ``` - fn parse_if_stmt(&self, parser: &mut Parser) -> Result { + fn parse_if_stmt(&self, parser: &Parser) -> Result { let if_token = parser.expect_keyword(Keyword::IF)?; let condition = parser.parse_expr()?; @@ -167,19 +167,19 @@ impl MsSqlDialect { let statements = self.parse_statement_list(parser, Some(Keyword::END))?; let end_token = parser.expect_keyword(Keyword::END)?; ConditionalStatementBlock { - start_token: AttachedToken(if_token), + start_token: AttachedToken(if_token.to_static()), condition: Some(condition), then_token: None, conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), + begin_token: AttachedToken(begin_token.to_static()), statements, - end_token: AttachedToken(end_token), + end_token: AttachedToken(end_token.to_static()), }), } } else { let stmt = parser.parse_statement()?; ConditionalStatementBlock { - start_token: AttachedToken(if_token), + start_token: AttachedToken(if_token.to_static()), condition: Some(condition), then_token: None, conditional_statements: ConditionalStatements::Sequence { @@ -189,7 +189,8 @@ impl MsSqlDialect { }; let mut prior_statement_ended_with_semi_colon = false; - while let Token::SemiColon = parser.peek_token_ref().token { + + while let BorrowedToken::SemiColon = parser.peek_token_ref().token { parser.advance_token(); prior_statement_ended_with_semi_colon = true; } @@ -202,19 +203,19 @@ impl MsSqlDialect { let statements = self.parse_statement_list(parser, Some(Keyword::END))?; let end_token = parser.expect_keyword(Keyword::END)?; else_block = Some(ConditionalStatementBlock { - start_token: AttachedToken(else_token), + start_token: AttachedToken(else_token.to_static()), condition: None, then_token: None, conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), + begin_token: AttachedToken(begin_token.to_static()), statements, - end_token: AttachedToken(end_token), + end_token: AttachedToken(end_token.to_static()), }), }); } else { let stmt = parser.parse_statement()?; else_block = Some(ConditionalStatementBlock { - start_token: AttachedToken(else_token), + start_token: AttachedToken(else_token.to_static()), condition: None, then_token: None, conditional_statements: ConditionalStatements::Sequence { @@ -240,7 +241,7 @@ impl MsSqlDialect { /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql fn parse_create_trigger( &self, - parser: &mut Parser, + parser: &Parser, or_alter: bool, ) -> Result { let name = parser.parse_object_name(false)?; @@ -279,12 +280,12 @@ impl MsSqlDialect { /// Stops parsing when reaching EOF or the given keyword. fn parse_statement_list( &self, - parser: &mut Parser, + parser: &Parser, terminal_keyword: Option, ) -> Result, ParserError> { let mut stmts = Vec::new(); loop { - if let Token::EOF = parser.peek_token_ref().token { + if let BorrowedToken::EOF = parser.peek_token_ref().token { break; } if let Some(term) = terminal_keyword { @@ -293,7 +294,7 @@ impl MsSqlDialect { } } stmts.push(parser.parse_statement()?); - while let Token::SemiColon = parser.peek_token_ref().token { + while let BorrowedToken::SemiColon = parser.peek_token_ref().token { parser.advance_token(); } } diff --git a/src/dialect/mysql.rs b/src/dialect/mysql.rs index 8d2a5ad4b..f8c96d127 100644 --- a/src/dialect/mysql.rs +++ b/src/dialect/mysql.rs @@ -86,7 +86,7 @@ impl Dialect for MySqlDialect { fn parse_infix( &self, - parser: &mut crate::parser::Parser, + parser: &crate::parser::Parser, expr: &crate::ast::Expr, _precedence: u8, ) -> Option> { @@ -102,7 +102,7 @@ impl Dialect for MySqlDialect { } } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keywords(&[Keyword::LOCK, Keyword::TABLES]) { Some(parse_lock_tables(parser)) } else if parser.parse_keywords(&[Keyword::UNLOCK, Keyword::TABLES]) { @@ -134,7 +134,7 @@ impl Dialect for MySqlDialect { true } - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &Parser) -> bool { explicit || (!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) && !RESERVED_FOR_TABLE_ALIAS_MYSQL.contains(kw)) @@ -171,13 +171,13 @@ impl Dialect for MySqlDialect { /// `LOCK TABLES` /// -fn parse_lock_tables(parser: &mut Parser) -> Result { +fn parse_lock_tables(parser: &Parser) -> Result { let tables = parser.parse_comma_separated(parse_lock_table)?; Ok(Statement::LockTables { tables }) } // tbl_name [[AS] alias] lock_type -fn parse_lock_table(parser: &mut Parser) -> Result { +fn parse_lock_table(parser: &Parser) -> Result { let table = parser.parse_identifier()?; let alias = parser.parse_optional_alias(&[Keyword::READ, Keyword::WRITE, Keyword::LOW_PRIORITY])?; @@ -191,7 +191,7 @@ fn parse_lock_table(parser: &mut Parser) -> Result { } // READ [LOCAL] | [LOW_PRIORITY] WRITE -fn parse_lock_tables_type(parser: &mut Parser) -> Result { +fn parse_lock_tables_type(parser: &Parser) -> Result { if parser.parse_keyword(Keyword::READ) { if parser.parse_keyword(Keyword::LOCAL) { Ok(LockTableType::Read { local: true }) @@ -211,6 +211,6 @@ fn parse_lock_tables_type(parser: &mut Parser) -> Result -fn parse_unlock_tables(_parser: &mut Parser) -> Result { +fn parse_unlock_tables(_parser: &Parser) -> Result { Ok(Statement::UnlockTables) } diff --git a/src/dialect/postgresql.rs b/src/dialect/postgresql.rs index e861cc515..9da545fdb 100644 --- a/src/dialect/postgresql.rs +++ b/src/dialect/postgresql.rs @@ -31,7 +31,7 @@ use log::debug; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; +use crate::tokenizer::BorrowedToken; /// A [`Dialect`] for [PostgreSQL](https://www.postgresql.org/) #[derive(Debug)] @@ -110,32 +110,32 @@ impl Dialect for PostgreSqlDialect { // we only return some custom value here when the behaviour (not merely the numeric value) differs // from the default implementation match token.token { - Token::Word(w) + BorrowedToken::Word(w) if w.keyword == Keyword::COLLATE && !parser.in_column_definition_state() => { Some(Ok(COLLATE_PREC)) } - Token::LBracket => Some(Ok(BRACKET_PREC)), - Token::Arrow - | Token::LongArrow - | Token::HashArrow - | Token::HashLongArrow - | Token::AtArrow - | Token::ArrowAt - | Token::HashMinus - | Token::AtQuestion - | Token::AtAt - | Token::Question - | Token::QuestionAnd - | Token::QuestionPipe - | Token::ExclamationMark - | Token::Overlap - | Token::CaretAt - | Token::StringConcat - | Token::Sharp - | Token::ShiftRight - | Token::ShiftLeft - | Token::CustomBinaryOperator(_) => Some(Ok(PG_OTHER_PREC)), + BorrowedToken::LBracket => Some(Ok(BRACKET_PREC)), + BorrowedToken::Arrow + | BorrowedToken::LongArrow + | BorrowedToken::HashArrow + | BorrowedToken::HashLongArrow + | BorrowedToken::AtArrow + | BorrowedToken::ArrowAt + | BorrowedToken::HashMinus + | BorrowedToken::AtQuestion + | BorrowedToken::AtAt + | BorrowedToken::Question + | BorrowedToken::QuestionAnd + | BorrowedToken::QuestionPipe + | BorrowedToken::ExclamationMark + | BorrowedToken::Overlap + | BorrowedToken::CaretAt + | BorrowedToken::StringConcat + | BorrowedToken::Sharp + | BorrowedToken::ShiftRight + | BorrowedToken::ShiftLeft + | BorrowedToken::CustomBinaryOperator(_) => Some(Ok(PG_OTHER_PREC)), _ => None, } } diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index bb0d4f16b..a696c101b 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -37,7 +37,7 @@ use crate::ast::{ use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; use crate::parser::{IsOptional, Parser, ParserError}; -use crate::tokenizer::Token; +use crate::tokenizer::BorrowedToken; #[cfg(not(feature = "std"))] use alloc::boxed::Box; #[cfg(not(feature = "std"))] @@ -211,7 +211,7 @@ impl Dialect for SnowflakeDialect { true } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::BEGIN) { return Some(parser.parse_begin_exception_end()); } @@ -318,7 +318,7 @@ impl Dialect for SnowflakeDialect { fn parse_column_option( &self, - parser: &mut Parser, + parser: &Parser, ) -> Result, ParserError>>, ParserError> { parser.maybe_parse(|parser| { let with = parser.parse_keyword(Keyword::WITH); @@ -350,7 +350,7 @@ impl Dialect for SnowflakeDialect { let token = parser.peek_token(); // Snowflake supports the `:` cast operator unlike other dialects match token.token { - Token::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))), + BorrowedToken::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))), _ => None, } } @@ -391,14 +391,14 @@ impl Dialect for SnowflakeDialect { true } - fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { // The following keywords can be considered an alias as long as // they are not followed by other tokens that may change their meaning // e.g. `SELECT * EXCEPT (col1) FROM tbl` Keyword::EXCEPT // e.g. `INSERT INTO t SELECT 1 RETURNING *` - | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) => + | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, BorrowedToken::Comma | BorrowedToken::EOF) => { false } @@ -437,7 +437,7 @@ impl Dialect for SnowflakeDialect { } } - fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_alias(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { // The following keywords can be considered an alias as long as // they are not followed by other tokens that may change their meaning @@ -448,7 +448,10 @@ impl Dialect for SnowflakeDialect { | Keyword::UNPIVOT | Keyword::EXCEPT | Keyword::MATCH_RECOGNIZE - if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) => + if !matches!( + parser.peek_token_ref().token, + BorrowedToken::SemiColon | BorrowedToken::EOF + ) => { false } @@ -521,11 +524,13 @@ impl Dialect for SnowflakeDialect { } } - fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_factor(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { Keyword::LIMIT if peek_for_limit_options(parser) => false, // Table function - Keyword::TABLE if matches!(parser.peek_token_ref().token, Token::LParen) => true, + Keyword::TABLE if matches!(parser.peek_token_ref().token, BorrowedToken::LParen) => { + true + } _ => !RESERVED_KEYWORDS_FOR_TABLE_FACTOR.contains(kw), } } @@ -583,18 +588,20 @@ impl Dialect for SnowflakeDialect { // a LIMIT/FETCH keyword. fn peek_for_limit_options(parser: &Parser) -> bool { match &parser.peek_token_ref().token { - Token::Number(_, _) | Token::Placeholder(_) => true, - Token::SingleQuotedString(val) if val.is_empty() => true, - Token::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => true, - Token::Word(w) if w.keyword == Keyword::NULL => true, + BorrowedToken::Number(_, _) | BorrowedToken::Placeholder(_) => true, + BorrowedToken::SingleQuotedString(val) if val.is_empty() => true, + BorrowedToken::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => { + true + } + BorrowedToken::Word(w) if w.keyword == Keyword::NULL => true, _ => false, } } -fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result { +fn parse_file_staging_command(kw: Keyword, parser: &Parser) -> Result { let stage = parse_snowflake_stage_name(parser)?; let pattern = if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; Some(parser.parse_literal_string()?) } else { None @@ -613,7 +620,7 @@ fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result -fn parse_alter_dynamic_table(parser: &mut Parser) -> Result { +fn parse_alter_dynamic_table(parser: &Parser) -> Result { // Use parse_object_name(true) to support IDENTIFIER() function let table_name = parser.parse_object_name(true)?; @@ -631,7 +638,7 @@ fn parse_alter_dynamic_table(parser: &mut Parser) -> Result Result -fn parse_alter_session(parser: &mut Parser, set: bool) -> Result { +fn parse_alter_session(parser: &Parser, set: bool) -> Result { let session_options = parse_session_options(parser, set)?; Ok(Statement::AlterSession { set, @@ -674,7 +681,7 @@ pub fn parse_create_table( transient: bool, iceberg: bool, dynamic: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = parser.parse_object_name(false)?; @@ -701,7 +708,7 @@ pub fn parse_create_table( loop { let next_token = parser.next_token(); match &next_token.token { - Token::Word(word) => match word.keyword { + BorrowedToken::Word(word) => match word.keyword { Keyword::COPY => { parser.expect_keyword_is(Keyword::GRANTS)?; builder = builder.copy_grants(true); @@ -732,36 +739,36 @@ pub fn parse_create_table( } Keyword::CLUSTER => { parser.expect_keyword_is(Keyword::BY)?; - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let cluster_by = Some(WrappedCollection::Parentheses( parser.parse_comma_separated(|p| p.parse_expr())?, )); - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; builder = builder.cluster_by(cluster_by) } Keyword::ENABLE_SCHEMA_EVOLUTION => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.enable_schema_evolution(Some(parser.parse_boolean_string()?)); } Keyword::CHANGE_TRACKING => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.change_tracking(Some(parser.parse_boolean_string()?)); } Keyword::DATA_RETENTION_TIME_IN_DAYS => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let data_retention_time_in_days = parser.parse_literal_uint()?; builder = builder.data_retention_time_in_days(Some(data_retention_time_in_days)); } Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let max_data_extension_time_in_days = parser.parse_literal_uint()?; builder = builder .max_data_extension_time_in_days(Some(max_data_extension_time_in_days)); } Keyword::DEFAULT_DDL_COLLATION => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let default_ddl_collation = parser.parse_literal_string()?; builder = builder.default_ddl_collation(Some(default_ddl_collation)); } @@ -784,17 +791,17 @@ pub fn parse_create_table( parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?; let policy = parser.parse_object_name(false)?; parser.expect_keyword_is(Keyword::ON)?; - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; builder = builder.with_row_access_policy(Some(RowAccessPolicy::new(policy, columns))) } Keyword::TAG => { - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let tags = parser.parse_comma_separated(Parser::parse_tag)?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; builder = builder.with_tags(Some(tags)); } Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => { @@ -802,23 +809,23 @@ pub fn parse_create_table( builder = builder.on_commit(on_commit); } Keyword::EXTERNAL_VOLUME => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder.external_volume = Some(parser.parse_literal_string()?); } Keyword::CATALOG => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder.catalog = Some(parser.parse_literal_string()?); } Keyword::BASE_LOCATION => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder.base_location = Some(parser.parse_literal_string()?); } Keyword::CATALOG_SYNC => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder.catalog_sync = Some(parser.parse_literal_string()?); } Keyword::STORAGE_SERIALIZATION_POLICY => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder.storage_serialization_policy = Some(parse_storage_serialization_policy(parser)?); @@ -827,12 +834,12 @@ pub fn parse_create_table( builder = builder.if_not_exists(true); } Keyword::TARGET_LAG => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let target_lag = parser.parse_literal_string()?; builder = builder.target_lag(Some(target_lag)); } Keyword::WAREHOUSE => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let warehouse = parser.parse_identifier()?; builder = builder.warehouse(Some(warehouse)); } @@ -842,7 +849,7 @@ pub fn parse_create_table( builder = builder.version(version); } Keyword::REFRESH_MODE => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let refresh_mode = match parser.parse_one_of_keywords(&[ Keyword::AUTO, Keyword::FULL, @@ -856,7 +863,7 @@ pub fn parse_create_table( builder = builder.refresh_mode(refresh_mode); } Keyword::INITIALIZE => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let initialize = match parser .parse_one_of_keywords(&[Keyword::ON_CREATE, Keyword::ON_SCHEDULE]) { @@ -873,15 +880,15 @@ pub fn parse_create_table( return parser.expected("end of statement", next_token); } }, - Token::LParen => { + BorrowedToken::LParen => { parser.prev_token(); let (columns, constraints) = parser.parse_columns()?; builder = builder.columns(columns).constraints(constraints); } - Token::EOF => { + BorrowedToken::EOF => { break; } - Token::SemiColon => { + BorrowedToken::SemiColon => { parser.prev_token(); break; } @@ -912,7 +919,7 @@ pub fn parse_create_table( pub fn parse_create_database( or_replace: bool, transient: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = parser.parse_object_name(false)?; @@ -925,58 +932,58 @@ pub fn parse_create_database( loop { let next_token = parser.next_token(); match &next_token.token { - Token::Word(word) => match word.keyword { + BorrowedToken::Word(word) => match word.keyword { Keyword::CLONE => { builder = builder.clone_clause(Some(parser.parse_object_name(false)?)); } Keyword::DATA_RETENTION_TIME_IN_DAYS => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.data_retention_time_in_days(Some(parser.parse_literal_uint()?)); } Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.max_data_extension_time_in_days(Some(parser.parse_literal_uint()?)); } Keyword::EXTERNAL_VOLUME => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.external_volume(Some(parser.parse_literal_string()?)); } Keyword::CATALOG => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.catalog(Some(parser.parse_literal_string()?)); } Keyword::REPLACE_INVALID_CHARACTERS => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.replace_invalid_characters(Some(parser.parse_boolean_string()?)); } Keyword::DEFAULT_DDL_COLLATION => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.default_ddl_collation(Some(parser.parse_literal_string()?)); } Keyword::STORAGE_SERIALIZATION_POLICY => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let policy = parse_storage_serialization_policy(parser)?; builder = builder.storage_serialization_policy(Some(policy)); } Keyword::COMMENT => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.comment(Some(parser.parse_literal_string()?)); } Keyword::CATALOG_SYNC => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.catalog_sync(Some(parser.parse_literal_string()?)); } Keyword::CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; builder = builder.catalog_sync_namespace_flatten_delimiter(Some( parser.parse_literal_string()?, )); } Keyword::CATALOG_SYNC_NAMESPACE_MODE => { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let mode = match parser.parse_one_of_keywords(&[Keyword::NEST, Keyword::FLATTEN]) { Some(Keyword::NEST) => CatalogSyncNamespaceMode::Nest, @@ -989,19 +996,19 @@ pub fn parse_create_database( } Keyword::WITH => { if parser.parse_keyword(Keyword::TAG) { - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let tags = parser.parse_comma_separated(Parser::parse_tag)?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; builder = builder.with_tags(Some(tags)); } else if parser.parse_keyword(Keyword::CONTACT) { - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let contacts = parser.parse_comma_separated(|p| { let purpose = p.parse_identifier()?.value; - p.expect_token(&Token::Eq)?; + p.expect_token(&BorrowedToken::Eq)?; let contact = p.parse_identifier()?.value; Ok(ContactEntry { purpose, contact }) })?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; builder = builder.with_contacts(Some(contacts)); } else { return parser.expected("TAG or CONTACT", next_token); @@ -1009,7 +1016,7 @@ pub fn parse_create_database( } _ => return parser.expected("end of statement", next_token), }, - Token::SemiColon | Token::EOF => break, + BorrowedToken::SemiColon | BorrowedToken::EOF => break, _ => return parser.expected("end of statement", next_token), } } @@ -1017,11 +1024,11 @@ pub fn parse_create_database( } pub fn parse_storage_serialization_policy( - parser: &mut Parser, + parser: &Parser, ) -> Result { let next_token = parser.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible), Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized), _ => parser.expected("storage_serialization_policy", next_token), @@ -1033,7 +1040,7 @@ pub fn parse_storage_serialization_policy( pub fn parse_create_stage( or_replace: bool, temporary: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { //[ IF NOT EXISTS ] let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -1048,25 +1055,25 @@ pub fn parse_create_stage( // [ directoryTableParams ] if parser.parse_keyword(Keyword::DIRECTORY) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; directory_table_params = parser.parse_key_value_options(true, &[])?.options; } // [ file_format] if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; file_format = parser.parse_key_value_options(true, &[])?.options; } // [ copy_options ] if parser.parse_keyword(Keyword::COPY_OPTIONS) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; copy_options = parser.parse_key_value_options(true, &[])?.options; } // [ comment ] if parser.parse_keyword(Keyword::COMMENT) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; comment = Some(parser.parse_comment_value()?); } @@ -1092,41 +1099,41 @@ pub fn parse_create_stage( }) } -pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result { +pub fn parse_stage_name_identifier(parser: &Parser) -> Result { let mut ident = String::new(); while let Some(next_token) = parser.next_token_no_skip() { match &next_token.token { - Token::Whitespace(_) | Token::SemiColon => break, - Token::Period => { + BorrowedToken::Whitespace(_) | BorrowedToken::SemiColon => break, + BorrowedToken::Period => { parser.prev_token(); break; } - Token::RParen => { + BorrowedToken::RParen => { parser.prev_token(); break; } - Token::AtSign => ident.push('@'), - Token::Tilde => ident.push('~'), - Token::Mod => ident.push('%'), - Token::Div => ident.push('/'), - Token::Plus => ident.push('+'), - Token::Minus => ident.push('-'), - Token::Number(n, _) => ident.push_str(n), - Token::Word(w) => ident.push_str(&w.to_string()), + BorrowedToken::AtSign => ident.push('@'), + BorrowedToken::Tilde => ident.push('~'), + BorrowedToken::Mod => ident.push('%'), + BorrowedToken::Div => ident.push('/'), + BorrowedToken::Plus => ident.push('+'), + BorrowedToken::Minus => ident.push('-'), + BorrowedToken::Number(n, _) => ident.push_str(n), + BorrowedToken::Word(w) => ident.push_str(&w.to_string()), _ => return parser.expected("stage name identifier", parser.peek_token()), } } Ok(Ident::new(ident)) } -pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result { +pub fn parse_snowflake_stage_name(parser: &Parser) -> Result { match parser.next_token().token { - Token::AtSign => { + BorrowedToken::AtSign => { parser.prev_token(); let mut idents = vec![]; loop { idents.push(parse_stage_name_identifier(parser)?); - if !parser.consume_token(&Token::Period) { + if !parser.consume_token(&BorrowedToken::Period) { break; } } @@ -1141,12 +1148,14 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` /// and `COPY INTO ` which have different syntax. -pub fn parse_copy_into(parser: &mut Parser) -> Result { +pub fn parse_copy_into(parser: &Parser) -> Result { let kind = match parser.peek_token().token { // Indicates an internal stage - Token::AtSign => CopyIntoSnowflakeKind::Location, + BorrowedToken::AtSign => CopyIntoSnowflakeKind::Location, // Indicates an external stage, i.e. s3://, gcs:// or azure:// - Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location, + BorrowedToken::SingleQuotedString(s) if s.contains("://") => { + CopyIntoSnowflakeKind::Location + } _ => CopyIntoSnowflakeKind::Table, }; @@ -1180,13 +1189,15 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { } let into_columns = match &parser.peek_token().token { - Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?), + BorrowedToken::LParen => { + Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?) + } _ => None, }; parser.expect_keyword_is(Keyword::FROM)?; match parser.next_token().token { - Token::LParen if kind == CopyIntoSnowflakeKind::Table => { + BorrowedToken::LParen if kind == CopyIntoSnowflakeKind::Table => { // Data load with transformations parser.expect_keyword_is(Keyword::SELECT)?; from_transformations = parse_select_items_for_data_load(parser)?; @@ -1199,12 +1210,12 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { from_stage_alias = parser .maybe_parse_table_alias()? .map(|table_alias| table_alias.name); - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; } - Token::LParen if kind == CopyIntoSnowflakeKind::Location => { + BorrowedToken::LParen if kind == CopyIntoSnowflakeKind::Location => { // Data unload with a query from_query = Some(parser.parse_query()?); - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; } _ => { parser.prev_token(); @@ -1214,7 +1225,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { // as from_stage_alias = if parser.parse_keyword(Keyword::AS) { Some(match parser.next_token().token { - Token::Word(w) => Ok(Ident::new(w.value)), + BorrowedToken::Word(w) => Ok(Ident::new(w.value)), _ => parser.expected("stage alias", parser.peek_token()), }?) } else { @@ -1226,53 +1237,53 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { loop { // FILE_FORMAT if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; file_format = parser.parse_key_value_options(true, &[])?.options; // PARTITION BY } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { partition = Some(Box::new(parser.parse_expr()?)) // FILES } else if parser.parse_keyword(Keyword::FILES) { - parser.expect_token(&Token::Eq)?; - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::Eq)?; + parser.expect_token(&BorrowedToken::LParen)?; let mut continue_loop = true; while continue_loop { continue_loop = false; let next_token = parser.next_token(); match next_token.token { - Token::SingleQuotedString(s) => files.push(s), + BorrowedToken::SingleQuotedString(s) => files.push(s), _ => parser.expected("file token", next_token)?, }; - if parser.next_token().token.eq(&Token::Comma) { + if parser.next_token().token.eq(&BorrowedToken::Comma) { continue_loop = true; } else { parser.prev_token(); // not a comma, need to go back } } - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; // PATTERN } else if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; let next_token = parser.next_token(); pattern = Some(match next_token.token { - Token::SingleQuotedString(s) => s, + BorrowedToken::SingleQuotedString(s) => s, _ => parser.expected("pattern", next_token)?, }); // VALIDATION MODE } else if parser.parse_keyword(Keyword::VALIDATION_MODE) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; validation_mode = Some(parser.next_token().token.to_string()); // COPY OPTIONS } else if parser.parse_keyword(Keyword::COPY_OPTIONS) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; copy_options = parser.parse_key_value_options(true, &[])?.options; } else { match parser.next_token().token { - Token::SemiColon | Token::EOF => break, - Token::Comma => continue, + BorrowedToken::SemiColon | BorrowedToken::EOF => break, + BorrowedToken::Comma => continue, // In `COPY INTO ` the copy options do not have a shared key // like in `COPY INTO ` - Token::Word(key) => copy_options.push(parser.parse_key_value_option(&key)?), + BorrowedToken::Word(key) => copy_options.push(parser.parse_key_value_option(&key)?), _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()), } } @@ -1303,7 +1314,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { } fn parse_select_items_for_data_load( - parser: &mut Parser, + parser: &Parser, ) -> Result>, ParserError> { let mut select_items: Vec = vec![]; loop { @@ -1315,7 +1326,7 @@ fn parse_select_items_for_data_load( parser.parse_select_item()?, )), } - if matches!(parser.peek_token_ref().token, Token::Comma) { + if matches!(parser.peek_token_ref().token, BorrowedToken::Comma) { parser.advance_token(); } else { break; @@ -1324,9 +1335,7 @@ fn parse_select_items_for_data_load( Ok(Some(select_items)) } -fn parse_select_item_for_data_load( - parser: &mut Parser, -) -> Result { +fn parse_select_item_for_data_load(parser: &Parser) -> Result { let mut alias: Option = None; let mut file_col_num: i32 = 0; let mut element: Option = None; @@ -1334,13 +1343,13 @@ fn parse_select_item_for_data_load( let next_token = parser.next_token(); match next_token.token { - Token::Placeholder(w) => { + BorrowedToken::Placeholder(w) => { file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) })?; Ok(()) } - Token::Word(w) => { + BorrowedToken::Word(w) => { alias = Some(Ident::new(w.value)); Ok(()) } @@ -1348,11 +1357,11 @@ fn parse_select_item_for_data_load( }?; if alias.is_some() { - parser.expect_token(&Token::Period)?; + parser.expect_token(&BorrowedToken::Period)?; // now we get col_num token let col_num_token = parser.next_token(); match col_num_token.token { - Token::Placeholder(w) => { + BorrowedToken::Placeholder(w) => { file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) })?; @@ -1364,10 +1373,10 @@ fn parse_select_item_for_data_load( // try extracting optional element match parser.next_token().token { - Token::Colon => { + BorrowedToken::Colon => { // parse element element = Some(Ident::new(match parser.next_token().token { - Token::Word(w) => Ok(w.value), + BorrowedToken::Word(w) => Ok(w.value), _ => parser.expected("file_col_num", parser.peek_token()), }?)); } @@ -1380,7 +1389,7 @@ fn parse_select_item_for_data_load( // as if parser.parse_keyword(Keyword::AS) { item_as = Some(match parser.next_token().token { - Token::Word(w) => Ok(Ident::new(w.value)), + BorrowedToken::Word(w) => Ok(Ident::new(w.value)), _ => parser.expected("column item alias", parser.peek_token()), }?); } @@ -1393,7 +1402,7 @@ fn parse_select_item_for_data_load( }) } -fn parse_stage_params(parser: &mut Parser) -> Result { +fn parse_stage_params(parser: &Parser) -> Result { let (mut url, mut storage_integration, mut endpoint) = (None, None, None); let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![], @@ -1406,31 +1415,31 @@ fn parse_stage_params(parser: &mut Parser) -> Result Ok(word), + BorrowedToken::SingleQuotedString(word) => Ok(word), _ => parser.expected("a URL statement", parser.peek_token()), }?) } // STORAGE INTEGRATION if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; storage_integration = Some(parser.next_token().token.to_string()); } // ENDPOINT if parser.parse_keyword(Keyword::ENDPOINT) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; endpoint = Some(match parser.next_token().token { - Token::SingleQuotedString(word) => Ok(word), + BorrowedToken::SingleQuotedString(word) => Ok(word), _ => parser.expected("an endpoint statement", parser.peek_token()), }?) } // CREDENTIALS if parser.parse_keyword(Keyword::CREDENTIALS) { - parser.expect_token(&Token::Eq)?; + parser.expect_token(&BorrowedToken::Eq)?; credentials = KeyValueOptions { options: parser.parse_key_value_options(true, &[])?.options, delimiter: KeyValueOptionsDelimiter::Space, @@ -1439,7 +1448,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result Result Result, ParserError> { +fn parse_session_options(parser: &Parser, set: bool) -> Result, ParserError> { let mut options: Vec = Vec::new(); let empty = String::new; loop { let next_token = parser.peek_token(); match next_token.token { - Token::SemiColon | Token::EOF => break, - Token::Comma => { + BorrowedToken::SemiColon | BorrowedToken::EOF => break, + BorrowedToken::Comma => { parser.advance_token(); continue; } - Token::Word(key) => { + BorrowedToken::Word(key) => { parser.advance_token(); if set { let option = parser.parse_key_value_option(&key)?; @@ -1505,12 +1511,12 @@ fn parse_session_options( /// [ (seed , increment) | START num INCREMENT num ] [ ORDER | NOORDER ] /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table -fn parse_identity_property(parser: &mut Parser) -> Result { - let parameters = if parser.consume_token(&Token::LParen) { +fn parse_identity_property(parser: &Parser) -> Result { + let parameters = if parser.consume_token(&BorrowedToken::LParen) { let seed = parser.parse_number()?; - parser.expect_token(&Token::Comma)?; + parser.expect_token(&BorrowedToken::Comma)?; let increment = parser.parse_number()?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { seed, increment }, @@ -1541,14 +1547,14 @@ fn parse_identity_property(parser: &mut Parser) -> Result Result { let policy_name = parser.parse_object_name(false)?; let using_columns = if parser.parse_keyword(Keyword::USING) { - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; Some(columns) } else { None @@ -1567,17 +1573,17 @@ fn parse_column_policy_property( /// ( = '' [ , = '' , ... ] ) /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table -fn parse_column_tags(parser: &mut Parser, with: bool) -> Result { - parser.expect_token(&Token::LParen)?; +fn parse_column_tags(parser: &Parser, with: bool) -> Result { + parser.expect_token(&BorrowedToken::LParen)?; let tags = parser.parse_comma_separated(Parser::parse_tag)?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; Ok(TagsColumnOption { with, tags }) } /// Parse snowflake show objects. /// -fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result { +fn parse_show_objects(terse: bool, parser: &Parser) -> Result { let show_options = parser.parse_show_stmt_options()?; Ok(Statement::ShowObjects(ShowObjects { terse, diff --git a/src/dialect/sqlite.rs b/src/dialect/sqlite.rs index ba4cb6173..4edaa67c7 100644 --- a/src/dialect/sqlite.rs +++ b/src/dialect/sqlite.rs @@ -65,7 +65,7 @@ impl Dialect for SQLiteDialect { self.is_identifier_start(ch) || ch.is_ascii_digit() } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::REPLACE) { parser.prev_token(); Some(parser.parse_insert(parser.get_current_token().clone())) @@ -76,7 +76,7 @@ impl Dialect for SQLiteDialect { fn parse_infix( &self, - parser: &mut crate::parser::Parser, + parser: &crate::parser::Parser, expr: &crate::ast::Expr, _precedence: u8, ) -> Option> { diff --git a/src/parser/alter.rs b/src/parser/alter.rs index b3e3c99e6..767276d29 100644 --- a/src/parser/alter.rs +++ b/src/parser/alter.rs @@ -26,11 +26,11 @@ use crate::{ }, dialect::{MsSqlDialect, PostgreSqlDialect}, keywords::Keyword, - tokenizer::Token, + tokenizer::BorrowedToken, }; impl Parser<'_> { - pub fn parse_alter_role(&mut self) -> Result { + pub fn parse_alter_role(&self) -> Result { if dialect_of!(self is PostgreSqlDialect) { return self.parse_pg_alter_role(); } else if dialect_of!(self is MsSqlDialect) { @@ -53,7 +53,7 @@ impl Parser<'_> { /// ``` /// /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-alterpolicy.html) - pub fn parse_alter_policy(&mut self) -> Result { + pub fn parse_alter_policy(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; @@ -74,18 +74,18 @@ impl Parser<'_> { }; let using = if self.parse_keyword(Keyword::USING) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(expr) } else { None }; let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(expr) } else { None @@ -110,7 +110,7 @@ impl Parser<'_> { /// /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; /// ``` - pub fn parse_alter_connector(&mut self) -> Result { + pub fn parse_alter_connector(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::SET)?; @@ -147,7 +147,7 @@ impl Parser<'_> { /// ```sql /// ALTER USER [ IF EXISTS ] [ ] [ OPTIONS ] /// ``` - pub fn parse_alter_user(&mut self) -> Result { + pub fn parse_alter_user(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; let rename_to = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { @@ -216,7 +216,7 @@ impl Parser<'_> { let add_mfa_method_otp = if self.parse_keywords(&[Keyword::ADD, Keyword::MFA, Keyword::METHOD, Keyword::OTP]) { let count = if self.parse_keyword(Keyword::COUNT) { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; Some(self.parse_value()?.into()) } else { None @@ -314,7 +314,7 @@ impl Parser<'_> { })) } - fn parse_mfa_method(&mut self) -> Result { + fn parse_mfa_method(&self) -> Result { if self.parse_keyword(Keyword::PASSKEY) { Ok(MfaMethodKind::PassKey) } else if self.parse_keyword(Keyword::TOTP) { @@ -326,7 +326,7 @@ impl Parser<'_> { } } - fn parse_mssql_alter_role(&mut self) -> Result { + fn parse_mssql_alter_role(&self) -> Result { let role_name = self.parse_identifier()?; let operation = if self.parse_keywords(&[Keyword::ADD, Keyword::MEMBER]) { @@ -336,7 +336,7 @@ impl Parser<'_> { let member_name = self.parse_identifier()?; AlterRoleOperation::DropMember { member_name } } else if self.parse_keywords(&[Keyword::WITH, Keyword::NAME]) { - if self.consume_token(&Token::Eq) { + if self.consume_token(&BorrowedToken::Eq) { let role_name = self.parse_identifier()?; AlterRoleOperation::RenameRole { role_name } } else { @@ -352,7 +352,7 @@ impl Parser<'_> { }) } - fn parse_pg_alter_role(&mut self) -> Result { + fn parse_pg_alter_role(&self) -> Result { let role_name = self.parse_identifier()?; // [ IN DATABASE _`database_name`_ ] @@ -380,7 +380,7 @@ impl Parser<'_> { in_database, } // { TO | = } { value | DEFAULT } - } else if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + } else if self.consume_token(&BorrowedToken::Eq) || self.parse_keyword(Keyword::TO) { if self.parse_keyword(Keyword::DEFAULT) { AlterRoleOperation::Set { config_name, @@ -436,7 +436,7 @@ impl Parser<'_> { }) } - fn parse_pg_role_option(&mut self) -> Result { + fn parse_pg_role_option(&self) -> Result { let option = match self.parse_one_of_keywords(&[ Keyword::BYPASSRLS, Keyword::NOBYPASSRLS, diff --git a/src/parser/mod.rs b/src/parser/mod.rs index f835f5417..0d1285d60 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -43,6 +43,7 @@ use crate::ast::*; use crate::dialect::*; use crate::keywords::{Keyword, ALL_KEYWORDS}; use crate::tokenizer::*; +use core::cell::Cell; use sqlparser::parser::ParserState::ColumnDefinition; mod alter; @@ -194,7 +195,7 @@ const DEFAULT_REMAINING_DEPTH: usize = 50; // A constant EOF token that can be referenced. const EOF_TOKEN: TokenWithSpan = TokenWithSpan { - token: Token::EOF, + token: BorrowedToken::EOF, span: Span { start: Location { line: 0, column: 0 }, end: Location { line: 0, column: 0 }, @@ -310,7 +311,7 @@ enum ParserState { /// * The "previous" token is the token at `index - 2` /// /// If `index` is equal to the length of the token stream, the 'next' token is -/// [`Token::EOF`]. +/// [`BorrowedToken::EOF`]. /// /// For example, the SQL string "SELECT * FROM foo" will be tokenized into /// following tokens: @@ -329,11 +330,11 @@ enum ParserState { /// pub struct Parser<'a> { /// The tokens - tokens: Vec, + tokens: Vec>, /// The index of the first unprocessed token in [`Parser::tokens`]. - index: usize, + index: Cell, /// The current state of the parser. - state: ParserState, + state: Cell, /// The SQL dialect to use. dialect: &'a dyn Dialect, /// Additional options that allow you to mix & match behavior @@ -363,8 +364,8 @@ impl<'a> Parser<'a> { pub fn new(dialect: &'a dyn Dialect) -> Self { Self { tokens: vec![], - index: 0, - state: ParserState::Normal, + index: Cell::new(0), + state: Cell::new(ParserState::Normal), dialect, recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH), options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()), @@ -426,9 +427,9 @@ impl<'a> Parser<'a> { } /// Reset this parser to parse the specified token stream - pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { + pub fn with_tokens_with_locations(mut self, tokens: Vec>) -> Self { self.tokens = tokens; - self.index = 0; + self.index = Cell::new(0); self } @@ -451,7 +452,7 @@ impl<'a> Parser<'a> { /// Returns an error if there was an error tokenizing the SQL string. /// /// See example on [`Parser::new()`] for an example - pub fn try_with_sql(self, sql: &str) -> Result { + pub fn try_with_sql(self, sql: &'a str) -> Result { debug!("Parsing sql '{sql}'..."); let tokens = Tokenizer::new(self.dialect, sql) .with_unescape(self.options.unescape) @@ -474,12 +475,12 @@ impl<'a> Parser<'a> { /// # Ok(()) /// # } /// ``` - pub fn parse_statements(&mut self) -> Result, ParserError> { + pub fn parse_statements(&self) -> Result, ParserError> { let mut stmts = Vec::new(); let mut expecting_statement_delimiter = false; loop { // ignore empty statements (between successive statement delimiters) - while self.consume_token(&Token::SemiColon) { + while self.consume_token(&BorrowedToken::SemiColon) { expecting_statement_delimiter = false; } @@ -488,10 +489,10 @@ impl<'a> Parser<'a> { } match self.peek_token().token { - Token::EOF => break, + BorrowedToken::EOF => break, // end of statement - Token::Word(word) => { + BorrowedToken::Word(word) => { if expecting_statement_delimiter && word.keyword == Keyword::END { break; } @@ -531,7 +532,7 @@ impl<'a> Parser<'a> { /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.), /// stopping before the statement separator, if any. - pub fn parse_statement(&mut self) -> Result { + pub fn parse_statement(&self) -> Result { let _guard = self.recursion_counter.try_decrease()?; // allow the dialect to override statement parsing @@ -541,7 +542,7 @@ impl<'a> Parser<'a> { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::KILL => self.parse_kill(), Keyword::FLUSH => self.parse_flush(), Keyword::DESC => self.parse_explain(DescribeAlias::Desc), @@ -659,7 +660,7 @@ impl<'a> Parser<'a> { Keyword::RESET => self.parse_reset(), _ => self.expected("an SQL statement", next_token), }, - Token::LParen => { + BorrowedToken::LParen => { self.prev_token(); self.parse_query().map(Statement::Query) } @@ -670,7 +671,7 @@ impl<'a> Parser<'a> { /// Parse a `CASE` statement. /// /// See [Statement::Case] - pub fn parse_case_stmt(&mut self) -> Result { + pub fn parse_case_stmt(&self) -> Result { let case_token = self.expect_keyword(Keyword::CASE)?; let match_expr = if self.peek_keyword(Keyword::WHEN) { @@ -696,18 +697,18 @@ impl<'a> Parser<'a> { } Ok(Statement::Case(CaseStatement { - case_token: AttachedToken(case_token), + case_token: AttachedToken(case_token.to_static()), match_expr, when_blocks, else_block, - end_case_token: AttachedToken(end_case_token), + end_case_token: AttachedToken(end_case_token.to_static()), })) } /// Parse an `IF` statement. /// /// See [Statement::If] - pub fn parse_if_stmt(&mut self) -> Result { + pub fn parse_if_stmt(&self) -> Result { self.expect_keyword_is(Keyword::IF)?; let if_block = self.parse_conditional_statement_block(&[ Keyword::ELSE, @@ -740,14 +741,14 @@ impl<'a> Parser<'a> { if_block, elseif_blocks, else_block, - end_token: Some(AttachedToken(end_token)), + end_token: Some(AttachedToken(end_token.to_static())), })) } /// Parse a `WHILE` statement. /// /// See [Statement::While] - fn parse_while(&mut self) -> Result { + fn parse_while(&self) -> Result { self.expect_keyword_is(Keyword::WHILE)?; let while_block = self.parse_conditional_statement_block(&[Keyword::END])?; @@ -762,21 +763,23 @@ impl<'a> Parser<'a> { /// IF condition THEN statement1; statement2; /// ``` fn parse_conditional_statement_block( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result { let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?; let mut then_token = None; let condition = match &start_token.token { - Token::Word(w) if w.keyword == Keyword::ELSE => None, - Token::Word(w) if w.keyword == Keyword::WHILE => { + BorrowedToken::Word(w) if w.keyword == Keyword::ELSE => None, + BorrowedToken::Word(w) if w.keyword == Keyword::WHILE => { let expr = self.parse_expr()?; Some(expr) } _ => { let expr = self.parse_expr()?; - then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?)); + then_token = Some(AttachedToken( + self.expect_keyword(Keyword::THEN)?.to_static(), + )); Some(expr) } }; @@ -784,7 +787,7 @@ impl<'a> Parser<'a> { let conditional_statements = self.parse_conditional_statements(terminal_keywords)?; Ok(ConditionalStatementBlock { - start_token: AttachedToken(start_token), + start_token: AttachedToken(start_token.to_static()), condition, then_token, conditional_statements, @@ -794,7 +797,7 @@ impl<'a> Parser<'a> { /// Parse a BEGIN/END block or a sequence of statements /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements. pub(crate) fn parse_conditional_statements( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result { let conditional_statements = if self.peek_keyword(Keyword::BEGIN) { @@ -803,9 +806,9 @@ impl<'a> Parser<'a> { let end_token = self.expect_keyword(Keyword::END)?; ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), + begin_token: AttachedToken(begin_token.to_static()), statements, - end_token: AttachedToken(end_token), + end_token: AttachedToken(end_token.to_static()), }) } else { ConditionalStatements::Sequence { @@ -818,11 +821,11 @@ impl<'a> Parser<'a> { /// Parse a `RAISE` statement. /// /// See [Statement::Raise] - pub fn parse_raise_stmt(&mut self) -> Result { + pub fn parse_raise_stmt(&self) -> Result { self.expect_keyword_is(Keyword::RAISE)?; let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; Some(RaiseStatementValue::UsingMessage(self.parse_expr()?)) } else { self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))? @@ -831,32 +834,32 @@ impl<'a> Parser<'a> { Ok(Statement::Raise(RaiseStatement { value })) } - pub fn parse_comment(&mut self) -> Result { + pub fn parse_comment(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); self.expect_keyword_is(Keyword::ON)?; let token = self.next_token(); let (object_type, object_name) = match token.token { - Token::Word(w) if w.keyword == Keyword::COLUMN => { + BorrowedToken::Word(w) if w.keyword == Keyword::COLUMN => { (CommentObject::Column, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::TABLE => { + BorrowedToken::Word(w) if w.keyword == Keyword::TABLE => { (CommentObject::Table, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::EXTENSION => { + BorrowedToken::Word(w) if w.keyword == Keyword::EXTENSION => { (CommentObject::Extension, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::SCHEMA => { + BorrowedToken::Word(w) if w.keyword == Keyword::SCHEMA => { (CommentObject::Schema, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::DATABASE => { + BorrowedToken::Word(w) if w.keyword == Keyword::DATABASE => { (CommentObject::Database, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::USER => { + BorrowedToken::Word(w) if w.keyword == Keyword::USER => { (CommentObject::User, self.parse_object_name(false)?) } - Token::Word(w) if w.keyword == Keyword::ROLE => { + BorrowedToken::Word(w) if w.keyword == Keyword::ROLE => { (CommentObject::Role, self.parse_object_name(false)?) } _ => self.expected("comment object_type", token)?, @@ -876,7 +879,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_flush(&mut self) -> Result { + pub fn parse_flush(&self) -> Result { let mut channel = None; let mut tables: Vec = vec![]; let mut read_lock = false; @@ -925,7 +928,7 @@ impl<'a> Parser<'a> { loop { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::WITH => { read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]); } @@ -963,7 +966,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_msck(&mut self) -> Result { + pub fn parse_msck(&self) -> Result { let repair = self.parse_keyword(Keyword::REPAIR); self.expect_keyword_is(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; @@ -991,7 +994,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_truncate(&mut self) -> Result { + pub fn parse_truncate(&self) -> Result { let table = self.parse_keyword(Keyword::TABLE); let table_names = self @@ -1004,9 +1007,9 @@ impl<'a> Parser<'a> { let mut partitions = None; if self.parse_keyword(Keyword::PARTITION) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; partitions = Some(self.parse_comma_separated(Parser::parse_expr)?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } let mut identity = None; @@ -1037,7 +1040,7 @@ impl<'a> Parser<'a> { .into()) } - fn parse_cascade_option(&mut self) -> Option { + fn parse_cascade_option(&self) -> Option { if self.parse_keyword(Keyword::CASCADE) { Some(CascadeOption::Cascade) } else if self.parse_keyword(Keyword::RESTRICT) { @@ -1048,9 +1051,9 @@ impl<'a> Parser<'a> { } pub fn parse_attach_duckdb_database_options( - &mut self, + &self, ) -> Result, ParserError> { - if !self.consume_token(&Token::LParen) { + if !self.consume_token(&BorrowedToken::LParen) { return Ok(vec![]); } @@ -1072,9 +1075,9 @@ impl<'a> Parser<'a> { return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token()); }; - if self.consume_token(&Token::RParen) { + if self.consume_token(&BorrowedToken::RParen) { return Ok(options); - } else if self.consume_token(&Token::Comma) { + } else if self.consume_token(&BorrowedToken::Comma) { continue; } else { return self.expected("expected one of: ')', ','", self.peek_token()); @@ -1082,7 +1085,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_attach_duckdb_database(&mut self) -> Result { + pub fn parse_attach_duckdb_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let database_path = self.parse_identifier()?; @@ -1102,7 +1105,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_detach_duckdb_database(&mut self) -> Result { + pub fn parse_detach_duckdb_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let database_alias = self.parse_identifier()?; @@ -1113,7 +1116,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_attach_database(&mut self) -> Result { + pub fn parse_attach_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let database_file_name = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; @@ -1125,7 +1128,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_analyze(&mut self) -> Result { + pub fn parse_analyze(&self) -> Result { let has_table_keyword = self.parse_keyword(Keyword::TABLE); let table_name = self.parse_object_name(false)?; let mut for_columns = false; @@ -1143,9 +1146,9 @@ impl<'a> Parser<'a> { Keyword::COMPUTE, ]) { Some(Keyword::PARTITION) => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; partitions = Some(self.parse_comma_separated(Parser::parse_expr)?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } Some(Keyword::NOSCAN) => noscan = true, Some(Keyword::FOR) => { @@ -1184,31 +1187,31 @@ impl<'a> Parser<'a> { } /// Parse a new expression including wildcard & qualified wildcard. - pub fn parse_wildcard_expr(&mut self) -> Result { - let index = self.index; + pub fn parse_wildcard_expr(&self) -> Result { + let index = self.index.get(); let next_token = self.next_token(); match next_token.token { - t @ (Token::Word(_) | Token::SingleQuotedString(_)) => { - if self.peek_token().token == Token::Period { + t @ (BorrowedToken::Word(_) | BorrowedToken::SingleQuotedString(_)) => { + if self.peek_token().token == BorrowedToken::Period { let mut id_parts: Vec = vec![match t { - Token::Word(w) => w.into_ident(next_token.span), - Token::SingleQuotedString(s) => Ident::with_quote('\'', s), + BorrowedToken::Word(w) => w.into_ident(next_token.span), + BorrowedToken::SingleQuotedString(s) => Ident::with_quote('\'', s), _ => unreachable!(), // We matched above }]; - while self.consume_token(&Token::Period) { + while self.consume_token(&BorrowedToken::Period) { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => id_parts.push(w.into_ident(next_token.span)), - Token::SingleQuotedString(s) => { + BorrowedToken::Word(w) => id_parts.push(w.into_ident(next_token.span)), + BorrowedToken::SingleQuotedString(s) => { // SQLite has single-quoted identifiers id_parts.push(Ident::with_quote('\'', s)) } - Token::Mul => { + BorrowedToken::Mul => { return Ok(Expr::QualifiedWildcard( ObjectName::from(id_parts), - AttachedToken(next_token), + AttachedToken(next_token.to_static()), )); } _ => { @@ -1219,27 +1222,27 @@ impl<'a> Parser<'a> { } } } - Token::Mul => { - return Ok(Expr::Wildcard(AttachedToken(next_token))); + BorrowedToken::Mul => { + return Ok(Expr::Wildcard(AttachedToken(next_token.to_static()))); } _ => (), }; - self.index = index; + self.index.set(index); self.parse_expr() } /// Parse a new expression. - pub fn parse_expr(&mut self) -> Result { + pub fn parse_expr(&self) -> Result { self.parse_subexpr(self.dialect.prec_unknown()) } pub fn parse_expr_with_alias_and_order_by( - &mut self, + &self, ) -> Result { let expr = self.parse_expr()?; - fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + fn validator(explicit: bool, kw: &Keyword, _parser: &Parser) -> bool { explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw) } let alias = self.parse_optional_alias_inner(None, validator)?; @@ -1254,7 +1257,7 @@ impl<'a> Parser<'a> { } /// Parse tokens until the precedence changes. - pub fn parse_subexpr(&mut self, precedence: u8) -> Result { + pub fn parse_subexpr(&self, precedence: u8) -> Result { let _guard = self.recursion_counter.try_decrease()?; debug!("parsing expr"); let mut expr = self.parse_prefix()?; @@ -1272,7 +1275,7 @@ impl<'a> Parser<'a> { // The period operator is handled exclusively by the // compound field access parsing. - if Token::Period == self.peek_token_ref().token { + if BorrowedToken::Period == self.peek_token_ref().token { break; } @@ -1281,7 +1284,7 @@ impl<'a> Parser<'a> { Ok(expr) } - pub fn parse_assert(&mut self) -> Result { + pub fn parse_assert(&self) -> Result { let condition = self.parse_expr()?; let message = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) @@ -1292,25 +1295,25 @@ impl<'a> Parser<'a> { Ok(Statement::Assert { condition, message }) } - pub fn parse_savepoint(&mut self) -> Result { + pub fn parse_savepoint(&self) -> Result { let name = self.parse_identifier()?; Ok(Statement::Savepoint { name }) } - pub fn parse_release(&mut self) -> Result { + pub fn parse_release(&self) -> Result { let _ = self.parse_keyword(Keyword::SAVEPOINT); let name = self.parse_identifier()?; Ok(Statement::ReleaseSavepoint { name }) } - pub fn parse_listen(&mut self) -> Result { + pub fn parse_listen(&self) -> Result { let channel = self.parse_identifier()?; Ok(Statement::LISTEN { channel }) } - pub fn parse_unlisten(&mut self) -> Result { - let channel = if self.consume_token(&Token::Mul) { + pub fn parse_unlisten(&self) -> Result { + let channel = if self.consume_token(&BorrowedToken::Mul) { Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string()) } else { match self.parse_identifier() { @@ -1324,9 +1327,9 @@ impl<'a> Parser<'a> { Ok(Statement::UNLISTEN { channel }) } - pub fn parse_notify(&mut self) -> Result { + pub fn parse_notify(&self) -> Result { let channel = self.parse_identifier()?; - let payload = if self.consume_token(&Token::Comma) { + let payload = if self.consume_token(&BorrowedToken::Comma) { Some(self.parse_literal_string()?) } else { None @@ -1335,7 +1338,7 @@ impl<'a> Parser<'a> { } /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable] - pub fn parse_rename(&mut self) -> Result { + pub fn parse_rename(&self) -> Result { if self.peek_keyword(Keyword::TABLE) { self.expect_keyword(Keyword::TABLE)?; let rename_tables = self.parse_comma_separated(|parser| { @@ -1354,7 +1357,7 @@ impl<'a> Parser<'a> { /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect. /// Returns `None if no match is found. fn parse_expr_prefix_by_reserved_word( - &mut self, + &self, w: &Word, w_span: Span, ) -> Result, ParserError> { @@ -1402,7 +1405,7 @@ impl<'a> Parser<'a> { if !dialect_of!(self is DatabricksDialect) || matches!( self.peek_nth_token_ref(1).token, - Token::Word(Word { + BorrowedToken::Word(Word { keyword: Keyword::SELECT | Keyword::WITH, .. }) @@ -1413,7 +1416,7 @@ impl<'a> Parser<'a> { Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)), Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)), Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)), - Keyword::POSITION if self.peek_token_ref().token == Token::LParen => { + Keyword::POSITION if self.peek_token_ref().token == BorrowedToken::LParen => { Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?)) } Keyword::SUBSTR | Keyword::SUBSTRING => { @@ -1424,17 +1427,17 @@ impl<'a> Parser<'a> { Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)), Keyword::INTERVAL => Ok(Some(self.parse_interval()?)), // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call - Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => { - self.expect_token(&Token::LBracket)?; + Keyword::ARRAY if *self.peek_token_ref() == BorrowedToken::LBracket => { + self.expect_token(&BorrowedToken::LBracket)?; Ok(Some(self.parse_array_expr(true)?)) } Keyword::ARRAY - if self.peek_token() == Token::LParen + if self.peek_token() == BorrowedToken::LParen && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let query = self.parse_query()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some(Expr::Function(Function { name: ObjectName::from(vec![w.clone().into_ident(w_span)]), uses_odbc_syntax: false, @@ -1454,11 +1457,11 @@ impl<'a> Parser<'a> { let struct_expr = self.parse_struct_literal()?; Ok(Some(struct_expr)) } - Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { + Keyword::PRIOR if matches!(self.state.get(), ParserState::ConnectBy) => { let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; Ok(Some(Expr::Prior(Box::new(expr)))) } - Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => { + Keyword::MAP if *self.peek_token_ref() == BorrowedToken::LBrace && self.dialect.support_map_literal_syntax() => { Ok(Some(self.parse_duckdb_map_literal()?)) } _ if self.dialect.supports_geometric_types() => match w.keyword { @@ -1477,19 +1480,19 @@ impl<'a> Parser<'a> { /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect. fn parse_expr_prefix_by_unreserved_word( - &mut self, + &self, w: &Word, w_span: Span, ) -> Result { match self.peek_token().token { - Token::LParen if !self.peek_outer_join_operator() => { + BorrowedToken::LParen if !self.peek_outer_join_operator() => { let id_parts = vec![w.clone().into_ident(w_span)]; self.parse_function(ObjectName::from(id_parts)) } // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html - Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::HexStringLiteral(_) + BorrowedToken::SingleQuotedString(_) + | BorrowedToken::DoubleQuotedString(_) + | BorrowedToken::HexStringLiteral(_) if w.value.starts_with('_') => { Ok(Expr::Prefixed { @@ -1498,9 +1501,9 @@ impl<'a> Parser<'a> { }) } // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html - Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::HexStringLiteral(_) + BorrowedToken::SingleQuotedString(_) + | BorrowedToken::DoubleQuotedString(_) + | BorrowedToken::HexStringLiteral(_) if w.value.starts_with('_') => { Ok(Expr::Prefixed { @@ -1508,8 +1511,8 @@ impl<'a> Parser<'a> { value: self.parse_introduced_string_expr()?.into(), }) } - Token::Arrow if self.dialect.supports_lambda_functions() => { - self.expect_token(&Token::Arrow)?; + BorrowedToken::Arrow if self.dialect.supports_lambda_functions() => { + self.expect_token(&BorrowedToken::Arrow)?; Ok(Expr::Lambda(LambdaFunction { params: OneOrManyWithParens::One(w.clone().into_ident(w_span)), body: Box::new(self.parse_expr()?), @@ -1520,7 +1523,7 @@ impl<'a> Parser<'a> { } /// Parse an expression prefix. - pub fn parse_prefix(&mut self) -> Result { + pub fn parse_prefix(&self) -> Result { // allow the dialect to override prefix parsing if let Some(prefix) = self.dialect.parse_prefix(self) { return prefix; @@ -1576,7 +1579,7 @@ impl<'a> Parser<'a> { let next_token = self.get_current_token(); let span = next_token.span; let expr = match &next_token.token { - Token::Word(w) => { + BorrowedToken::Word(w) => { // The word we consumed may fall into one of two cases: it has a special meaning, or not. // For example, in Snowflake, the word `interval` may have two meanings depending on the context: // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;` @@ -1610,11 +1613,11 @@ impl<'a> Parser<'a> { return Err(e); } } - } // End of Token::Word + } // End of BorrowedToken::Word // array `[1, 2, 3]` - Token::LBracket => self.parse_array_expr(false), - tok @ Token::Minus | tok @ Token::Plus => { - let op = if *tok == Token::Plus { + BorrowedToken::LBracket => self.parse_array_expr(false), + tok @ BorrowedToken::Minus | tok @ BorrowedToken::Plus => { + let op = if *tok == BorrowedToken::Plus { UnaryOperator::Plus } else { UnaryOperator::Minus @@ -1626,21 +1629,25 @@ impl<'a> Parser<'a> { ), }) } - Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp { - op: UnaryOperator::BangNot, - expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?), - }), - tok @ Token::DoubleExclamationMark - | tok @ Token::PGSquareRoot - | tok @ Token::PGCubeRoot - | tok @ Token::AtSign + BorrowedToken::ExclamationMark if dialect.supports_bang_not_operator() => { + Ok(Expr::UnaryOp { + op: UnaryOperator::BangNot, + expr: Box::new( + self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?, + ), + }) + } + tok @ BorrowedToken::DoubleExclamationMark + | tok @ BorrowedToken::PGSquareRoot + | tok @ BorrowedToken::PGCubeRoot + | tok @ BorrowedToken::AtSign if dialect_is!(dialect is PostgreSqlDialect) => { let op = match tok { - Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial, - Token::PGSquareRoot => UnaryOperator::PGSquareRoot, - Token::PGCubeRoot => UnaryOperator::PGCubeRoot, - Token::AtSign => UnaryOperator::PGAbs, + BorrowedToken::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial, + BorrowedToken::PGSquareRoot => UnaryOperator::PGSquareRoot, + BorrowedToken::PGCubeRoot => UnaryOperator::PGCubeRoot, + BorrowedToken::AtSign => UnaryOperator::PGAbs, _ => unreachable!(), }; Ok(Expr::UnaryOp { @@ -1650,23 +1657,23 @@ impl<'a> Parser<'a> { ), }) } - Token::Tilde => Ok(Expr::UnaryOp { + BorrowedToken::Tilde => Ok(Expr::UnaryOp { op: UnaryOperator::BitwiseNot, expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?), }), - tok @ Token::Sharp - | tok @ Token::AtDashAt - | tok @ Token::AtAt - | tok @ Token::QuestionMarkDash - | tok @ Token::QuestionPipe + tok @ BorrowedToken::Sharp + | tok @ BorrowedToken::AtDashAt + | tok @ BorrowedToken::AtAt + | tok @ BorrowedToken::QuestionMarkDash + | tok @ BorrowedToken::QuestionPipe if self.dialect.supports_geometric_types() => { let op = match tok { - Token::Sharp => UnaryOperator::Hash, - Token::AtDashAt => UnaryOperator::AtDashAt, - Token::AtAt => UnaryOperator::DoubleAt, - Token::QuestionMarkDash => UnaryOperator::QuestionDash, - Token::QuestionPipe => UnaryOperator::QuestionPipe, + BorrowedToken::Sharp => UnaryOperator::Hash, + BorrowedToken::AtDashAt => UnaryOperator::AtDashAt, + BorrowedToken::AtAt => UnaryOperator::DoubleAt, + BorrowedToken::QuestionMarkDash => UnaryOperator::QuestionDash, + BorrowedToken::QuestionPipe => UnaryOperator::QuestionPipe, _ => { return Err(ParserError::ParserError(format!( "Unexpected token in unary operator parsing: {tok:?}" @@ -1680,35 +1687,35 @@ impl<'a> Parser<'a> { ), }) } - Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => + BorrowedToken::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::UnicodeStringLiteral(_) => { + BorrowedToken::UnicodeStringLiteral(_) => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::Number(_, _) - | Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::TripleSingleQuotedString(_) - | Token::TripleDoubleQuotedString(_) - | Token::DollarQuotedString(_) - | Token::SingleQuotedByteStringLiteral(_) - | Token::DoubleQuotedByteStringLiteral(_) - | Token::TripleSingleQuotedByteStringLiteral(_) - | Token::TripleDoubleQuotedByteStringLiteral(_) - | Token::SingleQuotedRawStringLiteral(_) - | Token::DoubleQuotedRawStringLiteral(_) - | Token::TripleSingleQuotedRawStringLiteral(_) - | Token::TripleDoubleQuotedRawStringLiteral(_) - | Token::NationalStringLiteral(_) - | Token::HexStringLiteral(_) => { + BorrowedToken::Number(_, _) + | BorrowedToken::SingleQuotedString(_) + | BorrowedToken::DoubleQuotedString(_) + | BorrowedToken::TripleSingleQuotedString(_) + | BorrowedToken::TripleDoubleQuotedString(_) + | BorrowedToken::DollarQuotedString(_) + | BorrowedToken::SingleQuotedByteStringLiteral(_) + | BorrowedToken::DoubleQuotedByteStringLiteral(_) + | BorrowedToken::TripleSingleQuotedByteStringLiteral(_) + | BorrowedToken::TripleDoubleQuotedByteStringLiteral(_) + | BorrowedToken::SingleQuotedRawStringLiteral(_) + | BorrowedToken::DoubleQuotedRawStringLiteral(_) + | BorrowedToken::TripleSingleQuotedRawStringLiteral(_) + | BorrowedToken::TripleDoubleQuotedRawStringLiteral(_) + | BorrowedToken::NationalStringLiteral(_) + | BorrowedToken::HexStringLiteral(_) => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::LParen => { + BorrowedToken::LParen => { let expr = if let Some(expr) = self.try_parse_expr_sub_query()? { expr } else if let Some(lambda) = self.try_parse_lambda()? { @@ -1721,14 +1728,14 @@ impl<'a> Parser<'a> { _ => Expr::Tuple(exprs), } }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(expr) } - Token::Placeholder(_) | Token::Colon | Token::AtSign => { + BorrowedToken::Placeholder(_) | BorrowedToken::Colon | BorrowedToken::AtSign => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::LBrace => { + BorrowedToken::LBrace => { self.prev_token(); self.parse_lbrace_expr() } @@ -1745,7 +1752,7 @@ impl<'a> Parser<'a> { } } - fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result { + fn parse_geometric_type(&self, kind: GeometricTypeKind) -> Result { Ok(Expr::TypedString(TypedString { data_type: DataType::GeometricType(kind), value: self.parse_value()?, @@ -1760,16 +1767,16 @@ impl<'a> Parser<'a> { /// or bracket notation. /// For example: `a.b.c`, `a.b[1]`. pub fn parse_compound_expr( - &mut self, + &self, root: Expr, mut chain: Vec, ) -> Result { let mut ending_wildcard: Option = None; loop { - if self.consume_token(&Token::Period) { + if self.consume_token(&BorrowedToken::Period) { let next_token = self.peek_token_ref(); match &next_token.token { - Token::Mul => { + BorrowedToken::Mul => { // Postgres explicitly allows funcnm(tablenm.*) and the // function array_agg traverses this control flow if dialect_of!(self is PostgreSqlDialect) { @@ -1785,7 +1792,7 @@ impl<'a> Parser<'a> { break; } - Token::SingleQuotedString(s) => { + BorrowedToken::SingleQuotedString(s) => { let expr = Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s)); chain.push(AccessExpr::Dot(expr)); @@ -1813,7 +1820,7 @@ impl<'a> Parser<'a> { }, } } else if !self.dialect.supports_partiql() - && self.peek_token_ref().token == Token::LBracket + && self.peek_token_ref().token == BorrowedToken::LBracket { self.parse_multi_dim_subscript(&mut chain)?; } else { @@ -1828,7 +1835,7 @@ impl<'a> Parser<'a> { }; Ok(Expr::QualifiedWildcard( ObjectName::from(Self::exprs_to_idents(root, chain)?), - AttachedToken(wildcard_token), + AttachedToken(wildcard_token.to_static()), )) } else if self.maybe_parse_outer_join_operator() { if !Self::is_all_ident(&root, &chain) { @@ -1982,37 +1989,41 @@ impl<'a> Parser<'a> { } /// Returns true if the next tokens indicate the outer join operator `(+)`. - fn peek_outer_join_operator(&mut self) -> bool { + fn peek_outer_join_operator(&self) -> bool { if !self.dialect.supports_outer_join_operator() { return false; } let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref(); - Token::LParen == maybe_lparen.token - && Token::Plus == maybe_plus.token - && Token::RParen == maybe_rparen.token + BorrowedToken::LParen == maybe_lparen.token + && BorrowedToken::Plus == maybe_plus.token + && BorrowedToken::RParen == maybe_rparen.token } /// If the next tokens indicates the outer join operator `(+)`, consume /// the tokens and return true. - fn maybe_parse_outer_join_operator(&mut self) -> bool { + fn maybe_parse_outer_join_operator(&self) -> bool { self.dialect.supports_outer_join_operator() - && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen]) + && self.consume_tokens(&[ + BorrowedToken::LParen, + BorrowedToken::Plus, + BorrowedToken::RParen, + ]) } - pub fn parse_utility_options(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; + pub fn parse_utility_options(&self) -> Result, ParserError> { + self.expect_token(&BorrowedToken::LParen)?; let options = self.parse_comma_separated(Self::parse_utility_option)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(options) } - fn parse_utility_option(&mut self) -> Result { + fn parse_utility_option(&self) -> Result { let name = self.parse_identifier()?; let next_token = self.peek_token(); - if next_token == Token::Comma || next_token == Token::RParen { + if next_token == BorrowedToken::Comma || next_token == BorrowedToken::RParen { return Ok(UtilityOption { name, arg: None }); } let arg = self.parse_expr()?; @@ -2023,7 +2034,7 @@ impl<'a> Parser<'a> { }) } - fn try_parse_expr_sub_query(&mut self) -> Result, ParserError> { + fn try_parse_expr_sub_query(&self) -> Result, ParserError> { if !self.peek_sub_query() { return Ok(None); } @@ -2031,14 +2042,14 @@ impl<'a> Parser<'a> { Ok(Some(Expr::Subquery(self.parse_query()?))) } - fn try_parse_lambda(&mut self) -> Result, ParserError> { + fn try_parse_lambda(&self) -> Result, ParserError> { if !self.dialect.supports_lambda_functions() { return Ok(None); } self.maybe_parse(|p| { let params = p.parse_comma_separated(|p| p.parse_identifier())?; - p.expect_token(&Token::RParen)?; - p.expect_token(&Token::Arrow)?; + p.expect_token(&BorrowedToken::RParen)?; + p.expect_token(&BorrowedToken::Arrow)?; let expr = p.parse_expr()?; Ok(Expr::Lambda(LambdaFunction { params: OneOrManyWithParens::Many(params), @@ -2053,7 +2064,7 @@ impl<'a> Parser<'a> { /// Scalar Function Calls /// Date, Time, and Timestamp Literals /// See - fn maybe_parse_odbc_body(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_body(&self) -> Result, ParserError> { // Attempt 1: Try to parse it as a function. if let Some(expr) = self.maybe_parse_odbc_fn_body()? { return Ok(Some(expr)); @@ -2072,7 +2083,7 @@ impl<'a> Parser<'a> { /// /// [ODBC Date, Time, and Timestamp Literals]: /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017 - fn maybe_parse_odbc_body_datetime(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_body_datetime(&self) -> Result, ParserError> { self.maybe_parse(|p| { let token = p.next_token().clone(); let word_string = token.token.to_string(); @@ -2099,7 +2110,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 - fn maybe_parse_odbc_fn_body(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_fn_body(&self) -> Result, ParserError> { self.maybe_parse(|p| { p.expect_keyword(Keyword::FN)?; let fn_name = p.parse_object_name(false)?; @@ -2109,18 +2120,18 @@ impl<'a> Parser<'a> { }) } - pub fn parse_function(&mut self, name: ObjectName) -> Result { + pub fn parse_function(&self, name: ObjectName) -> Result { self.parse_function_call(name).map(Expr::Function) } - fn parse_function_call(&mut self, name: ObjectName) -> Result { - self.expect_token(&Token::LParen)?; + fn parse_function_call(&self, name: ObjectName) -> Result { + self.expect_token(&BorrowedToken::LParen)?; // Snowflake permits a subquery to be passed as an argument without // an enclosing set of parens if it's the only argument. if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() { let subquery = self.parse_query()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; return Ok(Function { name, uses_odbc_syntax: false, @@ -2138,17 +2149,17 @@ impl<'a> Parser<'a> { // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)` // which (0.5, 0.6) is a parameter to the function. if dialect_of!(self is ClickHouseDialect | GenericDialect) - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) { parameters = FunctionArguments::List(args); args = self.parse_function_argument_list()?; } let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?; let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; order_by } else { vec![] @@ -2156,11 +2167,11 @@ impl<'a> Parser<'a> { let filter = if self.dialect.supports_filter_during_aggregation() && self.parse_keyword(Keyword::FILTER) - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) && self.parse_keyword(Keyword::WHERE) { let filter = Some(Box::new(self.parse_expr()?)); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; filter } else { None @@ -2179,7 +2190,7 @@ impl<'a> Parser<'a> { }; let over = if self.parse_keyword(Keyword::OVER) { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let window_spec = self.parse_window_spec()?; Some(WindowType::WindowSpec(window_spec)) } else { @@ -2202,7 +2213,7 @@ impl<'a> Parser<'a> { } /// Optionally parses a null treatment clause. - fn parse_null_treatment(&mut self) -> Result, ParserError> { + fn parse_null_treatment(&self) -> Result, ParserError> { match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) { Some(keyword) => { self.expect_keyword_is(Keyword::NULLS)?; @@ -2217,8 +2228,8 @@ impl<'a> Parser<'a> { } } - pub fn parse_time_functions(&mut self, name: ObjectName) -> Result { - let args = if self.consume_token(&Token::LParen) { + pub fn parse_time_functions(&self, name: ObjectName) -> Result { + let args = if self.consume_token(&BorrowedToken::LParen) { FunctionArguments::List(self.parse_function_argument_list()?) } else { FunctionArguments::None @@ -2235,10 +2246,10 @@ impl<'a> Parser<'a> { })) } - pub fn parse_window_frame_units(&mut self) -> Result { + pub fn parse_window_frame_units(&self) -> Result { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::ROWS => Ok(WindowFrameUnits::Rows), Keyword::RANGE => Ok(WindowFrameUnits::Range), Keyword::GROUPS => Ok(WindowFrameUnits::Groups), @@ -2248,7 +2259,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_window_frame(&mut self) -> Result { + pub fn parse_window_frame(&self) -> Result { let units = self.parse_window_frame_units()?; let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) { let start_bound = self.parse_window_frame_bound()?; @@ -2266,7 +2277,7 @@ impl<'a> Parser<'a> { } /// Parse `CURRENT ROW` or `{ | UNBOUNDED } { PRECEDING | FOLLOWING }` - pub fn parse_window_frame_bound(&mut self) -> Result { + pub fn parse_window_frame_bound(&self) -> Result { if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) { Ok(WindowFrameBound::CurrentRow) } else { @@ -2274,7 +2285,7 @@ impl<'a> Parser<'a> { None } else { Some(Box::new(match self.peek_token().token { - Token::SingleQuotedString(_) => self.parse_interval()?, + BorrowedToken::SingleQuotedString(_) => self.parse_interval()?, _ => self.parse_expr()?, })) }; @@ -2289,24 +2300,24 @@ impl<'a> Parser<'a> { } /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr. - fn parse_group_by_expr(&mut self) -> Result { + fn parse_group_by_expr(&self) -> Result { if self.dialect.supports_group_by_expr() { if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::GroupingSets(result)) } else if self.parse_keyword(Keyword::CUBE) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Cube(result)) } else if self.parse_keyword(Keyword::ROLLUP) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Rollup(result)) - } else if self.consume_tokens(&[Token::LParen, Token::RParen]) { + } else if self.consume_tokens(&[BorrowedToken::LParen, BorrowedToken::RParen]) { // PostgreSQL allow to use empty tuple as a group by expression, // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html) @@ -2324,17 +2335,17 @@ impl<'a> Parser<'a> { /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail. /// If `allow_empty` is true, then an empty tuple is allowed. fn parse_tuple( - &mut self, + &self, lift_singleton: bool, allow_empty: bool, ) -> Result, ParserError> { if lift_singleton { - if self.consume_token(&Token::LParen) { - let result = if allow_empty && self.consume_token(&Token::RParen) { + if self.consume_token(&BorrowedToken::LParen) { + let result = if allow_empty && self.consume_token(&BorrowedToken::RParen) { vec![] } else { let result = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; result }; Ok(result) @@ -2342,20 +2353,21 @@ impl<'a> Parser<'a> { Ok(vec![self.parse_expr()?]) } } else { - self.expect_token(&Token::LParen)?; - let result = if allow_empty && self.consume_token(&Token::RParen) { + self.expect_token(&BorrowedToken::LParen)?; + let result = if allow_empty && self.consume_token(&BorrowedToken::RParen) { vec![] } else { let result = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; result }; Ok(result) } } - pub fn parse_case_expr(&mut self) -> Result { - let case_token = AttachedToken(self.get_current_token().clone()); + pub fn parse_case_expr(&self) -> Result { + let current_token = self.get_current_token().clone(); + let case_token = AttachedToken(current_token.to_static()); let mut operand = None; if !self.parse_keyword(Keyword::WHEN) { operand = Some(Box::new(self.parse_expr()?)); @@ -2376,7 +2388,7 @@ impl<'a> Parser<'a> { } else { None }; - let end_token = AttachedToken(self.expect_keyword(Keyword::END)?); + let end_token = AttachedToken(self.expect_keyword(Keyword::END)?.to_static()); Ok(Expr::Case { case_token, end_token, @@ -2386,7 +2398,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_optional_cast_format(&mut self) -> Result, ParserError> { + pub fn parse_optional_cast_format(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::FORMAT) { let value = self.parse_value()?.value; match self.parse_optional_time_zone()? { @@ -2398,7 +2410,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_time_zone(&mut self) -> Result, ParserError> { + pub fn parse_optional_time_zone(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) { self.parse_value().map(|v| Some(v.value)) } else { @@ -2407,17 +2419,17 @@ impl<'a> Parser<'a> { } /// mssql-like convert function - fn parse_mssql_convert(&mut self, is_try: bool) -> Result { - self.expect_token(&Token::LParen)?; + fn parse_mssql_convert(&self, is_try: bool) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let data_type = self.parse_data_type()?; - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let expr = self.parse_expr()?; - let styles = if self.consume_token(&Token::Comma) { + let styles = if self.consume_token(&BorrowedToken::Comma) { self.parse_comma_separated(Parser::parse_expr)? } else { Default::default() }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Convert { is_try, expr: Box::new(expr), @@ -2432,15 +2444,15 @@ impl<'a> Parser<'a> { /// - `CONVERT('héhé' USING utf8mb4)` (MySQL) /// - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL) /// - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first - pub fn parse_convert_expr(&mut self, is_try: bool) -> Result { + pub fn parse_convert_expr(&self, is_try: bool) -> Result { if self.dialect.convert_type_before_value() { return self.parse_mssql_convert(is_try); } - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; if self.parse_keyword(Keyword::USING) { let charset = self.parse_object_name(false)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; return Ok(Expr::Convert { is_try, expr: Box::new(expr), @@ -2450,14 +2462,14 @@ impl<'a> Parser<'a> { styles: vec![], }); } - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let data_type = self.parse_data_type()?; let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { Some(self.parse_object_name(false)?) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Convert { is_try, expr: Box::new(expr), @@ -2469,13 +2481,13 @@ impl<'a> Parser<'a> { } /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)` - pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_cast_expr(&self, kind: CastKind) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; let data_type = self.parse_data_type()?; let format = self.parse_optional_cast_format()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Cast { kind, expr: Box::new(expr), @@ -2485,23 +2497,23 @@ impl<'a> Parser<'a> { } /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`. - pub fn parse_exists_expr(&mut self, negated: bool) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_exists_expr(&self, negated: bool) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let exists_node = Expr::Exists { negated, subquery: self.parse_query()?, }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(exists_node) } - pub fn parse_extract_expr(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_extract_expr(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let field = self.parse_date_time_field()?; let syntax = if self.parse_keyword(Keyword::FROM) { ExtractSyntax::From - } else if self.consume_token(&Token::Comma) + } else if self.consume_token(&BorrowedToken::Comma) && dialect_of!(self is SnowflakeDialect | GenericDialect) { ExtractSyntax::Comma @@ -2512,7 +2524,7 @@ impl<'a> Parser<'a> { }; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Extract { field, expr: Box::new(expr), @@ -2520,14 +2532,14 @@ impl<'a> Parser<'a> { }) } - pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_ceil_floor_expr(&self, is_ceil: bool) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; // Parse `CEIL/FLOOR(expr)` let field = if self.parse_keyword(Keyword::TO) { // Parse `CEIL/FLOOR(expr TO DateTimeField)` CeilFloorKind::DateTimeField(self.parse_date_time_field()?) - } else if self.consume_token(&Token::Comma) { + } else if self.consume_token(&BorrowedToken::Comma) { // Parse `CEIL/FLOOR(expr, scale)` match self.parse_value()?.value { Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)), @@ -2540,7 +2552,7 @@ impl<'a> Parser<'a> { } else { CeilFloorKind::DateTimeField(DateTimeField::NoDateTime) }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; if is_ceil { Ok(Expr::Ceil { expr: Box::new(expr), @@ -2554,17 +2566,17 @@ impl<'a> Parser<'a> { } } - pub fn parse_position_expr(&mut self, ident: Ident) -> Result { + pub fn parse_position_expr(&self, ident: Ident) -> Result { let between_prec = self.dialect.prec_value(Precedence::Between); let position_expr = self.maybe_parse(|p| { // PARSE SELECT POSITION('@' in field) - p.expect_token(&Token::LParen)?; + p.expect_token(&BorrowedToken::LParen)?; // Parse the subexpr till the IN keyword let expr = p.parse_subexpr(between_prec)?; p.expect_keyword_is(Keyword::IN)?; let from = p.parse_expr()?; - p.expect_token(&Token::RParen)?; + p.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Position { expr: Box::new(expr), r#in: Box::new(from), @@ -2579,7 +2591,7 @@ impl<'a> Parser<'a> { } // { SUBSTRING | SUBSTR } ( [FROM 1] [FOR 3]) - pub fn parse_substring(&mut self) -> Result { + pub fn parse_substring(&self) -> Result { let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? { Keyword::SUBSTR => true, Keyword::SUBSTRING => false, @@ -2588,19 +2600,19 @@ impl<'a> Parser<'a> { return self.expected("SUBSTR or SUBSTRING", self.peek_token()); } }; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; let mut from_expr = None; - let special = self.consume_token(&Token::Comma); + let special = self.consume_token(&BorrowedToken::Comma); if special || self.parse_keyword(Keyword::FROM) { from_expr = Some(self.parse_expr()?); } let mut to_expr = None; - if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) { + if self.parse_keyword(Keyword::FOR) || self.consume_token(&BorrowedToken::Comma) { to_expr = Some(self.parse_expr()?); } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Substring { expr: Box::new(expr), @@ -2611,9 +2623,9 @@ impl<'a> Parser<'a> { }) } - pub fn parse_overlay_expr(&mut self) -> Result { + pub fn parse_overlay_expr(&self) -> Result { // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3]) - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; self.expect_keyword_is(Keyword::PLACING)?; let what_expr = self.parse_expr()?; @@ -2623,7 +2635,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::FOR) { for_expr = Some(self.parse_expr()?); } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Overlay { expr: Box::new(expr), @@ -2638,10 +2650,10 @@ impl<'a> Parser<'a> { /// TRIM ('text') /// TRIM(, [, characters]) -- only Snowflake or BigQuery /// ``` - pub fn parse_trim_expr(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_trim_expr(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let mut trim_where = None; - if let Token::Word(word) = self.peek_token().token { + if let BorrowedToken::Word(word) = self.peek_token().token { if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) { trim_where = Some(self.parse_trim_where()?); } @@ -2650,18 +2662,18 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::FROM) { let trim_what = Box::new(expr); let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Trim { expr: Box::new(expr), trim_where, trim_what: Some(trim_what), trim_characters: None, }) - } else if self.consume_token(&Token::Comma) + } else if self.consume_token(&BorrowedToken::Comma) && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect) { let characters = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Trim { expr: Box::new(expr), trim_where: None, @@ -2669,7 +2681,7 @@ impl<'a> Parser<'a> { trim_characters: Some(characters), }) } else { - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Trim { expr: Box::new(expr), trim_where, @@ -2679,10 +2691,10 @@ impl<'a> Parser<'a> { } } - pub fn parse_trim_where(&mut self) -> Result { + pub fn parse_trim_where(&self) -> Result { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::BOTH => Ok(TrimWhereField::Both), Keyword::LEADING => Ok(TrimWhereField::Leading), Keyword::TRAILING => Ok(TrimWhereField::Trailing), @@ -2694,29 +2706,29 @@ impl<'a> Parser<'a> { /// Parses an array expression `[ex1, ex2, ..]` /// if `named` is `true`, came from an expression like `ARRAY[ex1, ex2]` - pub fn parse_array_expr(&mut self, named: bool) -> Result { - let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?; - self.expect_token(&Token::RBracket)?; + pub fn parse_array_expr(&self, named: bool) -> Result { + let exprs = self.parse_comma_separated0(Parser::parse_expr, BorrowedToken::RBracket)?; + self.expect_token(&BorrowedToken::RBracket)?; Ok(Expr::Array(Array { elem: exprs, named })) } - pub fn parse_listagg_on_overflow(&mut self) -> Result, ParserError> { + pub fn parse_listagg_on_overflow(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) { if self.parse_keyword(Keyword::ERROR) { Ok(Some(ListAggOnOverflow::Error)) } else { self.expect_keyword_is(Keyword::TRUNCATE)?; let filler = match self.peek_token().token { - Token::Word(w) + BorrowedToken::Word(w) if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT => { None } - Token::SingleQuotedString(_) - | Token::EscapedStringLiteral(_) - | Token::UnicodeStringLiteral(_) - | Token::NationalStringLiteral(_) - | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)), + BorrowedToken::SingleQuotedString(_) + | BorrowedToken::EscapedStringLiteral(_) + | BorrowedToken::UnicodeStringLiteral(_) + | BorrowedToken::NationalStringLiteral(_) + | BorrowedToken::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)), _ => self.expected( "either filler, WITH, or WITHOUT in LISTAGG", self.peek_token(), @@ -2738,20 +2750,20 @@ impl<'a> Parser<'a> { // operator, interval qualifiers, and the ceil/floor operations. // EXTRACT supports a wider set of date/time fields than interval qualifiers, // so this function may need to be split in two. - pub fn parse_date_time_field(&mut self) -> Result { + pub fn parse_date_time_field(&self) -> Result { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::YEAR => Ok(DateTimeField::Year), Keyword::YEARS => Ok(DateTimeField::Years), Keyword::MONTH => Ok(DateTimeField::Month), Keyword::MONTHS => Ok(DateTimeField::Months), Keyword::WEEK => { let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect) - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) { let week_day = self.parse_identifier()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(week_day) } else { None @@ -2802,7 +2814,7 @@ impl<'a> Parser<'a> { } _ => self.expected("date/time field", next_token), }, - Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => { + BorrowedToken::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => { self.prev_token(); let custom = self.parse_identifier()?; Ok(DateTimeField::Custom(custom)) @@ -2811,9 +2823,9 @@ impl<'a> Parser<'a> { } } - pub fn parse_not(&mut self) -> Result { + pub fn parse_not(&self) -> Result { match self.peek_token().token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::EXISTS => { let negated = true; let _ = self.parse_keyword(Keyword::EXISTS); @@ -2842,11 +2854,11 @@ impl<'a> Parser<'a> { /// -- Function call using the ODBC syntax. /// { fn CONCAT('foo', 'bar') } /// ``` - fn parse_lbrace_expr(&mut self) -> Result { - let token = self.expect_token(&Token::LBrace)?; + fn parse_lbrace_expr(&self) -> Result { + let token = self.expect_token(&BorrowedToken::LBrace)?; if let Some(fn_expr) = self.maybe_parse_odbc_body()? { - self.expect_token(&Token::RBrace)?; + self.expect_token(&BorrowedToken::RBrace)?; return Ok(fn_expr); } @@ -2863,12 +2875,12 @@ impl<'a> Parser<'a> { /// # Errors /// This method will raise an error if the column list is empty or with invalid identifiers, /// the match expression is not a literal string, or if the search modifier is not valid. - pub fn parse_match_against(&mut self) -> Result { + pub fn parse_match_against(&self) -> Result { let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; self.expect_keyword_is(Keyword::AGAINST)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level. let match_value = self.parse_value()?.value; @@ -2898,7 +2910,7 @@ impl<'a> Parser<'a> { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::MatchAgainst { columns, @@ -2922,7 +2934,7 @@ impl<'a> Parser<'a> { /// ``` /// /// Note that we do not currently attempt to parse the quoted value. - pub fn parse_interval(&mut self) -> Result { + pub fn parse_interval(&self) -> Result { // The SQL standard allows an optional sign before the value string, but // it is not clear if any implementations support that syntax, so we // don't currently try to parse it. (The sign can instead be included @@ -2992,8 +3004,8 @@ impl<'a> Parser<'a> { /// Peek at the next token and determine if it is a temporal unit /// like `second`. - pub fn next_token_is_temporal_unit(&mut self) -> bool { - if let Token::Word(word) = self.peek_token().token { + pub fn next_token_is_temporal_unit(&self) -> bool { + if let BorrowedToken::Word(word) = self.peek_token().token { matches!( word.keyword, Keyword::YEAR @@ -3043,7 +3055,7 @@ impl<'a> Parser<'a> { /// -- typeless /// STRUCT( expr1 [AS field_name] [, ... ]) /// ``` - fn parse_struct_literal(&mut self) -> Result { + fn parse_struct_literal(&self) -> Result { // Parse the fields definition if exist `<[field_name] field_type, ...>` self.prev_token(); let (fields, trailing_bracket) = @@ -3056,10 +3068,10 @@ impl<'a> Parser<'a> { } // Parse the struct values `(expr1 [, ... ])` - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let values = self .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::Struct { values, fields }) } @@ -3077,7 +3089,7 @@ impl<'a> Parser<'a> { /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax - fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result { + fn parse_struct_field_expr(&self, typed_syntax: bool) -> Result { let expr = self.parse_expr()?; if self.parse_keyword(Keyword::AS) { if typed_syntax { @@ -3109,16 +3121,16 @@ impl<'a> Parser<'a> { /// STRUCT<[field_name] field_type> /// ``` fn parse_struct_type_def( - &mut self, + &self, mut elem_parser: F, ) -> Result<(Vec, MatchedTrailingBracket), ParserError> where - F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, + F: FnMut(&Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, { self.expect_keyword_is(Keyword::STRUCT)?; // Nothing to do if we have no type information. - if Token::Lt != self.peek_token() { + if BorrowedToken::Lt != self.peek_token() { return Ok((Default::default(), false.into())); } self.next_token(); @@ -3128,7 +3140,7 @@ impl<'a> Parser<'a> { let (def, trailing_bracket) = elem_parser(self)?; field_defs.push(def); // The struct field definition is finished if it occurs `>>` or comma. - if trailing_bracket.0 || !self.consume_token(&Token::Comma) { + if trailing_bracket.0 || !self.consume_token(&BorrowedToken::Comma) { break trailing_bracket; } }; @@ -3140,9 +3152,9 @@ impl<'a> Parser<'a> { } /// Duckdb Struct Data Type - fn parse_duckdb_struct_type_def(&mut self) -> Result, ParserError> { + fn parse_duckdb_struct_type_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::STRUCT)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let struct_body = self.parse_comma_separated(|parser| { let field_name = parser.parse_identifier()?; let field_type = parser.parse_data_type()?; @@ -3153,7 +3165,7 @@ impl<'a> Parser<'a> { options: None, }) }); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; struct_body } @@ -3166,14 +3178,12 @@ impl<'a> Parser<'a> { /// /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple - fn parse_struct_field_def( - &mut self, - ) -> Result<(StructField, MatchedTrailingBracket), ParserError> { + fn parse_struct_field_def(&self) -> Result<(StructField, MatchedTrailingBracket), ParserError> { // Look beyond the next item to infer whether both field name // and type are specified. let is_anonymous_field = !matches!( (self.peek_nth_token(0).token, self.peek_nth_token(1).token), - (Token::Word(_), Token::Word(_)) + (BorrowedToken::Word(_), BorrowedToken::Word(_)) ); let field_name = if is_anonymous_field { @@ -3204,10 +3214,10 @@ impl<'a> Parser<'a> { /// ``` /// /// [1]: https://duckdb.org/docs/sql/data_types/union.html - fn parse_union_type_def(&mut self) -> Result, ParserError> { + fn parse_union_type_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::UNION)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let fields = self.parse_comma_separated(|p| { Ok(UnionField { @@ -3216,7 +3226,7 @@ impl<'a> Parser<'a> { }) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(fields) } @@ -3231,12 +3241,13 @@ impl<'a> Parser<'a> { /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary(&mut self) -> Result { - self.expect_token(&Token::LBrace)?; + fn parse_dictionary(&self) -> Result { + self.expect_token(&BorrowedToken::LBrace)?; - let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?; + let fields = + self.parse_comma_separated0(Self::parse_dictionary_field, BorrowedToken::RBrace)?; - self.expect_token(&Token::RBrace)?; + self.expect_token(&BorrowedToken::RBrace)?; Ok(Expr::Dictionary(fields)) } @@ -3251,10 +3262,10 @@ impl<'a> Parser<'a> { /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary_field(&mut self) -> Result { + fn parse_dictionary_field(&self) -> Result { let key = self.parse_identifier()?; - self.expect_token(&Token::Colon)?; + self.expect_token(&BorrowedToken::Colon)?; let expr = self.parse_expr()?; @@ -3273,10 +3284,11 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps - fn parse_duckdb_map_literal(&mut self) -> Result { - self.expect_token(&Token::LBrace)?; - let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?; - self.expect_token(&Token::RBrace)?; + fn parse_duckdb_map_literal(&self) -> Result { + self.expect_token(&BorrowedToken::LBrace)?; + let fields = + self.parse_comma_separated0(Self::parse_duckdb_map_field, BorrowedToken::RBrace)?; + self.expect_token(&BorrowedToken::RBrace)?; Ok(Expr::Map(Map { entries: fields })) } @@ -3289,10 +3301,10 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps - fn parse_duckdb_map_field(&mut self) -> Result { + fn parse_duckdb_map_field(&self) -> Result { let key = self.parse_expr()?; - self.expect_token(&Token::Colon)?; + self.expect_token(&BorrowedToken::Colon)?; let value = self.parse_expr()?; @@ -3311,13 +3323,13 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map - fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> { + fn parse_click_house_map_def(&self) -> Result<(DataType, DataType), ParserError> { self.expect_keyword_is(Keyword::MAP)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let key_data_type = self.parse_data_type()?; - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let value_data_type = self.parse_data_type()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok((key_data_type, value_data_type)) } @@ -3331,18 +3343,18 @@ impl<'a> Parser<'a> { /// ``` /// /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple - fn parse_click_house_tuple_def(&mut self) -> Result, ParserError> { + fn parse_click_house_tuple_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::TUPLE)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let mut field_defs = vec![]; loop { let (def, _) = self.parse_struct_field_def()?; field_defs.push(def); - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { break; } } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(field_defs) } @@ -3352,16 +3364,16 @@ impl<'a> Parser<'a> { /// matched `trailing_bracket` argument). It returns whether there is a trailing /// left to be matched - (i.e. if '>>' was matched). fn expect_closing_angle_bracket( - &mut self, + &self, trailing_bracket: MatchedTrailingBracket, ) -> Result { let trailing_bracket = if !trailing_bracket.0 { match self.peek_token().token { - Token::Gt => { + BorrowedToken::Gt => { self.next_token(); false.into() } - Token::ShiftRight => { + BorrowedToken::ShiftRight => { self.next_token(); true.into() } @@ -3375,7 +3387,7 @@ impl<'a> Parser<'a> { } /// Parse an operator following an expression - pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result { + pub fn parse_infix(&self, expr: Expr, precedence: u8) -> Result { // allow the dialect to override infix parsing if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) { return infix; @@ -3389,22 +3401,22 @@ impl<'a> Parser<'a> { let tok_index = self.get_current_index(); let span = tok.span; let regular_binary_operator = match &tok.token { - Token::Spaceship => Some(BinaryOperator::Spaceship), - Token::DoubleEq => Some(BinaryOperator::Eq), - Token::Assignment => Some(BinaryOperator::Assignment), - Token::Eq => Some(BinaryOperator::Eq), - Token::Neq => Some(BinaryOperator::NotEq), - Token::Gt => Some(BinaryOperator::Gt), - Token::GtEq => Some(BinaryOperator::GtEq), - Token::Lt => Some(BinaryOperator::Lt), - Token::LtEq => Some(BinaryOperator::LtEq), - Token::Plus => Some(BinaryOperator::Plus), - Token::Minus => Some(BinaryOperator::Minus), - Token::Mul => Some(BinaryOperator::Multiply), - Token::Mod => Some(BinaryOperator::Modulo), - Token::StringConcat => Some(BinaryOperator::StringConcat), - Token::Pipe => Some(BinaryOperator::BitwiseOr), - Token::Caret => { + BorrowedToken::Spaceship => Some(BinaryOperator::Spaceship), + BorrowedToken::DoubleEq => Some(BinaryOperator::Eq), + BorrowedToken::Assignment => Some(BinaryOperator::Assignment), + BorrowedToken::Eq => Some(BinaryOperator::Eq), + BorrowedToken::Neq => Some(BinaryOperator::NotEq), + BorrowedToken::Gt => Some(BinaryOperator::Gt), + BorrowedToken::GtEq => Some(BinaryOperator::GtEq), + BorrowedToken::Lt => Some(BinaryOperator::Lt), + BorrowedToken::LtEq => Some(BinaryOperator::LtEq), + BorrowedToken::Plus => Some(BinaryOperator::Plus), + BorrowedToken::Minus => Some(BinaryOperator::Minus), + BorrowedToken::Mul => Some(BinaryOperator::Multiply), + BorrowedToken::Mod => Some(BinaryOperator::Modulo), + BorrowedToken::StringConcat => Some(BinaryOperator::StringConcat), + BorrowedToken::Pipe => Some(BinaryOperator::BitwiseOr), + BorrowedToken::Caret => { // In PostgreSQL, ^ stands for the exponentiation operation, // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html if dialect_is!(dialect is PostgreSqlDialect) { @@ -3413,109 +3425,119 @@ impl<'a> Parser<'a> { Some(BinaryOperator::BitwiseXor) } } - Token::Ampersand => Some(BinaryOperator::BitwiseAnd), - Token::Div => Some(BinaryOperator::Divide), - Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { + BorrowedToken::Ampersand => Some(BinaryOperator::BitwiseAnd), + BorrowedToken::Div => Some(BinaryOperator::Divide), + BorrowedToken::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { Some(BinaryOperator::DuckIntegerDivide) } - Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { + BorrowedToken::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseShiftLeft) } - Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { + BorrowedToken::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseShiftRight) } - Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { + BorrowedToken::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseXor) } - Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { + BorrowedToken::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGOverlap) } - Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { + BorrowedToken::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { Some(BinaryOperator::PGOverlap) } - Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { + BorrowedToken::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { Some(BinaryOperator::PGStartsWith) } - Token::Tilde => Some(BinaryOperator::PGRegexMatch), - Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch), - Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch), - Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch), - Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch), - Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch), - Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch), - Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch), - Token::Arrow => Some(BinaryOperator::Arrow), - Token::LongArrow => Some(BinaryOperator::LongArrow), - Token::HashArrow => Some(BinaryOperator::HashArrow), - Token::HashLongArrow => Some(BinaryOperator::HashLongArrow), - Token::AtArrow => Some(BinaryOperator::AtArrow), - Token::ArrowAt => Some(BinaryOperator::ArrowAt), - Token::HashMinus => Some(BinaryOperator::HashMinus), - Token::AtQuestion => Some(BinaryOperator::AtQuestion), - Token::AtAt => Some(BinaryOperator::AtAt), - Token::Question => Some(BinaryOperator::Question), - Token::QuestionAnd => Some(BinaryOperator::QuestionAnd), - Token::QuestionPipe => Some(BinaryOperator::QuestionPipe), - Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())), - Token::DoubleSharp if self.dialect.supports_geometric_types() => { + BorrowedToken::Tilde => Some(BinaryOperator::PGRegexMatch), + BorrowedToken::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch), + BorrowedToken::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch), + BorrowedToken::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch), + BorrowedToken::DoubleTilde => Some(BinaryOperator::PGLikeMatch), + BorrowedToken::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch), + BorrowedToken::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch), + BorrowedToken::ExclamationMarkDoubleTildeAsterisk => { + Some(BinaryOperator::PGNotILikeMatch) + } + BorrowedToken::Arrow => Some(BinaryOperator::Arrow), + BorrowedToken::LongArrow => Some(BinaryOperator::LongArrow), + BorrowedToken::HashArrow => Some(BinaryOperator::HashArrow), + BorrowedToken::HashLongArrow => Some(BinaryOperator::HashLongArrow), + BorrowedToken::AtArrow => Some(BinaryOperator::AtArrow), + BorrowedToken::ArrowAt => Some(BinaryOperator::ArrowAt), + BorrowedToken::HashMinus => Some(BinaryOperator::HashMinus), + BorrowedToken::AtQuestion => Some(BinaryOperator::AtQuestion), + BorrowedToken::AtAt => Some(BinaryOperator::AtAt), + BorrowedToken::Question => Some(BinaryOperator::Question), + BorrowedToken::QuestionAnd => Some(BinaryOperator::QuestionAnd), + BorrowedToken::QuestionPipe => Some(BinaryOperator::QuestionPipe), + BorrowedToken::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())), + BorrowedToken::DoubleSharp if self.dialect.supports_geometric_types() => { Some(BinaryOperator::DoubleHash) } - Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => { + BorrowedToken::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => { Some(BinaryOperator::AndLt) } - Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => { + BorrowedToken::AmpersandRightAngleBracket + if self.dialect.supports_geometric_types() => + { Some(BinaryOperator::AndGt) } - Token::QuestionMarkDash if self.dialect.supports_geometric_types() => { + BorrowedToken::QuestionMarkDash if self.dialect.supports_geometric_types() => { Some(BinaryOperator::QuestionDash) } - Token::AmpersandLeftAngleBracketVerticalBar + BorrowedToken::AmpersandLeftAngleBracketVerticalBar if self.dialect.supports_geometric_types() => { Some(BinaryOperator::AndLtPipe) } - Token::VerticalBarAmpersandRightAngleBracket + BorrowedToken::VerticalBarAmpersandRightAngleBracket if self.dialect.supports_geometric_types() => { Some(BinaryOperator::PipeAndGt) } - Token::TwoWayArrow if self.dialect.supports_geometric_types() => { + BorrowedToken::TwoWayArrow if self.dialect.supports_geometric_types() => { Some(BinaryOperator::LtDashGt) } - Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => { + BorrowedToken::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => { Some(BinaryOperator::LtCaret) } - Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => { + BorrowedToken::RightAngleBracketCaret if self.dialect.supports_geometric_types() => { Some(BinaryOperator::GtCaret) } - Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => { + BorrowedToken::QuestionMarkSharp if self.dialect.supports_geometric_types() => { Some(BinaryOperator::QuestionHash) } - Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => { + BorrowedToken::QuestionMarkDoubleVerticalBar + if self.dialect.supports_geometric_types() => + { Some(BinaryOperator::QuestionDoublePipe) } - Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => { + BorrowedToken::QuestionMarkDashVerticalBar + if self.dialect.supports_geometric_types() => + { Some(BinaryOperator::QuestionDashPipe) } - Token::TildeEqual if self.dialect.supports_geometric_types() => { + BorrowedToken::TildeEqual if self.dialect.supports_geometric_types() => { Some(BinaryOperator::TildeEq) } - Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => { + BorrowedToken::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => { Some(BinaryOperator::LtLtPipe) } - Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => { + BorrowedToken::VerticalBarShiftRight if self.dialect.supports_geometric_types() => { Some(BinaryOperator::PipeGtGt) } - Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At), + BorrowedToken::AtSign if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::At) + } - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::AND => Some(BinaryOperator::And), Keyword::OR => Some(BinaryOperator::Or), Keyword::XOR => Some(BinaryOperator::Xor), Keyword::OVERLAPS => Some(BinaryOperator::Overlaps), Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; // there are special rules for operator names in // postgres so we can not use 'parse_object' // or similar. @@ -3524,11 +3546,11 @@ impl<'a> Parser<'a> { loop { self.advance_token(); idents.push(self.get_current_token().to_string()); - if !self.consume_token(&Token::Period) { + if !self.consume_token(&BorrowedToken::Period) { break; } } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(BinaryOperator::PGCustomBinaryOperator(idents)) } _ => None, @@ -3541,7 +3563,7 @@ impl<'a> Parser<'a> { if let Some(keyword) = self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let right = if self.peek_sub_query() { // We have a subquery ahead (SELECT\WITH ...) need to rewind and // use the parenthesis for parsing the subquery as an expression. @@ -3550,7 +3572,7 @@ impl<'a> Parser<'a> { } else { // Non-subquery expression let right = self.parse_subexpr(precedence)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; right }; @@ -3600,7 +3622,7 @@ impl<'a> Parser<'a> { right: Box::new(self.parse_subexpr(precedence)?), }) } - } else if let Token::Word(w) = &tok.token { + } else if let BorrowedToken::Word(w) = &tok.token { match w.keyword { Keyword::IS => { if self.parse_keyword(Keyword::NULL) { @@ -3712,9 +3734,9 @@ impl<'a> Parser<'a> { } Keyword::MEMBER => { if self.parse_keyword(Keyword::OF) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let array = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Expr::MemberOf(MemberOf { value: Box::new(expr), array: Box::new(array), @@ -3729,20 +3751,23 @@ impl<'a> Parser<'a> { tok.span.start ), } - } else if Token::DoubleColon == *tok { + } else if BorrowedToken::DoubleColon == *tok { Ok(Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(expr), data_type: self.parse_data_type()?, format: None, }) - } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() { + } else if BorrowedToken::ExclamationMark == *tok + && self.dialect.supports_factorial_operator() + { Ok(Expr::UnaryOp { op: UnaryOperator::PGPostfixFactorial, expr: Box::new(expr), }) - } else if Token::LBracket == *tok && self.dialect.supports_partiql() - || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok) + } else if BorrowedToken::LBracket == *tok && self.dialect.supports_partiql() + || (dialect_of!(self is SnowflakeDialect | GenericDialect) + && BorrowedToken::Colon == *tok) { self.prev_token(); self.parse_json_access(expr) @@ -3756,7 +3781,7 @@ impl<'a> Parser<'a> { } /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO` - pub fn parse_escape_char(&mut self) -> Result, ParserError> { + pub fn parse_escape_char(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::ESCAPE) { Ok(Some(self.parse_value()?.into())) } else { @@ -3773,16 +3798,16 @@ impl<'a> Parser<'a> { /// * `[l:u:s]` /// /// Parser is right after `[` - fn parse_subscript_inner(&mut self) -> Result { + fn parse_subscript_inner(&self) -> Result { // at either `:(rest)` or `:(rest)]` - let lower_bound = if self.consume_token(&Token::Colon) { + let lower_bound = if self.consume_token(&BorrowedToken::Colon) { None } else { Some(self.parse_expr()?) }; // check for end - if self.consume_token(&Token::RBracket) { + if self.consume_token(&BorrowedToken::RBracket) { if let Some(lower_bound) = lower_bound { return Ok(Subscript::Index { index: lower_bound }); }; @@ -3795,11 +3820,11 @@ impl<'a> Parser<'a> { // consume the `:` if lower_bound.is_some() { - self.expect_token(&Token::Colon)?; + self.expect_token(&BorrowedToken::Colon)?; } // we are now at either `]`, `(rest)]` - let upper_bound = if self.consume_token(&Token::RBracket) { + let upper_bound = if self.consume_token(&BorrowedToken::RBracket) { return Ok(Subscript::Slice { lower_bound, upper_bound: None, @@ -3810,7 +3835,7 @@ impl<'a> Parser<'a> { }; // check for end - if self.consume_token(&Token::RBracket) { + if self.consume_token(&BorrowedToken::RBracket) { return Ok(Subscript::Slice { lower_bound, upper_bound, @@ -3819,15 +3844,15 @@ impl<'a> Parser<'a> { } // we are now at `:]` or `:stride]` - self.expect_token(&Token::Colon)?; - let stride = if self.consume_token(&Token::RBracket) { + self.expect_token(&BorrowedToken::Colon)?; + let stride = if self.consume_token(&BorrowedToken::RBracket) { None } else { Some(self.parse_expr()?) }; if stride.is_some() { - self.expect_token(&Token::RBracket)?; + self.expect_token(&BorrowedToken::RBracket)?; } Ok(Subscript::Slice { @@ -3839,10 +3864,10 @@ impl<'a> Parser<'a> { /// Parse a multi-dimension array accessing like `[1:3][1][1]` pub fn parse_multi_dim_subscript( - &mut self, + &self, chain: &mut Vec, ) -> Result<(), ParserError> { - while self.consume_token(&Token::LBracket) { + while self.consume_token(&BorrowedToken::LBracket) { self.parse_subscript(chain)?; } Ok(()) @@ -3851,16 +3876,16 @@ impl<'a> Parser<'a> { /// Parses an array subscript like `[1:3]` /// /// Parser is right after `[` - fn parse_subscript(&mut self, chain: &mut Vec) -> Result<(), ParserError> { + fn parse_subscript(&self, chain: &mut Vec) -> Result<(), ParserError> { let subscript = self.parse_subscript_inner()?; chain.push(AccessExpr::Subscript(subscript)); Ok(()) } - fn parse_json_path_object_key(&mut self) -> Result { + fn parse_json_path_object_key(&self) -> Result { let token = self.next_token(); match token.token { - Token::Word(Word { + BorrowedToken::Word(Word { value, // path segments in SF dot notation can be unquoted or double-quoted quote_style: quote_style @ (Some('"') | None), @@ -3875,13 +3900,13 @@ impl<'a> Parser<'a> { // This token should never be generated on snowflake or generic // dialects, but we handle it just in case this is used on future // dialects. - Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }), + BorrowedToken::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }), _ => self.expected("variant object key name", token), } } - fn parse_json_access(&mut self, expr: Expr) -> Result { + fn parse_json_access(&self, expr: Expr) -> Result { let path = self.parse_json_path()?; Ok(Expr::JsonAccess { value: Box::new(expr), @@ -3889,19 +3914,19 @@ impl<'a> Parser<'a> { }) } - fn parse_json_path(&mut self) -> Result { + fn parse_json_path(&self) -> Result { let mut path = Vec::new(); loop { match self.next_token().token { - Token::Colon if path.is_empty() => { + BorrowedToken::Colon if path.is_empty() => { path.push(self.parse_json_path_object_key()?); } - Token::Period if !path.is_empty() => { + BorrowedToken::Period if !path.is_empty() => { path.push(self.parse_json_path_object_key()?); } - Token::LBracket => { + BorrowedToken::LBracket => { let key = self.parse_expr()?; - self.expect_token(&Token::RBracket)?; + self.expect_token(&BorrowedToken::RBracket)?; path.push(JsonPathElem::Bracket { key }); } @@ -3917,20 +3942,20 @@ impl<'a> Parser<'a> { } /// Parses the parens following the `[ NOT ] IN` operator. - pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result { + pub fn parse_in(&self, expr: Expr, negated: bool) -> Result { // BigQuery allows `IN UNNEST(array_expression)` // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators if self.parse_keyword(Keyword::UNNEST) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let array_expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; return Ok(Expr::InUnnest { expr: Box::new(expr), array_expr: Box::new(array_expr), negated, }); } - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let in_op = match self.maybe_parse(|p| p.parse_query())? { Some(subquery) => Expr::InSubquery { expr: Box::new(expr), @@ -3940,19 +3965,19 @@ impl<'a> Parser<'a> { None => Expr::InList { expr: Box::new(expr), list: if self.dialect.supports_in_empty_list() { - self.parse_comma_separated0(Parser::parse_expr, Token::RParen)? + self.parse_comma_separated0(Parser::parse_expr, BorrowedToken::RParen)? } else { self.parse_comma_separated(Parser::parse_expr)? }, negated, }, }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(in_op) } /// Parses `BETWEEN AND `, assuming the `BETWEEN` keyword was already consumed. - pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result { + pub fn parse_between(&self, expr: Expr, negated: bool) -> Result { // Stop parsing subexpressions for and on tokens with // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc. let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; @@ -3967,7 +3992,7 @@ impl<'a> Parser<'a> { } /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`. - pub fn parse_pg_cast(&mut self, expr: Expr) -> Result { + pub fn parse_pg_cast(&self, expr: Expr) -> Result { Ok(Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(expr), @@ -3983,21 +4008,21 @@ impl<'a> Parser<'a> { /// Return the token at the given location, or EOF if the index is beyond /// the length of the current set of tokens. - pub fn token_at(&self, index: usize) -> &TokenWithSpan { + pub fn token_at(&self, index: usize) -> &TokenWithSpan<'_> { self.tokens.get(index).unwrap_or(&EOF_TOKEN) } /// Return the first non-whitespace token that has not yet been processed - /// or Token::EOF + /// or BorrowedToken::EOF /// /// See [`Self::peek_token_ref`] to avoid the copy. - pub fn peek_token(&self) -> TokenWithSpan { + pub fn peek_token(&self) -> TokenWithSpan<'_> { self.peek_nth_token(0) } /// Return a reference to the first non-whitespace token that has not yet - /// been processed or Token::EOF - pub fn peek_token_ref(&self) -> &TokenWithSpan { + /// been processed or BorrowedToken::EOF + pub fn peek_token_ref(&self) -> &TokenWithSpan<'a> { self.peek_nth_token_ref(0) } @@ -4009,7 +4034,7 @@ impl<'a> Parser<'a> { /// # use sqlparser::dialect::GenericDialect; /// # use sqlparser::parser::Parser; /// # use sqlparser::keywords::Keyword; - /// # use sqlparser::tokenizer::{Token, Word}; + /// # use sqlparser::tokenizer::{BorrowedToken, Word}; /// let dialect = GenericDialect {}; /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap(); /// @@ -4018,12 +4043,12 @@ impl<'a> Parser<'a> { /// assert!(matches!( /// parser.peek_tokens(), /// [ - /// Token::Word(Word { keyword: Keyword::ORDER, .. }), - /// Token::Word(Word { keyword: Keyword::BY, .. }), + /// BorrowedToken::Word(Word { keyword: Keyword::ORDER, .. }), + /// BorrowedToken::Word(Word { keyword: Keyword::BY, .. }), /// ] /// )); /// ``` - pub fn peek_tokens(&self) -> [Token; N] { + pub fn peek_tokens(&'a self) -> [BorrowedToken<'a>; N] { self.peek_tokens_with_location() .map(|with_loc| with_loc.token) } @@ -4032,20 +4057,20 @@ impl<'a> Parser<'a> { /// yet been processed. /// /// See [`Self::peek_token`] for an example. - pub fn peek_tokens_with_location(&self) -> [TokenWithSpan; N] { - let mut index = self.index; + pub fn peek_tokens_with_location(&self) -> [TokenWithSpan<'_>; N] { + let mut index = self.index.get(); core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; if let Some(TokenWithSpan { - token: Token::Whitespace(_), + token: BorrowedToken::Whitespace(_), span: _, }) = token { continue; } break token.cloned().unwrap_or(TokenWithSpan { - token: Token::EOF, + token: BorrowedToken::EOF, span: Span::empty(), }); }) @@ -4055,13 +4080,13 @@ impl<'a> Parser<'a> { /// that have not yet been processed. /// /// See [`Self::peek_tokens`] for an example. - pub fn peek_tokens_ref(&self) -> [&TokenWithSpan; N] { - let mut index = self.index; + pub fn peek_tokens_ref(&self) -> [&TokenWithSpan<'_>; N] { + let mut index = self.index.get(); core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; if let Some(TokenWithSpan { - token: Token::Whitespace(_), + token: BorrowedToken::Whitespace(_), span: _, }) = token { @@ -4072,18 +4097,18 @@ impl<'a> Parser<'a> { } /// Return nth non-whitespace token that has not yet been processed - pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan { + pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan<'_> { self.peek_nth_token_ref(n).clone() } /// Return nth non-whitespace token that has not yet been processed - pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan { - let mut index = self.index; + pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan<'a> { + let mut index = self.index.get(); loop { index += 1; match self.tokens.get(index - 1) { Some(TokenWithSpan { - token: Token::Whitespace(_), + token: BorrowedToken::Whitespace(_), span: _, }) => continue, non_whitespace => { @@ -4098,17 +4123,17 @@ impl<'a> Parser<'a> { /// Return the first token, possibly whitespace, that has not yet been processed /// (or None if reached end-of-file). - pub fn peek_token_no_skip(&self) -> TokenWithSpan { + pub fn peek_token_no_skip(&self) -> TokenWithSpan<'_> { self.peek_nth_token_no_skip(0) } /// Return nth token, possibly whitespace, that has not yet been processed. - pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan { + pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan<'_> { self.tokens - .get(self.index + n) + .get(self.index.get() + n) .cloned() .unwrap_or(TokenWithSpan { - token: Token::EOF, + token: BorrowedToken::EOF, span: Span::empty(), }) } @@ -4116,10 +4141,10 @@ impl<'a> Parser<'a> { /// Return true if the next tokens exactly `expected` /// /// Does not advance the current token. - fn peek_keywords(&mut self, expected: &[Keyword]) -> bool { - let index = self.index; + fn peek_keywords(&self, expected: &[Keyword]) -> bool { + let index = self.index.get(); let matched = self.parse_keywords(expected); - self.index = index; + self.index.set(index); matched } @@ -4127,7 +4152,7 @@ impl<'a> Parser<'a> { /// /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to /// avoid the copy. - pub fn next_token(&mut self) -> TokenWithSpan { + pub fn next_token(&self) -> TokenWithSpan<'_> { self.advance_token(); self.get_current_token().clone() } @@ -4137,24 +4162,24 @@ impl<'a> Parser<'a> { /// This can be used with APIs that expect an index, such as /// [`Self::token_at`] pub fn get_current_index(&self) -> usize { - self.index.saturating_sub(1) + self.index.get().saturating_sub(1) } /// Return the next unprocessed token, possibly whitespace. - pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> { - self.index += 1; - self.tokens.get(self.index - 1) + pub fn next_token_no_skip(&self) -> Option<&TokenWithSpan<'_>> { + self.index.set(self.index.get() + 1); + self.tokens.get(self.index.get() - 1) } /// Advances the current token to the next non-whitespace token /// /// See [`Self::get_current_token`] to get the current token after advancing - pub fn advance_token(&mut self) { + pub fn advance_token(&self) { loop { - self.index += 1; - match self.tokens.get(self.index - 1) { + self.index.set(self.index.get() + 1); + match self.tokens.get(self.index.get() - 1) { Some(TokenWithSpan { - token: Token::Whitespace(_), + token: BorrowedToken::Whitespace(_), span: _, }) => continue, _ => break, @@ -4165,22 +4190,22 @@ impl<'a> Parser<'a> { /// Returns a reference to the current token /// /// Does not advance the current token. - pub fn get_current_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(1)) + pub fn get_current_token(&self) -> &TokenWithSpan<'_> { + self.token_at(self.index.get().saturating_sub(1)) } /// Returns a reference to the previous token /// /// Does not advance the current token. - pub fn get_previous_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(2)) + pub fn get_previous_token(&self) -> &TokenWithSpan<'_> { + self.token_at(self.index.get().saturating_sub(2)) } /// Returns a reference to the next token /// /// Does not advance the current token. - pub fn get_next_token(&self) -> &TokenWithSpan { - self.token_at(self.index) + pub fn get_next_token(&self) -> &TokenWithSpan<'_> { + self.token_at(self.index.get()) } /// Seek back the last one non-whitespace token. @@ -4189,14 +4214,14 @@ impl<'a> Parser<'a> { /// after `next_token()` indicates an EOF. /// // TODO rename to backup_token and deprecate prev_token? - pub fn prev_token(&mut self) { + pub fn prev_token(&self) { loop { - assert!(self.index > 0); - self.index -= 1; + assert!(self.index.get() > 0); + self.index.set(self.index.get() - 1); if let Some(TokenWithSpan { - token: Token::Whitespace(_), + token: BorrowedToken::Whitespace(_), span: _, - }) = self.tokens.get(self.index) + }) = self.tokens.get(self.index.get()) { continue; } @@ -4232,7 +4257,7 @@ impl<'a> Parser<'a> { /// If the current token is the `expected` keyword, consume it and returns /// true. Otherwise, no tokens are consumed and returns false. #[must_use] - pub fn parse_keyword(&mut self, expected: Keyword) -> bool { + pub fn parse_keyword(&self, expected: Keyword) -> bool { if self.peek_keyword(expected) { self.advance_token(); true @@ -4243,7 +4268,7 @@ impl<'a> Parser<'a> { #[must_use] pub fn peek_keyword(&self, expected: Keyword) -> bool { - matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword) + matches!(&self.peek_token_ref().token, BorrowedToken::Word(w) if expected == w.keyword) } /// If the current token is the `expected` keyword followed by @@ -4253,7 +4278,7 @@ impl<'a> Parser<'a> { /// Note that if the length of `tokens` is too long, this function will /// not be efficient as it does a loop on the tokens with `peek_nth_token` /// each time. - pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { + pub fn parse_keyword_with_tokens(&self, expected: Keyword, tokens: &[Token]) -> bool { self.keyword_with_tokens(expected, tokens, true) } @@ -4261,13 +4286,13 @@ impl<'a> Parser<'a> { /// without consuming them. /// /// See [Self::parse_keyword_with_tokens] for details. - pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { + pub(crate) fn peek_keyword_with_tokens(&self, expected: Keyword, tokens: &[Token]) -> bool { self.keyword_with_tokens(expected, tokens, false) } - fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool { + fn keyword_with_tokens(&self, expected: Keyword, tokens: &[Token], consume: bool) -> bool { match &self.peek_token_ref().token { - Token::Word(w) if expected == w.keyword => { + BorrowedToken::Word(w) if expected == w.keyword => { for (idx, token) in tokens.iter().enumerate() { if self.peek_nth_token_ref(idx + 1).token != *token { return false; @@ -4290,13 +4315,13 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns true. Otherwise, no tokens are /// consumed and returns false #[must_use] - pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool { - let index = self.index; + pub fn parse_keywords(&self, keywords: &[Keyword]) -> bool { + let index = self.index.get(); for &keyword in keywords { if !self.parse_keyword(keyword) { // println!("parse_keywords aborting .. did not find {:?}", keyword); // reset index and return immediately - self.index = index; + self.index.set(index); return false; } } @@ -4319,9 +4344,9 @@ impl<'a> Parser<'a> { /// and return the keyword that matches. Otherwise, no tokens are consumed /// and returns [`None`]. #[must_use] - pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option { + pub fn parse_one_of_keywords(&self, keywords: &[Keyword]) -> Option { match &self.peek_token_ref().token { - Token::Word(w) => { + BorrowedToken::Word(w) => { keywords .iter() .find(|keyword| **keyword == w.keyword) @@ -4336,7 +4361,7 @@ impl<'a> Parser<'a> { /// If the current token is one of the expected keywords, consume the token /// and return the keyword that matches. Otherwise, return an error. - pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result { + pub fn expect_one_of_keywords(&self, keywords: &[Keyword]) -> Result { if let Some(keyword) = self.parse_one_of_keywords(keywords) { Ok(keyword) } else { @@ -4352,7 +4377,7 @@ impl<'a> Parser<'a> { /// Otherwise, return an error. /// // todo deprecate in favor of expected_keyword_is - pub fn expect_keyword(&mut self, expected: Keyword) -> Result { + pub fn expect_keyword(&self, expected: Keyword) -> Result, ParserError> { if self.parse_keyword(expected) { Ok(self.get_current_token().clone()) } else { @@ -4365,7 +4390,7 @@ impl<'a> Parser<'a> { /// /// This differs from expect_keyword only in that the matched keyword /// token is not returned. - pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> { + pub fn expect_keyword_is(&self, expected: Keyword) -> Result<(), ParserError> { if self.parse_keyword(expected) { Ok(()) } else { @@ -4375,7 +4400,7 @@ impl<'a> Parser<'a> { /// If the current and subsequent tokens exactly match the `keywords` /// sequence, consume them and returns Ok. Otherwise, return an Error. - pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> { + pub fn expect_keywords(&self, expected: &[Keyword]) -> Result<(), ParserError> { for &kw in expected { self.expect_keyword_is(kw)?; } @@ -4386,7 +4411,7 @@ impl<'a> Parser<'a> { /// /// See [Self::advance_token] to consume the token unconditionally #[must_use] - pub fn consume_token(&mut self, expected: &Token) -> bool { + pub fn consume_token(&self, expected: &Token) -> bool { if self.peek_token_ref() == expected { self.advance_token(); true @@ -4399,11 +4424,11 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns true. Otherwise, no tokens are /// consumed and returns false #[must_use] - pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool { - let index = self.index; + pub fn consume_tokens(&self, tokens: &[Token]) -> bool { + let index = self.index.get(); for token in tokens { if !self.consume_token(token) { - self.index = index; + self.index.set(index); return false; } } @@ -4411,7 +4436,10 @@ impl<'a> Parser<'a> { } /// Bail out if the current token is not an expected keyword, or consume it if it is - pub fn expect_token(&mut self, expected: &Token) -> Result { + pub fn expect_token<'b>( + &self, + expected: &'b BorrowedToken<'b>, + ) -> Result, ParserError> { if self.peek_token_ref() == expected { Ok(self.next_token()) } else { @@ -4432,7 +4460,7 @@ impl<'a> Parser<'a> { } /// Parse a comma-separated list of 1+ SelectItem - pub fn parse_projection(&mut self) -> Result, ParserError> { + pub fn parse_projection(&self) -> Result, ParserError> { // BigQuery and Snowflake allow trailing commas, but only in project lists // e.g. `SELECT 1, 2, FROM t` // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas @@ -4448,22 +4476,22 @@ impl<'a> Parser<'a> { ) } - pub fn parse_actions_list(&mut self) -> Result, ParserError> { + pub fn parse_actions_list(&self) -> Result, ParserError> { let mut values = vec![]; loop { values.push(self.parse_grant_permission()?); - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { break; } else if self.options.trailing_commas { match self.peek_token().token { - Token::Word(kw) if kw.keyword == Keyword::ON => { + BorrowedToken::Word(kw) if kw.keyword == Keyword::ON => { break; } - Token::RParen - | Token::SemiColon - | Token::EOF - | Token::RBracket - | Token::RBrace => break, + BorrowedToken::RParen + | BorrowedToken::SemiColon + | BorrowedToken::EOF + | BorrowedToken::RBracket + | BorrowedToken::RBrace => break, _ => continue, } } @@ -4472,7 +4500,7 @@ impl<'a> Parser<'a> { } /// Parse a list of [TableWithJoins] - fn parse_table_with_joins(&mut self) -> Result, ParserError> { + fn parse_table_with_joins(&self) -> Result, ParserError> { let trailing_commas = self.dialect.supports_from_trailing_commas(); self.parse_comma_separated_with_trailing_commas( @@ -4489,22 +4517,24 @@ impl<'a> Parser<'a> { /// /// Returns true if there is a next element fn is_parse_comma_separated_end_with_trailing_commas( - &mut self, + &self, trailing_commas: bool, is_reserved_keyword: &R, ) -> bool where - R: Fn(&Keyword, &mut Parser) -> bool, + R: Fn(&Keyword, &Parser) -> bool, { - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { true } else if trailing_commas { let token = self.next_token().token; let is_end = match token { - Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true, - Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => { - true - } + BorrowedToken::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true, + BorrowedToken::RParen + | BorrowedToken::SemiColon + | BorrowedToken::EOF + | BorrowedToken::RBracket + | BorrowedToken::RBrace => true, _ => false, }; self.prev_token(); @@ -4517,7 +4547,7 @@ impl<'a> Parser<'a> { /// Parse the comma of a comma-separated syntax element. /// Returns true if there is a next element - fn is_parse_comma_separated_end(&mut self) -> bool { + fn is_parse_comma_separated_end(&self) -> bool { self.is_parse_comma_separated_end_with_trailing_commas( self.options.trailing_commas, &Self::is_reserved_for_column_alias, @@ -4525,9 +4555,9 @@ impl<'a> Parser<'a> { } /// Parse a comma-separated list of 1+ items accepted by `F` - pub fn parse_comma_separated(&mut self, f: F) -> Result, ParserError> + pub fn parse_comma_separated(&self, f: F) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { self.parse_comma_separated_with_trailing_commas( f, @@ -4541,14 +4571,14 @@ impl<'a> Parser<'a> { /// keyword is a reserved keyword. /// Allows for control over trailing commas. fn parse_comma_separated_with_trailing_commas( - &mut self, + &self, mut f: F, trailing_commas: bool, is_reserved_keyword: R, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, - R: Fn(&Keyword, &mut Parser) -> bool, + F: FnMut(&Parser<'a>) -> Result, + R: Fn(&Keyword, &Parser) -> bool, { let mut values = vec![]; loop { @@ -4564,14 +4594,14 @@ impl<'a> Parser<'a> { } /// Parse a period-separated list of 1+ items accepted by `F` - fn parse_period_separated(&mut self, mut f: F) -> Result, ParserError> + fn parse_period_separated(&self, mut f: F) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { let mut values = vec![]; loop { values.push(f(self)?); - if !self.consume_token(&Token::Period) { + if !self.consume_token(&BorrowedToken::Period) { break; } } @@ -4580,12 +4610,12 @@ impl<'a> Parser<'a> { /// Parse a keyword-separated list of 1+ items accepted by `F` pub fn parse_keyword_separated( - &mut self, + &self, keyword: Keyword, mut f: F, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { let mut values = vec![]; loop { @@ -4597,32 +4627,32 @@ impl<'a> Parser<'a> { Ok(values) } - pub fn parse_parenthesized(&mut self, mut f: F) -> Result + pub fn parse_parenthesized(&self, mut f: F) -> Result where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let res = f(self)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(res) } /// Parse a comma-separated list of 0+ items accepted by `F` - /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...) + /// * `end_token` - expected end token for the closure (e.g. [BorrowedToken::RParen], [BorrowedToken::RBrace] ...) pub fn parse_comma_separated0( - &mut self, + &self, f: F, end_token: Token, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { if self.peek_token().token == end_token { return Ok(vec![]); } - if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] { - let _ = self.consume_token(&Token::Comma); + if self.options.trailing_commas && self.peek_tokens() == [BorrowedToken::Comma, end_token] { + let _ = self.consume_token(&BorrowedToken::Comma); return Ok(vec![]); } @@ -4633,14 +4663,14 @@ impl<'a> Parser<'a> { /// If the next token is any of `terminal_keywords` then no more /// statements will be parsed. pub(crate) fn parse_statement_list( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result, ParserError> { let mut values = vec![]; loop { match &self.peek_nth_token_ref(0).token { - Token::EOF => break, - Token::Word(w) => { + BorrowedToken::EOF => break, + BorrowedToken::Word(w) => { if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) { break; } @@ -4649,7 +4679,7 @@ impl<'a> Parser<'a> { } values.push(self.parse_statement()?); - self.expect_token(&Token::SemiColon)?; + self.expect_token(&BorrowedToken::SemiColon)?; } Ok(values) } @@ -4657,16 +4687,16 @@ impl<'a> Parser<'a> { /// Default implementation of a predicate that returns true if /// the specified keyword is reserved for column alias. /// See [Dialect::is_column_alias] - fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool { + fn is_reserved_for_column_alias(kw: &Keyword, parser: &Parser) -> bool { !parser.dialect.is_column_alias(kw, parser) } /// Run a parser method `f`, reverting back to the current position if unsuccessful. /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`. /// Returns `Ok(None)` if `f` returns any other error. - pub fn maybe_parse(&mut self, f: F) -> Result, ParserError> + pub fn maybe_parse(&self, f: F) -> Result, ParserError> where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { match self.try_parse(f) { Ok(t) => Ok(Some(t)), @@ -4676,16 +4706,16 @@ impl<'a> Parser<'a> { } /// Run a parser method `f`, reverting back to the current position if unsuccessful. - pub fn try_parse(&mut self, mut f: F) -> Result + pub fn try_parse(&self, mut f: F) -> Result where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { - let index = self.index; + let index = self.index.get(); match f(self) { Ok(t) => Ok(t), Err(e) => { // Unwind stack if limit exceeded - self.index = index; + self.index.set(index); Err(e) } } @@ -4693,7 +4723,7 @@ impl<'a> Parser<'a> { /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found. - pub fn parse_all_or_distinct(&mut self) -> Result, ParserError> { + pub fn parse_all_or_distinct(&self) -> Result, ParserError> { let loc = self.peek_token().span.start; let all = self.parse_keyword(Keyword::ALL); let distinct = self.parse_keyword(Keyword::DISTINCT); @@ -4708,19 +4738,19 @@ impl<'a> Parser<'a> { return Ok(Some(Distinct::Distinct)); } - self.expect_token(&Token::LParen)?; - let col_names = if self.consume_token(&Token::RParen) { + self.expect_token(&BorrowedToken::LParen)?; + let col_names = if self.consume_token(&BorrowedToken::RParen) { self.prev_token(); Vec::new() } else { self.parse_comma_separated(Parser::parse_expr)? }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some(Distinct::On(col_names))) } /// Parse a SQL CREATE statement - pub fn parse_create(&mut self) -> Result { + pub fn parse_create(&self) -> Result { let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]); let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]); let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some(); @@ -4808,7 +4838,7 @@ impl<'a> Parser<'a> { } } - fn parse_create_user(&mut self, or_replace: bool) -> Result { + fn parse_create_user(&self, or_replace: bool) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; let options = self @@ -4838,7 +4868,7 @@ impl<'a> Parser<'a> { /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details. pub fn parse_create_secret( - &mut self, + &self, or_replace: bool, temporary: bool, persistent: bool, @@ -4847,7 +4877,7 @@ impl<'a> Parser<'a> { let mut storage_specifier = None; let mut name = None; - if self.peek_token() != Token::LParen { + if self.peek_token() != BorrowedToken::LParen { if self.parse_keyword(Keyword::IN) { storage_specifier = self.parse_identifier().ok() } else { @@ -4856,26 +4886,26 @@ impl<'a> Parser<'a> { // Storage specifier may follow the name if storage_specifier.is_none() - && self.peek_token() != Token::LParen + && self.peek_token() != BorrowedToken::LParen && self.parse_keyword(Keyword::IN) { storage_specifier = self.parse_identifier().ok(); } } - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; self.expect_keyword_is(Keyword::TYPE)?; let secret_type = self.parse_identifier()?; let mut options = Vec::new(); - if self.consume_token(&Token::Comma) { - options.append(&mut self.parse_comma_separated(|p| { + if self.consume_token(&BorrowedToken::Comma) { + options.extend(self.parse_comma_separated(|p| { let key = p.parse_identifier()?; let value = p.parse_identifier()?; Ok(SecretOption { key, value }) })?); } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let temp = match (temporary, persistent) { (true, false) => Some(true), @@ -4896,18 +4926,18 @@ impl<'a> Parser<'a> { } /// Parse a CACHE TABLE statement - pub fn parse_cache_table(&mut self) -> Result { + pub fn parse_cache_table(&self) -> Result { let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None); if self.parse_keyword(Keyword::TABLE) { let table_name = self.parse_object_name(false)?; - if self.peek_token().token != Token::EOF { - if let Token::Word(word) = self.peek_token().token { + if self.peek_token().token != BorrowedToken::EOF { + if let BorrowedToken::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { options = self.parse_options(Keyword::OPTIONS)? } }; - if self.peek_token().token != Token::EOF { + if self.peek_token().token != BorrowedToken::EOF { let (a, q) = self.parse_as_query()?; has_as = a; query = Some(q); @@ -4933,14 +4963,14 @@ impl<'a> Parser<'a> { table_flag = Some(self.parse_object_name(false)?); if self.parse_keyword(Keyword::TABLE) { let table_name = self.parse_object_name(false)?; - if self.peek_token() != Token::EOF { - if let Token::Word(word) = self.peek_token().token { + if self.peek_token() != BorrowedToken::EOF { + if let BorrowedToken::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { options = self.parse_options(Keyword::OPTIONS)? } }; - if self.peek_token() != Token::EOF { + if self.peek_token() != BorrowedToken::EOF { let (a, q) = self.parse_as_query()?; has_as = a; query = Some(q); @@ -4963,7 +4993,7 @@ impl<'a> Parser<'a> { }) } } else { - if self.peek_token() == Token::EOF { + if self.peek_token() == BorrowedToken::EOF { self.prev_token(); } self.expected("a `TABLE` keyword", self.peek_token()) @@ -4972,9 +5002,9 @@ impl<'a> Parser<'a> { } /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX` - pub fn parse_as_query(&mut self) -> Result<(bool, Box), ParserError> { + pub fn parse_as_query(&self) -> Result<(bool, Box), ParserError> { match self.peek_token().token { - Token::Word(word) => match word.keyword { + BorrowedToken::Word(word) => match word.keyword { Keyword::AS => { self.next_token(); Ok((true, self.parse_query()?)) @@ -4986,7 +5016,7 @@ impl<'a> Parser<'a> { } /// Parse a UNCACHE TABLE statement - pub fn parse_uncache_table(&mut self) -> Result { + pub fn parse_uncache_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -4997,7 +5027,7 @@ impl<'a> Parser<'a> { } /// SQLite-specific `CREATE VIRTUAL TABLE` - pub fn parse_create_virtual_table(&mut self) -> Result { + pub fn parse_create_virtual_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -5016,7 +5046,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_create_schema(&mut self) -> Result { + pub fn parse_create_schema(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let schema_name = self.parse_schema_name()?; @@ -5055,7 +5085,7 @@ impl<'a> Parser<'a> { }) } - fn parse_schema_name(&mut self) -> Result { + fn parse_schema_name(&self) -> Result { if self.parse_keyword(Keyword::AUTHORIZATION) { Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?)) } else { @@ -5072,7 +5102,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_create_database(&mut self) -> Result { + pub fn parse_create_database(&self) -> Result { let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let db_name = self.parse_object_name(false)?; let mut location = None; @@ -5117,7 +5147,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_create_function_using( - &mut self, + &self, ) -> Result, ParserError> { if !self.parse_keyword(Keyword::USING) { return Ok(None); @@ -5133,13 +5163,13 @@ impl<'a> Parser<'a> { Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))), _ => self.expected( "JAR, FILE or ARCHIVE, got {:?}", - TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())), + TokenWithSpan::wrap(BorrowedToken::make_keyword(format!("{keyword:?}").as_str())), ), } } pub fn parse_create_function( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5164,19 +5194,19 @@ impl<'a> Parser<'a> { /// /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html fn parse_postgres_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { let name = self.parse_object_name(false)?; - self.expect_token(&Token::LParen)?; - let args = if Token::RParen != self.peek_token_ref().token { + self.expect_token(&BorrowedToken::LParen)?; + let args = if BorrowedToken::RParen != self.peek_token_ref().token { self.parse_comma_separated(Parser::parse_function_arg)? } else { vec![] }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let return_type = if self.parse_keyword(Keyword::RETURNS) { Some(self.parse_data_type()?) @@ -5291,7 +5321,7 @@ impl<'a> Parser<'a> { /// /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction fn parse_hive_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5325,7 +5355,7 @@ impl<'a> Parser<'a> { /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement fn parse_bigquery_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5400,7 +5430,7 @@ impl<'a> Parser<'a> { /// /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql fn parse_mssql_create_function( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5445,12 +5475,12 @@ impl<'a> Parser<'a> { let end_token = self.expect_keyword(Keyword::END)?; Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), + begin_token: AttachedToken(begin_token.to_static()), statements, - end_token: AttachedToken(end_token), + end_token: AttachedToken(end_token.to_static()), })) } else if self.parse_keyword(Keyword::RETURN) { - if self.peek_token() == Token::LParen { + if self.peek_token() == BorrowedToken::LParen { Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?)) } else if self.peek_keyword(Keyword::SELECT) { let select = self.parse_select()?; @@ -5486,33 +5516,32 @@ impl<'a> Parser<'a> { } fn parse_create_function_name_and_params( - &mut self, + &self, ) -> Result<(ObjectName, Vec), ParserError> { let name = self.parse_object_name(false)?; - let parse_function_param = - |parser: &mut Parser| -> Result { - let name = parser.parse_identifier()?; - let data_type = parser.parse_data_type()?; - let default_expr = if parser.consume_token(&Token::Eq) { - Some(parser.parse_expr()?) - } else { - None - }; - - Ok(OperateFunctionArg { - mode: None, - name: Some(name), - data_type, - default_expr, - }) + let parse_function_param = |parser: &Parser| -> Result { + let name = parser.parse_identifier()?; + let data_type = parser.parse_data_type()?; + let default_expr = if parser.consume_token(&BorrowedToken::Eq) { + Some(parser.parse_expr()?) + } else { + None }; - self.expect_token(&Token::LParen)?; - let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; - self.expect_token(&Token::RParen)?; + + Ok(OperateFunctionArg { + mode: None, + name: Some(name), + data_type, + default_expr, + }) + }; + self.expect_token(&BorrowedToken::LParen)?; + let args = self.parse_comma_separated0(parse_function_param, BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok((name, args)) } - fn parse_function_arg(&mut self) -> Result { + fn parse_function_arg(&self) -> Result { let mode = if self.parse_keyword(Keyword::IN) { Some(ArgMode::In) } else if self.parse_keyword(Keyword::OUT) { @@ -5533,7 +5562,7 @@ impl<'a> Parser<'a> { let data_type_idx = self.get_current_index(); // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context - fn parse_data_type_no_default(parser: &mut Parser) -> Result { + fn parse_data_type_no_default(parser: &Parser) -> Result { if parser.peek_keyword(Keyword::DEFAULT) { // This dummy error is ignored in `maybe_parse` parser_err!( @@ -5549,7 +5578,7 @@ impl<'a> Parser<'a> { let token = self.token_at(data_type_idx); // We ensure that the token is a `Word` token, and not other special tokens. - if !matches!(token.token, Token::Word(_)) { + if !matches!(token.token, BorrowedToken::Word(_)) { return self.expected("a name or type", token.clone()); } @@ -5557,12 +5586,12 @@ impl<'a> Parser<'a> { data_type = next_data_type; } - let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq) - { - Some(self.parse_expr()?) - } else { - None - }; + let default_expr = + if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&BorrowedToken::Eq) { + Some(self.parse_expr()?) + } else { + None + }; Ok(OperateFunctionArg { mode, name, @@ -5576,7 +5605,7 @@ impl<'a> Parser<'a> { /// ```sql /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ] /// ``` - pub fn parse_drop_trigger(&mut self) -> Result { + pub fn parse_drop_trigger(&self) -> Result { if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); @@ -5605,7 +5634,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_trigger( - &mut self, + &self, temporary: bool, or_alter: bool, or_replace: bool, @@ -5694,7 +5723,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_trigger_period(&mut self) -> Result { + pub fn parse_trigger_period(&self) -> Result { Ok( match self.expect_one_of_keywords(&[ Keyword::FOR, @@ -5713,7 +5742,7 @@ impl<'a> Parser<'a> { ) } - pub fn parse_trigger_event(&mut self) -> Result { + pub fn parse_trigger_event(&self) -> Result { Ok( match self.expect_one_of_keywords(&[ Keyword::INSERT, @@ -5737,7 +5766,7 @@ impl<'a> Parser<'a> { ) } - pub fn parse_trigger_referencing(&mut self) -> Result, ParserError> { + pub fn parse_trigger_referencing(&self) -> Result, ParserError> { let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) { Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => { TriggerReferencingType::OldTable @@ -5759,7 +5788,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_trigger_exec_body(&mut self) -> Result { + pub fn parse_trigger_exec_body(&self) -> Result { Ok(TriggerExecBody { exec_type: match self .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])? @@ -5773,21 +5802,21 @@ impl<'a> Parser<'a> { } pub fn parse_create_macro( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { if dialect_of!(self is DuckDbDialect | GenericDialect) { let name = self.parse_object_name(false)?; - self.expect_token(&Token::LParen)?; - let args = if self.consume_token(&Token::RParen) { + self.expect_token(&BorrowedToken::LParen)?; + let args = if self.consume_token(&BorrowedToken::RParen) { self.prev_token(); None } else { Some(self.parse_comma_separated(Parser::parse_macro_arg)?) }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; self.expect_keyword_is(Keyword::AS)?; Ok(Statement::CreateMacro { @@ -5807,22 +5836,20 @@ impl<'a> Parser<'a> { } } - fn parse_macro_arg(&mut self) -> Result { + fn parse_macro_arg(&self) -> Result { let name = self.parse_identifier()?; - let default_expr = - if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) { - Some(self.parse_expr()?) - } else { - None - }; + let default_expr = if self.consume_token(&BorrowedToken::Assignment) + || self.consume_token(&BorrowedToken::RArrow) + { + Some(self.parse_expr()?) + } else { + None + }; Ok(MacroArg { name, default_expr }) } - pub fn parse_create_external_table( - &mut self, - or_replace: bool, - ) -> Result { + pub fn parse_create_external_table(&self, or_replace: bool) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -5860,10 +5887,10 @@ impl<'a> Parser<'a> { .build()) } - pub fn parse_file_format(&mut self) -> Result { + pub fn parse_file_format(&self) -> Result { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::AVRO => Ok(FileFormat::AVRO), Keyword::JSONFILE => Ok(FileFormat::JSONFILE), Keyword::ORC => Ok(FileFormat::ORC), @@ -5877,18 +5904,18 @@ impl<'a> Parser<'a> { } } - fn parse_analyze_format_kind(&mut self) -> Result { - if self.consume_token(&Token::Eq) { + fn parse_analyze_format_kind(&self) -> Result { + if self.consume_token(&BorrowedToken::Eq) { Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?)) } else { Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?)) } } - pub fn parse_analyze_format(&mut self) -> Result { + pub fn parse_analyze_format(&self) -> Result { let next_token = self.next_token(); match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::TEXT => Ok(AnalyzeFormat::TEXT), Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ), Keyword::JSON => Ok(AnalyzeFormat::JSON), @@ -5899,7 +5926,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_view( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5952,7 +5979,7 @@ impl<'a> Parser<'a> { let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect) && self.parse_keyword(Keyword::COMMENT) { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; Some(self.parse_comment_value()?) } else { None @@ -5994,9 +6021,9 @@ impl<'a> Parser<'a> { /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL]. /// /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html - fn parse_create_view_params(&mut self) -> Result, ParserError> { + fn parse_create_view_params(&self) -> Result, ParserError> { let algorithm = if self.parse_keyword(Keyword::ALGORITHM) { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; Some( match self.expect_one_of_keywords(&[ Keyword::UNDEFINED, @@ -6018,7 +6045,7 @@ impl<'a> Parser<'a> { None }; let definer = if self.parse_keyword(Keyword::DEFINER) { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; Some(self.parse_grantee_name()?) } else { None @@ -6049,7 +6076,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_create_role(&mut self) -> Result { + pub fn parse_create_role(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; @@ -6107,7 +6134,7 @@ impl<'a> Parser<'a> { while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) { let loc = self .tokens - .get(self.index - 1) + .get(self.index.get() - 1) .map_or(Location { line: 0, column: 0 }, |t| t.span.start); match keyword { Keyword::AUTHORIZATION => { @@ -6274,7 +6301,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_owner(&mut self) -> Result { + pub fn parse_owner(&self) -> Result { let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) { Some(Keyword::CURRENT_USER) => Owner::CurrentUser, Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole, @@ -6293,7 +6320,7 @@ impl<'a> Parser<'a> { } /// Parses a [Statement::CreateDomain] statement. - fn parse_create_domain(&mut self) -> Result { + fn parse_create_domain(&self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword_is(Keyword::AS)?; let data_type = self.parse_data_type()?; @@ -6330,7 +6357,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html) - pub fn parse_create_policy(&mut self) -> Result { + pub fn parse_create_policy(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; @@ -6374,18 +6401,18 @@ impl<'a> Parser<'a> { }; let using = if self.parse_keyword(Keyword::USING) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(expr) } else { None }; let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(expr) } else { None @@ -6411,7 +6438,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) - pub fn parse_create_connector(&mut self) -> Result { + pub fn parse_create_connector(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -6450,7 +6477,7 @@ impl<'a> Parser<'a> { /// This is used for PostgreSQL CREATE OPERATOR statements. /// /// Examples: `+`, `myschema.+`, `pg_catalog.<=` - fn parse_operator_name(&mut self) -> Result { + fn parse_operator_name(&self) -> Result { let mut parts = vec![]; loop { parts.push(ObjectNamePart::Identifier(Ident::new( @@ -6466,7 +6493,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::CreateOperator] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html) - pub fn parse_create_operator(&mut self) -> Result { + pub fn parse_create_operator(&self) -> Result { let name = self.parse_operator_name()?; self.expect_token(&Token::LParen)?; @@ -6582,7 +6609,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::CreateOperatorFamily] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html) - pub fn parse_create_operator_family(&mut self) -> Result { + pub fn parse_create_operator_family(&self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword(Keyword::USING)?; let using = self.parse_identifier()?; @@ -6596,7 +6623,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::CreateOperatorClass] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html) - pub fn parse_create_operator_class(&mut self) -> Result { + pub fn parse_create_operator_class(&self) -> Result { let name = self.parse_object_name(false)?; let default = self.parse_keyword(Keyword::DEFAULT); self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?; @@ -6720,7 +6747,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_drop(&mut self) -> Result { + pub fn parse_drop(&self) -> Result { // MySQL dialect supports `TEMPORARY` let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect) && self.parse_keyword(Keyword::TEMPORARY); @@ -6808,7 +6835,7 @@ impl<'a> Parser<'a> { }) } - fn parse_optional_drop_behavior(&mut self) -> Option { + fn parse_optional_drop_behavior(&self) -> Option { match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) { Some(Keyword::CASCADE) => Some(DropBehavior::Cascade), Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict), @@ -6820,7 +6847,7 @@ impl<'a> Parser<'a> { /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] /// [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_function(&mut self) -> Result { + fn parse_drop_function(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6836,7 +6863,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html) - fn parse_drop_policy(&mut self) -> Result { + fn parse_drop_policy(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; @@ -6854,7 +6881,7 @@ impl<'a> Parser<'a> { /// ``` /// /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) - fn parse_drop_connector(&mut self) -> Result { + fn parse_drop_connector(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; Ok(Statement::DropConnector { if_exists, name }) @@ -6863,7 +6890,7 @@ impl<'a> Parser<'a> { /// ```sql /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_domain(&mut self) -> Result { + fn parse_drop_domain(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_object_name(false)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6878,7 +6905,7 @@ impl<'a> Parser<'a> { /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] /// [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_procedure(&mut self) -> Result { + fn parse_drop_procedure(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6889,15 +6916,15 @@ impl<'a> Parser<'a> { }) } - fn parse_function_desc(&mut self) -> Result { + fn parse_function_desc(&self) -> Result { let name = self.parse_object_name(false)?; - let args = if self.consume_token(&Token::LParen) { - if self.consume_token(&Token::RParen) { + let args = if self.consume_token(&BorrowedToken::LParen) { + if self.consume_token(&BorrowedToken::RParen) { Some(vec![]) } else { let args = self.parse_comma_separated(Parser::parse_function_arg)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(args) } } else { @@ -6909,7 +6936,7 @@ impl<'a> Parser<'a> { /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details. fn parse_drop_secret( - &mut self, + &self, temporary: bool, persistent: bool, ) -> Result { @@ -6944,7 +6971,7 @@ impl<'a> Parser<'a> { /// /// The syntax can vary significantly between warehouses. See the grammar /// on the warehouse specific function in such cases. - pub fn parse_declare(&mut self) -> Result { + pub fn parse_declare(&self) -> Result { if dialect_of!(self is BigQueryDialect) { return self.parse_big_query_declare(); } @@ -7015,11 +7042,11 @@ impl<'a> Parser<'a> { /// DECLARE variable_name[, ...] [{ | }]; /// ``` /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare - pub fn parse_big_query_declare(&mut self) -> Result { + pub fn parse_big_query_declare(&self) -> Result { let names = self.parse_comma_separated(Parser::parse_identifier)?; let data_type = match self.peek_token().token { - Token::Word(w) if w.keyword == Keyword::DEFAULT => None, + BorrowedToken::Word(w) if w.keyword == Keyword::DEFAULT => None, _ => Some(self.parse_data_type()?), }; @@ -7075,7 +7102,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare - pub fn parse_snowflake_declare(&mut self) -> Result { + pub fn parse_snowflake_declare(&self) -> Result { let mut stmts = vec![]; loop { let name = self.parse_identifier()?; @@ -7083,7 +7110,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::CURSOR) { self.expect_keyword_is(Keyword::FOR)?; match self.peek_token().token { - Token::Word(w) if w.keyword == Keyword::SELECT => ( + BorrowedToken::Word(w) if w.keyword == Keyword::SELECT => ( Some(DeclareType::Cursor), Some(self.parse_query()?), None, @@ -7097,7 +7124,7 @@ impl<'a> Parser<'a> { ), } } else if self.parse_keyword(Keyword::RESULTSET) { - let assigned_expr = if self.peek_token().token != Token::SemiColon { + let assigned_expr = if self.peek_token().token != BorrowedToken::SemiColon { self.parse_snowflake_variable_declaration_expression()? } else { // Nothing more to do. The statement has no further parameters. @@ -7106,7 +7133,7 @@ impl<'a> Parser<'a> { (Some(DeclareType::ResultSet), None, assigned_expr, None) } else if self.parse_keyword(Keyword::EXCEPTION) { - let assigned_expr = if self.peek_token().token == Token::LParen { + let assigned_expr = if self.peek_token().token == BorrowedToken::LParen { Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?))) } else { // Nothing more to do. The statement has no further parameters. @@ -7120,7 +7147,7 @@ impl<'a> Parser<'a> { self.parse_snowflake_variable_declaration_expression()? { (Some(assigned_expr), None) - } else if let Token::Word(_) = self.peek_token().token { + } else if let BorrowedToken::Word(_) = self.peek_token().token { let data_type = self.parse_data_type()?; ( self.parse_snowflake_variable_declaration_expression()?, @@ -7144,9 +7171,9 @@ impl<'a> Parser<'a> { }; stmts.push(stmt); - if self.consume_token(&Token::SemiColon) { + if self.consume_token(&BorrowedToken::SemiColon) { match self.peek_token().token { - Token::Word(w) + BorrowedToken::Word(w) if ALL_KEYWORDS .binary_search(&w.value.to_uppercase().as_str()) .is_err() => @@ -7178,7 +7205,7 @@ impl<'a> Parser<'a> { // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 - pub fn parse_mssql_declare(&mut self) -> Result { + pub fn parse_mssql_declare(&self) -> Result { let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?; Ok(Statement::Declare { stmts }) @@ -7194,13 +7221,13 @@ impl<'a> Parser<'a> { // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 - pub fn parse_mssql_declare_stmt(&mut self) -> Result { + pub fn parse_mssql_declare_stmt(&self) -> Result { let name = { let ident = self.parse_identifier()?; if !ident.value.starts_with('@') && !matches!( self.peek_token().token, - Token::Word(w) if w.keyword == Keyword::CURSOR + BorrowedToken::Word(w) if w.keyword == Keyword::CURSOR ) { Err(ParserError::TokenizerError( @@ -7212,7 +7239,7 @@ impl<'a> Parser<'a> { }?; let (declare_type, data_type) = match self.peek_token().token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::CURSOR => { self.next_token(); (Some(DeclareType::Cursor), None) @@ -7256,14 +7283,14 @@ impl<'a> Parser<'a> { /// ``` /// pub fn parse_snowflake_variable_declaration_expression( - &mut self, + &self, ) -> Result, ParserError> { Ok(match self.peek_token().token { - Token::Word(w) if w.keyword == Keyword::DEFAULT => { + BorrowedToken::Word(w) if w.keyword == Keyword::DEFAULT => { self.next_token(); // Skip `DEFAULT` Some(DeclareAssignment::Default(Box::new(self.parse_expr()?))) } - Token::Assignment => { + BorrowedToken::Assignment => { self.next_token(); // Skip `:=` Some(DeclareAssignment::DuckAssignment(Box::new( self.parse_expr()?, @@ -7280,10 +7307,10 @@ impl<'a> Parser<'a> { /// [ = ] /// ``` pub fn parse_mssql_variable_declaration_expression( - &mut self, + &self, ) -> Result, ParserError> { Ok(match self.peek_token().token { - Token::Eq => { + BorrowedToken::Eq => { self.next_token(); // Skip `=` Some(DeclareAssignment::MsSqlAssignment(Box::new( self.parse_expr()?, @@ -7294,7 +7321,7 @@ impl<'a> Parser<'a> { } // FETCH [ direction { FROM | IN } ] cursor INTO target; - pub fn parse_fetch_statement(&mut self) -> Result { + pub fn parse_fetch_statement(&self) -> Result { let direction = if self.parse_keyword(Keyword::NEXT) { FetchDirection::Next } else if self.parse_keyword(Keyword::PRIOR) { @@ -7363,7 +7390,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_discard(&mut self) -> Result { + pub fn parse_discard(&self) -> Result { let object_type = if self.parse_keyword(Keyword::ALL) { DiscardObject::ALL } else if self.parse_keyword(Keyword::PLANS) { @@ -7381,7 +7408,7 @@ impl<'a> Parser<'a> { Ok(Statement::Discard { object_type }) } - pub fn parse_create_index(&mut self, unique: bool) -> Result { + pub fn parse_create_index(&self, unique: bool) -> Result { let concurrently = self.parse_keyword(Keyword::CONCURRENTLY); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -7406,9 +7433,9 @@ impl<'a> Parser<'a> { let columns = self.parse_parenthesized_index_column_list()?; let include = if self.parse_keyword(Keyword::INCLUDE) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(|p| p.parse_identifier())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; columns } else { vec![] @@ -7425,9 +7452,9 @@ impl<'a> Parser<'a> { let with = if self.dialect.supports_create_index_with_clause() && self.parse_keyword(Keyword::WITH) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let with_params = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; with_params } else { Vec::new() @@ -7472,7 +7499,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_create_extension(&mut self) -> Result { + pub fn parse_create_extension(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -7507,7 +7534,7 @@ impl<'a> Parser<'a> { } /// Parse a PostgreSQL-specific [Statement::DropExtension] statement. - pub fn parse_drop_extension(&mut self) -> Result { + pub fn parse_drop_extension(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_identifier())?; let cascade_or_restrict = @@ -7526,18 +7553,18 @@ impl<'a> Parser<'a> { } //TODO: Implement parsing for Skewed - pub fn parse_hive_distribution(&mut self) -> Result { + pub fn parse_hive_distribution(&self) -> Result { if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(Parser::parse_column_def)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(HiveDistributionStyle::PARTITIONED { columns }) } else { Ok(HiveDistributionStyle::NONE) } } - pub fn parse_hive_formats(&mut self) -> Result { + pub fn parse_hive_formats(&self) -> Result { let mut hive_format = HiveFormat::default(); loop { match self.parse_one_of_keywords(&[ @@ -7585,7 +7612,7 @@ impl<'a> Parser<'a> { Ok(hive_format) } - pub fn parse_row_format(&mut self) -> Result { + pub fn parse_row_format(&self) -> Result { self.expect_keyword_is(Keyword::FORMAT)?; match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) { Some(Keyword::SERDE) => { @@ -7681,7 +7708,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_on_cluster(&mut self) -> Result, ParserError> { + fn parse_optional_on_cluster(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) { Ok(Some(self.parse_identifier()?)) } else { @@ -7690,7 +7717,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_table( - &mut self, + &self, or_replace: bool, temporary: bool, global: Option, @@ -7717,7 +7744,7 @@ impl<'a> Parser<'a> { if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { let next_token = self.next_token(); match next_token.token { - Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)), + BorrowedToken::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)), _ => self.expected("comment", next_token)?, } } else { @@ -7744,13 +7771,13 @@ impl<'a> Parser<'a> { }; let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { - if self.consume_token(&Token::LParen) { - let columns = if self.peek_token() != Token::RParen { + if self.consume_token(&BorrowedToken::LParen) { + let columns = if self.peek_token() != BorrowedToken::RParen { self.parse_comma_separated(|p| p.parse_expr())? } else { vec![] }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(OneOrManyWithParens::Many(columns)) } else { Some(OneOrManyWithParens::One(self.parse_expr()?)) @@ -7808,11 +7835,11 @@ impl<'a> Parser<'a> { } fn maybe_parse_create_table_like( - &mut self, + &self, allow_unquoted_hyphen: bool, ) -> Result, ParserError> { let like = if self.dialect.supports_create_table_like_parenthesized() - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) { if self.parse_keyword(Keyword::LIKE) { let name = self.parse_object_name(allow_unquoted_hyphen)?; @@ -7823,7 +7850,7 @@ impl<'a> Parser<'a> { } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(CreateTableLikeKind::Parenthesized(CreateTableLike { name, defaults, @@ -7845,7 +7872,7 @@ impl<'a> Parser<'a> { Ok(like) } - pub(crate) fn parse_create_table_on_commit(&mut self) -> Result { + pub(crate) fn parse_create_table_on_commit(&self) -> Result { if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) { Ok(OnCommit::DeleteRows) } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) { @@ -7865,9 +7892,7 @@ impl<'a> Parser<'a> { /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2) /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html) /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html) - fn parse_optional_create_table_config( - &mut self, - ) -> Result { + fn parse_optional_create_table_config(&self) -> Result { let mut table_options = CreateTableOptions::None; let inherits = if self.parse_keyword(Keyword::INHERITS) { @@ -7902,7 +7927,7 @@ impl<'a> Parser<'a> { )); }; - if let Token::Word(word) = self.peek_token().token { + if let BorrowedToken::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { table_options = CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?) @@ -7925,7 +7950,7 @@ impl<'a> Parser<'a> { }) } - fn parse_plain_option(&mut self) -> Result, ParserError> { + fn parse_plain_option(&self) -> Result, ParserError> { // Single parameter option // if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) { @@ -7935,19 +7960,20 @@ impl<'a> Parser<'a> { // Custom option // if self.parse_keywords(&[Keyword::COMMENT]) { - let has_eq = self.consume_token(&Token::Eq); + let has_eq = self.consume_token(&BorrowedToken::Eq); let value = self.next_token(); let comment = match (has_eq, value.token) { - (true, Token::SingleQuotedString(s)) => { + (true, BorrowedToken::SingleQuotedString(s)) => { Ok(Some(SqlOption::Comment(CommentDef::WithEq(s)))) } - (false, Token::SingleQuotedString(s)) => { + (false, BorrowedToken::SingleQuotedString(s)) => { Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s)))) } - (_, token) => { - self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token)) - } + (_, token) => self.expected( + "BorrowedToken::SingleQuotedString", + TokenWithSpan::wrap(token), + ), }; return comment; } @@ -7955,12 +7981,12 @@ impl<'a> Parser<'a> { // // if self.parse_keywords(&[Keyword::ENGINE]) { - let _ = self.consume_token(&Token::Eq); + let _ = self.consume_token(&BorrowedToken::Eq); let value = self.next_token(); let engine = match value.token { - Token::Word(w) => { - let parameters = if self.peek_token() == Token::LParen { + BorrowedToken::Word(w) => { + let parameters = if self.peek_token() == BorrowedToken::LParen { self.parse_parenthesized_identifiers()? } else { vec![] @@ -7975,7 +8001,7 @@ impl<'a> Parser<'a> { ))) } _ => { - return self.expected("Token::Word", value)?; + return self.expected("BorrowedToken::Word", value)?; } }; @@ -7984,23 +8010,24 @@ impl<'a> Parser<'a> { // if self.parse_keywords(&[Keyword::TABLESPACE]) { - let _ = self.consume_token(&Token::Eq); + let _ = self.consume_token(&BorrowedToken::Eq); let value = self.next_token(); let tablespace = match value.token { - Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => { + BorrowedToken::Word(Word { value: name, .. }) + | BorrowedToken::SingleQuotedString(name) => { let storage = match self.parse_keyword(Keyword::STORAGE) { true => { - let _ = self.consume_token(&Token::Eq); + let _ = self.consume_token(&BorrowedToken::Eq); let storage_token = self.next_token(); match &storage_token.token { - Token::Word(w) => match w.value.to_uppercase().as_str() { + BorrowedToken::Word(w) => match w.value.to_uppercase().as_str() { "DISK" => Some(StorageType::Disk), "MEMORY" => Some(StorageType::Memory), _ => self .expected("Storage type (DISK or MEMORY)", storage_token)?, }, - _ => self.expected("Token::Word", storage_token)?, + _ => self.expected("BorrowedToken::Word", storage_token)?, } } false => None, @@ -8012,7 +8039,7 @@ impl<'a> Parser<'a> { }))) } _ => { - return self.expected("Token::Word", value)?; + return self.expected("BorrowedToken::Word", value)?; } }; @@ -8021,14 +8048,14 @@ impl<'a> Parser<'a> { // if self.parse_keyword(Keyword::UNION) { - let _ = self.consume_token(&Token::Eq); + let _ = self.consume_token(&BorrowedToken::Eq); let value = self.next_token(); match value.token { - Token::LParen => { - let tables: Vec = - self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?; - self.expect_token(&Token::RParen)?; + BorrowedToken::LParen => { + let tables: Vec = self + .parse_comma_separated0(Parser::parse_identifier, BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; return Ok(Some(SqlOption::NamedParenthesizedList( NamedParenthesizedList { @@ -8039,7 +8066,7 @@ impl<'a> Parser<'a> { ))); } _ => { - return self.expected("Token::LParen", value)?; + return self.expected("BorrowedToken::LParen", value)?; } } } @@ -8105,7 +8132,7 @@ impl<'a> Parser<'a> { return Ok(None); }; - let _ = self.consume_token(&Token::Eq); + let _ = self.consume_token(&BorrowedToken::Eq); let value = match self .maybe_parse(|parser| parser.parse_value())? @@ -8118,22 +8145,22 @@ impl<'a> Parser<'a> { Ok(Some(SqlOption::KeyValue { key, value })) } - pub fn parse_plain_options(&mut self) -> Result, ParserError> { + pub fn parse_plain_options(&self) -> Result, ParserError> { let mut options = Vec::new(); while let Some(option) = self.parse_plain_option()? { options.push(option); // Some dialects support comma-separated options; it shouldn't introduce ambiguity to // consume it for all dialects. - let _ = self.consume_token(&Token::Comma); + let _ = self.consume_token(&BorrowedToken::Comma); } Ok(options) } - pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { + pub fn parse_optional_inline_comment(&self) -> Result, ParserError> { let comment = if self.parse_keyword(Keyword::COMMENT) { - let has_eq = self.consume_token(&Token::Eq); + let has_eq = self.consume_token(&BorrowedToken::Eq); let comment = self.parse_comment_value()?; Some(if has_eq { CommentDef::WithEq(comment) @@ -8146,29 +8173,30 @@ impl<'a> Parser<'a> { Ok(comment) } - pub fn parse_comment_value(&mut self) -> Result { + pub fn parse_comment_value(&self) -> Result { let next_token = self.next_token(); let value = match next_token.token { - Token::SingleQuotedString(str) => str, - Token::DollarQuotedString(str) => str.value, + BorrowedToken::SingleQuotedString(str) => str, + BorrowedToken::DollarQuotedString(str) => str.value, _ => self.expected("string literal", next_token)?, }; Ok(value) } pub fn parse_optional_procedure_parameters( - &mut self, + &self, ) -> Result>, ParserError> { let mut params = vec![]; - if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { + if !self.consume_token(&BorrowedToken::LParen) || self.consume_token(&BorrowedToken::RParen) + { return Ok(Some(params)); } loop { - if let Token::Word(_) = self.peek_token().token { + if let BorrowedToken::Word(_) = self.peek_token().token { params.push(self.parse_procedure_param()?) } - let comma = self.consume_token(&Token::Comma); - if self.consume_token(&Token::RParen) { + let comma = self.consume_token(&BorrowedToken::Comma); + if self.consume_token(&BorrowedToken::RParen) { // allow a trailing comma, even though it's not in standard break; } else if !comma { @@ -8178,24 +8206,25 @@ impl<'a> Parser<'a> { Ok(Some(params)) } - pub fn parse_columns(&mut self) -> Result<(Vec, Vec), ParserError> { + pub fn parse_columns(&self) -> Result<(Vec, Vec), ParserError> { let mut columns = vec![]; let mut constraints = vec![]; - if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { + if !self.consume_token(&BorrowedToken::LParen) || self.consume_token(&BorrowedToken::RParen) + { return Ok((columns, constraints)); } loop { if let Some(constraint) = self.parse_optional_table_constraint()? { constraints.push(constraint); - } else if let Token::Word(_) = self.peek_token().token { + } else if let BorrowedToken::Word(_) = self.peek_token().token { columns.push(self.parse_column_def()?); } else { return self.expected("column name or constraint definition", self.peek_token()); } - let comma = self.consume_token(&Token::Comma); - let rparen = self.peek_token().token == Token::RParen; + let comma = self.consume_token(&BorrowedToken::Comma); + let rparen = self.peek_token().token == BorrowedToken::RParen; if !comma && !rparen { return self.expected("',' or ')' after column definition", self.peek_token()); @@ -8206,7 +8235,7 @@ impl<'a> Parser<'a> { || self.dialect.supports_column_definition_trailing_commas() || self.options.trailing_commas) { - let _ = self.consume_token(&Token::RParen); + let _ = self.consume_token(&BorrowedToken::RParen); break; } } @@ -8214,7 +8243,7 @@ impl<'a> Parser<'a> { Ok((columns, constraints)) } - pub fn parse_procedure_param(&mut self) -> Result { + pub fn parse_procedure_param(&self) -> Result { let mode = if self.parse_keyword(Keyword::IN) { Some(ArgMode::In) } else if self.parse_keyword(Keyword::OUT) { @@ -8226,7 +8255,7 @@ impl<'a> Parser<'a> { }; let name = self.parse_identifier()?; let data_type = self.parse_data_type()?; - let default = if self.consume_token(&Token::Eq) { + let default = if self.consume_token(&BorrowedToken::Eq) { Some(self.parse_expr()?) } else { None @@ -8240,7 +8269,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_column_def(&mut self) -> Result { + pub fn parse_column_def(&self) -> Result { let col_name = self.parse_identifier()?; let data_type = if self.is_column_type_sqlite_unspecified() { DataType::Unspecified @@ -8272,10 +8301,10 @@ impl<'a> Parser<'a> { }) } - fn is_column_type_sqlite_unspecified(&mut self) -> bool { + fn is_column_type_sqlite_unspecified(&self) -> bool { if dialect_of!(self is SQLiteDialect) { match self.peek_token().token { - Token::Word(word) => matches!( + BorrowedToken::Word(word) => matches!( word.keyword, Keyword::CONSTRAINT | Keyword::PRIMARY @@ -8295,7 +8324,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_column_option(&mut self) -> Result, ParserError> { + pub fn parse_optional_column_option(&self) -> Result, ParserError> { if let Some(option) = self.dialect.parse_column_option(self)? { return option; } @@ -8308,7 +8337,7 @@ impl<'a> Parser<'a> { ) } - fn parse_optional_column_option_inner(&mut self) -> Result, ParserError> { + fn parse_optional_column_option_inner(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { Ok(Some(ColumnOption::CharacterSet( self.parse_object_name(false)?, @@ -8342,7 +8371,10 @@ impl<'a> Parser<'a> { { // The expression is optional for the EPHEMERAL syntax, so we need to check // if the column definition has remaining tokens before parsing the expression. - if matches!(self.peek_token().token, Token::Comma | Token::RParen) { + if matches!( + self.peek_token().token, + BorrowedToken::Comma | BorrowedToken::RParen + ) { Ok(Some(ColumnOption::Ephemeral(None))) } else { Ok(Some(ColumnOption::Ephemeral(Some( @@ -8417,10 +8449,10 @@ impl<'a> Parser<'a> { .into(), )) } else if self.parse_keyword(Keyword::CHECK) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some( CheckConstraint { name: None, // Column-level check constraints don't have names @@ -8434,28 +8466,28 @@ impl<'a> Parser<'a> { { // Support AUTO_INCREMENT for MySQL Ok(Some(ColumnOption::DialectSpecific(vec![ - Token::make_keyword("AUTO_INCREMENT"), + BorrowedToken::make_keyword("AUTO_INCREMENT"), ]))) } else if self.parse_keyword(Keyword::AUTOINCREMENT) && dialect_of!(self is SQLiteDialect | GenericDialect) { // Support AUTOINCREMENT for SQLite Ok(Some(ColumnOption::DialectSpecific(vec![ - Token::make_keyword("AUTOINCREMENT"), + BorrowedToken::make_keyword("AUTOINCREMENT"), ]))) } else if self.parse_keyword(Keyword::ASC) && self.dialect.supports_asc_desc_in_column_definition() { // Support ASC for SQLite Ok(Some(ColumnOption::DialectSpecific(vec![ - Token::make_keyword("ASC"), + BorrowedToken::make_keyword("ASC"), ]))) } else if self.parse_keyword(Keyword::DESC) && self.dialect.supports_asc_desc_in_column_definition() { // Support DESC for SQLite Ok(Some(ColumnOption::DialectSpecific(vec![ - Token::make_keyword("DESC"), + BorrowedToken::make_keyword("DESC"), ]))) } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE]) && dialect_of!(self is MySqlDialect | GenericDialect) @@ -8484,11 +8516,11 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::IDENTITY) && dialect_of!(self is MsSqlDialect | GenericDialect) { - let parameters = if self.consume_token(&Token::LParen) { + let parameters = if self.consume_token(&BorrowedToken::LParen) { let seed = self.parse_number()?; - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let increment = self.parse_number()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { seed, increment }, @@ -8538,8 +8570,8 @@ impl<'a> Parser<'a> { /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull], /// whereas is both statements that trailing `NOT NULL` should only be parsed as a /// [ColumnOption::NotNull]. - fn parse_column_option_expr(&mut self) -> Result { - if self.peek_token_ref().token == Token::LParen { + fn parse_column_option_expr(&self) -> Result { + if self.peek_token_ref().token == BorrowedToken::LParen { let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?; Ok(expr) } else { @@ -8547,22 +8579,20 @@ impl<'a> Parser<'a> { } } - pub(crate) fn parse_tag(&mut self) -> Result { + pub(crate) fn parse_tag(&self) -> Result { let name = self.parse_object_name(false)?; - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let value = self.parse_literal_string()?; Ok(Tag::new(name, value)) } - fn parse_optional_column_option_generated( - &mut self, - ) -> Result, ParserError> { + fn parse_optional_column_option_generated(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) { let mut sequence_options = vec![]; - if self.expect_token(&Token::LParen).is_ok() { + if self.expect_token(&BorrowedToken::LParen).is_ok() { sequence_options = self.parse_create_sequence_options()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } Ok(Some(ColumnOption::Generated { generated_as: GeneratedAs::Always, @@ -8578,9 +8608,9 @@ impl<'a> Parser<'a> { Keyword::IDENTITY, ]) { let mut sequence_options = vec![]; - if self.expect_token(&Token::LParen).is_ok() { + if self.expect_token(&BorrowedToken::LParen).is_ok() { sequence_options = self.parse_create_sequence_options()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } Ok(Some(ColumnOption::Generated { generated_as: GeneratedAs::ByDefault, @@ -8590,9 +8620,9 @@ impl<'a> Parser<'a> { generated_keyword: true, })) } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) { - if self.expect_token(&Token::LParen).is_ok() { + if self.expect_token(&BorrowedToken::LParen).is_ok() { let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) { Ok(( GeneratedAs::ExpStored, @@ -8622,11 +8652,11 @@ impl<'a> Parser<'a> { } } - fn parse_optional_column_option_as(&mut self) -> Result, ParserError> { + fn parse_optional_column_option_as(&self) -> Result, ParserError> { // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) { ( @@ -8648,16 +8678,16 @@ impl<'a> Parser<'a> { })) } - pub fn parse_optional_clustered_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_clustered_by(&self) -> Result, ParserError> { let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect) && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY]) { let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(sorted_by_columns) } else { None @@ -8677,7 +8707,7 @@ impl<'a> Parser<'a> { Ok(clustered_by) } - pub fn parse_referential_action(&mut self) -> Result { + pub fn parse_referential_action(&self) -> Result { if self.parse_keyword(Keyword::RESTRICT) { Ok(ReferentialAction::Restrict) } else if self.parse_keyword(Keyword::CASCADE) { @@ -8696,7 +8726,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_match_kind(&mut self) -> Result { + pub fn parse_match_kind(&self) -> Result { if self.parse_keyword(Keyword::FULL) { Ok(ConstraintReferenceMatchKind::Full) } else if self.parse_keyword(Keyword::PARTIAL) { @@ -8709,7 +8739,7 @@ impl<'a> Parser<'a> { } pub fn parse_constraint_characteristics( - &mut self, + &self, ) -> Result, ParserError> { let mut cc = ConstraintCharacteristics::default(); @@ -8745,9 +8775,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_table_constraint( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_table_constraint(&self) -> Result, ParserError> { let name = if self.parse_keyword(Keyword::CONSTRAINT) { Some(self.parse_identifier()?) } else { @@ -8756,7 +8784,7 @@ impl<'a> Parser<'a> { let next_token = self.next_token(); match next_token.token { - Token::Word(w) if w.keyword == Keyword::UNIQUE => { + BorrowedToken::Word(w) if w.keyword == Keyword::UNIQUE => { let index_type_display = self.parse_index_type_display(); if !dialect_of!(self is GenericDialect | MySqlDialect) && !index_type_display.is_none() @@ -8788,7 +8816,7 @@ impl<'a> Parser<'a> { .into(), )) } - Token::Word(w) if w.keyword == Keyword::PRIMARY => { + BorrowedToken::Word(w) if w.keyword == Keyword::PRIMARY => { // after `PRIMARY` always stay `KEY` self.expect_keyword_is(Keyword::KEY)?; @@ -8811,7 +8839,7 @@ impl<'a> Parser<'a> { .into(), )) } - Token::Word(w) if w.keyword == Keyword::FOREIGN => { + BorrowedToken::Word(w) if w.keyword == Keyword::FOREIGN => { self.expect_keyword_is(Keyword::KEY)?; let index_name = self.parse_optional_ident()?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?; @@ -8854,10 +8882,10 @@ impl<'a> Parser<'a> { .into(), )) } - Token::Word(w) if w.keyword == Keyword::CHECK => { - self.expect_token(&Token::LParen)?; + BorrowedToken::Word(w) if w.keyword == Keyword::CHECK => { + self.expect_token(&BorrowedToken::LParen)?; let expr = Box::new(self.parse_expr()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let enforced = if self.parse_keyword(Keyword::ENFORCED) { Some(true) @@ -8876,7 +8904,7 @@ impl<'a> Parser<'a> { .into(), )) } - Token::Word(w) + BorrowedToken::Word(w) if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY) && dialect_of!(self is GenericDialect | MySqlDialect) && name.is_none() => @@ -8884,7 +8912,7 @@ impl<'a> Parser<'a> { let display_as_key = w.keyword == Keyword::KEY; let name = match self.peek_token().token { - Token::Word(word) if word.keyword == Keyword::USING => None, + BorrowedToken::Word(word) if word.keyword == Keyword::USING => None, _ => self.parse_optional_ident()?, }; @@ -8903,7 +8931,7 @@ impl<'a> Parser<'a> { .into(), )) } - Token::Word(w) + BorrowedToken::Word(w) if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL) && dialect_of!(self is GenericDialect | MySqlDialect) => { @@ -8911,7 +8939,7 @@ impl<'a> Parser<'a> { return self.expected( "FULLTEXT or SPATIAL option without constraint name", TokenWithSpan { - token: Token::make_keyword(&name.to_string()), + token: BorrowedToken::make_keyword(&name.to_string()), span: next_token.span, }, ); @@ -8946,7 +8974,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_nulls_distinct(&mut self) -> Result { + fn parse_optional_nulls_distinct(&self) -> Result { Ok(if self.parse_keyword(Keyword::NULLS) { let not = self.parse_keyword(Keyword::NOT); self.expect_keyword_is(Keyword::DISTINCT)?; @@ -8961,10 +8989,10 @@ impl<'a> Parser<'a> { } pub fn maybe_parse_options( - &mut self, + &self, keyword: Keyword, ) -> Result>, ParserError> { - if let Token::Word(word) = self.peek_token().token { + if let BorrowedToken::Word(word) = self.peek_token().token { if word.keyword == keyword { return Ok(Some(self.parse_options(keyword)?)); } @@ -8972,11 +9000,12 @@ impl<'a> Parser<'a> { Ok(None) } - pub fn parse_options(&mut self, keyword: Keyword) -> Result, ParserError> { + pub fn parse_options(&self, keyword: Keyword) -> Result, ParserError> { if self.parse_keyword(keyword) { - self.expect_token(&Token::LParen)?; - let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::LParen)?; + let options = + self.parse_comma_separated0(Parser::parse_sql_option, BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(options) } else { Ok(vec![]) @@ -8984,20 +9013,20 @@ impl<'a> Parser<'a> { } pub fn parse_options_with_keywords( - &mut self, + &self, keywords: &[Keyword], ) -> Result, ParserError> { if self.parse_keywords(keywords) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let options = self.parse_comma_separated(Parser::parse_sql_option)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(options) } else { Ok(vec![]) } } - pub fn parse_index_type(&mut self) -> Result { + pub fn parse_index_type(&self) -> Result { Ok(if self.parse_keyword(Keyword::BTREE) { IndexType::BTree } else if self.parse_keyword(Keyword::HASH) { @@ -9022,9 +9051,7 @@ impl<'a> Parser<'a> { /// ```sql //// USING BTREE (name, age DESC) /// ``` - pub fn parse_optional_using_then_index_type( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_using_then_index_type(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::USING) { Ok(Some(self.parse_index_type()?)) } else { @@ -9034,12 +9061,12 @@ impl<'a> Parser<'a> { /// Parse `[ident]`, mostly `ident` is name, like: /// `window_name`, `index_name`, ... - pub fn parse_optional_ident(&mut self) -> Result, ParserError> { + pub fn parse_optional_ident(&self) -> Result, ParserError> { self.maybe_parse(|parser| parser.parse_identifier()) } #[must_use] - pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay { + pub fn parse_index_type_display(&self) -> KeyOrIndexDisplay { if self.parse_keyword(Keyword::KEY) { KeyOrIndexDisplay::Key } else if self.parse_keyword(Keyword::INDEX) { @@ -9049,7 +9076,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_index_option(&mut self) -> Result, ParserError> { + pub fn parse_optional_index_option(&self) -> Result, ParserError> { if let Some(index_type) = self.parse_optional_using_then_index_type()? { Ok(Some(IndexOption::Using(index_type))) } else if self.parse_keyword(Keyword::COMMENT) { @@ -9060,7 +9087,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_index_options(&mut self) -> Result, ParserError> { + pub fn parse_index_options(&self) -> Result, ParserError> { let mut options = Vec::new(); loop { @@ -9071,22 +9098,22 @@ impl<'a> Parser<'a> { } } - pub fn parse_sql_option(&mut self) -> Result { + pub fn parse_sql_option(&self) -> Result { let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect); match self.peek_token().token { - Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => { + BorrowedToken::Word(w) if w.keyword == Keyword::HEAP && is_mssql => { Ok(SqlOption::Ident(self.parse_identifier()?)) } - Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => { + BorrowedToken::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => { self.parse_option_partition() } - Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => { + BorrowedToken::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => { self.parse_option_clustered() } _ => { let name = self.parse_identifier()?; - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let value = self.parse_expr()?; Ok(SqlOption::KeyValue { key: name, value }) @@ -9094,7 +9121,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_option_clustered(&mut self) -> Result { + pub fn parse_option_clustered(&self) -> Result { if self.parse_keywords(&[ Keyword::CLUSTERED, Keyword::COLUMNSTORE, @@ -9111,7 +9138,7 @@ impl<'a> Parser<'a> { TableOptionsClustered::ColumnstoreIndex, )) } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(|p| { let name = p.parse_identifier()?; @@ -9120,7 +9147,7 @@ impl<'a> Parser<'a> { Ok(ClusteredIndex { name, asc }) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns))) } else { @@ -9130,9 +9157,9 @@ impl<'a> Parser<'a> { } } - pub fn parse_option_partition(&mut self) -> Result { + pub fn parse_option_partition(&self) -> Result { self.expect_keyword_is(Keyword::PARTITION)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let column_name = self.parse_identifier()?; self.expect_keyword_is(Keyword::RANGE)?; @@ -9145,12 +9172,12 @@ impl<'a> Parser<'a> { }; self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let for_values = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(SqlOption::Partition { column_name, @@ -9159,27 +9186,27 @@ impl<'a> Parser<'a> { }) } - pub fn parse_partition(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_partition(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let partitions = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Partition::Partitions(partitions)) } - pub fn parse_projection_select(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_projection_select(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; self.expect_keyword_is(Keyword::SELECT)?; let projection = self.parse_projection()?; let group_by = self.parse_optional_group_by()?; let order_by = self.parse_optional_order_by()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(ProjectionSelect { projection, group_by, order_by, }) } - pub fn parse_alter_table_add_projection(&mut self) -> Result { + pub fn parse_alter_table_add_projection(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; let query = self.parse_projection_select()?; @@ -9190,7 +9217,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_alter_table_operation(&mut self) -> Result { + pub fn parse_alter_table_operation(&self) -> Result { let operation = if self.parse_keyword(Keyword::ADD) { if let Some(constraint) = self.parse_optional_table_constraint()? { let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]); @@ -9341,17 +9368,17 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::DROP) { if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let partitions = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; AlterTableOperation::DropPartitions { partitions, if_exists: true, } } else if self.parse_keyword(Keyword::PARTITION) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let partitions = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; AlterTableOperation::DropPartitions { partitions, if_exists: false, @@ -9403,14 +9430,14 @@ impl<'a> Parser<'a> { } } } else if self.parse_keyword(Keyword::PARTITION) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let before = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; self.expect_keyword_is(Keyword::RENAME)?; self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let renames = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; AlterTableOperation::RenamePartitions { old_partitions: before, new_partitions: renames, @@ -9487,10 +9514,10 @@ impl<'a> Parser<'a> { let mut sequence_options: Option> = None; - if self.peek_token().token == Token::LParen { - self.expect_token(&Token::LParen)?; + if self.peek_token().token == BorrowedToken::LParen { + self.expect_token(&BorrowedToken::LParen)?; sequence_options = Some(self.parse_create_sequence_options()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } AlterColumnOperation::AddGenerated { @@ -9557,16 +9584,16 @@ impl<'a> Parser<'a> { with_name, } } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; AlterTableOperation::ClusterBy { exprs } } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) { AlterTableOperation::SuspendRecluster } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) { AlterTableOperation::ResumeRecluster } else if self.parse_keyword(Keyword::LOCK) { - let equals = self.consume_token(&Token::Eq); + let equals = self.consume_token(&BorrowedToken::Eq); let lock = match self.parse_one_of_keywords(&[ Keyword::DEFAULT, Keyword::EXCLUSIVE, @@ -9584,7 +9611,7 @@ impl<'a> Parser<'a> { }; AlterTableOperation::Lock { equals, lock } } else if self.parse_keyword(Keyword::ALGORITHM) { - let equals = self.consume_token(&Token::Eq); + let equals = self.consume_token(&BorrowedToken::Eq); let algorithm = match self.parse_one_of_keywords(&[ Keyword::DEFAULT, Keyword::INSTANT, @@ -9602,7 +9629,7 @@ impl<'a> Parser<'a> { }; AlterTableOperation::Algorithm { equals, algorithm } } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { - let equals = self.consume_token(&Token::Eq); + let equals = self.consume_token(&BorrowedToken::Eq); let value = self.parse_number_value()?; AlterTableOperation::AutoIncrement { equals, value } } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) { @@ -9647,7 +9674,7 @@ impl<'a> Parser<'a> { Ok(operation) } - fn parse_set_data_type(&mut self, had_set: bool) -> Result { + fn parse_set_data_type(&self, had_set: bool) -> Result { let data_type = self.parse_data_type()?; let using = if self.dialect.supports_alter_column_type_using() && self.parse_keyword(Keyword::USING) @@ -9663,7 +9690,7 @@ impl<'a> Parser<'a> { }) } - fn parse_part_or_partition(&mut self) -> Result { + fn parse_part_or_partition(&self) -> Result { let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?; match keyword { Keyword::PART => Ok(Partition::Part(self.parse_expr()?)), @@ -9673,7 +9700,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_alter(&mut self) -> Result { + pub fn parse_alter(&self) -> Result { let object_type = self.expect_one_of_keywords(&[ Keyword::VIEW, Keyword::TYPE, @@ -9727,7 +9754,7 @@ impl<'a> Parser<'a> { } /// Parse a [Statement::AlterTable] - pub fn parse_alter_table(&mut self, iceberg: bool) -> Result { + pub fn parse_alter_table(&self, iceberg: bool) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ] let table_name = self.parse_object_name(false)?; @@ -9747,7 +9774,7 @@ impl<'a> Parser<'a> { }); } - let end_token = if self.peek_token_ref().token == Token::SemiColon { + let end_token = if self.peek_token_ref().token == BorrowedToken::SemiColon { self.peek_token_ref().clone() } else { self.get_current_token().clone() @@ -9765,12 +9792,12 @@ impl<'a> Parser<'a> { } else { None }, - end_token: AttachedToken(end_token), + end_token: AttachedToken(end_token.to_static()), } .into()) } - pub fn parse_alter_view(&mut self) -> Result { + pub fn parse_alter_view(&self) -> Result { let name = self.parse_object_name(false)?; let columns = self.parse_parenthesized_column_list(Optional, false)?; @@ -9788,7 +9815,7 @@ impl<'a> Parser<'a> { } /// Parse a [Statement::AlterType] - pub fn parse_alter_type(&mut self) -> Result { + pub fn parse_alter_type(&self) -> Result { let name = self.parse_object_name(false)?; if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { @@ -9838,7 +9865,7 @@ impl<'a> Parser<'a> { // Parse a [Statement::AlterSchema] // ALTER SCHEMA [ IF EXISTS ] schema_name - pub fn parse_alter_schema(&mut self) -> Result { + pub fn parse_alter_schema(&self) -> Result { self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_object_name(false)?; @@ -9878,9 +9905,9 @@ impl<'a> Parser<'a> { /// Parse a `CALL procedure_name(arg1, arg2, ...)` /// or `CALL procedure_name` statement - pub fn parse_call(&mut self) -> Result { + pub fn parse_call(&self) -> Result { let object_name = self.parse_object_name(false)?; - if self.peek_token().token == Token::LParen { + if self.peek_token().token == BorrowedToken::LParen { match self.parse_function(object_name)? { Expr::Function(f) => Ok(Statement::Call(f)), other => parser_err!( @@ -9903,11 +9930,11 @@ impl<'a> Parser<'a> { } /// Parse a copy statement - pub fn parse_copy(&mut self) -> Result { + pub fn parse_copy(&self) -> Result { let source; - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { source = CopySource::Query(self.parse_query()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } else { let table_name = self.parse_object_name(false)?; let columns = self.parse_parenthesized_column_list(Optional, false)?; @@ -9945,16 +9972,16 @@ impl<'a> Parser<'a> { }; let _ = self.parse_keyword(Keyword::WITH); // [ WITH ] let mut options = vec![]; - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { options = self.parse_comma_separated(Parser::parse_copy_option)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } let mut legacy_options = vec![]; while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? { legacy_options.push(opt); } let values = if let CopyTarget::Stdin = target { - self.expect_token(&Token::SemiColon)?; + self.expect_token(&BorrowedToken::SemiColon)?; self.parse_tsv() } else { vec![] @@ -9970,14 +9997,14 @@ impl<'a> Parser<'a> { } /// Parse [Statement::Open] - fn parse_open(&mut self) -> Result { + fn parse_open(&self) -> Result { self.expect_keyword(Keyword::OPEN)?; Ok(Statement::Open(OpenStatement { cursor_name: self.parse_identifier()?, })) } - pub fn parse_close(&mut self) -> Result { + pub fn parse_close(&self) -> Result { let cursor = if self.parse_keyword(Keyword::ALL) { CloseCursor::All } else { @@ -9989,7 +10016,7 @@ impl<'a> Parser<'a> { Ok(Statement::Close { cursor }) } - fn parse_copy_option(&mut self) -> Result { + fn parse_copy_option(&self) -> Result { let ret = match self.parse_one_of_keywords(&[ Keyword::FORMAT, Keyword::FREEZE, @@ -10031,7 +10058,7 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_copy_legacy_option(&mut self) -> Result { + fn parse_copy_legacy_option(&self) -> Result { // FORMAT \[ AS \] is optional if self.parse_keyword(Keyword::FORMAT) { let _ = self.parse_keyword(Keyword::AS); @@ -10077,7 +10104,10 @@ impl<'a> Parser<'a> { Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate, Some(Keyword::ACCEPTINVCHARS) => { let _ = self.parse_keyword(Keyword::AS); // [ AS ] - let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) { + let ch = if matches!( + self.peek_token().token, + BorrowedToken::SingleQuotedString(_) + ) { Some(self.parse_literal_string()?) } else { None @@ -10115,7 +10145,10 @@ impl<'a> Parser<'a> { }), Some(Keyword::DATEFORMAT) => { let _ = self.parse_keyword(Keyword::AS); - let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) { + let fmt = if matches!( + self.peek_token().token, + BorrowedToken::SingleQuotedString(_) + ) { Some(self.parse_literal_string()?) } else { None @@ -10213,7 +10246,10 @@ impl<'a> Parser<'a> { } Some(Keyword::TIMEFORMAT) => { let _ = self.parse_keyword(Keyword::AS); - let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) { + let fmt = if matches!( + self.peek_token().token, + BorrowedToken::SingleQuotedString(_) + ) { Some(self.parse_literal_string()?) } else { None @@ -10227,13 +10263,13 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_file_size(&mut self) -> Result { + fn parse_file_size(&self) -> Result { let size = self.parse_number_value()?.value; let unit = self.maybe_parse_file_size_unit(); Ok(FileSize { size, unit }) } - fn maybe_parse_file_size_unit(&mut self) -> Option { + fn maybe_parse_file_size_unit(&self) -> Option { match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) { Some(Keyword::MB) => Some(FileSizeUnit::MB), Some(Keyword::GB) => Some(FileSizeUnit::GB), @@ -10241,7 +10277,7 @@ impl<'a> Parser<'a> { } } - fn parse_iam_role_kind(&mut self) -> Result { + fn parse_iam_role_kind(&self) -> Result { if self.parse_keyword(Keyword::DEFAULT) { Ok(IamRoleKind::Default) } else { @@ -10250,7 +10286,7 @@ impl<'a> Parser<'a> { } } - fn parse_copy_legacy_csv_option(&mut self) -> Result { + fn parse_copy_legacy_csv_option(&self) -> Result { let ret = match self.parse_one_of_keywords(&[ Keyword::HEADER, Keyword::QUOTE, @@ -10281,12 +10317,12 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_literal_char(&mut self) -> Result { + fn parse_literal_char(&self) -> Result { let s = self.parse_literal_string()?; if s.len() != 1 { let loc = self .tokens - .get(self.index - 1) + .get(self.index.get() - 1) .map_or(Location { line: 0, column: 0 }, |t| t.span.start); return parser_err!(format!("Expect a char, found {s:?}"), loc); } @@ -10295,28 +10331,28 @@ impl<'a> Parser<'a> { /// Parse a tab separated values in /// COPY payload - pub fn parse_tsv(&mut self) -> Vec> { + pub fn parse_tsv(&self) -> Vec> { self.parse_tab_value() } - pub fn parse_tab_value(&mut self) -> Vec> { + pub fn parse_tab_value(&self) -> Vec> { let mut values = vec![]; let mut content = String::from(""); while let Some(t) = self.next_token_no_skip().map(|t| &t.token) { match t { - Token::Whitespace(Whitespace::Tab) => { + BorrowedToken::Whitespace(Whitespace::Tab) => { values.push(Some(content.to_string())); content.clear(); } - Token::Whitespace(Whitespace::Newline) => { + BorrowedToken::Whitespace(Whitespace::Newline) => { values.push(Some(content.to_string())); content.clear(); } - Token::Backslash => { - if self.consume_token(&Token::Period) { + BorrowedToken::Backslash => { + if self.consume_token(&BorrowedToken::Period) { return values; } - if let Token::Word(w) = self.next_token().token { + if let BorrowedToken::Word(w) = self.next_token().token { if w.value == "N" { values.push(None); } @@ -10331,12 +10367,12 @@ impl<'a> Parser<'a> { } /// Parse a literal value (numbers, strings, date/time, booleans) - pub fn parse_value(&mut self) -> Result { + pub fn parse_value(&self) -> Result { let next_token = self.next_token(); let span = next_token.span; let ok_value = |value: Value| Ok(value.with_span(span)); match next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::TRUE if self.dialect.supports_boolean_literals() => { ok_value(Value::Boolean(true)) } @@ -10350,7 +10386,7 @@ impl<'a> Parser<'a> { _ => self.expected( "A value?", TokenWithSpan { - token: Token::Word(w), + token: BorrowedToken::Word(w), span, }, )?, @@ -10358,7 +10394,7 @@ impl<'a> Parser<'a> { _ => self.expected( "a concrete value", TokenWithSpan { - token: Token::Word(w), + token: BorrowedToken::Word(w), span, }, ), @@ -10366,56 +10402,60 @@ impl<'a> Parser<'a> { // The call to n.parse() returns a bigdecimal when the // bigdecimal feature is enabled, and is otherwise a no-op // (i.e., it returns the input string). - Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)), - Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString( + BorrowedToken::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)), + BorrowedToken::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString( self.maybe_concat_string_literal(s.to_string()), )), - Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString( + BorrowedToken::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString( self.maybe_concat_string_literal(s.to_string()), )), - Token::TripleSingleQuotedString(ref s) => { + BorrowedToken::TripleSingleQuotedString(ref s) => { ok_value(Value::TripleSingleQuotedString(s.to_string())) } - Token::TripleDoubleQuotedString(ref s) => { + BorrowedToken::TripleDoubleQuotedString(ref s) => { ok_value(Value::TripleDoubleQuotedString(s.to_string())) } - Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())), - Token::SingleQuotedByteStringLiteral(ref s) => { + BorrowedToken::DollarQuotedString(ref s) => { + ok_value(Value::DollarQuotedString(s.clone())) + } + BorrowedToken::SingleQuotedByteStringLiteral(ref s) => { ok_value(Value::SingleQuotedByteStringLiteral(s.clone())) } - Token::DoubleQuotedByteStringLiteral(ref s) => { + BorrowedToken::DoubleQuotedByteStringLiteral(ref s) => { ok_value(Value::DoubleQuotedByteStringLiteral(s.clone())) } - Token::TripleSingleQuotedByteStringLiteral(ref s) => { + BorrowedToken::TripleSingleQuotedByteStringLiteral(ref s) => { ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone())) } - Token::TripleDoubleQuotedByteStringLiteral(ref s) => { + BorrowedToken::TripleDoubleQuotedByteStringLiteral(ref s) => { ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone())) } - Token::SingleQuotedRawStringLiteral(ref s) => { + BorrowedToken::SingleQuotedRawStringLiteral(ref s) => { ok_value(Value::SingleQuotedRawStringLiteral(s.clone())) } - Token::DoubleQuotedRawStringLiteral(ref s) => { + BorrowedToken::DoubleQuotedRawStringLiteral(ref s) => { ok_value(Value::DoubleQuotedRawStringLiteral(s.clone())) } - Token::TripleSingleQuotedRawStringLiteral(ref s) => { + BorrowedToken::TripleSingleQuotedRawStringLiteral(ref s) => { ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone())) } - Token::TripleDoubleQuotedRawStringLiteral(ref s) => { + BorrowedToken::TripleDoubleQuotedRawStringLiteral(ref s) => { ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone())) } - Token::NationalStringLiteral(ref s) => { + BorrowedToken::NationalStringLiteral(ref s) => { ok_value(Value::NationalStringLiteral(s.to_string())) } - Token::EscapedStringLiteral(ref s) => { + BorrowedToken::EscapedStringLiteral(ref s) => { ok_value(Value::EscapedStringLiteral(s.to_string())) } - Token::UnicodeStringLiteral(ref s) => { + BorrowedToken::UnicodeStringLiteral(ref s) => { ok_value(Value::UnicodeStringLiteral(s.to_string())) } - Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())), - Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())), - tok @ Token::Colon | tok @ Token::AtSign => { + BorrowedToken::HexStringLiteral(ref s) => { + ok_value(Value::HexStringLiteral(s.to_string())) + } + BorrowedToken::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())), + tok @ BorrowedToken::Colon | tok @ BorrowedToken::AtSign => { // 1. Not calling self.parse_identifier(false)? // because only in placeholder we want to check // numbers as idfentifies. This because snowflake @@ -10425,8 +10465,8 @@ impl<'a> Parser<'a> { // without any whitespace in between let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone(); let ident = match next_token.token { - Token::Word(w) => Ok(w.into_ident(next_token.span)), - Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)), + BorrowedToken::Word(w) => Ok(w.into_ident(next_token.span)), + BorrowedToken::Number(w, false) => Ok(Ident::with_span(next_token.span, w)), _ => self.expected("placeholder", next_token), }?; Ok(Value::Placeholder(tok.to_string() + &ident.value) @@ -10442,10 +10482,10 @@ impl<'a> Parser<'a> { } } - fn maybe_concat_string_literal(&mut self, mut str: String) -> String { + fn maybe_concat_string_literal(&self, mut str: String) -> String { if self.dialect.supports_string_literal_concatenation() { - while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) = - self.peek_token_ref().token + while let BorrowedToken::SingleQuotedString(ref s) + | BorrowedToken::DoubleQuotedString(ref s) = self.peek_token_ref().token { str.push_str(s.clone().as_str()); self.advance_token(); @@ -10455,7 +10495,7 @@ impl<'a> Parser<'a> { } /// Parse an unsigned numeric literal - pub fn parse_number_value(&mut self) -> Result { + pub fn parse_number_value(&self) -> Result { let value_wrapper = self.parse_value()?; match &value_wrapper.value { Value::Number(_, _) => Ok(value_wrapper), @@ -10469,14 +10509,14 @@ impl<'a> Parser<'a> { /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed, /// otherwise returns a [`Expr::Value`] - pub fn parse_number(&mut self) -> Result { + pub fn parse_number(&self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Plus => Ok(Expr::UnaryOp { + BorrowedToken::Plus => Ok(Expr::UnaryOp { op: UnaryOperator::Plus, expr: Box::new(Expr::Value(self.parse_number_value()?)), }), - Token::Minus => Ok(Expr::UnaryOp { + BorrowedToken::Minus => Ok(Expr::UnaryOp { op: UnaryOperator::Minus, expr: Box::new(Expr::Value(self.parse_number_value()?)), }), @@ -10487,17 +10527,17 @@ impl<'a> Parser<'a> { } } - fn parse_introduced_string_expr(&mut self) -> Result { + fn parse_introduced_string_expr(&self) -> Result { let next_token = self.next_token(); let span = next_token.span; match next_token.token { - Token::SingleQuotedString(ref s) => Ok(Expr::Value( + BorrowedToken::SingleQuotedString(ref s) => Ok(Expr::Value( Value::SingleQuotedString(s.to_string()).with_span(span), )), - Token::DoubleQuotedString(ref s) => Ok(Expr::Value( + BorrowedToken::DoubleQuotedString(ref s) => Ok(Expr::Value( Value::DoubleQuotedString(s.to_string()).with_span(span), )), - Token::HexStringLiteral(ref s) => Ok(Expr::Value( + BorrowedToken::HexStringLiteral(ref s) => Ok(Expr::Value( Value::HexStringLiteral(s.to_string()).with_span(span), )), unexpected => self.expected( @@ -10511,21 +10551,21 @@ impl<'a> Parser<'a> { } /// Parse an unsigned literal integer/long - pub fn parse_literal_uint(&mut self) -> Result { + pub fn parse_literal_uint(&self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.span.start), + BorrowedToken::Number(s, _) => Self::parse::(s, next_token.span.start), _ => self.expected("literal int", next_token), } } /// Parse the body of a `CREATE FUNCTION` specified as a string. /// e.g. `CREATE FUNCTION ... AS $$ body $$`. - fn parse_create_function_body_string(&mut self) -> Result { + fn parse_create_function_body_string(&self) -> Result { let peek_token = self.peek_token(); let span = peek_token.span; match peek_token.token { - Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => + BorrowedToken::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { self.next_token(); Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span))) @@ -10537,26 +10577,26 @@ impl<'a> Parser<'a> { } /// Parse a literal string - pub fn parse_literal_string(&mut self) -> Result { + pub fn parse_literal_string(&self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Word(Word { + BorrowedToken::Word(Word { value, keyword: Keyword::NoKeyword, .. }) => Ok(value), - Token::SingleQuotedString(s) => Ok(s), - Token::DoubleQuotedString(s) => Ok(s), - Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { + BorrowedToken::SingleQuotedString(s) => Ok(s), + BorrowedToken::DoubleQuotedString(s) => Ok(s), + BorrowedToken::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { Ok(s) } - Token::UnicodeStringLiteral(s) => Ok(s), + BorrowedToken::UnicodeStringLiteral(s) => Ok(s), _ => self.expected("literal string", next_token), } } /// Parse a boolean string - pub(crate) fn parse_boolean_string(&mut self) -> Result { + pub(crate) fn parse_boolean_string(&self) -> Result { match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) { Some(Keyword::TRUE) => Ok(true), Some(Keyword::FALSE) => Ok(false), @@ -10565,7 +10605,7 @@ impl<'a> Parser<'a> { } /// Parse a literal unicode normalization clause - pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result { + pub fn parse_unicode_is_normalized(&self, expr: Expr) -> Result { let neg = self.parse_keyword(Keyword::NOT); let normalized_form = self.maybe_parse(|parser| { match parser.parse_one_of_keywords(&[ @@ -10591,11 +10631,11 @@ impl<'a> Parser<'a> { self.expected("unicode normalization form", self.peek_token()) } - pub fn parse_enum_values(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; + pub fn parse_enum_values(&self) -> Result, ParserError> { + self.expect_token(&BorrowedToken::LParen)?; let values = self.parse_comma_separated(|parser| { let name = parser.parse_literal_string()?; - let e = if parser.consume_token(&Token::Eq) { + let e = if parser.consume_token(&BorrowedToken::Eq) { let value = parser.parse_number()?; EnumMember::NamedValue(name, value) } else { @@ -10603,13 +10643,13 @@ impl<'a> Parser<'a> { }; Ok(e) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(values) } /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example) - pub fn parse_data_type(&mut self) -> Result { + pub fn parse_data_type(&self) -> Result { let (ty, trailing_bracket) = self.parse_data_type_helper()?; if trailing_bracket.0 { return parser_err!( @@ -10621,9 +10661,7 @@ impl<'a> Parser<'a> { Ok(ty) } - fn parse_data_type_helper( - &mut self, - ) -> Result<(DataType, MatchedTrailingBracket), ParserError> { + fn parse_data_type_helper(&self) -> Result<(DataType, MatchedTrailingBracket), ParserError> { let dialect = self.dialect; self.advance_token(); let next_token = self.get_current_token(); @@ -10631,7 +10669,7 @@ impl<'a> Parser<'a> { let mut trailing_bracket: MatchedTrailingBracket = false.into(); let mut data = match &next_token.token { - Token::Word(w) => match w.keyword { + BorrowedToken::Word(w) => match w.keyword { Keyword::BOOLEAN => Ok(DataType::Boolean), Keyword::BOOL => Ok(DataType::Bool), Keyword::FLOAT => { @@ -10884,9 +10922,9 @@ impl<'a> Parser<'a> { Keyword::REGCLASS => Ok(DataType::Regclass), Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)), Keyword::FIXEDSTRING => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let character_length = self.parse_literal_uint()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(DataType::FixedString(character_length)) } Keyword::TEXT => Ok(DataType::Text), @@ -10933,7 +10971,7 @@ impl<'a> Parser<'a> { DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type)) })?) } else { - self.expect_token(&Token::Lt)?; + self.expect_token(&BorrowedToken::Lt)?; let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?; trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?; Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( @@ -10976,9 +11014,9 @@ impl<'a> Parser<'a> { )) } Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let field_defs = self.parse_comma_separated(Parser::parse_column_def)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(DataType::Nested(field_defs)) } Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { @@ -10994,7 +11032,7 @@ impl<'a> Parser<'a> { Keyword::TABLE => { // an LParen after the TABLE keyword indicates that table columns are being defined // whereas no LParen indicates an anonymous table expression will be returned - if self.peek_token() == Token::LParen { + if self.peek_token() == BorrowedToken::LParen { let columns = self.parse_returns_table_columns()?; Ok(DataType::Table(Some(columns))) } else { @@ -11035,40 +11073,40 @@ impl<'a> Parser<'a> { }?; if self.dialect.supports_array_typedef_with_brackets() { - while self.consume_token(&Token::LBracket) { + while self.consume_token(&BorrowedToken::LBracket) { // Parse optional array data type size let size = self.maybe_parse(|p| p.parse_literal_uint())?; - self.expect_token(&Token::RBracket)?; + self.expect_token(&BorrowedToken::RBracket)?; data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size)) } } Ok((data, trailing_bracket)) } - fn parse_returns_table_column(&mut self) -> Result { + fn parse_returns_table_column(&self) -> Result { self.parse_column_def() } - fn parse_returns_table_columns(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; + fn parse_returns_table_columns(&self) -> Result, ParserError> { + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(columns) } - pub fn parse_string_values(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; + pub fn parse_string_values(&self) -> Result, ParserError> { + self.expect_token(&BorrowedToken::LParen)?; let mut values = Vec::new(); loop { let next_token = self.next_token(); match next_token.token { - Token::SingleQuotedString(value) => values.push(value), + BorrowedToken::SingleQuotedString(value) => values.push(value), _ => self.expected("a string", next_token)?, } let next_token = self.next_token(); match next_token.token { - Token::Comma => (), - Token::RParen => break, + BorrowedToken::Comma => (), + BorrowedToken::RParen => break, _ => self.expected(", or }", next_token)?, } } @@ -11076,7 +11114,7 @@ impl<'a> Parser<'a> { } /// Strictly parse `identifier AS identifier` - pub fn parse_identifier_with_alias(&mut self) -> Result { + pub fn parse_identifier_with_alias(&self) -> Result { let ident = self.parse_identifier()?; self.expect_keyword_is(Keyword::AS)?; let alias = self.parse_identifier()?; @@ -11084,7 +11122,7 @@ impl<'a> Parser<'a> { } /// Parse `identifier [AS] identifier` where the AS keyword is optional - fn parse_identifier_with_optional_alias(&mut self) -> Result { + fn parse_identifier_with_optional_alias(&self) -> Result { let ident = self.parse_identifier()?; let _after_as = self.parse_keyword(Keyword::AS); let alias = self.parse_identifier()?; @@ -11092,18 +11130,18 @@ impl<'a> Parser<'a> { } /// Parse comma-separated list of parenthesized queries for pipe operators - fn parse_pipe_operator_queries(&mut self) -> Result, ParserError> { + fn parse_pipe_operator_queries(&self) -> Result, ParserError> { self.parse_comma_separated(|parser| { - parser.expect_token(&Token::LParen)?; + parser.expect_token(&BorrowedToken::LParen)?; let query = parser.parse_query()?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; Ok(*query) }) } /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT fn parse_distinct_required_set_quantifier( - &mut self, + &self, operator_name: &str, ) -> Result { let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect)); @@ -11116,7 +11154,7 @@ impl<'a> Parser<'a> { } /// Parse optional identifier alias (with or without AS keyword) - fn parse_identifier_optional_alias(&mut self) -> Result, ParserError> { + fn parse_identifier_optional_alias(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::AS) { Ok(Some(self.parse_identifier()?)) } else { @@ -11126,8 +11164,8 @@ impl<'a> Parser<'a> { } /// Optionally parses an alias for a select list item - fn maybe_parse_select_item_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn maybe_parse_select_item_alias(&self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &Parser) -> bool { parser.dialect.is_select_item_alias(explicit, kw, parser) } self.parse_optional_alias_inner(None, validator) @@ -11136,8 +11174,8 @@ impl<'a> Parser<'a> { /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`. /// In this case, the alias is allowed to optionally name the columns in the table, in /// addition to the table itself. - pub fn maybe_parse_table_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + pub fn maybe_parse_table_alias(&self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &Parser) -> bool { parser.dialect.is_table_factor_alias(explicit, kw, parser) } match self.parse_optional_alias_inner(None, validator)? { @@ -11149,7 +11187,7 @@ impl<'a> Parser<'a> { } } - fn parse_table_index_hints(&mut self) -> Result, ParserError> { + fn parse_table_index_hints(&self) -> Result, ParserError> { let mut hints = vec![]; while let Some(hint_type) = self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE]) @@ -11190,13 +11228,13 @@ impl<'a> Parser<'a> { None }; - self.expect_token(&Token::LParen)?; - let index_names = if self.peek_token().token != Token::RParen { + self.expect_token(&BorrowedToken::LParen)?; + let index_names = if self.peek_token().token != BorrowedToken::RParen { self.parse_comma_separated(Parser::parse_identifier)? } else { vec![] }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; hints.push(TableIndexHints { hint_type, index_type, @@ -11211,10 +11249,10 @@ impl<'a> Parser<'a> { /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias` /// and `maybe_parse_table_alias`. pub fn parse_optional_alias( - &mut self, + &self, reserved_kwds: &[Keyword], ) -> Result, ParserError> { - fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool { + fn validator(_explicit: bool, _kw: &Keyword, _parser: &Parser) -> bool { false } self.parse_optional_alias_inner(Some(reserved_kwds), validator) @@ -11227,12 +11265,12 @@ impl<'a> Parser<'a> { /// to call to validate if a keyword should be parsed as an alias, to allow /// callers to customize the parsing logic based on their context. fn parse_optional_alias_inner( - &mut self, + &self, reserved_kwds: Option<&[Keyword]>, validator: F, ) -> Result, ParserError> where - F: Fn(bool, &Keyword, &mut Parser) -> bool, + F: Fn(bool, &Keyword, &Parser) -> bool, { let after_as = self.parse_keyword(Keyword::AS); @@ -11240,7 +11278,7 @@ impl<'a> Parser<'a> { match next_token.token { // By default, if a word is located after the `AS` keyword we consider it an alias // as long as it's not reserved. - Token::Word(w) + BorrowedToken::Word(w) if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) => { Ok(Some(w.into_ident(next_token.span))) @@ -11248,12 +11286,12 @@ impl<'a> Parser<'a> { // This pattern allows for customizing the acceptance of words as aliases based on the caller's // context, such as to what SQL element this word is a potential alias of (select item alias, table name // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords. - Token::Word(w) if validator(after_as, &w.keyword, self) => { + BorrowedToken::Word(w) if validator(after_as, &w.keyword, self) => { Ok(Some(w.into_ident(next_token.span))) } // For backwards-compatibility, we accept quoted strings as aliases regardless of the context. - Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))), - Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))), + BorrowedToken::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))), + BorrowedToken::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))), _ => { if after_as { return self.expected("an identifier after AS", next_token); @@ -11264,7 +11302,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_group_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_group_by(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { let expressions = if self.parse_keyword(Keyword::ALL) { None @@ -11297,15 +11335,15 @@ impl<'a> Parser<'a> { } } if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let result = self.parse_comma_separated(|p| { - if p.peek_token_ref().token == Token::LParen { + if p.peek_token_ref().token == BorrowedToken::LParen { p.parse_tuple(true, true) } else { Ok(vec![p.parse_expr()?]) } })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets( result, ))); @@ -11320,7 +11358,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_order_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_order_by(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { let order_by = if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) { @@ -11347,7 +11385,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_limit_clause(&mut self) -> Result, ParserError> { + fn parse_optional_limit_clause(&self) -> Result, ParserError> { let mut offset = if self.parse_keyword(Keyword::OFFSET) { Some(self.parse_offset()?) } else { @@ -11360,7 +11398,7 @@ impl<'a> Parser<'a> { if self.dialect.supports_limit_comma() && offset.is_none() && expr.is_some() // ALL not supported with comma - && self.consume_token(&Token::Comma) + && self.consume_token(&BorrowedToken::Comma) { let offset = expr.ok_or_else(|| { ParserError::ParserError( @@ -11403,7 +11441,7 @@ impl<'a> Parser<'a> { /// Parse a table object for insertion /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)` - pub fn parse_table_object(&mut self) -> Result { + pub fn parse_table_object(&self) -> Result { if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) { let fn_name = self.parse_object_name(false)?; self.parse_function_call(fn_name) @@ -11419,7 +11457,7 @@ impl<'a> Parser<'a> { /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN, /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers /// in this context on BigQuery. - pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result { + pub fn parse_object_name(&self, in_table_clause: bool) -> Result { self.parse_object_name_inner(in_table_clause, false) } @@ -11433,7 +11471,7 @@ impl<'a> Parser<'a> { /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name /// e.g. *, *.*, `foo`.*, or "foo"."bar" fn parse_object_name_inner( - &mut self, + &self, in_table_clause: bool, allow_wildcards: bool, ) -> Result { @@ -11442,28 +11480,28 @@ impl<'a> Parser<'a> { loop { let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?; parts.push(ObjectNamePart::Identifier(ident)); - if !self.consume_token(&Token::Period) && !end_with_period { + if !self.consume_token(&BorrowedToken::Period) && !end_with_period { break; } } } else { loop { - if allow_wildcards && self.peek_token().token == Token::Mul { + if allow_wildcards && self.peek_token().token == BorrowedToken::Mul { let span = self.next_token().span; parts.push(ObjectNamePart::Identifier(Ident { - value: Token::Mul.to_string(), + value: BorrowedToken::Mul.to_string(), quote_style: None, span, })); } else if dialect_of!(self is BigQueryDialect) && in_table_clause { let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?; parts.push(ObjectNamePart::Identifier(ident)); - if !self.consume_token(&Token::Period) && !end_with_period { + if !self.consume_token(&BorrowedToken::Period) && !end_with_period { break; } } else if self.dialect.supports_object_name_double_dot_notation() && parts.len() == 1 - && matches!(self.peek_token().token, Token::Period) + && matches!(self.peek_token().token, BorrowedToken::Period) { // Empty string here means default schema parts.push(ObjectNamePart::Identifier(Ident::new(""))); @@ -11473,10 +11511,12 @@ impl<'a> Parser<'a> { .dialect .is_identifier_generating_function_name(&ident, &parts) { - self.expect_token(&Token::LParen)?; - let args: Vec = - self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::LParen)?; + let args: Vec = self.parse_comma_separated0( + Self::parse_function_args, + BorrowedToken::RParen, + )?; + self.expect_token(&BorrowedToken::RParen)?; ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args }) } else { ObjectNamePart::Identifier(ident) @@ -11484,7 +11524,7 @@ impl<'a> Parser<'a> { parts.push(part); } - if !self.consume_token(&Token::Period) { + if !self.consume_token(&BorrowedToken::Period) { break; } } @@ -11521,14 +11561,14 @@ impl<'a> Parser<'a> { } /// Parse identifiers - pub fn parse_identifiers(&mut self) -> Result, ParserError> { + pub fn parse_identifiers(&self) -> Result, ParserError> { let mut idents = vec![]; loop { match &self.peek_token_ref().token { - Token::Word(w) => { + BorrowedToken::Word(w) => { idents.push(w.clone().into_ident(self.peek_token_ref().span)); } - Token::EOF | Token::Eq => break, + BorrowedToken::EOF | BorrowedToken::Eq => break, _ => {} } self.advance_token(); @@ -11575,14 +11615,14 @@ impl<'a> Parser<'a> { /// ``` /// /// [parse_identifiers]: Parser::parse_identifiers - pub fn parse_multipart_identifier(&mut self) -> Result, ParserError> { + pub fn parse_multipart_identifier(&self) -> Result, ParserError> { let mut idents = vec![]; // expecting at least one word for identifier let next_token = self.next_token(); match next_token.token { - Token::Word(w) => idents.push(w.into_ident(next_token.span)), - Token::EOF => { + BorrowedToken::Word(w) => idents.push(w.into_ident(next_token.span)), + BorrowedToken::EOF => { return Err(ParserError::ParserError( "Empty input when parsing identifier".to_string(), ))? @@ -11598,11 +11638,11 @@ impl<'a> Parser<'a> { loop { match self.next_token().token { // ensure that optional period is succeeded by another identifier - Token::Period => { + BorrowedToken::Period => { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => idents.push(w.into_ident(next_token.span)), - Token::EOF => { + BorrowedToken::Word(w) => idents.push(w.into_ident(next_token.span)), + BorrowedToken::EOF => { return Err(ParserError::ParserError( "Trailing period in identifier".to_string(), ))? @@ -11614,7 +11654,7 @@ impl<'a> Parser<'a> { } } } - Token::EOF => break, + BorrowedToken::EOF => break, token => { return Err(ParserError::ParserError(format!( "Unexpected token in identifier: {token}" @@ -11627,12 +11667,12 @@ impl<'a> Parser<'a> { } /// Parse a simple one-word identifier (possibly quoted, possibly a keyword) - pub fn parse_identifier(&mut self) -> Result { + pub fn parse_identifier(&self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => Ok(w.into_ident(next_token.span)), - Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), - Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), + BorrowedToken::Word(w) => Ok(w.into_ident(next_token.span)), + BorrowedToken::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), + BorrowedToken::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), _ => self.expected("identifier", next_token), } } @@ -11647,27 +11687,27 @@ impl<'a> Parser<'a> { /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical) /// /// Return a tuple of the identifier and a boolean indicating it ends with a period. - fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> { + fn parse_unquoted_hyphenated_identifier(&self) -> Result<(Ident, bool), ParserError> { match self.peek_token().token { - Token::Word(w) => { + BorrowedToken::Word(w) => { let quote_style_is_none = w.quote_style.is_none(); let mut requires_whitespace = false; let mut ident = w.into_ident(self.next_token().span); if quote_style_is_none { - while matches!(self.peek_token_no_skip().token, Token::Minus) { + while matches!(self.peek_token_no_skip().token, BorrowedToken::Minus) { self.next_token(); ident.value.push('-'); let token = self .next_token_no_skip() .cloned() - .unwrap_or(TokenWithSpan::wrap(Token::EOF)); + .unwrap_or(TokenWithSpan::wrap(BorrowedToken::EOF)); requires_whitespace = match token.token { - Token::Word(next_word) if next_word.quote_style.is_none() => { + BorrowedToken::Word(next_word) if next_word.quote_style.is_none() => { ident.value.push_str(&next_word.value); false } - Token::Number(s, false) => { + BorrowedToken::Number(s, false) => { // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`. // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`. // @@ -11680,7 +11720,10 @@ impl<'a> Parser<'a> { }) else { return self.expected( "continuation of hyphenated identifier", - TokenWithSpan::new(Token::Number(s, false), token.span), + TokenWithSpan::new( + BorrowedToken::Number(s, false), + token.span, + ), ); }; ident.value.push_str(s); @@ -11690,7 +11733,7 @@ impl<'a> Parser<'a> { } // If next token is period, then it is part of an ObjectName and we don't expect whitespace // after the number. - !matches!(self.peek_token().token, Token::Period) + !matches!(self.peek_token().token, BorrowedToken::Period) } _ => { return self @@ -11703,7 +11746,10 @@ impl<'a> Parser<'a> { // otherwise foo-123a will be parsed as `foo-123` with the alias `a`. if requires_whitespace { let token = self.next_token(); - if !matches!(token.token, Token::EOF | Token::Whitespace(_)) { + if !matches!( + token.token, + BorrowedToken::EOF | BorrowedToken::Whitespace(_) + ) { return self .expected("whitespace following hyphenated identifier", token); } @@ -11716,9 +11762,9 @@ impl<'a> Parser<'a> { } /// Parses a parenthesized, comma-separated list of column definitions within a view. - fn parse_view_columns(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { - if self.peek_token().token == Token::RParen { + fn parse_view_columns(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { + if self.peek_token().token == BorrowedToken::RParen { self.next_token(); Ok(vec![]) } else { @@ -11727,7 +11773,7 @@ impl<'a> Parser<'a> { self.dialect.supports_column_definition_trailing_commas(), Self::is_reserved_for_column_alias, )?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(cols) } } else { @@ -11736,7 +11782,7 @@ impl<'a> Parser<'a> { } /// Parses a column definition within a view. - fn parse_view_column(&mut self) -> Result { + fn parse_view_column(&self) -> Result { let name = self.parse_identifier()?; let options = self.parse_view_column_options()?; let data_type = if dialect_of!(self is ClickHouseDialect) { @@ -11751,7 +11797,7 @@ impl<'a> Parser<'a> { }) } - fn parse_view_column_options(&mut self) -> Result, ParserError> { + fn parse_view_column_options(&self) -> Result, ParserError> { let mut options = Vec::new(); loop { let option = self.parse_optional_column_option()?; @@ -11773,7 +11819,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers. /// For example: `(col1, "col 2", ...)` pub fn parse_parenthesized_column_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11781,7 +11827,7 @@ impl<'a> Parser<'a> { } pub fn parse_parenthesized_compound_identifier_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11794,7 +11840,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary /// expressions with ordering information (and an opclass in some dialects). - fn parse_parenthesized_index_column_list(&mut self) -> Result, ParserError> { + fn parse_parenthesized_index_column_list(&self) -> Result, ParserError> { self.parse_parenthesized_column_list_inner(Mandatory, false, |p| { p.parse_create_index_expr() }) @@ -11803,7 +11849,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers. /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)` pub fn parse_parenthesized_qualified_column_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11815,21 +11861,21 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of columns using /// the provided function to parse each element. fn parse_parenthesized_column_list_inner( - &mut self, + &self, optional: IsOptional, allow_empty: bool, mut f: F, ) -> Result, ParserError> where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { - if self.consume_token(&Token::LParen) { - if allow_empty && self.peek_token().token == Token::RParen { + if self.consume_token(&BorrowedToken::LParen) { + if allow_empty && self.peek_token().token == BorrowedToken::RParen { self.next_token(); Ok(vec![]) } else { let cols = self.parse_comma_separated(|p| f(p))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(cols) } } else if optional == Optional { @@ -11840,40 +11886,38 @@ impl<'a> Parser<'a> { } /// Parses a parenthesized comma-separated list of table alias column definitions. - fn parse_table_alias_column_defs(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { + fn parse_table_alias_column_defs(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { let cols = self.parse_comma_separated(|p| { let name = p.parse_identifier()?; let data_type = p.maybe_parse(|p| p.parse_data_type())?; Ok(TableAliasColumnDef { name, data_type }) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(cols) } else { Ok(vec![]) } } - pub fn parse_precision(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_precision(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let n = self.parse_literal_uint()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(n) } - pub fn parse_optional_precision(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { + pub fn parse_optional_precision(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { let n = self.parse_literal_uint()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some(n)) } else { Ok(None) } } - fn maybe_parse_optional_interval_fields( - &mut self, - ) -> Result, ParserError> { + fn maybe_parse_optional_interval_fields(&self) -> Result, ParserError> { match self.parse_one_of_keywords(&[ // Can be followed by `TO` option Keyword::YEAR, @@ -11957,42 +12001,40 @@ impl<'a> Parser<'a> { /// ``` /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 - pub fn parse_datetime_64(&mut self) -> Result<(u64, Option), ParserError> { + pub fn parse_datetime_64(&self) -> Result<(u64, Option), ParserError> { self.expect_keyword_is(Keyword::DATETIME64)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let precision = self.parse_literal_uint()?; - let time_zone = if self.consume_token(&Token::Comma) { + let time_zone = if self.consume_token(&BorrowedToken::Comma) { Some(self.parse_literal_string()?) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok((precision, time_zone)) } - pub fn parse_optional_character_length( - &mut self, - ) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { + pub fn parse_optional_character_length(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { let character_length = self.parse_character_length()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some(character_length)) } else { Ok(None) } } - pub fn parse_optional_binary_length(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { + pub fn parse_optional_binary_length(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { let binary_length = self.parse_binary_length()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Some(binary_length)) } else { Ok(None) } } - pub fn parse_character_length(&mut self) -> Result { + pub fn parse_character_length(&self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(CharacterLength::Max); } @@ -12007,7 +12049,7 @@ impl<'a> Parser<'a> { Ok(CharacterLength::IntegerLength { length, unit }) } - pub fn parse_binary_length(&mut self) -> Result { + pub fn parse_binary_length(&self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(BinaryLength::Max); } @@ -12016,16 +12058,16 @@ impl<'a> Parser<'a> { } pub fn parse_optional_precision_scale( - &mut self, + &self, ) -> Result<(Option, Option), ParserError> { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let n = self.parse_literal_uint()?; - let scale = if self.consume_token(&Token::Comma) { + let scale = if self.consume_token(&BorrowedToken::Comma) { Some(self.parse_literal_uint()?) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok((Some(n), scale)) } else { Ok((None, None)) @@ -12033,17 +12075,17 @@ impl<'a> Parser<'a> { } pub fn parse_exact_number_optional_precision_scale( - &mut self, + &self, ) -> Result { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let precision = self.parse_literal_uint()?; - let scale = if self.consume_token(&Token::Comma) { + let scale = if self.consume_token(&BorrowedToken::Comma) { Some(self.parse_signed_integer()?) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; match scale { None => Ok(ExactNumberInfo::Precision(precision)), @@ -12055,16 +12097,16 @@ impl<'a> Parser<'a> { } /// Parse an optionally signed integer literal. - fn parse_signed_integer(&mut self) -> Result { - let is_negative = self.consume_token(&Token::Minus); + fn parse_signed_integer(&self) -> Result { + let is_negative = self.consume_token(&BorrowedToken::Minus); if !is_negative { - let _ = self.consume_token(&Token::Plus); + let _ = self.consume_token(&BorrowedToken::Plus); } let current_token = self.peek_token_ref(); match ¤t_token.token { - Token::Number(s, _) => { + BorrowedToken::Number(s, _) => { let s = s.clone(); let span_start = current_token.span.start; self.advance_token(); @@ -12075,20 +12117,20 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_type_modifiers(&mut self) -> Result>, ParserError> { - if self.consume_token(&Token::LParen) { + pub fn parse_optional_type_modifiers(&self) -> Result>, ParserError> { + if self.consume_token(&BorrowedToken::LParen) { let mut modifiers = Vec::new(); loop { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => modifiers.push(w.to_string()), - Token::Number(n, _) => modifiers.push(n), - Token::SingleQuotedString(s) => modifiers.push(s), + BorrowedToken::Word(w) => modifiers.push(w.to_string()), + BorrowedToken::Number(n, _) => modifiers.push(n), + BorrowedToken::SingleQuotedString(s) => modifiers.push(s), - Token::Comma => { + BorrowedToken::Comma => { continue; } - Token::RParen => { + BorrowedToken::RParen => { break; } _ => self.expected("type modifiers", next_token)?, @@ -12102,13 +12144,13 @@ impl<'a> Parser<'a> { } /// Parse a parenthesized sub data type - fn parse_sub_type(&mut self, parent_type: F) -> Result + fn parse_sub_type(&self, parent_type: F) -> Result where F: FnOnce(Box) -> DataType, { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let inside_type = self.parse_data_type()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(parent_type(inside_type.into())) } @@ -12116,7 +12158,7 @@ impl<'a> Parser<'a> { /// /// This is used to reduce the size of the stack frames in debug builds fn parse_delete_setexpr_boxed( - &mut self, + &self, delete_token: TokenWithSpan, ) -> Result, ParserError> { Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?))) @@ -12125,11 +12167,11 @@ impl<'a> Parser<'a> { /// Parse a MERGE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds - fn parse_merge_setexpr_boxed(&mut self) -> Result, ParserError> { + fn parse_merge_setexpr_boxed(&self) -> Result, ParserError> { Ok(Box::new(SetExpr::Merge(self.parse_merge()?))) } - pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result { + pub fn parse_delete(&self, delete_token: TokenWithSpan) -> Result { let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) { // `FROM` keyword is optional in BigQuery SQL. // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement @@ -12172,7 +12214,7 @@ impl<'a> Parser<'a> { }; Ok(Statement::Delete(Delete { - delete_token: delete_token.into(), + delete_token: delete_token.to_static().into(), tables, from: if with_from_keyword { FromTable::WithFromKeyword(from) @@ -12188,7 +12230,7 @@ impl<'a> Parser<'a> { } // KILL [CONNECTION | QUERY | MUTATION] processlist_id - pub fn parse_kill(&mut self) -> Result { + pub fn parse_kill(&self) -> Result { let modifier_keyword = self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]); @@ -12213,10 +12255,7 @@ impl<'a> Parser<'a> { Ok(Statement::Kill { modifier, id }) } - pub fn parse_explain( - &mut self, - describe_alias: DescribeAlias, - ) -> Result { + pub fn parse_explain(&self, describe_alias: DescribeAlias) -> Result { let mut analyze = false; let mut verbose = false; let mut query_plan = false; @@ -12228,7 +12267,7 @@ impl<'a> Parser<'a> { // although not all features may be implemented. if describe_alias == DescribeAlias::Explain && self.dialect.supports_explain_with_utility_options() - && self.peek_token().token == Token::LParen + && self.peek_token().token == BorrowedToken::LParen { options = Some(self.parse_utility_options()?) } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) { @@ -12287,12 +12326,12 @@ impl<'a> Parser<'a> { /// preceded with some `WITH` CTE declarations and optionally followed /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't /// expect the initial keyword to be already consumed - pub fn parse_query(&mut self) -> Result, ParserError> { + pub fn parse_query(&self) -> Result, ParserError> { let _guard = self.recursion_counter.try_decrease()?; let with = if self.parse_keyword(Keyword::WITH) { - let with_token = self.get_current_token(); + let with_token = self.get_current_token().clone(); Some(With { - with_token: with_token.clone().into(), + with_token: with_token.to_static().into(), recursive: self.parse_keyword(Keyword::RECURSIVE), cte_tables: self.parse_comma_separated(Parser::parse_cte)?, }) @@ -12415,10 +12454,10 @@ impl<'a> Parser<'a> { } } - fn parse_pipe_operators(&mut self) -> Result, ParserError> { + fn parse_pipe_operators(&self) -> Result, ParserError> { let mut pipe_operators = Vec::new(); - while self.consume_token(&Token::VerticalBarRightAngleBracket) { + while self.consume_token(&BorrowedToken::VerticalBarRightAngleBracket) { let kw = self.expect_one_of_keywords(&[ Keyword::SELECT, Keyword::EXTEND, @@ -12552,14 +12591,14 @@ impl<'a> Parser<'a> { } } Keyword::PIVOT => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?; self.expect_keyword_is(Keyword::FOR)?; let value_column = self.parse_period_separated(|p| p.parse_identifier())?; self.expect_keyword_is(Keyword::IN)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let value_source = if self.parse_keyword(Keyword::ANY) { let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { self.parse_comma_separated(Parser::parse_order_by_expr)? @@ -12574,8 +12613,8 @@ impl<'a> Parser<'a> { self.parse_comma_separated(Self::parse_expr_with_alias)?, ) }; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.parse_identifier_optional_alias()?; @@ -12587,17 +12626,17 @@ impl<'a> Parser<'a> { }); } Keyword::UNPIVOT => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let value_column = self.parse_identifier()?; self.expect_keyword(Keyword::FOR)?; let name_column = self.parse_identifier()?; self.expect_keyword(Keyword::IN)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.parse_identifier_optional_alias()?; @@ -12634,13 +12673,13 @@ impl<'a> Parser<'a> { Ok(pipe_operators) } - fn parse_settings(&mut self) -> Result>, ParserError> { + fn parse_settings(&self) -> Result>, ParserError> { let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect) && self.parse_keyword(Keyword::SETTINGS) { let key_values = self.parse_comma_separated(|p| { let key = p.parse_identifier()?; - p.expect_token(&Token::Eq)?; + p.expect_token(&BorrowedToken::Eq)?; let value = p.parse_expr()?; Ok(Setting { key, value }) })?; @@ -12652,7 +12691,7 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause - pub fn parse_for_clause(&mut self) -> Result, ParserError> { + pub fn parse_for_clause(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::XML) { Ok(Some(self.parse_for_xml()?)) } else if self.parse_keyword(Keyword::JSON) { @@ -12665,13 +12704,13 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR XML` clause - pub fn parse_for_xml(&mut self) -> Result { + pub fn parse_for_xml(&self) -> Result { let for_xml = if self.parse_keyword(Keyword::RAW) { let mut element_name = None; - if self.peek_token().token == Token::LParen { - self.expect_token(&Token::LParen)?; + if self.peek_token().token == BorrowedToken::LParen { + self.expect_token(&BorrowedToken::LParen)?; element_name = Some(self.parse_literal_string()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } ForXml::Raw(element_name) } else if self.parse_keyword(Keyword::AUTO) { @@ -12680,10 +12719,10 @@ impl<'a> Parser<'a> { ForXml::Explicit } else if self.parse_keyword(Keyword::PATH) { let mut element_name = None; - if self.peek_token().token == Token::LParen { - self.expect_token(&Token::LParen)?; + if self.peek_token().token == BorrowedToken::LParen { + self.expect_token(&BorrowedToken::LParen)?; element_name = Some(self.parse_literal_string()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } ForXml::Path(element_name) } else { @@ -12695,7 +12734,7 @@ impl<'a> Parser<'a> { let mut binary_base64 = false; let mut root = None; let mut r#type = false; - while self.peek_token().token == Token::Comma { + while self.peek_token().token == BorrowedToken::Comma { self.next_token(); if self.parse_keyword(Keyword::ELEMENTS) { elements = true; @@ -12703,9 +12742,9 @@ impl<'a> Parser<'a> { self.expect_keyword_is(Keyword::BASE64)?; binary_base64 = true; } else if self.parse_keyword(Keyword::ROOT) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; root = Some(self.parse_literal_string()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } else if self.parse_keyword(Keyword::TYPE) { r#type = true; } @@ -12720,7 +12759,7 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR JSON` clause - pub fn parse_for_json(&mut self) -> Result { + pub fn parse_for_json(&self) -> Result { let for_json = if self.parse_keyword(Keyword::AUTO) { ForJson::Auto } else if self.parse_keyword(Keyword::PATH) { @@ -12733,12 +12772,12 @@ impl<'a> Parser<'a> { let mut root = None; let mut include_null_values = false; let mut without_array_wrapper = false; - while self.peek_token().token == Token::Comma { + while self.peek_token().token == BorrowedToken::Comma { self.next_token(); if self.parse_keyword(Keyword::ROOT) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; root = Some(self.parse_literal_string()?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) { include_null_values = true; } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) { @@ -12754,7 +12793,7 @@ impl<'a> Parser<'a> { } /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`) - pub fn parse_cte(&mut self) -> Result { + pub fn parse_cte(&self) -> Result { let name = self.parse_identifier()?; let mut cte = if self.parse_keyword(Keyword::AS) { @@ -12766,10 +12805,10 @@ impl<'a> Parser<'a> { is_materialized = Some(CteAsMaterialized::NotMaterialized); } } - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let query = self.parse_query()?; - let closing_paren_token = self.expect_token(&Token::RParen)?; + let closing_paren_token = self.expect_token(&BorrowedToken::RParen)?; let alias = TableAlias { name, @@ -12780,7 +12819,7 @@ impl<'a> Parser<'a> { query, from: None, materialized: is_materialized, - closing_paren_token: closing_paren_token.into(), + closing_paren_token: closing_paren_token.to_static().into(), } } else { let columns = self.parse_table_alias_column_defs()?; @@ -12793,10 +12832,10 @@ impl<'a> Parser<'a> { is_materialized = Some(CteAsMaterialized::NotMaterialized); } } - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let query = self.parse_query()?; - let closing_paren_token = self.expect_token(&Token::RParen)?; + let closing_paren_token = self.expect_token(&BorrowedToken::RParen)?; let alias = TableAlias { name, columns }; Cte { @@ -12804,7 +12843,7 @@ impl<'a> Parser<'a> { query, from: None, materialized: is_materialized, - closing_paren_token: closing_paren_token.into(), + closing_paren_token: closing_paren_token.to_static().into(), } }; if self.parse_keyword(Keyword::FROM) { @@ -12821,17 +12860,17 @@ impl<'a> Parser<'a> { /// subquery ::= query_body [ order_by_limit ] /// set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body /// ``` - pub fn parse_query_body(&mut self, precedence: u8) -> Result, ParserError> { + pub fn parse_query_body(&self, precedence: u8) -> Result, ParserError> { // We parse the expression using a Pratt parser, as in `parse_expr()`. // Start by parsing a restricted SELECT or a `(subquery)`: let expr = if self.peek_keyword(Keyword::SELECT) || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select()) { SetExpr::Select(self.parse_select().map(Box::new)?) - } else if self.consume_token(&Token::LParen) { + } else if self.consume_token(&BorrowedToken::LParen) { // CTEs are not allowed here, but the parser currently accepts them let subquery = self.parse_query()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; SetExpr::Query(subquery) } else if self.parse_keyword(Keyword::VALUES) { let is_mysql = dialect_of!(self is MySqlDialect); @@ -12855,7 +12894,7 @@ impl<'a> Parser<'a> { /// /// (this is its own function to reduce required stack size in debug builds) fn parse_remaining_set_exprs( - &mut self, + &self, mut expr: SetExpr, precedence: u8, ) -> Result, ParserError> { @@ -12888,17 +12927,19 @@ impl<'a> Parser<'a> { Ok(expr.into()) } - pub fn parse_set_operator(&mut self, token: &Token) -> Option { + pub fn parse_set_operator<'b>(&self, token: &BorrowedToken<'b>) -> Option { match token { - Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union), - Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except), - Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect), - Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus), + BorrowedToken::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union), + BorrowedToken::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except), + BorrowedToken::Word(w) if w.keyword == Keyword::INTERSECT => { + Some(SetOperator::Intersect) + } + BorrowedToken::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus), _ => None, } } - pub fn parse_set_quantifier(&mut self, op: &Option) -> SetQuantifier { + pub fn parse_set_quantifier(&self, op: &Option) -> SetQuantifier { match op { Some( SetOperator::Except @@ -12927,7 +12968,7 @@ impl<'a> Parser<'a> { } /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`) - pub fn parse_select(&mut self) -> Result { + pub fn parse_select(&self) -> Result { let mut from_first = None; if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) { @@ -12935,7 +12976,7 @@ impl<'a> Parser<'a> { let from = self.parse_table_with_joins()?; if !self.peek_keyword(Keyword::SELECT) { return Ok(Select { - select_token: AttachedToken(from_token), + select_token: AttachedToken(from_token.to_static()), distinct: None, top: None, top_before_distinct: false, @@ -13117,7 +13158,7 @@ impl<'a> Parser<'a> { }; Ok(Select { - select_token: AttachedToken(select_token), + select_token: AttachedToken(select_token.to_static()), distinct, top, top_before_distinct, @@ -13146,7 +13187,7 @@ impl<'a> Parser<'a> { }) } - fn parse_value_table_mode(&mut self) -> Result, ParserError> { + fn parse_value_table_mode(&self) -> Result, ParserError> { if !dialect_of!(self is BigQueryDialect) { return Ok(None); } @@ -13175,18 +13216,18 @@ impl<'a> Parser<'a> { /// Invoke `f` after first setting the parser's `ParserState` to `state`. /// /// Upon return, restores the parser's state to what it started at. - fn with_state(&mut self, state: ParserState, mut f: F) -> Result + fn with_state(&self, state: ParserState, mut f: F) -> Result where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { - let current_state = self.state; - self.state = state; + let current_state = self.state.get(); + self.state.set(state); let res = f(self); - self.state = current_state; + self.state.set(current_state); res } - pub fn parse_connect_by(&mut self) -> Result { + pub fn parse_connect_by(&self) -> Result { let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) { let relationships = self.with_state(ParserState::ConnectBy, |parser| { parser.parse_comma_separated(Parser::parse_expr) @@ -13210,16 +13251,16 @@ impl<'a> Parser<'a> { } /// Parse `CREATE TABLE x AS TABLE y` - pub fn parse_as_table(&mut self) -> Result { + pub fn parse_as_table(&self) -> Result { let token1 = self.next_token(); let token2 = self.next_token(); let token3 = self.next_token(); let table_name; let schema_name; - if token2 == Token::Period { + if token2 == BorrowedToken::Period { match token1.token { - Token::Word(w) => { + BorrowedToken::Word(w) => { schema_name = w.value; } _ => { @@ -13227,7 +13268,7 @@ impl<'a> Parser<'a> { } } match token3.token { - Token::Word(w) => { + BorrowedToken::Word(w) => { table_name = w.value; } _ => { @@ -13240,7 +13281,7 @@ impl<'a> Parser<'a> { }) } else { match token1.token { - Token::Word(w) => { + BorrowedToken::Word(w) => { table_name = w.value; } _ => { @@ -13255,10 +13296,7 @@ impl<'a> Parser<'a> { } /// Parse a `SET ROLE` statement. Expects SET to be consumed already. - fn parse_set_role( - &mut self, - modifier: Option, - ) -> Result { + fn parse_set_role(&self, modifier: Option) -> Result { self.expect_keyword_is(Keyword::ROLE)?; let role_name = if self.parse_keyword(Keyword::NONE) { @@ -13272,14 +13310,11 @@ impl<'a> Parser<'a> { })) } - fn parse_set_values( - &mut self, - parenthesized_assignment: bool, - ) -> Result, ParserError> { + fn parse_set_values(&self, parenthesized_assignment: bool) -> Result, ParserError> { let mut values = vec![]; if parenthesized_assignment { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; } loop { @@ -13292,18 +13327,18 @@ impl<'a> Parser<'a> { }; values.push(value); - if self.consume_token(&Token::Comma) { + if self.consume_token(&BorrowedToken::Comma) { continue; } if parenthesized_assignment { - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } return Ok(values); } } - fn parse_context_modifier(&mut self) -> Option { + fn parse_context_modifier(&self) -> Option { let modifier = self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?; @@ -13311,11 +13346,11 @@ impl<'a> Parser<'a> { } /// Parse a single SET statement assignment `var = expr`. - fn parse_set_assignment(&mut self) -> Result { + fn parse_set_assignment(&self) -> Result { let scope = self.parse_context_modifier(); let name = if self.dialect.supports_parenthesized_set_variables() - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) { // Parenthesized assignments are handled in the `parse_set` function after // trying to parse list of assignments using this function. @@ -13325,7 +13360,7 @@ impl<'a> Parser<'a> { self.parse_object_name(false)? }; - if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) { + if !(self.consume_token(&BorrowedToken::Eq) || self.parse_keyword(Keyword::TO)) { return self.expected("assignment operator", self.peek_token()); } @@ -13334,7 +13369,7 @@ impl<'a> Parser<'a> { Ok(SetAssignment { scope, name, value }) } - fn parse_set(&mut self) -> Result { + fn parse_set(&self) -> Result { let hivevar = self.parse_keyword(Keyword::HIVEVAR); // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both @@ -13345,7 +13380,7 @@ impl<'a> Parser<'a> { }; if hivevar { - self.expect_token(&Token::Colon)?; + self.expect_token(&BorrowedToken::Colon)?; } if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? { @@ -13356,7 +13391,7 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) || self.parse_keyword(Keyword::TIMEZONE) { - if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + if self.consume_token(&BorrowedToken::Eq) || self.parse_keyword(Keyword::TO) { return Ok(Set::SingleAssignment { scope, hivevar, @@ -13456,21 +13491,21 @@ impl<'a> Parser<'a> { } let variables = if self.dialect.supports_parenthesized_set_variables() - && self.consume_token(&Token::LParen) + && self.consume_token(&BorrowedToken::LParen) { let vars = OneOrManyWithParens::Many( - self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())? + self.parse_comma_separated(|parser: &Parser<'a>| parser.parse_identifier())? .into_iter() .map(|ident| ObjectName::from(vec![ident])) .collect(), ); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; vars } else { OneOrManyWithParens::One(self.parse_object_name(false)?) }; - if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + if self.consume_token(&BorrowedToken::Eq) || self.parse_keyword(Keyword::TO) { let stmt = match variables { OneOrManyWithParens::One(var) => Set::SingleAssignment { scope, @@ -13495,7 +13530,7 @@ impl<'a> Parser<'a> { self.expected("equals sign or TO", self.peek_token()) } - pub fn parse_set_session_params(&mut self) -> Result { + pub fn parse_set_session_params(&self) -> Result { if self.parse_keyword(Keyword::STATISTICS) { let topic = match self.parse_one_of_keywords(&[ Keyword::IO, @@ -13528,7 +13563,7 @@ impl<'a> Parser<'a> { let keywords = self.parse_comma_separated(|parser| { let next_token = parser.next_token(); match &next_token.token { - Token::Word(w) => Ok(w.to_string()), + BorrowedToken::Word(w) => Ok(w.to_string()), _ => parser.expected("SQL keyword", next_token), } })?; @@ -13544,7 +13579,7 @@ impl<'a> Parser<'a> { let names = self.parse_comma_separated(|parser| { let next_token = parser.next_token(); match next_token.token { - Token::Word(w) => Ok(w.to_string()), + BorrowedToken::Word(w) => Ok(w.to_string()), _ => parser.expected("Session param name", next_token), } })?; @@ -13559,7 +13594,7 @@ impl<'a> Parser<'a> { } } - fn parse_session_param_value(&mut self) -> Result { + fn parse_session_param_value(&self) -> Result { if self.parse_keyword(Keyword::ON) { Ok(SessionParamValue::On) } else if self.parse_keyword(Keyword::OFF) { @@ -13569,7 +13604,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_show(&mut self) -> Result { + pub fn parse_show(&self) -> Result { let terse = self.parse_keyword(Keyword::TERSE); let extended = self.parse_keyword(Keyword::EXTENDED); let full = self.parse_keyword(Keyword::FULL); @@ -13628,7 +13663,7 @@ impl<'a> Parser<'a> { } } - fn parse_show_charset(&mut self, is_shorthand: bool) -> Result { + fn parse_show_charset(&self, is_shorthand: bool) -> Result { // parse one of keywords Ok(Statement::ShowCharset(ShowCharset { is_shorthand, @@ -13636,7 +13671,7 @@ impl<'a> Parser<'a> { })) } - fn parse_show_databases(&mut self, terse: bool) -> Result { + fn parse_show_databases(&self, terse: bool) -> Result { let history = self.parse_keyword(Keyword::HISTORY); let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowDatabases { @@ -13646,7 +13681,7 @@ impl<'a> Parser<'a> { }) } - fn parse_show_schemas(&mut self, terse: bool) -> Result { + fn parse_show_schemas(&self, terse: bool) -> Result { let history = self.parse_keyword(Keyword::HISTORY); let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowSchemas { @@ -13656,7 +13691,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_show_create(&mut self) -> Result { + pub fn parse_show_create(&self) -> Result { let obj_type = match self.expect_one_of_keywords(&[ Keyword::TABLE, Keyword::TRIGGER, @@ -13681,11 +13716,7 @@ impl<'a> Parser<'a> { Ok(Statement::ShowCreate { obj_type, obj_name }) } - pub fn parse_show_columns( - &mut self, - extended: bool, - full: bool, - ) -> Result { + pub fn parse_show_columns(&self, extended: bool, full: bool) -> Result { let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowColumns { extended, @@ -13695,7 +13726,7 @@ impl<'a> Parser<'a> { } fn parse_show_tables( - &mut self, + &self, terse: bool, extended: bool, full: bool, @@ -13713,11 +13744,7 @@ impl<'a> Parser<'a> { }) } - fn parse_show_views( - &mut self, - terse: bool, - materialized: bool, - ) -> Result { + fn parse_show_views(&self, terse: bool, materialized: bool) -> Result { let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowViews { materialized, @@ -13726,19 +13753,17 @@ impl<'a> Parser<'a> { }) } - pub fn parse_show_functions(&mut self) -> Result { + pub fn parse_show_functions(&self) -> Result { let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowFunctions { filter }) } - pub fn parse_show_collation(&mut self) -> Result { + pub fn parse_show_collation(&self) -> Result { let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowCollation { filter }) } - pub fn parse_show_statement_filter( - &mut self, - ) -> Result, ParserError> { + pub fn parse_show_statement_filter(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::LIKE) { Ok(Some(ShowStatementFilter::Like( self.parse_literal_string()?, @@ -13759,7 +13784,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_use(&mut self) -> Result { + pub fn parse_use(&self) -> Result { // Determine which keywords are recognized by the current dialect let parsed_keyword = if dialect_of!(self is HiveDialect) { // HiveDialect accepts USE DEFAULT; statement without any db specified @@ -13798,7 +13823,7 @@ impl<'a> Parser<'a> { Ok(Statement::Use(result)) } - fn parse_secondary_roles(&mut self) -> Result { + fn parse_secondary_roles(&self) -> Result { self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?; if self.parse_keyword(Keyword::NONE) { Ok(Use::SecondaryRoles(SecondaryRoles::None)) @@ -13810,7 +13835,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_table_and_joins(&mut self) -> Result { + pub fn parse_table_and_joins(&self) -> Result { let relation = self.parse_table_factor()?; // Note that for keywords to be properly handled here, they need to be // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as @@ -13819,7 +13844,7 @@ impl<'a> Parser<'a> { Ok(TableWithJoins { relation, joins }) } - fn parse_joins(&mut self) -> Result, ParserError> { + fn parse_joins(&self) -> Result, ParserError> { let mut joins = vec![]; loop { let global = self.parse_keyword(Keyword::GLOBAL); @@ -13869,7 +13894,7 @@ impl<'a> Parser<'a> { } } else { let natural = self.parse_keyword(Keyword::NATURAL); - let peek_keyword = if let Token::Word(w) = self.peek_token().token { + let peek_keyword = if let BorrowedToken::Word(w) = self.peek_token().token { w.keyword } else { Keyword::NoKeyword @@ -13990,7 +14015,7 @@ impl<'a> Parser<'a> { fn peek_parens_less_nested_join(&self) -> bool { matches!( self.peek_token_ref().token, - Token::Word(Word { + BorrowedToken::Word(Word { keyword: Keyword::JOIN | Keyword::INNER | Keyword::LEFT @@ -14002,14 +14027,14 @@ impl<'a> Parser<'a> { } /// A table name or a parenthesized subquery, followed by optional `[AS] alias` - pub fn parse_table_factor(&mut self) -> Result { + pub fn parse_table_factor(&self) -> Result { if self.parse_keyword(Keyword::LATERAL) { // LATERAL must always be followed by a subquery or table function. - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { self.parse_derived_table_factor(Lateral) } else { let name = self.parse_object_name(false)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let args = self.parse_optional_args()?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Function { @@ -14021,12 +14046,12 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::TABLE) { // parse table function (SELECT * FROM TABLE () [ AS ]) - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::TableFunction { expr, alias }) - } else if self.consume_token(&Token::LParen) { + } else if self.consume_token(&BorrowedToken::LParen) { // A left paren introduces either a derived table (i.e., a subquery) // or a nested join. It's nearly impossible to determine ahead of // time which it is... so we just try to parse both. @@ -14072,7 +14097,7 @@ impl<'a> Parser<'a> { #[allow(clippy::if_same_then_else)] if !table_and_joins.joins.is_empty() { - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), @@ -14085,7 +14110,7 @@ impl<'a> Parser<'a> { { // (B): `table_and_joins` (what we found inside the parentheses) // is a nested join `(foo JOIN bar)`, not followed by other joins. - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), @@ -14098,7 +14123,7 @@ impl<'a> Parser<'a> { // around lone table names (e.g. `FROM (mytable [AS alias])`) // and around derived tables (e.g. `FROM ((SELECT ...) // [AS alias])`) as well. - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; if let Some(outer_alias) = self.maybe_parse_table_alias()? { // Snowflake also allows specifying an alias *after* parens @@ -14141,11 +14166,11 @@ impl<'a> Parser<'a> { && matches!( self.peek_tokens(), [ - Token::Word(Word { + BorrowedToken::Word(Word { keyword: Keyword::VALUES, .. }), - Token::LParen + BorrowedToken::LParen ] ) { @@ -14175,9 +14200,9 @@ impl<'a> Parser<'a> { } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect) && self.parse_keyword(Keyword::UNNEST) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let array_exprs = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]); let alias = match self.maybe_parse_table_alias() { @@ -14208,15 +14233,15 @@ impl<'a> Parser<'a> { with_offset_alias, with_ordinality, }) - } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) { + } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[BorrowedToken::LParen]) { let json_expr = self.parse_expr()?; - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let json_path = self.parse_value()?.value; self.expect_keyword_is(Keyword::COLUMNS)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::JsonTable { json_expr, @@ -14224,21 +14249,23 @@ impl<'a> Parser<'a> { columns, alias, }) - } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) { + } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[BorrowedToken::LParen]) { self.prev_token(); self.parse_open_json_table_factor() - } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) { + } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[BorrowedToken::LParen]) { self.prev_token(); self.parse_xml_table_factor() } else if self.dialect.supports_semantic_view_table_factor() - && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen]) + && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[BorrowedToken::LParen]) { self.parse_semantic_view_table_factor() } else { let name = self.parse_object_name(true)?; let json_path = match self.peek_token().token { - Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?), + BorrowedToken::LBracket if self.dialect.supports_partiql() => { + Some(self.parse_json_path()?) + } _ => None, }; @@ -14254,7 +14281,7 @@ impl<'a> Parser<'a> { let version = self.maybe_parse_table_version()?; // Postgres, MSSQL, ClickHouse: table-valued functions: - let args = if self.consume_token(&Token::LParen) { + let args = if self.consume_token(&BorrowedToken::LParen) { Some(self.parse_table_function_args()?) } else { None @@ -14282,9 +14309,9 @@ impl<'a> Parser<'a> { // MSSQL-specific table hints: let mut with_hints = vec![]; if self.parse_keyword(Keyword::WITH) { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { with_hints = self.parse_comma_separated(Parser::parse_expr)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } else { // rewind, as WITH may belong to the next statement's CTE self.prev_token(); @@ -14328,7 +14355,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_table_sample(&mut self) -> Result>, ParserError> { + fn maybe_parse_table_sample(&self) -> Result>, ParserError> { let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) { TableSampleModifier::TableSample } else if self.parse_keyword(Keyword::SAMPLE) { @@ -14340,7 +14367,7 @@ impl<'a> Parser<'a> { } fn parse_table_sample( - &mut self, + &self, modifier: TableSampleModifier, ) -> Result, ParserError> { let name = match self.parse_one_of_keywords(&[ @@ -14356,7 +14383,7 @@ impl<'a> Parser<'a> { _ => None, }; - let parenthesized = self.consume_token(&Token::LParen); + let parenthesized = self.consume_token(&BorrowedToken::LParen); let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) { let selected_bucket = self.parse_number_value()?.value; @@ -14380,7 +14407,7 @@ impl<'a> Parser<'a> { Some(num) => num, None => { let next_token = self.next_token(); - if let Token::Word(w) = next_token.token { + if let BorrowedToken::Word(w) = next_token.token { Expr::Value(Value::Placeholder(w.value).with_span(next_token.span)) } else { return parser_err!( @@ -14407,7 +14434,7 @@ impl<'a> Parser<'a> { ) }; if parenthesized { - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } let seed = if self.parse_keyword(Keyword::REPEATABLE) { @@ -14435,30 +14462,30 @@ impl<'a> Parser<'a> { } fn parse_table_sample_seed( - &mut self, + &self, modifier: TableSampleSeedModifier, ) -> Result { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let value = self.parse_number_value()?.value; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(TableSampleSeed { modifier, value }) } /// Parses `OPENJSON( jsonExpression [ , path ] ) [ ]` clause, /// assuming the `OPENJSON` keyword was already consumed. - fn parse_open_json_table_factor(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + fn parse_open_json_table_factor(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let json_expr = self.parse_expr()?; - let json_path = if self.consume_token(&Token::Comma) { + let json_path = if self.consume_token(&BorrowedToken::Comma) { Some(self.parse_value()?.value) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let columns = if self.parse_keyword(Keyword::WITH) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; columns } else { Vec::new() @@ -14472,13 +14499,13 @@ impl<'a> Parser<'a> { }) } - fn parse_xml_table_factor(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + fn parse_xml_table_factor(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::Comma)?; namespaces } else { vec![] @@ -14487,7 +14514,7 @@ impl<'a> Parser<'a> { let passing = self.parse_xml_passing_clause()?; self.expect_keyword_is(Keyword::COLUMNS)?; let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::XmlTable { namespaces, @@ -14498,14 +14525,14 @@ impl<'a> Parser<'a> { }) } - fn parse_xml_namespace_definition(&mut self) -> Result { + fn parse_xml_namespace_definition(&self) -> Result { let uri = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; let name = self.parse_identifier()?; Ok(XmlNamespaceDefinition { uri, name }) } - fn parse_xml_table_column(&mut self) -> Result { + fn parse_xml_table_column(&self) -> Result { let name = self.parse_identifier()?; let option = if self.parse_keyword(Keyword::FOR) { @@ -14540,7 +14567,7 @@ impl<'a> Parser<'a> { Ok(XmlTableColumn { name, option }) } - fn parse_xml_passing_clause(&mut self) -> Result { + fn parse_xml_passing_clause(&self) -> Result { let mut arguments = vec![]; if self.parse_keyword(Keyword::PASSING) { loop { @@ -14557,7 +14584,7 @@ impl<'a> Parser<'a> { alias, by_value, }); - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { break; } } @@ -14566,9 +14593,9 @@ impl<'a> Parser<'a> { } /// Parse a [TableFactor::SemanticView] - fn parse_semantic_view_table_factor(&mut self) -> Result { + fn parse_semantic_view_table_factor(&self) -> Result { self.expect_keyword(Keyword::SEMANTIC_VIEW)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let name = self.parse_object_name(true)?; @@ -14578,7 +14605,7 @@ impl<'a> Parser<'a> { let mut facts = Vec::new(); let mut where_clause = None; - while self.peek_token().token != Token::RParen { + while self.peek_token().token != BorrowedToken::RParen { if self.parse_keyword(Keyword::DIMENSIONS) { if !dimensions.is_empty() { return Err(ParserError::ParserError( @@ -14618,7 +14645,7 @@ impl<'a> Parser<'a> { } } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; @@ -14632,8 +14659,8 @@ impl<'a> Parser<'a> { }) } - fn parse_match_recognize(&mut self, table: TableFactor) -> Result { - self.expect_token(&Token::LParen)?; + fn parse_match_recognize(&self, table: TableFactor) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { self.parse_comma_separated(Parser::parse_expr)? @@ -14720,7 +14747,7 @@ impl<'a> Parser<'a> { Ok(SymbolDefinition { symbol, definition }) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; @@ -14737,34 +14764,34 @@ impl<'a> Parser<'a> { }) } - fn parse_base_pattern(&mut self) -> Result { + fn parse_base_pattern(&self) -> Result { match self.next_token().token { - Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)), - Token::Placeholder(s) if s == "$" => { + BorrowedToken::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)), + BorrowedToken::Placeholder(s) if s == "$" => { Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End)) } - Token::LBrace => { - self.expect_token(&Token::Minus)?; + BorrowedToken::LBrace => { + self.expect_token(&BorrowedToken::Minus)?; let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?; - self.expect_token(&Token::Minus)?; - self.expect_token(&Token::RBrace)?; + self.expect_token(&BorrowedToken::Minus)?; + self.expect_token(&BorrowedToken::RBrace)?; Ok(MatchRecognizePattern::Exclude(symbol)) } - Token::Word(Word { + BorrowedToken::Word(Word { value, quote_style: None, .. }) if value == "PERMUTE" => { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let symbols = self.parse_comma_separated(|p| { p.parse_identifier().map(MatchRecognizeSymbol::Named) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(MatchRecognizePattern::Permute(symbols)) } - Token::LParen => { + BorrowedToken::LParen => { let pattern = self.parse_pattern()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(MatchRecognizePattern::Group(Box::new(pattern))) } _ => { @@ -14776,37 +14803,39 @@ impl<'a> Parser<'a> { } } - fn parse_repetition_pattern(&mut self) -> Result { + fn parse_repetition_pattern(&self) -> Result { let mut pattern = self.parse_base_pattern()?; loop { let token = self.next_token(); let quantifier = match token.token { - Token::Mul => RepetitionQuantifier::ZeroOrMore, - Token::Plus => RepetitionQuantifier::OneOrMore, - Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne, - Token::LBrace => { + BorrowedToken::Mul => RepetitionQuantifier::ZeroOrMore, + BorrowedToken::Plus => RepetitionQuantifier::OneOrMore, + BorrowedToken::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne, + BorrowedToken::LBrace => { // quantifier is a range like {n} or {n,} or {,m} or {n,m} let token = self.next_token(); match token.token { - Token::Comma => { + BorrowedToken::Comma => { let next_token = self.next_token(); - let Token::Number(n, _) = next_token.token else { + let BorrowedToken::Number(n, _) = next_token.token else { return self.expected("literal number", next_token); }; - self.expect_token(&Token::RBrace)?; + self.expect_token(&BorrowedToken::RBrace)?; RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?) } - Token::Number(n, _) if self.consume_token(&Token::Comma) => { + BorrowedToken::Number(n, _) + if self.consume_token(&BorrowedToken::Comma) => + { let next_token = self.next_token(); match next_token.token { - Token::Number(m, _) => { - self.expect_token(&Token::RBrace)?; + BorrowedToken::Number(m, _) => { + self.expect_token(&BorrowedToken::RBrace)?; RepetitionQuantifier::Range( Self::parse(n, token.span.start)?, Self::parse(m, token.span.start)?, ) } - Token::RBrace => { + BorrowedToken::RBrace => { RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?) } _ => { @@ -14814,8 +14843,8 @@ impl<'a> Parser<'a> { } } } - Token::Number(n, _) => { - self.expect_token(&Token::RBrace)?; + BorrowedToken::Number(n, _) => { + self.expect_token(&BorrowedToken::RBrace)?; RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?) } _ => return self.expected("quantifier range", token), @@ -14831,9 +14860,12 @@ impl<'a> Parser<'a> { Ok(pattern) } - fn parse_concat_pattern(&mut self) -> Result { + fn parse_concat_pattern(&self) -> Result { let mut patterns = vec![self.parse_repetition_pattern()?]; - while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) { + while !matches!( + self.peek_token().token, + BorrowedToken::RParen | BorrowedToken::Pipe + ) { patterns.push(self.parse_repetition_pattern()?); } match <[MatchRecognizePattern; 1]>::try_from(patterns) { @@ -14842,9 +14874,9 @@ impl<'a> Parser<'a> { } } - fn parse_pattern(&mut self) -> Result { + fn parse_pattern(&self) -> Result { let pattern = self.parse_concat_pattern()?; - if self.consume_token(&Token::Pipe) { + if self.consume_token(&BorrowedToken::Pipe) { match self.parse_pattern()? { // flatten nested alternations MatchRecognizePattern::Alternation(mut patterns) => { @@ -14859,7 +14891,7 @@ impl<'a> Parser<'a> { } /// Parses a the timestamp version specifier (i.e. query historical data) - pub fn maybe_parse_table_version(&mut self) -> Result, ParserError> { + pub fn maybe_parse_table_version(&self) -> Result, ParserError> { if self.dialect.supports_timestamp_versioning() { if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) { @@ -14876,7 +14908,7 @@ impl<'a> Parser<'a> { /// Parses MySQL's JSON_TABLE column definition. /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR` - pub fn parse_json_table_column_def(&mut self) -> Result { + pub fn parse_json_table_column_def(&self) -> Result { if self.parse_keyword(Keyword::NESTED) { let _has_path_keyword = self.parse_keyword(Keyword::PATH); let path = self.parse_value()?.value; @@ -14925,10 +14957,10 @@ impl<'a> Parser<'a> { /// ``` /// /// Reference: - pub fn parse_openjson_table_column_def(&mut self) -> Result { + pub fn parse_openjson_table_column_def(&self) -> Result { let name = self.parse_identifier()?; let r#type = self.parse_data_type()?; - let path = if let Token::SingleQuotedString(path) = self.peek_token().token { + let path = if let BorrowedToken::SingleQuotedString(path) = self.peek_token().token { self.next_token(); Some(path) } else { @@ -14947,7 +14979,7 @@ impl<'a> Parser<'a> { } fn parse_json_table_column_error_handling( - &mut self, + &self, ) -> Result, ParserError> { let res = if self.parse_keyword(Keyword::NULL) { JsonTableColumnErrorHandling::Null @@ -14963,11 +14995,11 @@ impl<'a> Parser<'a> { } pub fn parse_derived_table_factor( - &mut self, + &self, lateral: IsLateral, ) -> Result { let subquery = self.parse_query()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Derived { lateral: match lateral { @@ -14979,9 +15011,9 @@ impl<'a> Parser<'a> { }) } - fn parse_aliased_function_call(&mut self) -> Result { + fn parse_aliased_function_call(&self) -> Result { let function_name = match self.next_token().token { - Token::Word(w) => Ok(w.value), + BorrowedToken::Word(w) => Ok(w.value), _ => self.expected("a function identifier", self.peek_token()), }?; let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?; @@ -15015,7 +15047,7 @@ impl<'a> Parser<'a> { /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value)); /// # Ok(()) /// # } - pub fn parse_expr_with_alias(&mut self) -> Result { + pub fn parse_expr_with_alias(&self) -> Result { let expr = self.parse_expr()?; let alias = if self.parse_keyword(Keyword::AS) { Some(self.parse_identifier()?) @@ -15026,14 +15058,11 @@ impl<'a> Parser<'a> { Ok(ExprWithAlias { expr, alias }) } - pub fn parse_pivot_table_factor( - &mut self, - table: TableFactor, - ) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_pivot_table_factor(&self, table: TableFactor) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?; self.expect_keyword_is(Keyword::FOR)?; - let value_column = if self.peek_token_ref().token == Token::LParen { + let value_column = if self.peek_token_ref().token == BorrowedToken::LParen { self.parse_parenthesized_column_list_inner(Mandatory, false, |p| { p.parse_subexpr(self.dialect.prec_value(Precedence::Between)) })? @@ -15042,7 +15071,7 @@ impl<'a> Parser<'a> { }; self.expect_keyword_is(Keyword::IN)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let value_source = if self.parse_keyword(Keyword::ANY) { let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { self.parse_comma_separated(Parser::parse_order_by_expr)? @@ -15055,19 +15084,19 @@ impl<'a> Parser<'a> { } else { PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?) }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let default_on_null = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let expr = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(expr) } else { None }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Pivot { table: Box::new(table), @@ -15080,7 +15109,7 @@ impl<'a> Parser<'a> { } pub fn parse_unpivot_table_factor( - &mut self, + &self, table: TableFactor, ) -> Result { let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) { @@ -15092,7 +15121,7 @@ impl<'a> Parser<'a> { } else { None }; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let value = self.parse_expr()?; self.expect_keyword_is(Keyword::FOR)?; let name = self.parse_identifier()?; @@ -15100,7 +15129,7 @@ impl<'a> Parser<'a> { let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| { p.parse_expr_with_alias() })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Unpivot { table: Box::new(table), @@ -15112,7 +15141,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_join_constraint(&mut self, natural: bool) -> Result { + pub fn parse_join_constraint(&self, natural: bool) -> Result { if natural { Ok(JoinConstraint::Natural) } else if self.parse_keyword(Keyword::ON) { @@ -15128,7 +15157,7 @@ impl<'a> Parser<'a> { } /// Parse a GRANT statement. - pub fn parse_grant(&mut self) -> Result { + pub fn parse_grant(&self) -> Result { let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; self.expect_keyword_is(Keyword::TO)?; @@ -15169,7 +15198,7 @@ impl<'a> Parser<'a> { }) } - fn parse_grantees(&mut self) -> Result, ParserError> { + fn parse_grantees(&self) -> Result, ParserError> { let mut values = vec![]; let mut grantee_type = GranteesType::None; loop { @@ -15210,7 +15239,7 @@ impl<'a> Parser<'a> { } } else { let mut name = self.parse_grantee_name()?; - if self.consume_token(&Token::Colon) { + if self.consume_token(&BorrowedToken::Colon) { // Redshift supports namespace prefix for external users and groups: // : or : // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html @@ -15229,7 +15258,7 @@ impl<'a> Parser<'a> { values.push(grantee); - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { break; } } @@ -15238,7 +15267,7 @@ impl<'a> Parser<'a> { } pub fn parse_grant_deny_revoke_privileges_objects( - &mut self, + &self, ) -> Result<(Privileges, Option), ParserError> { let privileges = if self.parse_keyword(Keyword::ALL) { Privileges::All { @@ -15424,13 +15453,13 @@ impl<'a> Parser<'a> { } fn parse_grant_procedure_or_function( - &mut self, + &self, name: &ObjectName, kw: &Option, ) -> Result, ParserError> { - let arg_types = if self.consume_token(&Token::LParen) { - let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?; - self.expect_token(&Token::RParen)?; + let arg_types = if self.consume_token(&BorrowedToken::LParen) { + let list = self.parse_comma_separated0(Self::parse_data_type, BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; list } else { vec![] @@ -15448,8 +15477,8 @@ impl<'a> Parser<'a> { } } - pub fn parse_grant_permission(&mut self) -> Result { - fn parse_columns(parser: &mut Parser) -> Result>, ParserError> { + pub fn parse_grant_permission(&self) -> Result { + fn parse_columns(parser: &Parser) -> Result>, ParserError> { let columns = parser.parse_parenthesized_column_list(Optional, false)?; if columns.is_empty() { Ok(None) @@ -15568,7 +15597,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_action_create_object_type(&mut self) -> Option { + fn maybe_parse_action_create_object_type(&self) -> Option { // Multi-word object types if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) { Some(ActionCreateObjectType::ApplicationPackage) @@ -15611,7 +15640,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_apply_type(&mut self) -> Result { + fn parse_action_apply_type(&self) -> Result { if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) { Ok(ActionApplyType::AggregationPolicy) } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) { @@ -15637,7 +15666,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_action_execute_obj_type(&mut self) -> Option { + fn maybe_parse_action_execute_obj_type(&self) -> Option { if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) { Some(ActionExecuteObjectType::DataMetricFunction) } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) { @@ -15653,7 +15682,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_manage_type(&mut self) -> Result { + fn parse_action_manage_type(&self) -> Result { if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) { Ok(ActionManageType::AccountSupportCases) } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) { @@ -15673,7 +15702,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_modify_type(&mut self) -> Option { + fn parse_action_modify_type(&self) -> Option { if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) { Some(ActionModifyType::LogLevel) } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) { @@ -15687,7 +15716,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_monitor_type(&mut self) -> Option { + fn parse_action_monitor_type(&self) -> Option { if self.parse_keyword(Keyword::EXECUTION) { Some(ActionMonitorType::Execution) } else if self.parse_keyword(Keyword::SECURITY) { @@ -15699,12 +15728,12 @@ impl<'a> Parser<'a> { } } - pub fn parse_grantee_name(&mut self) -> Result { + pub fn parse_grantee_name(&self) -> Result { let mut name = self.parse_object_name(false)?; if self.dialect.supports_user_host_grantee() && name.0.len() == 1 && name.0[0].as_ident().is_some() - && self.consume_token(&Token::AtSign) + && self.consume_token(&BorrowedToken::AtSign) { let user = name.0.pop().unwrap().as_ident().unwrap().clone(); let host = self.parse_identifier()?; @@ -15715,7 +15744,7 @@ impl<'a> Parser<'a> { } /// Parse [`Statement::Deny`] - pub fn parse_deny(&mut self) -> Result { + pub fn parse_deny(&self) -> Result { self.expect_keyword(Keyword::DENY)?; let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; @@ -15748,7 +15777,7 @@ impl<'a> Parser<'a> { } /// Parse a REVOKE statement - pub fn parse_revoke(&mut self) -> Result { + pub fn parse_revoke(&self) -> Result { let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; self.expect_keyword_is(Keyword::FROM)?; @@ -15772,10 +15801,7 @@ impl<'a> Parser<'a> { } /// Parse an REPLACE statement - pub fn parse_replace( - &mut self, - replace_token: TokenWithSpan, - ) -> Result { + pub fn parse_replace(&self, replace_token: TokenWithSpan) -> Result { if !dialect_of!(self is MySqlDialect | GenericDialect) { return parser_err!( "Unsupported statement REPLACE", @@ -15795,14 +15821,14 @@ impl<'a> Parser<'a> { /// /// This is used to reduce the size of the stack frames in debug builds fn parse_insert_setexpr_boxed( - &mut self, + &self, insert_token: TokenWithSpan, ) -> Result, ParserError> { Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?))) } /// Parse an INSERT statement - pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result { + pub fn parse_insert(&self, insert_token: TokenWithSpan) -> Result { let or = self.parse_conflict_clause(); let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) { None @@ -15922,7 +15948,7 @@ impl<'a> Parser<'a> { let conflict_target = if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) { Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?)) - } else if self.peek_token() == Token::LParen { + } else if self.peek_token() == BorrowedToken::LParen { Some(ConflictTarget::Columns( self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?, )) @@ -15971,7 +15997,7 @@ impl<'a> Parser<'a> { }; Ok(Statement::Insert(Insert { - insert_token: insert_token.into(), + insert_token: insert_token.to_static().into(), or, table: table_object, table_alias, @@ -15998,7 +16024,7 @@ impl<'a> Parser<'a> { // Parses input format clause used for [ClickHouse]. // // - pub fn parse_input_format_clause(&mut self) -> Result { + pub fn parse_input_format_clause(&self) -> Result { let ident = self.parse_identifier()?; let values = self .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))? @@ -16009,13 +16035,13 @@ impl<'a> Parser<'a> { /// Returns true if the immediate tokens look like the /// beginning of a subquery. `(SELECT ...` - fn peek_subquery_start(&mut self) -> bool { + fn peek_subquery_start(&self) -> bool { let [maybe_lparen, maybe_select] = self.peek_tokens(); - Token::LParen == maybe_lparen - && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT) + BorrowedToken::LParen == maybe_lparen + && matches!(maybe_select, BorrowedToken::Word(w) if w.keyword == Keyword::SELECT) } - fn parse_conflict_clause(&mut self) -> Option { + fn parse_conflict_clause(&self) -> Option { if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { Some(SqliteOnConflict::Replace) } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { @@ -16033,20 +16059,18 @@ impl<'a> Parser<'a> { } } - pub fn parse_insert_partition(&mut self) -> Result>, ParserError> { + pub fn parse_insert_partition(&self) -> Result>, ParserError> { if self.parse_keyword(Keyword::PARTITION) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(partition_cols) } else { Ok(None) } } - pub fn parse_load_data_table_format( - &mut self, - ) -> Result, ParserError> { + pub fn parse_load_data_table_format(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::INPUTFORMAT) { let input_format = self.parse_expr()?; self.expect_keyword_is(Keyword::SERDE)?; @@ -16064,13 +16088,13 @@ impl<'a> Parser<'a> { /// /// This is used to reduce the size of the stack frames in debug builds fn parse_update_setexpr_boxed( - &mut self, + &self, update_token: TokenWithSpan, ) -> Result, ParserError> { Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?))) } - pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result { + pub fn parse_update(&self, update_token: TokenWithSpan) -> Result { let or = self.parse_conflict_clause(); let table = self.parse_table_and_joins()?; let from_before_set = if self.parse_keyword(Keyword::FROM) { @@ -16105,7 +16129,7 @@ impl<'a> Parser<'a> { None }; Ok(Update { - update_token: update_token.into(), + update_token: update_token.to_static().into(), table, assignments, from, @@ -16118,18 +16142,18 @@ impl<'a> Parser<'a> { } /// Parse a `var = expr` assignment, used in an UPDATE statement - pub fn parse_assignment(&mut self) -> Result { + pub fn parse_assignment(&self) -> Result { let target = self.parse_assignment_target()?; - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let value = self.parse_expr()?; Ok(Assignment { target, value }) } /// Parse the left-hand side of an assignment, used in an UPDATE statement - pub fn parse_assignment_target(&mut self) -> Result { - if self.consume_token(&Token::LParen) { + pub fn parse_assignment_target(&self) -> Result { + if self.consume_token(&BorrowedToken::LParen) { let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(AssignmentTarget::Tuple(columns)) } else { let column = self.parse_object_name(false)?; @@ -16137,7 +16161,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_function_args(&mut self) -> Result { + pub fn parse_function_args(&self) -> Result { let arg = if self.dialect.supports_named_fn_args_with_expr_name() { self.maybe_parse(|p| { let name = p.parse_expr()?; @@ -16167,26 +16191,26 @@ impl<'a> Parser<'a> { Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) } - fn parse_function_named_arg_operator(&mut self) -> Result { + fn parse_function_named_arg_operator(&self) -> Result { if self.parse_keyword(Keyword::VALUE) { return Ok(FunctionArgOperator::Value); } let tok = self.next_token(); match tok.token { - Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => { + BorrowedToken::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => { Ok(FunctionArgOperator::RightArrow) } - Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => { + BorrowedToken::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => { Ok(FunctionArgOperator::Equals) } - Token::Assignment + BorrowedToken::Assignment if self .dialect .supports_named_fn_args_with_assignment_operator() => { Ok(FunctionArgOperator::Assignment) } - Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => { + BorrowedToken::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => { Ok(FunctionArgOperator::Colon) } _ => { @@ -16196,18 +16220,18 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_args(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::RParen) { + pub fn parse_optional_args(&self) -> Result, ParserError> { + if self.consume_token(&BorrowedToken::RParen) { Ok(vec![]) } else { let args = self.parse_comma_separated(Parser::parse_function_args)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(args) } } - fn parse_table_function_args(&mut self) -> Result { - if self.consume_token(&Token::RParen) { + fn parse_table_function_args(&self) -> Result { + if self.consume_token(&BorrowedToken::RParen) { return Ok(TableFunctionArgs { args: vec![], settings: None, @@ -16223,7 +16247,7 @@ impl<'a> Parser<'a> { break None; } }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(TableFunctionArgs { args, settings }) } @@ -16235,7 +16259,7 @@ impl<'a> Parser<'a> { /// FIRST_VALUE(x ORDER BY 1,2,3); /// FIRST_VALUE(x IGNORE NULL); /// ``` - fn parse_function_argument_list(&mut self) -> Result { + fn parse_function_argument_list(&self) -> Result { let mut clauses = vec![]; // Handle clauses that may exist with an empty argument list @@ -16250,7 +16274,7 @@ impl<'a> Parser<'a> { )); } - if self.consume_token(&Token::RParen) { + if self.consume_token(&BorrowedToken::RParen) { return Ok(FunctionArgumentList { duplicate_treatment: None, args: vec![], @@ -16311,7 +16335,7 @@ impl<'a> Parser<'a> { )); } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(FunctionArgumentList { duplicate_treatment, args, @@ -16319,7 +16343,7 @@ impl<'a> Parser<'a> { }) } - fn parse_json_null_clause(&mut self) -> Option { + fn parse_json_null_clause(&self) -> Option { if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) { Some(JsonNullClause::AbsentOnNull) } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) { @@ -16330,7 +16354,7 @@ impl<'a> Parser<'a> { } fn maybe_parse_json_returning_clause( - &mut self, + &self, ) -> Result, ParserError> { if self.parse_keyword(Keyword::RETURNING) { let data_type = self.parse_data_type()?; @@ -16340,7 +16364,7 @@ impl<'a> Parser<'a> { } } - fn parse_duplicate_treatment(&mut self) -> Result, ParserError> { + fn parse_duplicate_treatment(&self) -> Result, ParserError> { let loc = self.peek_token().span.start; match ( self.parse_keyword(Keyword::ALL), @@ -16354,7 +16378,7 @@ impl<'a> Parser<'a> { } /// Parse a comma-delimited list of projections after SELECT - pub fn parse_select_item(&mut self) -> Result { + pub fn parse_select_item(&self) -> Result { let prefix = self .parse_one_of_keywords( self.dialect @@ -16395,7 +16419,7 @@ impl<'a> Parser<'a> { }) } expr if self.dialect.supports_select_expr_star() - && self.consume_tokens(&[Token::Period, Token::Mul]) => + && self.consume_tokens(&[BorrowedToken::Period, BorrowedToken::Mul]) => { let wildcard_token = self.get_previous_token().clone(); Ok(SelectItem::QualifiedWildcard( @@ -16419,7 +16443,7 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_wildcard_additional_options( - &mut self, + &self, wildcard_token: TokenWithSpan, ) -> Result { let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) { @@ -16451,7 +16475,7 @@ impl<'a> Parser<'a> { }; Ok(WildcardAdditionalOptions { - wildcard_token: wildcard_token.into(), + wildcard_token: wildcard_token.to_static().into(), opt_ilike, opt_exclude, opt_except, @@ -16463,13 +16487,11 @@ impl<'a> Parser<'a> { /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items. /// /// If it is not possible to parse it, will return an option. - pub fn parse_optional_select_item_ilike( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_select_item_ilike(&self) -> Result, ParserError> { let opt_ilike = if self.parse_keyword(Keyword::ILIKE) { let next_token = self.next_token(); let pattern = match next_token.token { - Token::SingleQuotedString(s) => s, + BorrowedToken::SingleQuotedString(s) => s, _ => return self.expected("ilike pattern", next_token), }; Some(IlikeSelectItem { pattern }) @@ -16483,12 +16505,12 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_optional_select_item_exclude( - &mut self, + &self, ) -> Result, ParserError> { let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(ExcludeSelectItem::Multiple(columns)) } else { let column = self.parse_identifier()?; @@ -16505,10 +16527,10 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_optional_select_item_except( - &mut self, + &self, ) -> Result, ParserError> { let opt_except = if self.parse_keyword(Keyword::EXCEPT) { - if self.peek_token().token == Token::LParen { + if self.peek_token().token == BorrowedToken::LParen { let idents = self.parse_parenthesized_column_list(Mandatory, false)?; match &idents[..] { [] => { @@ -16539,13 +16561,13 @@ impl<'a> Parser<'a> { /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items. pub fn parse_optional_select_item_rename( - &mut self, + &self, ) -> Result, ParserError> { let opt_rename = if self.parse_keyword(Keyword::RENAME) { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let idents = self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(RenameSelectItem::Multiple(idents)) } else { let ident = self.parse_identifier_with_alias()?; @@ -16560,14 +16582,14 @@ impl<'a> Parser<'a> { /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items. pub fn parse_optional_select_item_replace( - &mut self, + &self, ) -> Result, ParserError> { let opt_replace = if self.parse_keyword(Keyword::REPLACE) { - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let items = self.parse_comma_separated(|parser| { Ok(Box::new(parser.parse_replace_elements()?)) })?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(ReplaceSelectItem { items }) } else { let tok = self.next_token(); @@ -16579,7 +16601,7 @@ impl<'a> Parser<'a> { Ok(opt_replace) } - pub fn parse_replace_elements(&mut self) -> Result { + pub fn parse_replace_elements(&self) -> Result { let expr = self.parse_expr()?; let as_keyword = self.parse_keyword(Keyword::AS); let ident = self.parse_identifier()?; @@ -16592,7 +16614,7 @@ impl<'a> Parser<'a> { /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of /// them. - pub fn parse_asc_desc(&mut self) -> Option { + pub fn parse_asc_desc(&self) -> Option { if self.parse_keyword(Keyword::ASC) { Some(true) } else if self.parse_keyword(Keyword::DESC) { @@ -16603,13 +16625,13 @@ impl<'a> Parser<'a> { } /// Parse an [OrderByExpr] expression. - pub fn parse_order_by_expr(&mut self) -> Result { + pub fn parse_order_by_expr(&self) -> Result { self.parse_order_by_expr_inner(false) .map(|(order_by, _)| order_by) } /// Parse an [IndexColumn]. - pub fn parse_create_index_expr(&mut self) -> Result { + pub fn parse_create_index_expr(&self) -> Result { self.parse_order_by_expr_inner(true) .map(|(column, operator_class)| IndexColumn { column, @@ -16618,7 +16640,7 @@ impl<'a> Parser<'a> { } fn parse_order_by_expr_inner( - &mut self, + &self, with_operator_class: bool, ) -> Result<(OrderByExpr, Option), ParserError> { let expr = self.parse_expr()?; @@ -16658,7 +16680,7 @@ impl<'a> Parser<'a> { )) } - fn parse_order_by_options(&mut self) -> Result { + fn parse_order_by_options(&self) -> Result { let asc = self.parse_asc_desc(); let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) { @@ -16674,7 +16696,7 @@ impl<'a> Parser<'a> { // Parse a WITH FILL clause (ClickHouse dialect) // that follow the WITH FILL keywords in a ORDER BY clause - pub fn parse_with_fill(&mut self) -> Result { + pub fn parse_with_fill(&self) -> Result { let from = if self.parse_keyword(Keyword::FROM) { Some(self.parse_expr()?) } else { @@ -16698,15 +16720,15 @@ impl<'a> Parser<'a> { // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect) // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier - pub fn parse_interpolations(&mut self) -> Result, ParserError> { + pub fn parse_interpolations(&self) -> Result, ParserError> { if !self.parse_keyword(Keyword::INTERPOLATE) { return Ok(None); } - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let interpolations = - self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?; - self.expect_token(&Token::RParen)?; + self.parse_comma_separated0(|p| p.parse_interpolation(), BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; // INTERPOLATE () and INTERPOLATE ( ... ) variants return Ok(Some(Interpolate { exprs: Some(interpolations), @@ -16718,7 +16740,7 @@ impl<'a> Parser<'a> { } // Parse a INTERPOLATE expression (ClickHouse dialect) - pub fn parse_interpolation(&mut self) -> Result { + pub fn parse_interpolation(&self) -> Result { let column = self.parse_identifier()?; let expr = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) @@ -16730,15 +16752,15 @@ impl<'a> Parser<'a> { /// Parse a TOP clause, MSSQL equivalent of LIMIT, /// that follows after `SELECT [DISTINCT]`. - pub fn parse_top(&mut self) -> Result { - let quantity = if self.consume_token(&Token::LParen) { + pub fn parse_top(&self) -> Result { + let quantity = if self.consume_token(&BorrowedToken::LParen) { let quantity = self.parse_expr()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(TopQuantity::Expr(quantity)) } else { let next_token = self.next_token(); let quantity = match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.span.start)?, + BorrowedToken::Number(s, _) => Self::parse::(s, next_token.span.start)?, _ => self.expected("literal int", next_token)?, }; Some(TopQuantity::Constant(quantity)) @@ -16756,7 +16778,7 @@ impl<'a> Parser<'a> { } /// Parse a LIMIT clause - pub fn parse_limit(&mut self) -> Result, ParserError> { + pub fn parse_limit(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::ALL) { Ok(None) } else { @@ -16765,7 +16787,7 @@ impl<'a> Parser<'a> { } /// Parse an OFFSET clause - pub fn parse_offset(&mut self) -> Result { + pub fn parse_offset(&self) -> Result { let value = self.parse_expr()?; let rows = if self.parse_keyword(Keyword::ROW) { OffsetRows::Row @@ -16778,7 +16800,7 @@ impl<'a> Parser<'a> { } /// Parse a FETCH clause - pub fn parse_fetch(&mut self) -> Result { + pub fn parse_fetch(&self) -> Result { let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]); let (quantity, percent) = if self @@ -16807,7 +16829,7 @@ impl<'a> Parser<'a> { } /// Parse a FOR UPDATE/FOR SHARE clause - pub fn parse_lock(&mut self) -> Result { + pub fn parse_lock(&self) -> Result { let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? { Keyword::UPDATE => LockType::Update, Keyword::SHARE => LockType::Share, @@ -16833,7 +16855,7 @@ impl<'a> Parser<'a> { } pub fn parse_values( - &mut self, + &self, allow_empty: bool, value_keyword: bool, ) -> Result { @@ -16844,13 +16866,13 @@ impl<'a> Parser<'a> { explicit_row = true; } - parser.expect_token(&Token::LParen)?; - if allow_empty && parser.peek_token().token == Token::RParen { + parser.expect_token(&BorrowedToken::LParen)?; + if allow_empty && parser.peek_token().token == BorrowedToken::RParen { parser.next_token(); Ok(vec![]) } else { let exprs = parser.parse_comma_separated(Parser::parse_expr)?; - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::RParen)?; Ok(exprs) } })?; @@ -16861,7 +16883,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_start_transaction(&mut self) -> Result { + pub fn parse_start_transaction(&self) -> Result { self.expect_keyword_is(Keyword::TRANSACTION)?; Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, @@ -16874,7 +16896,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_begin(&mut self) -> Result { + pub fn parse_begin(&self) -> Result { let modifier = if !self.dialect.supports_start_transaction_modifier() { None } else if self.parse_keyword(Keyword::DEFERRED) { @@ -16906,7 +16928,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_begin_exception_end(&mut self) -> Result { + pub fn parse_begin_exception_end(&self) -> Result { let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?; let exception = if self.parse_keyword(Keyword::EXCEPTION) { @@ -16925,7 +16947,7 @@ impl<'a> Parser<'a> { let ident = self.parse_identifier()?; idents.push(ident); - self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?; + let _ = self.parse_keyword(Keyword::OR); } let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?; @@ -16951,7 +16973,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_end(&mut self) -> Result { + pub fn parse_end(&self) -> Result { let modifier = if !self.dialect.supports_end_transaction_modifier() { None } else if self.parse_keyword(Keyword::TRY) { @@ -16968,7 +16990,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_transaction_modes(&mut self) -> Result, ParserError> { + pub fn parse_transaction_modes(&self) -> Result, ParserError> { let mut modes = vec![]; let mut required = false; loop { @@ -17001,12 +17023,12 @@ impl<'a> Parser<'a> { // PostgreSQL, for historical reasons, does not. We follow // PostgreSQL in making the comma optional, since that is strictly // more general. - required = self.consume_token(&Token::Comma); + required = self.consume_token(&BorrowedToken::Comma); } Ok(modes) } - pub fn parse_commit(&mut self) -> Result { + pub fn parse_commit(&self) -> Result { Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, end: false, @@ -17014,14 +17036,14 @@ impl<'a> Parser<'a> { }) } - pub fn parse_rollback(&mut self) -> Result { + pub fn parse_rollback(&self) -> Result { let chain = self.parse_commit_rollback_chain()?; let savepoint = self.parse_rollback_savepoint()?; Ok(Statement::Rollback { chain, savepoint }) } - pub fn parse_commit_rollback_chain(&mut self) -> Result { + pub fn parse_commit_rollback_chain(&self) -> Result { let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); if self.parse_keyword(Keyword::AND) { let chain = !self.parse_keyword(Keyword::NO); @@ -17032,7 +17054,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_rollback_savepoint(&mut self) -> Result, ParserError> { + pub fn parse_rollback_savepoint(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::TO) { let _ = self.parse_keyword(Keyword::SAVEPOINT); let savepoint = self.parse_identifier()?; @@ -17044,19 +17066,19 @@ impl<'a> Parser<'a> { } /// Parse a 'RAISERROR' statement - pub fn parse_raiserror(&mut self) -> Result { - self.expect_token(&Token::LParen)?; + pub fn parse_raiserror(&self) -> Result { + self.expect_token(&BorrowedToken::LParen)?; let message = Box::new(self.parse_expr()?); - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let severity = Box::new(self.parse_expr()?); - self.expect_token(&Token::Comma)?; + self.expect_token(&BorrowedToken::Comma)?; let state = Box::new(self.parse_expr()?); - let arguments = if self.consume_token(&Token::Comma) { + let arguments = if self.consume_token(&BorrowedToken::Comma) { self.parse_comma_separated(Parser::parse_expr)? } else { vec![] }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; let options = if self.parse_keyword(Keyword::WITH) { self.parse_comma_separated(Parser::parse_raiserror_option)? } else { @@ -17071,7 +17093,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_raiserror_option(&mut self) -> Result { + pub fn parse_raiserror_option(&self) -> Result { match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? { Keyword::LOG => Ok(RaisErrorOption::Log), Keyword::NOWAIT => Ok(RaisErrorOption::NoWait), @@ -17083,13 +17105,13 @@ impl<'a> Parser<'a> { } } - pub fn parse_deallocate(&mut self) -> Result { + pub fn parse_deallocate(&self) -> Result { let prepare = self.parse_keyword(Keyword::PREPARE); let name = self.parse_identifier()?; Ok(Statement::Deallocate { name, prepare }) } - pub fn parse_execute(&mut self) -> Result { + pub fn parse_execute(&self) -> Result { let name = if self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE) { @@ -17099,20 +17121,22 @@ impl<'a> Parser<'a> { Some(name) }; - let has_parentheses = self.consume_token(&Token::LParen); + let has_parentheses = self.consume_token(&BorrowedToken::LParen); let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT]; let end_token = match (has_parentheses, self.peek_token().token) { - (true, _) => Token::RParen, - (false, Token::EOF) => Token::EOF, - (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w), - (false, _) => Token::SemiColon, + (true, _) => BorrowedToken::RParen, + (false, BorrowedToken::EOF) => BorrowedToken::EOF, + (false, BorrowedToken::Word(w)) if end_kws.contains(&w.keyword) => { + BorrowedToken::Word(w) + } + (false, _) => BorrowedToken::SemiColon, }; let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?; if has_parentheses { - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } let into = if self.parse_keyword(Keyword::INTO) { @@ -17143,13 +17167,13 @@ impl<'a> Parser<'a> { }) } - pub fn parse_prepare(&mut self) -> Result { + pub fn parse_prepare(&self) -> Result { let name = self.parse_identifier()?; let mut data_types = vec![]; - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { data_types = self.parse_comma_separated(Parser::parse_data_type)?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } self.expect_keyword_is(Keyword::AS)?; @@ -17161,16 +17185,18 @@ impl<'a> Parser<'a> { }) } - pub fn parse_unload(&mut self) -> Result { + pub fn parse_unload(&self) -> Result { self.expect_keyword(Keyword::UNLOAD)?; - self.expect_token(&Token::LParen)?; - let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) - { + self.expect_token(&BorrowedToken::LParen)?; + let (query, query_text) = if matches!( + self.peek_token().token, + BorrowedToken::SingleQuotedString(_) + ) { (None, Some(self.parse_literal_string()?)) } else { (Some(self.parse_query()?), None) }; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; self.expect_keyword_is(Keyword::TO)?; let to = self.parse_identifier()?; @@ -17194,7 +17220,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_merge_clauses(&mut self) -> Result, ParserError> { + pub fn parse_merge_clauses(&self) -> Result, ParserError> { let mut clauses = vec![]; loop { if !(self.parse_keyword(Keyword::WHEN)) { @@ -17293,7 +17319,7 @@ impl<'a> Parser<'a> { Ok(clauses) } - fn parse_output(&mut self, start_keyword: Keyword) -> Result { + fn parse_output(&self, start_keyword: Keyword) -> Result { let select_items = self.parse_projection()?; let into_table = if start_keyword == Keyword::OUTPUT && self.peek_keyword(Keyword::INTO) { self.expect_keyword_is(Keyword::INTO)?; @@ -17312,7 +17338,7 @@ impl<'a> Parser<'a> { }) } - fn parse_select_into(&mut self) -> Result { + fn parse_select_into(&self) -> Result { let temporary = self .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) .is_some(); @@ -17328,7 +17354,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_merge(&mut self) -> Result { + pub fn parse_merge(&self) -> Result { let into = self.parse_keyword(Keyword::INTO); let table = self.parse_table_factor()?; @@ -17353,7 +17379,7 @@ impl<'a> Parser<'a> { }) } - fn parse_pragma_value(&mut self) -> Result { + fn parse_pragma_value(&self) -> Result { match self.parse_value()?.value { v @ Value::SingleQuotedString(_) => Ok(v), v @ Value::DoubleQuotedString(_) => Ok(v), @@ -17367,17 +17393,17 @@ impl<'a> Parser<'a> { } // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')'] - pub fn parse_pragma(&mut self) -> Result { + pub fn parse_pragma(&self) -> Result { let name = self.parse_object_name(false)?; - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { let value = self.parse_pragma_value()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Statement::Pragma { name, value: Some(value), is_eq: false, }) - } else if self.consume_token(&Token::Eq) { + } else if self.consume_token(&BorrowedToken::Eq) { Ok(Statement::Pragma { name, value: Some(self.parse_pragma_value()?), @@ -17393,14 +17419,14 @@ impl<'a> Parser<'a> { } /// `INSTALL [extension_name]` - pub fn parse_install(&mut self) -> Result { + pub fn parse_install(&self) -> Result { let extension_name = self.parse_identifier()?; Ok(Statement::Install { extension_name }) } /// Parse a SQL LOAD statement - pub fn parse_load(&mut self) -> Result { + pub fn parse_load(&self) -> Result { if self.dialect.supports_load_extension() { let extension_name = self.parse_identifier()?; Ok(Statement::Load { extension_name }) @@ -17434,7 +17460,7 @@ impl<'a> Parser<'a> { /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]] /// ``` /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize) - pub fn parse_optimize_table(&mut self) -> Result { + pub fn parse_optimize_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let name = self.parse_object_name(false)?; let on_cluster = self.parse_optional_on_cluster()?; @@ -17474,7 +17500,7 @@ impl<'a> Parser<'a> { /// ``` /// /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details. - pub fn parse_create_sequence(&mut self, temporary: bool) -> Result { + pub fn parse_create_sequence(&self, temporary: bool) -> Result { //[ IF NOT EXISTS ] let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); //name @@ -17505,7 +17531,7 @@ impl<'a> Parser<'a> { }) } - fn parse_create_sequence_options(&mut self) -> Result, ParserError> { + fn parse_create_sequence_options(&self) -> Result, ParserError> { let mut sequence_options = vec![]; //[ INCREMENT [ BY ] increment ] if self.parse_keywords(&[Keyword::INCREMENT]) { @@ -17553,7 +17579,7 @@ impl<'a> Parser<'a> { /// Parse a `CREATE SERVER` statement. /// /// See [Statement::CreateServer] - pub fn parse_pg_create_server(&mut self) -> Result { + pub fn parse_pg_create_server(&self) -> Result { let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_object_name(false)?; @@ -17574,13 +17600,13 @@ impl<'a> Parser<'a> { let mut options = None; if self.parse_keyword(Keyword::OPTIONS) { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; options = Some(self.parse_comma_separated(|p| { let key = p.parse_identifier()?; let value = p.parse_identifier()?; Ok(CreateServerOption { key, value }) })?); - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; } Ok(Statement::CreateServer(CreateServerStatement { @@ -17595,14 +17621,14 @@ impl<'a> Parser<'a> { /// The index of the first unprocessed token. pub fn index(&self) -> usize { - self.index + self.index.get() } - pub fn parse_named_window(&mut self) -> Result { + pub fn parse_named_window(&self) -> Result { let ident = self.parse_identifier()?; self.expect_keyword_is(Keyword::AS)?; - let window_expr = if self.consume_token(&Token::LParen) { + let window_expr = if self.consume_token(&BorrowedToken::LParen) { NamedWindowExpr::WindowSpec(self.parse_window_spec()?) } else if self.dialect.supports_window_clause_named_window_reference() { NamedWindowExpr::NamedWindow(self.parse_identifier()?) @@ -17613,7 +17639,7 @@ impl<'a> Parser<'a> { Ok(NamedWindowDefinition(ident, window_expr)) } - pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result { + pub fn parse_create_procedure(&self, or_alter: bool) -> Result { let name = self.parse_object_name(false)?; let params = self.parse_optional_procedure_parameters()?; @@ -17636,9 +17662,9 @@ impl<'a> Parser<'a> { }) } - pub fn parse_window_spec(&mut self) -> Result { + pub fn parse_window_spec(&self) -> Result { let window_name = match self.peek_token().token { - Token::Word(word) if word.keyword == Keyword::NoKeyword => { + BorrowedToken::Word(word) if word.keyword == Keyword::NoKeyword => { self.parse_optional_ident()? } _ => None, @@ -17655,9 +17681,9 @@ impl<'a> Parser<'a> { vec![] }; - let window_frame = if !self.consume_token(&Token::RParen) { + let window_frame = if !self.consume_token(&BorrowedToken::RParen) { let window_frame = self.parse_window_frame()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Some(window_frame) } else { None @@ -17670,7 +17696,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_create_type(&mut self) -> Result { + pub fn parse_create_type(&self) -> Result { let name = self.parse_object_name(false)?; // Check if we have AS keyword @@ -17678,10 +17704,10 @@ impl<'a> Parser<'a> { if !has_as { // Two cases: CREATE TYPE name; or CREATE TYPE name (options); - if self.consume_token(&Token::LParen) { + if self.consume_token(&BorrowedToken::LParen) { // CREATE TYPE name (options) - SQL definition without AS let options = self.parse_create_type_sql_definition_options()?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; return Ok(Statement::CreateType { name, representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }), @@ -17702,7 +17728,7 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::RANGE) { // CREATE TYPE name AS RANGE (options) self.parse_create_type_range(name) - } else if self.consume_token(&Token::LParen) { + } else if self.consume_token(&BorrowedToken::LParen) { // CREATE TYPE name AS (attributes) - Composite self.parse_create_type_composite(name) } else { @@ -17713,8 +17739,8 @@ impl<'a> Parser<'a> { /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type) /// /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) - fn parse_create_type_composite(&mut self, name: ObjectName) -> Result { - if self.consume_token(&Token::RParen) { + fn parse_create_type_composite(&self, name: ObjectName) -> Result { + if self.consume_token(&BorrowedToken::RParen) { // Empty composite type return Ok(Statement::CreateType { name, @@ -17739,11 +17765,11 @@ impl<'a> Parser<'a> { collation: attr_collation, }); - if !self.consume_token(&Token::Comma) { + if !self.consume_token(&BorrowedToken::Comma) { break; } } - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Statement::CreateType { name, @@ -17754,10 +17780,11 @@ impl<'a> Parser<'a> { /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type]) /// /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) - pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result { - self.expect_token(&Token::LParen)?; - let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; - self.expect_token(&Token::RParen)?; + pub fn parse_create_type_enum(&self, name: ObjectName) -> Result { + self.expect_token(&BorrowedToken::LParen)?; + let labels = + self.parse_comma_separated0(|p| p.parse_identifier(), BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Statement::CreateType { name, @@ -17768,10 +17795,11 @@ impl<'a> Parser<'a> { /// Parse remainder of `CREATE TYPE AS RANGE` statement /// /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) - fn parse_create_type_range(&mut self, name: ObjectName) -> Result { - self.expect_token(&Token::LParen)?; - let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?; - self.expect_token(&Token::RParen)?; + fn parse_create_type_range(&self, name: ObjectName) -> Result { + self.expect_token(&BorrowedToken::LParen)?; + let options = + self.parse_comma_separated0(|p| p.parse_range_option(), BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(Statement::CreateType { name, @@ -17780,7 +17808,7 @@ impl<'a> Parser<'a> { } /// Parse a single range option for a `CREATE TYPE AS RANGE` statement - fn parse_range_option(&mut self) -> Result { + fn parse_range_option(&self) -> Result { let keyword = self.parse_one_of_keywords(&[ Keyword::SUBTYPE, Keyword::SUBTYPE_OPCLASS, @@ -17792,32 +17820,32 @@ impl<'a> Parser<'a> { match keyword { Some(Keyword::SUBTYPE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let data_type = self.parse_data_type()?; Ok(UserDefinedTypeRangeOption::Subtype(data_type)) } Some(Keyword::SUBTYPE_OPCLASS) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name)) } Some(Keyword::COLLATION) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeRangeOption::Collation(name)) } Some(Keyword::CANONICAL) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeRangeOption::Canonical(name)) } Some(Keyword::SUBTYPE_DIFF) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeRangeOption::SubtypeDiff(name)) } Some(Keyword::MULTIRANGE_TYPE_NAME) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name)) } @@ -17827,14 +17855,14 @@ impl<'a> Parser<'a> { /// Parse SQL definition options for CREATE TYPE (options) fn parse_create_type_sql_definition_options( - &mut self, + &self, ) -> Result, ParserError> { - self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen) + self.parse_comma_separated0(|p| p.parse_sql_definition_option(), BorrowedToken::RParen) } /// Parse a single SQL definition option for CREATE TYPE (options) fn parse_sql_definition_option( - &mut self, + &self, ) -> Result { let keyword = self.parse_one_of_keywords(&[ Keyword::INPUT, @@ -17860,47 +17888,47 @@ impl<'a> Parser<'a> { match keyword { Some(Keyword::INPUT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Input(name)) } Some(Keyword::OUTPUT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Output(name)) } Some(Keyword::RECEIVE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Receive(name)) } Some(Keyword::SEND) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Send(name)) } Some(Keyword::TYPMOD_IN) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name)) } Some(Keyword::TYPMOD_OUT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name)) } Some(Keyword::ANALYZE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name)) } Some(Keyword::SUBSCRIPT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name)) } Some(Keyword::INTERNALLENGTH) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; if self.parse_keyword(Keyword::VARIABLE) { Ok(UserDefinedTypeSqlDefinitionOption::InternalLength( UserDefinedTypeInternalLength::Variable, @@ -17914,7 +17942,7 @@ impl<'a> Parser<'a> { } Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue), Some(Keyword::ALIGNMENT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let align_keyword = self.parse_one_of_keywords(&[ Keyword::CHAR, Keyword::INT2, @@ -17941,7 +17969,7 @@ impl<'a> Parser<'a> { } } Some(Keyword::STORAGE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let storage_keyword = self.parse_one_of_keywords(&[ Keyword::PLAIN, Keyword::EXTERNAL, @@ -17968,12 +17996,12 @@ impl<'a> Parser<'a> { } } Some(Keyword::LIKE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let name = self.parse_object_name(false)?; Ok(UserDefinedTypeSqlDefinitionOption::Like(name)) } Some(Keyword::CATEGORY) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let category_str = self.parse_literal_string()?; let category_char = category_str.chars().next().ok_or_else(|| { ParserError::ParserError( @@ -17983,28 +18011,28 @@ impl<'a> Parser<'a> { Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char)) } Some(Keyword::PREFERRED) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let value = self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE); Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value)) } Some(Keyword::DEFAULT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let expr = self.parse_expr()?; Ok(UserDefinedTypeSqlDefinitionOption::Default(expr)) } Some(Keyword::ELEMENT) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let data_type = self.parse_data_type()?; Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type)) } Some(Keyword::DELIMITER) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let delimiter = self.parse_literal_string()?; Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter)) } Some(Keyword::COLLATABLE) => { - self.expect_token(&Token::Eq)?; + self.expect_token(&BorrowedToken::Eq)?; let value = self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE); Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value)) @@ -18013,14 +18041,15 @@ impl<'a> Parser<'a> { } } - fn parse_parenthesized_identifiers(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; - let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; - self.expect_token(&Token::RParen)?; + fn parse_parenthesized_identifiers(&self) -> Result, ParserError> { + self.expect_token(&BorrowedToken::LParen)?; + let idents = + self.parse_comma_separated0(|p| p.parse_identifier(), BorrowedToken::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; Ok(idents) } - fn parse_column_position(&mut self) -> Result, ParserError> { + fn parse_column_position(&self) -> Result, ParserError> { if dialect_of!(self is MySqlDialect | GenericDialect) { if self.parse_keyword(Keyword::FIRST) { Ok(Some(MySQLColumnPosition::First)) @@ -18036,14 +18065,14 @@ impl<'a> Parser<'a> { } /// Parse [Statement::Print] - fn parse_print(&mut self) -> Result { + fn parse_print(&self) -> Result { Ok(Statement::Print(PrintStatement { message: Box::new(self.parse_expr()?), })) } /// Parse [Statement::Return] - fn parse_return(&mut self) -> Result { + fn parse_return(&self) -> Result { match self.maybe_parse(|p| p.parse_expr())? { Some(expr) => Ok(Statement::Return(ReturnStatement { value: Some(ReturnStatementValue::Expr(expr)), @@ -18055,7 +18084,7 @@ impl<'a> Parser<'a> { /// /// Parse a `EXPORT DATA` statement. /// /// See [Statement::ExportData] - fn parse_export_data(&mut self) -> Result { + fn parse_export_data(&self) -> Result { self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?; let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) { @@ -18064,9 +18093,9 @@ impl<'a> Parser<'a> { None }; self.expect_keyword(Keyword::OPTIONS)?; - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; let options = self.parse_comma_separated(|p| p.parse_sql_option())?; - self.expect_token(&Token::RParen)?; + self.expect_token(&BorrowedToken::RParen)?; self.expect_keyword(Keyword::AS)?; let query = self.parse_query()?; Ok(Statement::ExportData(ExportData { @@ -18076,7 +18105,7 @@ impl<'a> Parser<'a> { })) } - fn parse_vacuum(&mut self) -> Result { + fn parse_vacuum(&self) -> Result { self.expect_keyword(Keyword::VACUUM)?; let full = self.parse_keyword(Keyword::FULL); let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]); @@ -18111,12 +18140,12 @@ impl<'a> Parser<'a> { } /// Consume the parser and return its underlying token buffer - pub fn into_tokens(self) -> Vec { + pub fn into_tokens(self) -> Vec> { self.tokens } /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH - fn peek_sub_query(&mut self) -> bool { + fn peek_sub_query(&self) -> bool { if self .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH]) .is_some() @@ -18127,7 +18156,7 @@ impl<'a> Parser<'a> { false } - pub(crate) fn parse_show_stmt_options(&mut self) -> Result { + pub(crate) fn parse_show_stmt_options(&self) -> Result { let show_in; let mut filter_position = None; if self.dialect.supports_show_like_before_in() { @@ -18153,7 +18182,7 @@ impl<'a> Parser<'a> { }) } - fn maybe_parse_show_stmt_in(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_in(&self) -> Result, ParserError> { let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { Some(Keyword::FROM) => ShowStatementInClause::FROM, Some(Keyword::IN) => ShowStatementInClause::IN, @@ -18223,7 +18252,7 @@ impl<'a> Parser<'a> { })) } - fn maybe_parse_show_stmt_starts_with(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_starts_with(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) { Ok(Some(self.parse_value()?.value)) } else { @@ -18231,7 +18260,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_show_stmt_limit(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_limit(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::LIMIT) { Ok(self.parse_limit()?) } else { @@ -18239,7 +18268,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_show_stmt_from(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_from(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::FROM) { Ok(Some(self.parse_value()?.value)) } else { @@ -18248,7 +18277,7 @@ impl<'a> Parser<'a> { } pub(crate) fn in_column_definition_state(&self) -> bool { - matches!(self.state, ColumnDefinition) + matches!(self.state.get(), ColumnDefinition) } /// Parses options provided in key-value format. @@ -18256,33 +18285,33 @@ impl<'a> Parser<'a> { /// * `parenthesized` - true if the options are enclosed in parenthesis /// * `end_words` - a list of keywords that any of them indicates the end of the options section pub(crate) fn parse_key_value_options( - &mut self, + &self, parenthesized: bool, end_words: &[Keyword], ) -> Result { let mut options: Vec = Vec::new(); let mut delimiter = KeyValueOptionsDelimiter::Space; if parenthesized { - self.expect_token(&Token::LParen)?; + self.expect_token(&BorrowedToken::LParen)?; } loop { match self.next_token().token { - Token::RParen => { + BorrowedToken::RParen => { if parenthesized { break; } else { return self.expected(" another option or EOF", self.peek_token()); } } - Token::EOF => break, - Token::Comma => { + BorrowedToken::EOF => break, + BorrowedToken::Comma => { delimiter = KeyValueOptionsDelimiter::Comma; continue; } - Token::Word(w) if !end_words.contains(&w.keyword) => { + BorrowedToken::Word(w) if !end_words.contains(&w.keyword) => { options.push(self.parse_key_value_option(&w)?) } - Token::Word(w) if end_words.contains(&w.keyword) => { + BorrowedToken::Word(w) if end_words.contains(&w.keyword) => { self.prev_token(); break; } @@ -18294,17 +18323,14 @@ impl<'a> Parser<'a> { } /// Parses a `KEY = VALUE` construct based on the specified key - pub(crate) fn parse_key_value_option( - &mut self, - key: &Word, - ) -> Result { - self.expect_token(&Token::Eq)?; + pub(crate) fn parse_key_value_option(&self, key: &Word) -> Result { + self.expect_token(&BorrowedToken::Eq)?; match self.peek_token().token { - Token::SingleQuotedString(_) => Ok(KeyValueOption { + BorrowedToken::SingleQuotedString(_) => Ok(KeyValueOption { option_name: key.value.clone(), option_value: KeyValueOptionKind::Single(self.parse_value()?.into()), }), - Token::Word(word) + BorrowedToken::Word(word) if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE => { Ok(KeyValueOption { @@ -18312,11 +18338,11 @@ impl<'a> Parser<'a> { option_value: KeyValueOptionKind::Single(self.parse_value()?.into()), }) } - Token::Number(..) => Ok(KeyValueOption { + BorrowedToken::Number(..) => Ok(KeyValueOption { option_name: key.value.clone(), option_value: KeyValueOptionKind::Single(self.parse_value()?.into()), }), - Token::Word(word) => { + BorrowedToken::Word(word) => { self.next_token(); Ok(KeyValueOption { option_name: key.value.clone(), @@ -18325,14 +18351,15 @@ impl<'a> Parser<'a> { )), }) } - Token::LParen => { + BorrowedToken::LParen => { // Can be a list of values or a list of key value properties. // Try to parse a list of values and if that fails, try to parse // a list of key-value properties. match self.maybe_parse(|parser| { - parser.expect_token(&Token::LParen)?; - let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen); - parser.expect_token(&Token::RParen)?; + parser.expect_token(&BorrowedToken::LParen)?; + let values = + parser.parse_comma_separated0(|p| p.parse_value(), BorrowedToken::RParen); + parser.expect_token(&BorrowedToken::RParen)?; values })? { Some(values) => { @@ -18355,7 +18382,7 @@ impl<'a> Parser<'a> { } /// Parses a RESET statement - fn parse_reset(&mut self) -> Result { + fn parse_reset(&self) -> Result { if self.parse_keyword(Keyword::ALL) { return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL })); } @@ -18408,19 +18435,31 @@ mod tests { fn test_prev_index() { let sql = "SELECT version"; all_dialects().run_parser_method(sql, |parser| { - assert_eq!(parser.peek_token(), Token::make_keyword("SELECT")); - assert_eq!(parser.next_token(), Token::make_keyword("SELECT")); + assert_eq!(parser.peek_token(), BorrowedToken::make_keyword("SELECT")); + assert_eq!(parser.next_token(), BorrowedToken::make_keyword("SELECT")); parser.prev_token(); - assert_eq!(parser.next_token(), Token::make_keyword("SELECT")); - assert_eq!(parser.next_token(), Token::make_word("version", None)); + assert_eq!(parser.next_token(), BorrowedToken::make_keyword("SELECT")); + assert_eq!( + parser.next_token(), + BorrowedToken::make_word("version", None) + ); parser.prev_token(); - assert_eq!(parser.peek_token(), Token::make_word("version", None)); - assert_eq!(parser.next_token(), Token::make_word("version", None)); - assert_eq!(parser.peek_token(), Token::EOF); + assert_eq!( + parser.peek_token(), + BorrowedToken::make_word("version", None) + ); + assert_eq!( + parser.next_token(), + BorrowedToken::make_word("version", None) + ); + assert_eq!(parser.peek_token(), BorrowedToken::EOF); parser.prev_token(); - assert_eq!(parser.next_token(), Token::make_word("version", None)); - assert_eq!(parser.next_token(), Token::EOF); - assert_eq!(parser.next_token(), Token::EOF); + assert_eq!( + parser.next_token(), + BorrowedToken::make_word("version", None) + ); + assert_eq!(parser.next_token(), BorrowedToken::EOF); + assert_eq!(parser.next_token(), BorrowedToken::EOF); parser.prev_token(); }); } @@ -18430,7 +18469,7 @@ mod tests { all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| { assert!(matches!( parser.peek_tokens(), - [Token::Word(Word { + [BorrowedToken::Word(Word { keyword: Keyword::SELECT, .. })] @@ -18439,12 +18478,12 @@ mod tests { assert!(matches!( parser.peek_tokens(), [ - Token::Word(Word { + BorrowedToken::Word(Word { keyword: Keyword::SELECT, .. }), - Token::Word(_), - Token::Word(Word { + BorrowedToken::Word(_), + BorrowedToken::Word(Word { keyword: Keyword::AS, .. }), @@ -18458,13 +18497,13 @@ mod tests { assert!(matches!( parser.peek_tokens(), [ - Token::Word(Word { + BorrowedToken::Word(Word { keyword: Keyword::FROM, .. }), - Token::Word(_), - Token::EOF, - Token::EOF, + BorrowedToken::Word(_), + BorrowedToken::EOF, + BorrowedToken::EOF, ] )) }) diff --git a/src/test_utils.rs b/src/test_utils.rs index b6100d498..b534981d3 100644 --- a/src/test_utils.rs +++ b/src/test_utils.rs @@ -126,7 +126,8 @@ impl TestedDialects { if let Some(options) = &self.options { tokenizer = tokenizer.with_unescape(options.unescape); } - let tokens = tokenizer.tokenize()?; + + let tokens = tokenizer.tokenized_owned()?; self.new_parser(dialect) .with_tokens(tokens) .parse_statements() diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 745c735b3..fef4fe106 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -51,11 +51,11 @@ use crate::dialect::{ use crate::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX}; use crate::{ast::DollarQuotedString, dialect::HiveDialect}; -/// SQL Token enumeration +/// SQL Token enumeration with lifetime parameter for future zero-copy support #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum Token { +pub enum BorrowedToken<'a> { /// An end-of-file marker, not a real token EOF, /// A keyword (like SELECT) or an optionally quoted SQL identifier @@ -280,126 +280,284 @@ pub enum Token { /// This is used to represent any custom binary operator that is not part of the SQL standard. /// PostgreSQL allows defining custom binary operators using CREATE OPERATOR. CustomBinaryOperator(String), + /// Marker to carry the lifetime parameter (never constructed) + _Phantom(Cow<'a, str>), } -impl fmt::Display for Token { +/// Type alias for backward compatibility - Token without explicit lifetime uses 'static +pub type Token = BorrowedToken<'static>; + +impl<'a> fmt::Display for BorrowedToken<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Token::EOF => f.write_str("EOF"), - Token::Word(ref w) => write!(f, "{w}"), - Token::Number(ref n, l) => write!(f, "{}{long}", n, long = if *l { "L" } else { "" }), - Token::Char(ref c) => write!(f, "{c}"), - Token::SingleQuotedString(ref s) => write!(f, "'{s}'"), - Token::TripleSingleQuotedString(ref s) => write!(f, "'''{s}'''"), - Token::DoubleQuotedString(ref s) => write!(f, "\"{s}\""), - Token::TripleDoubleQuotedString(ref s) => write!(f, "\"\"\"{s}\"\"\""), - Token::DollarQuotedString(ref s) => write!(f, "{s}"), - Token::NationalStringLiteral(ref s) => write!(f, "N'{s}'"), - Token::EscapedStringLiteral(ref s) => write!(f, "E'{s}'"), - Token::UnicodeStringLiteral(ref s) => write!(f, "U&'{s}'"), - Token::HexStringLiteral(ref s) => write!(f, "X'{s}'"), - Token::SingleQuotedByteStringLiteral(ref s) => write!(f, "B'{s}'"), - Token::TripleSingleQuotedByteStringLiteral(ref s) => write!(f, "B'''{s}'''"), - Token::DoubleQuotedByteStringLiteral(ref s) => write!(f, "B\"{s}\""), - Token::TripleDoubleQuotedByteStringLiteral(ref s) => write!(f, "B\"\"\"{s}\"\"\""), - Token::SingleQuotedRawStringLiteral(ref s) => write!(f, "R'{s}'"), - Token::DoubleQuotedRawStringLiteral(ref s) => write!(f, "R\"{s}\""), - Token::TripleSingleQuotedRawStringLiteral(ref s) => write!(f, "R'''{s}'''"), - Token::TripleDoubleQuotedRawStringLiteral(ref s) => write!(f, "R\"\"\"{s}\"\"\""), - Token::Comma => f.write_str(","), - Token::Whitespace(ws) => write!(f, "{ws}"), - Token::DoubleEq => f.write_str("=="), - Token::Spaceship => f.write_str("<=>"), - Token::Eq => f.write_str("="), - Token::Neq => f.write_str("<>"), - Token::Lt => f.write_str("<"), - Token::Gt => f.write_str(">"), - Token::LtEq => f.write_str("<="), - Token::GtEq => f.write_str(">="), - Token::Plus => f.write_str("+"), - Token::Minus => f.write_str("-"), - Token::Mul => f.write_str("*"), - Token::Div => f.write_str("/"), - Token::DuckIntDiv => f.write_str("//"), - Token::StringConcat => f.write_str("||"), - Token::Mod => f.write_str("%"), - Token::LParen => f.write_str("("), - Token::RParen => f.write_str(")"), - Token::Period => f.write_str("."), - Token::Colon => f.write_str(":"), - Token::DoubleColon => f.write_str("::"), - Token::Assignment => f.write_str(":="), - Token::SemiColon => f.write_str(";"), - Token::Backslash => f.write_str("\\"), - Token::LBracket => f.write_str("["), - Token::RBracket => f.write_str("]"), - Token::Ampersand => f.write_str("&"), - Token::Caret => f.write_str("^"), - Token::Pipe => f.write_str("|"), - Token::LBrace => f.write_str("{"), - Token::RBrace => f.write_str("}"), - Token::RArrow => f.write_str("=>"), - Token::Sharp => f.write_str("#"), - Token::DoubleSharp => f.write_str("##"), - Token::ExclamationMark => f.write_str("!"), - Token::DoubleExclamationMark => f.write_str("!!"), - Token::Tilde => f.write_str("~"), - Token::TildeAsterisk => f.write_str("~*"), - Token::ExclamationMarkTilde => f.write_str("!~"), - Token::ExclamationMarkTildeAsterisk => f.write_str("!~*"), - Token::DoubleTilde => f.write_str("~~"), - Token::DoubleTildeAsterisk => f.write_str("~~*"), - Token::ExclamationMarkDoubleTilde => f.write_str("!~~"), - Token::ExclamationMarkDoubleTildeAsterisk => f.write_str("!~~*"), - Token::AtSign => f.write_str("@"), - Token::CaretAt => f.write_str("^@"), - Token::ShiftLeft => f.write_str("<<"), - Token::ShiftRight => f.write_str(">>"), - Token::Overlap => f.write_str("&&"), - Token::PGSquareRoot => f.write_str("|/"), - Token::PGCubeRoot => f.write_str("||/"), - Token::AtDashAt => f.write_str("@-@"), - Token::QuestionMarkDash => f.write_str("?-"), - Token::AmpersandLeftAngleBracket => f.write_str("&<"), - Token::AmpersandRightAngleBracket => f.write_str("&>"), - Token::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"), - Token::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"), - Token::VerticalBarRightAngleBracket => f.write_str("|>"), - Token::TwoWayArrow => f.write_str("<->"), - Token::LeftAngleBracketCaret => f.write_str("<^"), - Token::RightAngleBracketCaret => f.write_str(">^"), - Token::QuestionMarkSharp => f.write_str("?#"), - Token::QuestionMarkDashVerticalBar => f.write_str("?-|"), - Token::QuestionMarkDoubleVerticalBar => f.write_str("?||"), - Token::TildeEqual => f.write_str("~="), - Token::ShiftLeftVerticalBar => f.write_str("<<|"), - Token::VerticalBarShiftRight => f.write_str("|>>"), - Token::Placeholder(ref s) => write!(f, "{s}"), - Token::Arrow => write!(f, "->"), - Token::LongArrow => write!(f, "->>"), - Token::HashArrow => write!(f, "#>"), - Token::HashLongArrow => write!(f, "#>>"), - Token::AtArrow => write!(f, "@>"), - Token::ArrowAt => write!(f, "<@"), - Token::HashMinus => write!(f, "#-"), - Token::AtQuestion => write!(f, "@?"), - Token::AtAt => write!(f, "@@"), - Token::Question => write!(f, "?"), - Token::QuestionAnd => write!(f, "?&"), - Token::QuestionPipe => write!(f, "?|"), - Token::CustomBinaryOperator(s) => f.write_str(s), + BorrowedToken::EOF => f.write_str("EOF"), + BorrowedToken::Word(ref w) => write!(f, "{w}"), + BorrowedToken::Number(ref n, l) => { + write!(f, "{}{long}", n, long = if *l { "L" } else { "" }) + } + BorrowedToken::Char(ref c) => write!(f, "{c}"), + BorrowedToken::SingleQuotedString(ref s) => write!(f, "'{s}'"), + BorrowedToken::TripleSingleQuotedString(ref s) => write!(f, "'''{s}'''"), + BorrowedToken::DoubleQuotedString(ref s) => write!(f, "\"{s}\""), + BorrowedToken::TripleDoubleQuotedString(ref s) => write!(f, "\"\"\"{s}\"\"\""), + BorrowedToken::DollarQuotedString(ref s) => write!(f, "{s}"), + BorrowedToken::NationalStringLiteral(ref s) => write!(f, "N'{s}'"), + BorrowedToken::EscapedStringLiteral(ref s) => write!(f, "E'{s}'"), + BorrowedToken::UnicodeStringLiteral(ref s) => write!(f, "U&'{s}'"), + BorrowedToken::HexStringLiteral(ref s) => write!(f, "X'{s}'"), + BorrowedToken::SingleQuotedByteStringLiteral(ref s) => write!(f, "B'{s}'"), + BorrowedToken::TripleSingleQuotedByteStringLiteral(ref s) => write!(f, "B'''{s}'''"), + BorrowedToken::DoubleQuotedByteStringLiteral(ref s) => write!(f, "B\"{s}\""), + BorrowedToken::TripleDoubleQuotedByteStringLiteral(ref s) => { + write!(f, "B\"\"\"{s}\"\"\"") + } + BorrowedToken::SingleQuotedRawStringLiteral(ref s) => write!(f, "R'{s}'"), + BorrowedToken::DoubleQuotedRawStringLiteral(ref s) => write!(f, "R\"{s}\""), + BorrowedToken::TripleSingleQuotedRawStringLiteral(ref s) => write!(f, "R'''{s}'''"), + BorrowedToken::TripleDoubleQuotedRawStringLiteral(ref s) => { + write!(f, "R\"\"\"{s}\"\"\"") + } + BorrowedToken::Comma => f.write_str(","), + BorrowedToken::Whitespace(ws) => write!(f, "{ws}"), + BorrowedToken::DoubleEq => f.write_str("=="), + BorrowedToken::Spaceship => f.write_str("<=>"), + BorrowedToken::Eq => f.write_str("="), + BorrowedToken::Neq => f.write_str("<>"), + BorrowedToken::Lt => f.write_str("<"), + BorrowedToken::Gt => f.write_str(">"), + BorrowedToken::LtEq => f.write_str("<="), + BorrowedToken::GtEq => f.write_str(">="), + BorrowedToken::Plus => f.write_str("+"), + BorrowedToken::Minus => f.write_str("-"), + BorrowedToken::Mul => f.write_str("*"), + BorrowedToken::Div => f.write_str("/"), + BorrowedToken::DuckIntDiv => f.write_str("//"), + BorrowedToken::StringConcat => f.write_str("||"), + BorrowedToken::Mod => f.write_str("%"), + BorrowedToken::LParen => f.write_str("("), + BorrowedToken::RParen => f.write_str(")"), + BorrowedToken::Period => f.write_str("."), + BorrowedToken::Colon => f.write_str(":"), + BorrowedToken::DoubleColon => f.write_str("::"), + BorrowedToken::Assignment => f.write_str(":="), + BorrowedToken::SemiColon => f.write_str(";"), + BorrowedToken::Backslash => f.write_str("\\"), + BorrowedToken::LBracket => f.write_str("["), + BorrowedToken::RBracket => f.write_str("]"), + BorrowedToken::Ampersand => f.write_str("&"), + BorrowedToken::Caret => f.write_str("^"), + BorrowedToken::Pipe => f.write_str("|"), + BorrowedToken::LBrace => f.write_str("{"), + BorrowedToken::RBrace => f.write_str("}"), + BorrowedToken::RArrow => f.write_str("=>"), + BorrowedToken::Sharp => f.write_str("#"), + BorrowedToken::DoubleSharp => f.write_str("##"), + BorrowedToken::ExclamationMark => f.write_str("!"), + BorrowedToken::DoubleExclamationMark => f.write_str("!!"), + BorrowedToken::Tilde => f.write_str("~"), + BorrowedToken::TildeAsterisk => f.write_str("~*"), + BorrowedToken::ExclamationMarkTilde => f.write_str("!~"), + BorrowedToken::ExclamationMarkTildeAsterisk => f.write_str("!~*"), + BorrowedToken::DoubleTilde => f.write_str("~~"), + BorrowedToken::DoubleTildeAsterisk => f.write_str("~~*"), + BorrowedToken::ExclamationMarkDoubleTilde => f.write_str("!~~"), + BorrowedToken::ExclamationMarkDoubleTildeAsterisk => f.write_str("!~~*"), + BorrowedToken::AtSign => f.write_str("@"), + BorrowedToken::CaretAt => f.write_str("^@"), + BorrowedToken::ShiftLeft => f.write_str("<<"), + BorrowedToken::ShiftRight => f.write_str(">>"), + BorrowedToken::Overlap => f.write_str("&&"), + BorrowedToken::PGSquareRoot => f.write_str("|/"), + BorrowedToken::PGCubeRoot => f.write_str("||/"), + BorrowedToken::AtDashAt => f.write_str("@-@"), + BorrowedToken::QuestionMarkDash => f.write_str("?-"), + BorrowedToken::AmpersandLeftAngleBracket => f.write_str("&<"), + BorrowedToken::AmpersandRightAngleBracket => f.write_str("&>"), + BorrowedToken::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"), + BorrowedToken::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"), + BorrowedToken::VerticalBarRightAngleBracket => f.write_str("|>"), + BorrowedToken::TwoWayArrow => f.write_str("<->"), + BorrowedToken::LeftAngleBracketCaret => f.write_str("<^"), + BorrowedToken::RightAngleBracketCaret => f.write_str(">^"), + BorrowedToken::QuestionMarkSharp => f.write_str("?#"), + BorrowedToken::QuestionMarkDashVerticalBar => f.write_str("?-|"), + BorrowedToken::QuestionMarkDoubleVerticalBar => f.write_str("?||"), + BorrowedToken::TildeEqual => f.write_str("~="), + BorrowedToken::ShiftLeftVerticalBar => f.write_str("<<|"), + BorrowedToken::VerticalBarShiftRight => f.write_str("|>>"), + BorrowedToken::Placeholder(ref s) => write!(f, "{s}"), + BorrowedToken::Arrow => write!(f, "->"), + BorrowedToken::LongArrow => write!(f, "->>"), + BorrowedToken::HashArrow => write!(f, "#>"), + BorrowedToken::HashLongArrow => write!(f, "#>>"), + BorrowedToken::AtArrow => write!(f, "@>"), + BorrowedToken::ArrowAt => write!(f, "<@"), + BorrowedToken::HashMinus => write!(f, "#-"), + BorrowedToken::AtQuestion => write!(f, "@?"), + BorrowedToken::AtAt => write!(f, "@@"), + BorrowedToken::Question => write!(f, "?"), + BorrowedToken::QuestionAnd => write!(f, "?&"), + BorrowedToken::QuestionPipe => write!(f, "?|"), + BorrowedToken::CustomBinaryOperator(s) => f.write_str(s), + BorrowedToken::_Phantom(_) => unreachable!("_Phantom should never be constructed"), } } } -impl Token { +impl<'a> BorrowedToken<'a> { + /// Converts a borrowed token to a static token by taking ownership and moving the data + pub fn to_static(self) -> Token { + match self { + BorrowedToken::EOF => BorrowedToken::EOF, + BorrowedToken::Word(w) => BorrowedToken::Word(w), + BorrowedToken::Number(n, l) => BorrowedToken::Number(n, l), + BorrowedToken::Char(c) => BorrowedToken::Char(c), + BorrowedToken::SingleQuotedString(s) => BorrowedToken::SingleQuotedString(s), + BorrowedToken::DoubleQuotedString(s) => BorrowedToken::DoubleQuotedString(s), + BorrowedToken::TripleSingleQuotedString(s) => { + BorrowedToken::TripleSingleQuotedString(s) + } + BorrowedToken::TripleDoubleQuotedString(s) => { + BorrowedToken::TripleDoubleQuotedString(s) + } + BorrowedToken::DollarQuotedString(s) => BorrowedToken::DollarQuotedString(s), + BorrowedToken::SingleQuotedByteStringLiteral(s) => { + BorrowedToken::SingleQuotedByteStringLiteral(s) + } + BorrowedToken::DoubleQuotedByteStringLiteral(s) => { + BorrowedToken::DoubleQuotedByteStringLiteral(s) + } + BorrowedToken::TripleSingleQuotedByteStringLiteral(s) => { + BorrowedToken::TripleSingleQuotedByteStringLiteral(s) + } + BorrowedToken::TripleDoubleQuotedByteStringLiteral(s) => { + BorrowedToken::TripleDoubleQuotedByteStringLiteral(s) + } + BorrowedToken::SingleQuotedRawStringLiteral(s) => { + BorrowedToken::SingleQuotedRawStringLiteral(s) + } + BorrowedToken::DoubleQuotedRawStringLiteral(s) => { + BorrowedToken::DoubleQuotedRawStringLiteral(s) + } + BorrowedToken::TripleSingleQuotedRawStringLiteral(s) => { + BorrowedToken::TripleSingleQuotedRawStringLiteral(s) + } + BorrowedToken::TripleDoubleQuotedRawStringLiteral(s) => { + BorrowedToken::TripleDoubleQuotedRawStringLiteral(s) + } + BorrowedToken::NationalStringLiteral(s) => BorrowedToken::NationalStringLiteral(s), + BorrowedToken::EscapedStringLiteral(s) => BorrowedToken::EscapedStringLiteral(s), + BorrowedToken::UnicodeStringLiteral(s) => BorrowedToken::UnicodeStringLiteral(s), + BorrowedToken::HexStringLiteral(s) => BorrowedToken::HexStringLiteral(s), + BorrowedToken::Comma => BorrowedToken::Comma, + BorrowedToken::Whitespace(ws) => BorrowedToken::Whitespace(ws), + BorrowedToken::DoubleEq => BorrowedToken::DoubleEq, + BorrowedToken::Eq => BorrowedToken::Eq, + BorrowedToken::Neq => BorrowedToken::Neq, + BorrowedToken::Lt => BorrowedToken::Lt, + BorrowedToken::Gt => BorrowedToken::Gt, + BorrowedToken::LtEq => BorrowedToken::LtEq, + BorrowedToken::GtEq => BorrowedToken::GtEq, + BorrowedToken::Spaceship => BorrowedToken::Spaceship, + BorrowedToken::Plus => BorrowedToken::Plus, + BorrowedToken::Minus => BorrowedToken::Minus, + BorrowedToken::Mul => BorrowedToken::Mul, + BorrowedToken::Div => BorrowedToken::Div, + BorrowedToken::DuckIntDiv => BorrowedToken::DuckIntDiv, + BorrowedToken::Mod => BorrowedToken::Mod, + BorrowedToken::StringConcat => BorrowedToken::StringConcat, + BorrowedToken::LParen => BorrowedToken::LParen, + BorrowedToken::RParen => BorrowedToken::RParen, + BorrowedToken::Period => BorrowedToken::Period, + BorrowedToken::Colon => BorrowedToken::Colon, + BorrowedToken::DoubleColon => BorrowedToken::DoubleColon, + BorrowedToken::Assignment => BorrowedToken::Assignment, + BorrowedToken::SemiColon => BorrowedToken::SemiColon, + BorrowedToken::Backslash => BorrowedToken::Backslash, + BorrowedToken::LBracket => BorrowedToken::LBracket, + BorrowedToken::RBracket => BorrowedToken::RBracket, + BorrowedToken::Ampersand => BorrowedToken::Ampersand, + BorrowedToken::Pipe => BorrowedToken::Pipe, + BorrowedToken::Caret => BorrowedToken::Caret, + BorrowedToken::LBrace => BorrowedToken::LBrace, + BorrowedToken::RBrace => BorrowedToken::RBrace, + BorrowedToken::RArrow => BorrowedToken::RArrow, + BorrowedToken::Sharp => BorrowedToken::Sharp, + BorrowedToken::DoubleSharp => BorrowedToken::DoubleSharp, + BorrowedToken::Tilde => BorrowedToken::Tilde, + BorrowedToken::TildeAsterisk => BorrowedToken::TildeAsterisk, + BorrowedToken::ExclamationMarkTilde => BorrowedToken::ExclamationMarkTilde, + BorrowedToken::ExclamationMarkTildeAsterisk => { + BorrowedToken::ExclamationMarkTildeAsterisk + } + BorrowedToken::DoubleTilde => BorrowedToken::DoubleTilde, + BorrowedToken::DoubleTildeAsterisk => BorrowedToken::DoubleTildeAsterisk, + BorrowedToken::ExclamationMarkDoubleTilde => BorrowedToken::ExclamationMarkDoubleTilde, + BorrowedToken::ExclamationMarkDoubleTildeAsterisk => { + BorrowedToken::ExclamationMarkDoubleTildeAsterisk + } + BorrowedToken::ShiftLeft => BorrowedToken::ShiftLeft, + BorrowedToken::ShiftRight => BorrowedToken::ShiftRight, + BorrowedToken::Overlap => BorrowedToken::Overlap, + BorrowedToken::ExclamationMark => BorrowedToken::ExclamationMark, + BorrowedToken::DoubleExclamationMark => BorrowedToken::DoubleExclamationMark, + BorrowedToken::AtSign => BorrowedToken::AtSign, + BorrowedToken::CaretAt => BorrowedToken::CaretAt, + BorrowedToken::PGSquareRoot => BorrowedToken::PGSquareRoot, + BorrowedToken::PGCubeRoot => BorrowedToken::PGCubeRoot, + BorrowedToken::Placeholder(s) => BorrowedToken::Placeholder(s), + BorrowedToken::Arrow => BorrowedToken::Arrow, + BorrowedToken::LongArrow => BorrowedToken::LongArrow, + BorrowedToken::HashArrow => BorrowedToken::HashArrow, + BorrowedToken::AtDashAt => BorrowedToken::AtDashAt, + BorrowedToken::QuestionMarkDash => BorrowedToken::QuestionMarkDash, + BorrowedToken::AmpersandLeftAngleBracket => BorrowedToken::AmpersandLeftAngleBracket, + BorrowedToken::AmpersandRightAngleBracket => BorrowedToken::AmpersandRightAngleBracket, + BorrowedToken::AmpersandLeftAngleBracketVerticalBar => { + BorrowedToken::AmpersandLeftAngleBracketVerticalBar + } + BorrowedToken::VerticalBarAmpersandRightAngleBracket => { + BorrowedToken::VerticalBarAmpersandRightAngleBracket + } + BorrowedToken::TwoWayArrow => BorrowedToken::TwoWayArrow, + BorrowedToken::LeftAngleBracketCaret => BorrowedToken::LeftAngleBracketCaret, + BorrowedToken::RightAngleBracketCaret => BorrowedToken::RightAngleBracketCaret, + BorrowedToken::QuestionMarkSharp => BorrowedToken::QuestionMarkSharp, + BorrowedToken::QuestionMarkDashVerticalBar => { + BorrowedToken::QuestionMarkDashVerticalBar + } + BorrowedToken::QuestionMarkDoubleVerticalBar => { + BorrowedToken::QuestionMarkDoubleVerticalBar + } + BorrowedToken::TildeEqual => BorrowedToken::TildeEqual, + BorrowedToken::ShiftLeftVerticalBar => BorrowedToken::ShiftLeftVerticalBar, + BorrowedToken::VerticalBarShiftRight => BorrowedToken::VerticalBarShiftRight, + BorrowedToken::VerticalBarRightAngleBracket => { + BorrowedToken::VerticalBarRightAngleBracket + } + BorrowedToken::HashLongArrow => BorrowedToken::HashLongArrow, + BorrowedToken::AtArrow => BorrowedToken::AtArrow, + BorrowedToken::ArrowAt => BorrowedToken::ArrowAt, + BorrowedToken::HashMinus => BorrowedToken::HashMinus, + BorrowedToken::AtQuestion => BorrowedToken::AtQuestion, + BorrowedToken::AtAt => BorrowedToken::AtAt, + BorrowedToken::Question => BorrowedToken::Question, + BorrowedToken::QuestionAnd => BorrowedToken::QuestionAnd, + BorrowedToken::QuestionPipe => BorrowedToken::QuestionPipe, + BorrowedToken::CustomBinaryOperator(s) => BorrowedToken::CustomBinaryOperator(s), + BorrowedToken::_Phantom(_) => unreachable!("_Phantom should never be constructed"), + } + } +} + +impl BorrowedToken<'static> { pub fn make_keyword(keyword: &str) -> Self { - Token::make_word(keyword, None) + BorrowedToken::make_word(keyword, None) } pub fn make_word(word: &str, quote_style: Option) -> Self { let word_uppercase = word.to_uppercase(); - Token::Word(Word { + BorrowedToken::Word(Word { value: word.to_string(), quote_style, keyword: if quote_style.is_none() { @@ -656,7 +814,7 @@ impl Span { /// Backwards compatibility struct for [`TokenWithSpan`] #[deprecated(since = "0.53.0", note = "please use `TokenWithSpan` instead")] -pub type TokenWithLocation = TokenWithSpan; +pub type TokenWithLocation<'a> = TokenWithSpan<'a>; /// A [Token] with [Span] attached to it /// @@ -683,46 +841,58 @@ pub type TokenWithLocation = TokenWithSpan; #[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TokenWithSpan { - pub token: Token, +pub struct TokenWithSpan<'a> { + pub token: BorrowedToken<'a>, pub span: Span, } -impl TokenWithSpan { - /// Create a new [`TokenWithSpan`] from a [`Token`] and a [`Span`] - pub fn new(token: Token, span: Span) -> Self { +impl<'a> TokenWithSpan<'a> { + /// Create a new [`TokenWithSpan`] from a [`BorrowedToken`] and a [`Span`] + pub fn new(token: BorrowedToken<'a>, span: Span) -> Self { Self { token, span } } /// Wrap a token with an empty span - pub fn wrap(token: Token) -> Self { + pub fn wrap(token: BorrowedToken<'a>) -> Self { Self::new(token, Span::empty()) } /// Wrap a token with a location from `start` to `end` - pub fn at(token: Token, start: Location, end: Location) -> Self { + pub fn at(token: BorrowedToken<'a>, start: Location, end: Location) -> Self { Self::new(token, Span::new(start, end)) } /// Return an EOF token with no location pub fn new_eof() -> Self { - Self::wrap(Token::EOF) + Self::wrap(BorrowedToken::EOF) + } + + /// Convert to a `'static` lifetime by cloning the underlying data. + /// + /// This is used when tokens need to be stored in AST nodes that must be owned. + /// Currently all data is already owned (String), so this is just a clone. + /// When Cow is introduced, this will convert Cow::Borrowed → Cow::Owned. + pub fn to_static(self) -> TokenWithSpan<'static> { + TokenWithSpan { + token: self.token.to_static(), + span: self.span, + } } } -impl PartialEq for TokenWithSpan { - fn eq(&self, other: &Token) -> bool { +impl<'a> PartialEq> for TokenWithSpan<'a> { + fn eq(&self, other: &BorrowedToken<'a>) -> bool { &self.token == other } } -impl PartialEq for Token { - fn eq(&self, other: &TokenWithSpan) -> bool { +impl<'a> PartialEq> for BorrowedToken<'a> { + fn eq(&self, other: &TokenWithSpan<'a>) -> bool { self == &other.token } } -impl fmt::Display for TokenWithSpan { +impl<'a> fmt::Display for TokenWithSpan<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.token.fmt(f) } @@ -892,23 +1062,35 @@ impl<'a> Tokenizer<'a> { } /// Tokenize the statement and produce a vector of tokens - pub fn tokenize(&mut self) -> Result, TokenizerError> { + pub fn tokenize(&mut self) -> Result>, TokenizerError> { let twl = self.tokenize_with_location()?; Ok(twl.into_iter().map(|t| t.token).collect()) } + pub fn tokenized_owned(&mut self) -> Result, TokenizerError> { + let tokens = self.tokenize()?; + Ok(tokens.into_iter().map(|t| t.to_static()).collect()) + } + /// Tokenize the statement and produce a vector of tokens with location information - pub fn tokenize_with_location(&mut self) -> Result, TokenizerError> { - let mut tokens: Vec = vec![]; + pub fn tokenize_with_location(&mut self) -> Result>, TokenizerError> { + let mut tokens: Vec> = vec![]; self.tokenize_with_location_into_buf(&mut tokens) .map(|_| tokens) } + pub fn tokenized_with_location_owned( + &mut self, + ) -> Result>, TokenizerError> { + let tokens = self.tokenize_with_location()?; + Ok(tokens.into_iter().map(|t| t.to_static()).collect()) + } + /// Tokenize the statement and append tokens with location information into the provided buffer. /// If an error is thrown, the buffer will contain all tokens that were successfully parsed before the error. pub fn tokenize_with_location_into_buf( &mut self, - buf: &mut Vec, + buf: &mut Vec>, ) -> Result<(), TokenizerError> { let mut state = State { peekable: self.query.chars().peekable(), @@ -961,7 +1143,7 @@ impl<'a> Tokenizer<'a> { fn next_token( &self, chars: &mut State<'a>, - prev_token: Option<&Token>, + prev_token: Option<&BorrowedToken<'a>>, ) -> Result, TokenizerError> { match chars.peek() { Some(&ch) => match ch { @@ -1219,7 +1401,7 @@ impl<'a> Tokenizer<'a> { // if the prev token is not a word, then this is not a valid sql // word or number. if ch == '.' && chars.peekable.clone().nth(1) == Some('_') { - if let Some(Token::Word(_)) = prev_token { + if let Some(&BorrowedToken::Word(_)) = prev_token { chars.next(); return Ok(Some(Token::Period)); } @@ -1263,7 +1445,7 @@ impl<'a> Tokenizer<'a> { // we should yield the dot as a dedicated token so compound identifiers // starting with digits can be parsed correctly. if s == "." && self.dialect.supports_numeric_prefix() { - if let Some(Token::Word(_)) = prev_token { + if let Some(&BorrowedToken::Word(_)) = prev_token { return Ok(Some(Token::Period)); } } @@ -1322,7 +1504,7 @@ impl<'a> Tokenizer<'a> { s += word.as_str(); return Ok(Some(Token::make_word(s.as_str(), None))); } - } else if prev_token == Some(&Token::Period) { + } else if matches!(prev_token, Some(&BorrowedToken::Period)) { // If the previous token was a period, thus not belonging to a number, // the value we have is part of an identifier. return Ok(Some(Token::make_word(s.as_str(), None))); diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs index b06f1141a..93a7ccc51 100644 --- a/tests/sqlparser_common.rs +++ b/tests/sqlparser_common.rs @@ -311,8 +311,7 @@ fn parse_insert_default_values() { fn parse_insert_select_returning() { // Dialects that support `RETURNING` as a column identifier do // not support this syntax. - let dialects = - all_dialects_where(|d| !d.is_column_alias(&Keyword::RETURNING, &mut Parser::new(d))); + let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::RETURNING, &Parser::new(d))); dialects.verified_stmt("INSERT INTO t SELECT 1 RETURNING 2"); let stmt = dialects.verified_stmt("INSERT INTO t SELECT x RETURNING x AS y"); @@ -5655,7 +5654,7 @@ fn parse_named_window_functions() { WINDOW w AS (PARTITION BY x), win AS (ORDER BY y)"; supported_dialects.verified_stmt(sql); - let select = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))) + let select = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &Parser::new(d))) .verified_only_select(sql); const EXPECTED_PROJ_QTY: usize = 2; @@ -5686,7 +5685,7 @@ fn parse_named_window_functions() { #[test] fn parse_window_clause() { - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))); + let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &Parser::new(d))); let sql = "SELECT * \ FROM mytable \ WINDOW \ @@ -5705,7 +5704,7 @@ fn parse_window_clause() { let dialects = all_dialects_except(|d| { d.is::() || d.is::() - || d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d)) + || d.is_table_alias(&Keyword::WINDOW, &Parser::new(d)) }); let res = dialects.parse_sql_statements(sql); assert_eq!( @@ -5716,7 +5715,7 @@ fn parse_window_clause() { #[test] fn test_parse_named_window() { - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))); + let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &Parser::new(d))); let sql = "SELECT \ MIN(c12) OVER window1 AS min1, \ MAX(c12) OVER window2 AS max1 \ @@ -5875,8 +5874,8 @@ fn test_parse_named_window() { #[test] fn parse_window_and_qualify_clause() { let dialects = all_dialects_except(|d| { - d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d)) - || d.is_table_alias(&Keyword::QUALIFY, &mut Parser::new(d)) + d.is_table_alias(&Keyword::WINDOW, &Parser::new(d)) + || d.is_table_alias(&Keyword::QUALIFY, &Parser::new(d)) }); let sql = "SELECT \ MIN(c12) OVER window1 AS min1 \ @@ -7614,7 +7613,7 @@ fn parse_join_syntax_variants() { "SELECT c1 FROM t1 FULL JOIN t2 USING(c1)", ); - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::OUTER, &mut Parser::new(d))); + let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::OUTER, &Parser::new(d))); let res = dialects.parse_sql_statements("SELECT * FROM a OUTER JOIN b ON 1"); assert_eq!( ParserError::ParserError("Expected: APPLY, found: JOIN".to_string()), @@ -7829,7 +7828,7 @@ fn parse_union_except_intersect_minus() { // Dialects that support `MINUS` as column identifier // do not support `MINUS` as a set operator. - let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::MINUS, &mut Parser::new(d))); + let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::MINUS, &Parser::new(d))); dialects.verified_stmt("SELECT 1 MINUS SELECT 2"); dialects.verified_stmt("SELECT 1 MINUS ALL SELECT 2"); dialects.verified_stmt("SELECT 1 MINUS DISTINCT SELECT 1"); @@ -8592,8 +8591,7 @@ fn parse_invalid_subquery_without_parens() { fn parse_offset() { // Dialects that support `OFFSET` as column identifiers // don't support this syntax. - let dialects = - all_dialects_where(|d| !d.is_column_alias(&Keyword::OFFSET, &mut Parser::new(d))); + let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::OFFSET, &Parser::new(d))); let expected_limit_clause = &Some(LimitClause::LimitOffset { limit: None, @@ -12454,7 +12452,7 @@ fn test_buffer_reuse() { Tokenizer::new(&d, q) .tokenize_with_location_into_buf(&mut buf) .unwrap(); - let mut p = Parser::new(&d).with_tokens_with_locations(buf); + let p = Parser::new(&d).with_tokens_with_locations(buf); p.parse_statements().unwrap(); let _ = p.into_tokens(); } @@ -15368,7 +15366,7 @@ fn parse_case_statement() { #[test] fn test_case_statement_span() { let sql = "CASE 1 WHEN 2 THEN SELECT 1; SELECT 2; ELSE SELECT 3; END CASE"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + let parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) @@ -15449,7 +15447,7 @@ fn parse_if_statement() { #[test] fn test_if_statement_span() { let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + let parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) @@ -15463,7 +15461,7 @@ fn test_if_statement_multiline_span() { let sql_line3 = "ELSE SELECT 3;"; let sql_line4 = "END IF"; let sql = [sql_line1, sql_line2, sql_line3, sql_line4].join("\n"); - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(&sql).unwrap(); + let parser = Parser::new(&GenericDialect {}).try_with_sql(&sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new( @@ -15476,7 +15474,7 @@ fn test_if_statement_multiline_span() { #[test] fn test_conditional_statement_span() { let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + let parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); match parser.parse_statement().unwrap() { Statement::If(IfStatement { if_block, @@ -16958,7 +16956,7 @@ fn test_select_exclude() { let dialects = all_dialects_where(|d| { d.supports_select_wildcard_exclude() && !d.supports_select_exclude() - && d.is_column_alias(&Keyword::EXCLUDE, &mut Parser::new(d)) + && d.is_column_alias(&Keyword::EXCLUDE, &Parser::new(d)) }); assert_eq!( dialects @@ -16973,7 +16971,7 @@ fn test_select_exclude() { let dialects = all_dialects_where(|d| { d.supports_select_wildcard_exclude() && !d.supports_select_exclude() - && !d.is_column_alias(&Keyword::EXCLUDE, &mut Parser::new(d)) + && !d.is_column_alias(&Keyword::EXCLUDE, &Parser::new(d)) }); assert_eq!( dialects diff --git a/tests/sqlparser_custom_dialect.rs b/tests/sqlparser_custom_dialect.rs index cee604aca..600d717be 100644 --- a/tests/sqlparser_custom_dialect.rs +++ b/tests/sqlparser_custom_dialect.rs @@ -39,7 +39,7 @@ fn custom_prefix_parser() -> Result<(), ParserError> { is_identifier_part(ch) } - fn parse_prefix(&self, parser: &mut Parser) -> Option> { + fn parse_prefix(&self, parser: &Parser) -> Option> { if parser.consume_token(&Token::Number("1".to_string(), false)) { Some(Ok(Expr::Value(Value::Null.with_empty_span()))) } else { @@ -72,7 +72,7 @@ fn custom_infix_parser() -> Result<(), ParserError> { fn parse_infix( &self, - parser: &mut Parser, + parser: &Parser, expr: &Expr, _precedence: u8, ) -> Option> { @@ -110,7 +110,7 @@ fn custom_statement_parser() -> Result<(), ParserError> { is_identifier_part(ch) } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::SELECT) { for _ in 0..3 { let _ = parser.next_token(); diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index a947db49b..24937d0a2 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -2213,7 +2213,7 @@ fn parse_mssql_if_else() { #[test] fn test_mssql_if_else_span() { let sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); + let parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) @@ -2226,7 +2226,7 @@ fn test_mssql_if_else_multiline_span() { let sql_line2 = "SELECT '1'"; let sql_line3 = "ELSE SELECT '2'"; let sql = [sql_line1, sql_line2, sql_line3].join("\n"); - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); + let parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new( diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index fbfa66588..94b42c6d9 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -2045,7 +2045,7 @@ fn parse_pg_returning() { fn test_operator(operator: &str, dialect: &TestedDialects, expected: BinaryOperator) { let operator_tokens = sqlparser::tokenizer::Tokenizer::new(&PostgreSqlDialect {}, &format!("a{operator}b")) - .tokenize() + .tokenized_owned() .unwrap(); assert_eq!( operator_tokens.len(),