Skip to content

Commit 6e2cd53

Browse files
authored
Merge pull request #6 from Akanoa/carriage_return
feat: Add support to carriage return and tab recognizer
2 parents 49d38b7 + a0491d2 commit 6e2cd53

7 files changed

Lines changed: 29 additions & 7 deletions

File tree

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "noa-parser"
3-
version = "0.3.0"
3+
version = "0.4.0"
44
edition = "2024"
55
homepage = "https://github.com/Akanoa/noa-parser"
66
repository = "https://github.com/Akanoa/noa-parser"

Changelog.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Changelog
22

3+
**0.4.0**
4+
5+
- Add support to carriage return and tab recognizer
6+
37
**0.3.0**
48

59
- Add support to delimited group

examples/delimited_group.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@ use noa_parser::peek::peek;
44
fn main() {
55
let data = b"(2 * 3)";
66
let mut scanner = noa_parser::scanner::Scanner::new(data);
7-
let result = peek(GroupKind::Parenthesis, &mut scanner).expect("failed to parse").expect("failed to peek");
7+
let result = peek(GroupKind::Parenthesis, &mut scanner)
8+
.expect("failed to parse")
9+
.expect("failed to peek");
810
println!(
911
"{}",
1012
String::from_utf8_lossy(result.peeked_slice()) // 2 * 3

src/bytes/components/groups.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,9 +233,9 @@ impl<'a> Peekable<'a, u8, Token, Token> for GroupKind {
233233

234234
#[cfg(test)]
235235
mod tests {
236-
use crate::bytes::components::groups::{match_for_delimited_group, match_group, GroupKind};
236+
use crate::bytes::components::groups::{GroupKind, match_for_delimited_group, match_group};
237237
use crate::bytes::token::Token;
238-
use crate::peek::{peek, PeekResult, Peeking};
238+
use crate::peek::{PeekResult, Peeking, peek};
239239
use crate::scanner::Scanner;
240240

241241
#[test]

src/bytes/token.rs

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
//! Classic tokens
22
3-
use crate::bytes::matchers::match_char;
3+
use crate::bytes::matchers::{match_char, match_pattern};
44
use crate::matcher::{Match, MatchSize};
55

66
#[derive(Copy, Clone)]
@@ -65,6 +65,14 @@ pub enum Token {
6565
Underscore,
6666
/// The `#` character
6767
Sharp,
68+
/// The `\n` character
69+
Ln,
70+
/// The `\r` character
71+
Cr,
72+
/// The `\t` character
73+
Tab,
74+
/// The `\r\n` character
75+
CrLn,
6876
}
6977

7078
impl Match<u8> for Token {
@@ -99,6 +107,10 @@ impl Match<u8> for Token {
99107
Token::Backslash => match_char('\\', data),
100108
Token::Underscore => match_char('_', data),
101109
Token::Sharp => match_char('#', data),
110+
Token::Ln => match_char('\n', data),
111+
Token::Cr => match_char('\r', data),
112+
Token::Tab => match_char('\t', data),
113+
Token::CrLn => match_pattern(b"\r\n", data),
102114
}
103115
}
104116
}
@@ -135,6 +147,10 @@ impl MatchSize for Token {
135147
Token::Backslash => 1,
136148
Token::Underscore => 1,
137149
Token::Sharp => 1,
150+
Token::Ln => 1,
151+
Token::Cr => 1,
152+
Token::Tab => 1,
153+
Token::CrLn => 2,
138154
}
139155
}
140156
}

src/peek.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
66
use crate::errors::ParseResult;
77
use crate::matcher::MatchSize;
8-
use crate::recognizer::{recognize, Recognizable};
8+
use crate::recognizer::{Recognizable, recognize};
99
use crate::scanner::Scanner;
1010
use std::marker::PhantomData;
1111

0 commit comments

Comments
 (0)