Skip to content

Commit e13ee6b

Browse files
committed
Report columns as 1-based
Change cssparser to report columns as being 1-based, for Gecko compatibility. Formerly this offsetting was handled in Servo, but it wasn't done on all code paths, so we agreed to put it into rust-cssparser. For some background, see: https://bugzilla.mozilla.org/show_bug.cgi?id=1398869
1 parent 7c6c986 commit e13ee6b

File tree

4 files changed

+34
-34
lines changed

4 files changed

+34
-34
lines changed

Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "cssparser"
3-
version = "0.20.2"
3+
version = "0.21.0"
44
authors = [ "Simon Sapin <[email protected]>" ]
55

66
description = "Rust implementation of CSS Syntax Level 3"

src/parser.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ impl ParserState {
3535
pub fn source_location(&self) -> SourceLocation {
3636
SourceLocation {
3737
line: self.current_line_number,
38-
column: (self.position - self.current_line_start_position) as u32,
38+
column: (self.position - self.current_line_start_position + 1) as u32,
3939
}
4040
}
4141
}

src/tests.rs

+30-30
Original file line numberDiff line numberDiff line change
@@ -498,29 +498,29 @@ fn line_numbers() {
498498
"b\""
499499
));
500500
let mut input = Parser::new(&mut input);
501-
assert_eq!(input.current_source_location(), SourceLocation { line: 0, column: 0 });
501+
assert_eq!(input.current_source_location(), SourceLocation { line: 0, column: 1 });
502502
assert_eq!(input.next_including_whitespace(), Ok(&Token::Ident("fo00o".into())));
503-
assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 2 });
504-
assert_eq!(input.next_including_whitespace(), Ok(&Token::WhiteSpace(" ")));
505503
assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 3 });
504+
assert_eq!(input.next_including_whitespace(), Ok(&Token::WhiteSpace(" ")));
505+
assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 4 });
506506
assert_eq!(input.next_including_whitespace(), Ok(&Token::Ident("bar".into())));
507-
assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 6 });
507+
assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 7 });
508508
assert_eq!(input.next_including_whitespace_and_comments(), Ok(&Token::Comment("\n")));
509-
assert_eq!(input.current_source_location(), SourceLocation { line: 2, column: 2 });
509+
assert_eq!(input.current_source_location(), SourceLocation { line: 2, column: 3 });
510510
assert_eq!(input.next_including_whitespace(), Ok(&Token::Ident("baz".into())));
511-
assert_eq!(input.current_source_location(), SourceLocation { line: 2, column: 5 });
511+
assert_eq!(input.current_source_location(), SourceLocation { line: 2, column: 6 });
512512
let state = input.state();
513513

514514
assert_eq!(input.next_including_whitespace(), Ok(&Token::WhiteSpace("\r\n\n")));
515-
assert_eq!(input.current_source_location(), SourceLocation { line: 4, column: 0 });
515+
assert_eq!(input.current_source_location(), SourceLocation { line: 4, column: 1 });
516516

517-
assert_eq!(state.source_location(), SourceLocation { line: 2, column: 5 });
517+
assert_eq!(state.source_location(), SourceLocation { line: 2, column: 6 });
518518

519519
assert_eq!(input.next_including_whitespace(), Ok(&Token::UnquotedUrl("u".into())));
520-
assert_eq!(input.current_source_location(), SourceLocation { line: 6, column: 1 });
520+
assert_eq!(input.current_source_location(), SourceLocation { line: 6, column: 2 });
521521

522522
assert_eq!(input.next_including_whitespace(), Ok(&Token::QuotedString("ab".into())));
523-
assert_eq!(input.current_source_location(), SourceLocation { line: 7, column: 2 });
523+
assert_eq!(input.current_source_location(), SourceLocation { line: 7, column: 3 });
524524
assert!(input.next_including_whitespace().is_err());
525525
}
526526

@@ -1000,14 +1000,14 @@ fn parser_maintains_current_line() {
10001000
fn parser_with_line_number_offset() {
10011001
let mut input = ParserInput::new_with_line_number_offset("ident\nident", 72);
10021002
let mut parser = Parser::new(&mut input);
1003-
assert_eq!(parser.current_source_location(), SourceLocation { line: 72, column: 0 });
1003+
assert_eq!(parser.current_source_location(), SourceLocation { line: 72, column: 1 });
10041004
assert_eq!(parser.next_including_whitespace_and_comments(), Ok(&Token::Ident("ident".into())));
1005-
assert_eq!(parser.current_source_location(), SourceLocation { line: 72, column: 5 });
1005+
assert_eq!(parser.current_source_location(), SourceLocation { line: 72, column: 6 });
10061006
assert_eq!(parser.next_including_whitespace_and_comments(),
10071007
Ok(&Token::WhiteSpace("\n".into())));
1008-
assert_eq!(parser.current_source_location(), SourceLocation { line: 73, column: 0 });
1008+
assert_eq!(parser.current_source_location(), SourceLocation { line: 73, column: 1 });
10091009
assert_eq!(parser.next_including_whitespace_and_comments(), Ok(&Token::Ident("ident".into())));
1010-
assert_eq!(parser.current_source_location(), SourceLocation { line: 73, column: 5 });
1010+
assert_eq!(parser.current_source_location(), SourceLocation { line: 73, column: 6 });
10111011
}
10121012

10131013
#[test]
@@ -1088,24 +1088,24 @@ fn utf16_columns() {
10881088
// the column is in units of UTF-16, the 4-byte sequence results
10891089
// in two columns.
10901090
let tests = vec![
1091-
("", 0),
1092-
("ascii", 5),
1093-
("/*QΡ✈🆒*/", 9),
1094-
("'QΡ✈🆒*'", 8),
1095-
("\"\\\"'QΡ✈🆒*'", 11),
1096-
("\\Q\\Ρ\\\\🆒", 9),
1097-
("QΡ✈🆒", 5),
1098-
("QΡ✈🆒\\Q\\Ρ\\\\🆒", 14),
1099-
("newline\r\nQΡ✈🆒", 5),
1100-
("url(QΡ✈🆒\\Q\\Ρ\\\\🆒)", 19),
1101-
("url(QΡ✈🆒)", 10),
1102-
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒)", 15),
1103-
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒", 14),
1104-
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒 x", 16),
1105-
("QΡ✈🆒()", 7),
1091+
("", 1),
1092+
("ascii", 6),
1093+
("/*QΡ✈🆒*/", 10),
1094+
("'QΡ✈🆒*'", 9),
1095+
("\"\\\"'QΡ✈🆒*'", 12),
1096+
("\\Q\\Ρ\\\\🆒", 10),
1097+
("QΡ✈🆒", 6),
1098+
("QΡ✈🆒\\Q\\Ρ\\\\🆒", 15),
1099+
("newline\r\nQΡ✈🆒", 6),
1100+
("url(QΡ✈🆒\\Q\\Ρ\\\\🆒)", 20),
1101+
("url(QΡ✈🆒)", 11),
1102+
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒)", 16),
1103+
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒", 15),
1104+
("url(\r\nQΡ✈🆒\\Q\\Ρ\\\\🆒 x", 17),
1105+
("QΡ✈🆒()", 8),
11061106
// Test that under/over-flow of current_line_start_position is
11071107
// handled properly; see the special case in consume_4byte_intro.
1108-
("🆒", 2),
1108+
("🆒", 3),
11091109
];
11101110

11111111
for test in tests {

src/tokenizer.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ impl<'a> Tokenizer<'a> {
276276
pub fn current_source_location(&self) -> SourceLocation {
277277
SourceLocation {
278278
line: self.current_line_number,
279-
column: (self.position - self.current_line_start_position) as u32,
279+
column: (self.position - self.current_line_start_position + 1) as u32,
280280
}
281281
}
282282

@@ -521,7 +521,7 @@ pub struct SourceLocation {
521521
/// The line number, starting at 0 for the first line, unless `with_first_line_number` was used.
522522
pub line: u32,
523523

524-
/// The column number within a line, starting at 0 for first the character of the line.
524+
/// The column number within a line, starting at 1 for first the character of the line.
525525
/// Column numbers are in units of UTF-16 characters.
526526
pub column: u32,
527527
}

0 commit comments

Comments
 (0)