Skip to content

Commit 31bcfdd

Browse files
committed
Merge #39
39: Tools r=matklad a=matklad closes #34 bors r+
2 parents 3c70ae2 + a5a6973 commit 31bcfdd

File tree

15 files changed

+184
-36
lines changed

15 files changed

+184
-36
lines changed

.cargo/config

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
[alias]
22
parse = "run --package tools --bin parse"
33
gen = "run --package tools --bin gen"
4+
collect-tests = "run --package tools --bin collect-tests --"

appveyor.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ install:
1010
build: false
1111

1212
test_script:
13+
- cargo collect-tests --verify
1314
- cargo test
1415

1516
branches:

docs/TESTS.md

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,26 @@ files to have the same name except for the leading number. In general,
1919
test suite should be append-only: old tests should not be modified,
2020
new tests should be created instead.
2121

22-
2322
Note that only `ok` tests are normative: `err` tests test error
2423
recovery and it is totally ok for a parser to not implement any error
2524
recovery at all. However, for libsyntax2.0 we do care about error
2625
recovery, and we do care about precise and useful error messages.
2726

27+
There are also so-called "inline tests". They appear as the comments
28+
with a `test` header in the source code, like this:
29+
30+
```rust
31+
// test fn_basic
32+
// fn foo() {}
33+
fn fn_item(p: &mut Parser) {
34+
// ...
35+
}
36+
```
37+
38+
You can run `cargo collect-tests` command to collect all inline tests
39+
into `tests/data/inline` directory. The main advantage of inline tests
40+
is that they help to illustrate what the relevant code is doing.
41+
2842

2943
Contribution opportunity: design and implement testing infrastructure
3044
for validators.

src/parser/event_parser/grammar/items/mod.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,11 +52,15 @@ fn item(p: &mut Parser) {
5252
STATIC_ITEM
5353
}
5454
CONST_KW => match p.nth(1) {
55+
// test const_fn
56+
// const fn foo() {}
5557
FN_KW => {
5658
p.bump();
5759
fn_item(p);
5860
FN_ITEM
5961
}
62+
// test const_unsafe_fn
63+
// const unsafe fn foo() {}
6064
UNSAFE_KW if p.nth(2) == FN_KW => {
6165
p.bump();
6266
p.bump();
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
const unsafe fn foo() {}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
FILE@[0; 25)
2+
FN_ITEM@[0; 25)
3+
CONST_KW@[0; 5)
4+
WHITESPACE@[5; 6)
5+
UNSAFE_KW@[6; 12)
6+
WHITESPACE@[12; 13)
7+
FN_KW@[13; 15)
8+
WHITESPACE@[15; 16)
9+
IDENT@[16; 19) "foo"
10+
L_PAREN@[19; 20)
11+
R_PAREN@[20; 21)
12+
WHITESPACE@[21; 22)
13+
L_CURLY@[22; 23)
14+
R_CURLY@[23; 24)
15+
WHITESPACE@[24; 25)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
const fn foo() {}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
FILE@[0; 18)
2+
FN_ITEM@[0; 18)
3+
CONST_KW@[0; 5)
4+
WHITESPACE@[5; 6)
5+
FN_KW@[6; 8)
6+
WHITESPACE@[8; 9)
7+
IDENT@[9; 12) "foo"
8+
L_PAREN@[12; 13)
9+
R_PAREN@[13; 14)
10+
WHITESPACE@[14; 15)
11+
L_CURLY@[15; 16)
12+
R_CURLY@[16; 17)
13+
WHITESPACE@[17; 18)

tests/data/parser/ok/0024_const_fn.rs

Lines changed: 0 additions & 5 deletions
This file was deleted.

tests/data/parser/ok/0024_const_fn.txt

Lines changed: 0 additions & 29 deletions
This file was deleted.

tests/parser.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use testutils::dir_tests;
77

88
#[test]
99
fn parser_tests() {
10-
dir_tests(&["parser/ok", "parser/err"], |text| {
10+
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
1111
let tokens = tokenize(text);
1212
let file = parse(text.to_string(), &tokens);
1313
dump_tree(&file)

tools/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,6 @@ serde = "1.0.26"
99
serde_derive = "1.0.26"
1010
file = "1.1.1"
1111
ron = "0.1.5"
12+
walkdir = "2"
13+
itertools = "0.7"
1214
libsyntax2 = { path = "../" }

tools/src/bin/collect-tests.rs

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
extern crate file;
2+
extern crate itertools;
3+
extern crate walkdir;
4+
5+
use walkdir::WalkDir;
6+
use itertools::Itertools;
7+
8+
use std::path::{Path, PathBuf};
9+
use std::collections::HashSet;
10+
use std::fs;
11+
12+
fn main() {
13+
let verify = ::std::env::args().any(|arg| arg == "--verify");
14+
15+
let d = grammar_dir();
16+
let tests = tests_from_dir(&d);
17+
let existing = existing_tests();
18+
19+
for t in existing.difference(&tests) {
20+
panic!("Test is deleted: {}\n{}", t.name, t.text);
21+
}
22+
23+
let new_tests = tests.difference(&existing);
24+
for (i, t) in new_tests.enumerate() {
25+
if verify {
26+
panic!("Inline test is not recorded: {}", t.name);
27+
}
28+
29+
let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
30+
println!("Creating {}", name);
31+
let path = inline_tests_dir().join(name);
32+
file::put_text(&path, &t.text).unwrap();
33+
}
34+
}
35+
36+
#[derive(Debug, Eq)]
37+
struct Test {
38+
name: String,
39+
text: String,
40+
}
41+
42+
impl PartialEq for Test {
43+
fn eq(&self, other: &Test) -> bool {
44+
self.name.eq(&other.name)
45+
}
46+
}
47+
48+
impl ::std::hash::Hash for Test {
49+
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
50+
self.name.hash(state)
51+
}
52+
}
53+
54+
fn tests_from_dir(dir: &Path) -> HashSet<Test> {
55+
let mut res = HashSet::new();
56+
for entry in WalkDir::new(dir) {
57+
let entry = entry.unwrap();
58+
if !entry.file_type().is_file() {
59+
continue;
60+
}
61+
if entry.path().extension().unwrap_or_default() != "rs" {
62+
continue;
63+
}
64+
let text = file::get_text(entry.path()).unwrap();
65+
66+
for test in collect_tests(&text) {
67+
if let Some(old_test) = res.replace(test) {
68+
panic!("Duplicate test: {}", old_test.name)
69+
}
70+
}
71+
}
72+
res
73+
}
74+
75+
fn collect_tests(s: &str) -> Vec<Test> {
76+
let mut res = vec![];
77+
let prefix = "// ";
78+
let comment_blocks = s.lines()
79+
.map(str::trim_left)
80+
.group_by(|line| line.starts_with(prefix));
81+
82+
for (is_comment, block) in comment_blocks.into_iter() {
83+
if !is_comment {
84+
continue;
85+
}
86+
let mut block = block.map(|line| &line[prefix.len()..]);
87+
let first = block.next().unwrap();
88+
if !first.starts_with("test ") {
89+
continue;
90+
}
91+
let name = first["test ".len()..].to_string();
92+
let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
93+
assert!(!text.trim().is_empty() && text.ends_with("\n"));
94+
res.push(Test { name, text })
95+
}
96+
res
97+
}
98+
99+
fn existing_tests() -> HashSet<Test> {
100+
let mut res = HashSet::new();
101+
for file in fs::read_dir(&inline_tests_dir()).unwrap() {
102+
let file = file.unwrap();
103+
let path = file.path();
104+
if path.extension().unwrap_or_default() != "rs" {
105+
continue;
106+
}
107+
let name = path.file_name().unwrap().to_str().unwrap();
108+
let name = name["0000_".len()..name.len() - 3].to_string();
109+
let text = file::get_text(&path).unwrap();
110+
res.insert(Test { name, text });
111+
}
112+
res
113+
}
114+
115+
fn inline_tests_dir() -> PathBuf {
116+
let res = base_dir().join("tests/data/parser/inline");
117+
if !res.is_dir() {
118+
fs::create_dir_all(&res).unwrap();
119+
}
120+
res
121+
}
122+
123+
fn grammar_dir() -> PathBuf {
124+
base_dir().join("src/parser/event_parser/grammar")
125+
}
126+
127+
fn base_dir() -> PathBuf {
128+
let dir = env!("CARGO_MANIFEST_DIR");
129+
PathBuf::from(dir).parent().unwrap().to_owned()
130+
}

0 commit comments

Comments
 (0)