(index<- ) ./libsyntax/parse/mod.rs
git branch: * master 5200215 auto merge of #14035 : alexcrichton/rust/experimental, r=huonw
modified: Fri May 9 13:02:28 2014
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! The main parser interface
12
13
14 use ast;
15 use codemap::{Span, CodeMap, FileMap};
16 use diagnostic::{SpanHandler, mk_span_handler, default_handler};
17 use parse::attr::ParserAttr;
18 use parse::parser::Parser;
19
20 use std::cell::RefCell;
21 use std::io::File;
22 use std::rc::Rc;
23 use std::str;
24
25 pub mod lexer;
26 pub mod parser;
27 pub mod token;
28 pub mod comments;
29 pub mod attr;
30
31 pub mod common;
32 pub mod classify;
33 pub mod obsolete;
34
35 // info about a parsing session.
36 pub struct ParseSess {
37 pub span_diagnostic: SpanHandler, // better be the same as the one in the reader!
38 /// Used to determine and report recursive mod inclusions
39 included_mod_stack: RefCell<Vec<Path>>,
40 }
41
42 pub fn new_parse_sess() -> ParseSess {
43 ParseSess {
44 span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()),
45 included_mod_stack: RefCell::new(Vec::new()),
46 }
47 }
48
49 pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess {
50 ParseSess {
51 span_diagnostic: sh,
52 included_mod_stack: RefCell::new(Vec::new()),
53 }
54 }
55
56 // a bunch of utility functions of the form parse_<thing>_from_<source>
57 // where <thing> includes crate, expr, item, stmt, tts, and one that
58 // uses a HOF to parse anything, and <source> includes file and
59 // source_str.
60
61 pub fn parse_crate_from_file(
62 input: &Path,
63 cfg: ast::CrateConfig,
64 sess: &ParseSess
65 ) -> ast::Crate {
66 new_parser_from_file(sess, cfg, input).parse_crate_mod()
67 // why is there no p.abort_if_errors here?
68 }
69
70 pub fn parse_crate_attrs_from_file(
71 input: &Path,
72 cfg: ast::CrateConfig,
73 sess: &ParseSess
74 ) -> Vec<ast::Attribute> {
75 let mut parser = new_parser_from_file(sess, cfg, input);
76 let (inner, _) = parser.parse_inner_attrs_and_next();
77 inner
78 }
79
80 pub fn parse_crate_from_source_str(name: StrBuf,
81 source: StrBuf,
82 cfg: ast::CrateConfig,
83 sess: &ParseSess)
84 -> ast::Crate {
85 let mut p = new_parser_from_source_str(sess,
86 cfg,
87 name,
88 source);
89 maybe_aborted(p.parse_crate_mod(),p)
90 }
91
92 pub fn parse_crate_attrs_from_source_str(name: StrBuf,
93 source: StrBuf,
94 cfg: ast::CrateConfig,
95 sess: &ParseSess)
96 -> Vec<ast::Attribute> {
97 let mut p = new_parser_from_source_str(sess,
98 cfg,
99 name,
100 source);
101 let (inner, _) = maybe_aborted(p.parse_inner_attrs_and_next(),p);
102 inner
103 }
104
105 pub fn parse_expr_from_source_str(name: StrBuf,
106 source: StrBuf,
107 cfg: ast::CrateConfig,
108 sess: &ParseSess)
109 -> @ast::Expr {
110 let mut p = new_parser_from_source_str(sess, cfg, name, source);
111 maybe_aborted(p.parse_expr(), p)
112 }
113
114 pub fn parse_item_from_source_str(name: StrBuf,
115 source: StrBuf,
116 cfg: ast::CrateConfig,
117 sess: &ParseSess)
118 -> Option<@ast::Item> {
119 let mut p = new_parser_from_source_str(sess, cfg, name, source);
120 let attrs = p.parse_outer_attributes();
121 maybe_aborted(p.parse_item(attrs),p)
122 }
123
124 pub fn parse_meta_from_source_str(name: StrBuf,
125 source: StrBuf,
126 cfg: ast::CrateConfig,
127 sess: &ParseSess)
128 -> @ast::MetaItem {
129 let mut p = new_parser_from_source_str(sess, cfg, name, source);
130 maybe_aborted(p.parse_meta_item(),p)
131 }
132
133 pub fn parse_stmt_from_source_str(name: StrBuf,
134 source: StrBuf,
135 cfg: ast::CrateConfig,
136 attrs: Vec<ast::Attribute> ,
137 sess: &ParseSess)
138 -> @ast::Stmt {
139 let mut p = new_parser_from_source_str(
140 sess,
141 cfg,
142 name,
143 source
144 );
145 maybe_aborted(p.parse_stmt(attrs),p)
146 }
147
148 pub fn parse_tts_from_source_str(name: StrBuf,
149 source: StrBuf,
150 cfg: ast::CrateConfig,
151 sess: &ParseSess)
152 -> Vec<ast::TokenTree> {
153 let mut p = new_parser_from_source_str(
154 sess,
155 cfg,
156 name,
157 source
158 );
159 p.quote_depth += 1u;
160 // right now this is re-creating the token trees from ... token trees.
161 maybe_aborted(p.parse_all_token_trees(),p)
162 }
163
164 // Create a new parser from a source string
165 pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
166 cfg: ast::CrateConfig,
167 name: StrBuf,
168 source: StrBuf)
169 -> Parser<'a> {
170 filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
171 }
172
173 /// Create a new parser, handling errors as appropriate
174 /// if the file doesn't exist
175 pub fn new_parser_from_file<'a>(sess: &'a ParseSess,
176 cfg: ast::CrateConfig,
177 path: &Path) -> Parser<'a> {
178 filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
179 }
180
181 /// Given a session, a crate config, a path, and a span, add
182 /// the file at the given path to the codemap, and return a parser.
183 /// On an error, use the given span as the source of the problem.
184 pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
185 cfg: ast::CrateConfig,
186 path: &Path,
187 sp: Span) -> Parser<'a> {
188 filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
189 }
190
191 /// Given a filemap and config, return a parser
192 pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
193 filemap: Rc<FileMap>,
194 cfg: ast::CrateConfig) -> Parser<'a> {
195 tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
196 }
197
198 // must preserve old name for now, because quote! from the *existing*
199 // compiler expands into it
200 pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
201 cfg: ast::CrateConfig,
202 tts: Vec<ast::TokenTree>) -> Parser<'a> {
203 tts_to_parser(sess, tts, cfg)
204 }
205
206
207 // base abstractions
208
209 /// Given a session and a path and an optional span (for error reporting),
210 /// add the path to the session's codemap and return the new filemap.
211 pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
212 -> Rc<FileMap> {
213 let err = |msg: &str| {
214 match spanopt {
215 Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
216 None => sess.span_diagnostic.handler().fatal(msg),
217 }
218 };
219 let bytes = match File::open(path).read_to_end() {
220 Ok(bytes) => bytes,
221 Err(e) => {
222 err(format!("couldn't read {}: {}", path.display(), e));
223 unreachable!()
224 }
225 };
226 match str::from_utf8(bytes.as_slice()) {
227 Some(s) => {
228 return string_to_filemap(sess, s.to_strbuf(),
229 path.as_str().unwrap().to_strbuf())
230 }
231 None => err(format!("{} is not UTF-8 encoded", path.display())),
232 }
233 unreachable!()
234 }
235
236 // given a session and a string, add the string to
237 // the session's codemap and return the new filemap
238 pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
239 -> Rc<FileMap> {
240 sess.span_diagnostic.cm.new_filemap(path, source)
241 }
242
243 // given a filemap, produce a sequence of token-trees
244 pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
245 -> Vec<ast::TokenTree> {
246 // it appears to me that the cfg doesn't matter here... indeed,
247 // parsing tt's probably shouldn't require a parser at all.
248 let cfg = Vec::new();
249 let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap);
250 let mut p1 = Parser(sess, cfg, box srdr);
251 p1.parse_all_token_trees()
252 }
253
254 // given tts and cfg, produce a parser
255 pub fn tts_to_parser<'a>(sess: &'a ParseSess,
256 tts: Vec<ast::TokenTree>,
257 cfg: ast::CrateConfig) -> Parser<'a> {
258 let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
259 Parser(sess, cfg, box trdr)
260 }
261
262 // abort if necessary
263 pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
264 p.abort_if_errors();
265 result
266 }
267
268
269
270 #[cfg(test)]
271 mod test {
272 use super::*;
273 use serialize::{json, Encodable};
274 use std::io;
275 use std::io::MemWriter;
276 use std::str;
277 use codemap::{Span, BytePos, Spanned};
278 use owned_slice::OwnedSlice;
279 use ast;
280 use abi;
281 use parse::parser::Parser;
282 use parse::token::{str_to_ident};
283 use util::parser_testing::{string_to_tts, string_to_parser};
284 use util::parser_testing::{string_to_expr, string_to_item};
285 use util::parser_testing::string_to_stmt;
286
287 fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf {
288 let mut writer = MemWriter::new();
289 let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer);
290 let _ = val.encode(&mut encoder);
291 str::from_utf8(writer.unwrap().as_slice()).unwrap().to_strbuf()
292 }
293
294 // produce a codemap::span
295 fn sp(a: u32, b: u32) -> Span {
296 Span{lo:BytePos(a),hi:BytePos(b),expn_info:None}
297 }
298
299 #[test] fn path_exprs_1() {
300 assert!(string_to_expr("a".to_strbuf()) ==
301 @ast::Expr{
302 id: ast::DUMMY_NODE_ID,
303 node: ast::ExprPath(ast::Path {
304 span: sp(0, 1),
305 global: false,
306 segments: vec!(
307 ast::PathSegment {
308 identifier: str_to_ident("a"),
309 lifetimes: Vec::new(),
310 types: OwnedSlice::empty(),
311 }
312 ),
313 }),
314 span: sp(0, 1)
315 })
316 }
317
318 #[test] fn path_exprs_2 () {
319 assert!(string_to_expr("::a::b".to_strbuf()) ==
320 @ast::Expr {
321 id: ast::DUMMY_NODE_ID,
322 node: ast::ExprPath(ast::Path {
323 span: sp(0, 6),
324 global: true,
325 segments: vec!(
326 ast::PathSegment {
327 identifier: str_to_ident("a"),
328 lifetimes: Vec::new(),
329 types: OwnedSlice::empty(),
330 },
331 ast::PathSegment {
332 identifier: str_to_ident("b"),
333 lifetimes: Vec::new(),
334 types: OwnedSlice::empty(),
335 }
336 )
337 }),
338 span: sp(0, 6)
339 })
340 }
341
342 #[should_fail]
343 #[test] fn bad_path_expr_1() {
344 string_to_expr("::abc::def::return".to_strbuf());
345 }
346
347 // check the token-tree-ization of macros
348 #[test] fn string_to_tts_macro () {
349 let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_strbuf());
350 let tts: &[ast::TokenTree] = tts.as_slice();
351 match tts {
352 [ast::TTTok(_,_),
353 ast::TTTok(_,token::NOT),
354 ast::TTTok(_,_),
355 ast::TTDelim(ref delim_elts)] => {
356 let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
357 match delim_elts {
358 [ast::TTTok(_,token::LPAREN),
359 ast::TTDelim(ref first_set),
360 ast::TTTok(_,token::FAT_ARROW),
361 ast::TTDelim(ref second_set),
362 ast::TTTok(_,token::RPAREN)] => {
363 let first_set: &[ast::TokenTree] =
364 first_set.as_slice();
365 match first_set {
366 [ast::TTTok(_,token::LPAREN),
367 ast::TTTok(_,token::DOLLAR),
368 ast::TTTok(_,_),
369 ast::TTTok(_,token::RPAREN)] => {
370 let second_set: &[ast::TokenTree] =
371 second_set.as_slice();
372 match second_set {
373 [ast::TTTok(_,token::LPAREN),
374 ast::TTTok(_,token::DOLLAR),
375 ast::TTTok(_,_),
376 ast::TTTok(_,token::RPAREN)] => {
377 assert_eq!("correct","correct")
378 }
379 _ => assert_eq!("wrong 4","correct")
380 }
381 },
382 _ => {
383 error!("failing value 3: {:?}",first_set);
384 assert_eq!("wrong 3","correct")
385 }
386 }
387 },
388 _ => {
389 error!("failing value 2: {:?}",delim_elts);
390 assert_eq!("wrong","correct");
391 }
392 }
393 },
394 _ => {
395 error!("failing value: {:?}",tts);
396 assert_eq!("wrong 1","correct");
397 }
398 }
399 }
400
401 #[test] fn string_to_tts_1 () {
402 let tts = string_to_tts("fn a (b : int) { b; }".to_strbuf());
403 assert_eq!(to_json_str(&tts),
404 "[\
405 {\
406 \"variant\":\"TTTok\",\
407 \"fields\":[\
408 null,\
409 {\
410 \"variant\":\"IDENT\",\
411 \"fields\":[\
412 \"fn\",\
413 false\
414 ]\
415 }\
416 ]\
417 },\
418 {\
419 \"variant\":\"TTTok\",\
420 \"fields\":[\
421 null,\
422 {\
423 \"variant\":\"IDENT\",\
424 \"fields\":[\
425 \"a\",\
426 false\
427 ]\
428 }\
429 ]\
430 },\
431 {\
432 \"variant\":\"TTDelim\",\
433 \"fields\":[\
434 [\
435 {\
436 \"variant\":\"TTTok\",\
437 \"fields\":[\
438 null,\
439 \"LPAREN\"\
440 ]\
441 },\
442 {\
443 \"variant\":\"TTTok\",\
444 \"fields\":[\
445 null,\
446 {\
447 \"variant\":\"IDENT\",\
448 \"fields\":[\
449 \"b\",\
450 false\
451 ]\
452 }\
453 ]\
454 },\
455 {\
456 \"variant\":\"TTTok\",\
457 \"fields\":[\
458 null,\
459 \"COLON\"\
460 ]\
461 },\
462 {\
463 \"variant\":\"TTTok\",\
464 \"fields\":[\
465 null,\
466 {\
467 \"variant\":\"IDENT\",\
468 \"fields\":[\
469 \"int\",\
470 false\
471 ]\
472 }\
473 ]\
474 },\
475 {\
476 \"variant\":\"TTTok\",\
477 \"fields\":[\
478 null,\
479 \"RPAREN\"\
480 ]\
481 }\
482 ]\
483 ]\
484 },\
485 {\
486 \"variant\":\"TTDelim\",\
487 \"fields\":[\
488 [\
489 {\
490 \"variant\":\"TTTok\",\
491 \"fields\":[\
492 null,\
493 \"LBRACE\"\
494 ]\
495 },\
496 {\
497 \"variant\":\"TTTok\",\
498 \"fields\":[\
499 null,\
500 {\
501 \"variant\":\"IDENT\",\
502 \"fields\":[\
503 \"b\",\
504 false\
505 ]\
506 }\
507 ]\
508 },\
509 {\
510 \"variant\":\"TTTok\",\
511 \"fields\":[\
512 null,\
513 \"SEMI\"\
514 ]\
515 },\
516 {\
517 \"variant\":\"TTTok\",\
518 \"fields\":[\
519 null,\
520 \"RBRACE\"\
521 ]\
522 }\
523 ]\
524 ]\
525 }\
526 ]".to_strbuf()
527 );
528 }
529
530 #[test] fn ret_expr() {
531 assert!(string_to_expr("return d".to_strbuf()) ==
532 @ast::Expr{
533 id: ast::DUMMY_NODE_ID,
534 node:ast::ExprRet(Some(@ast::Expr{
535 id: ast::DUMMY_NODE_ID,
536 node:ast::ExprPath(ast::Path{
537 span: sp(7, 8),
538 global: false,
539 segments: vec!(
540 ast::PathSegment {
541 identifier: str_to_ident("d"),
542 lifetimes: Vec::new(),
543 types: OwnedSlice::empty(),
544 }
545 ),
546 }),
547 span:sp(7,8)
548 })),
549 span:sp(0,8)
550 })
551 }
552
553 #[test] fn parse_stmt_1 () {
554 assert!(string_to_stmt("b;".to_strbuf()) ==
555 @Spanned{
556 node: ast::StmtExpr(@ast::Expr {
557 id: ast::DUMMY_NODE_ID,
558 node: ast::ExprPath(ast::Path {
559 span:sp(0,1),
560 global:false,
561 segments: vec!(
562 ast::PathSegment {
563 identifier: str_to_ident("b"),
564 lifetimes: Vec::new(),
565 types: OwnedSlice::empty(),
566 }
567 ),
568 }),
569 span: sp(0,1)},
570 ast::DUMMY_NODE_ID),
571 span: sp(0,1)})
572
573 }
574
575 fn parser_done(p: Parser){
576 assert_eq!(p.token.clone(), token::EOF);
577 }
578
579 #[test] fn parse_ident_pat () {
580 let sess = new_parse_sess();
581 let mut parser = string_to_parser(&sess, "b".to_strbuf());
582 assert!(parser.parse_pat() ==
583 @ast::Pat{id: ast::DUMMY_NODE_ID,
584 node: ast::PatIdent(
585 ast::BindByValue(ast::MutImmutable),
586 ast::Path {
587 span:sp(0,1),
588 global:false,
589 segments: vec!(
590 ast::PathSegment {
591 identifier: str_to_ident("b"),
592 lifetimes: Vec::new(),
593 types: OwnedSlice::empty(),
594 }
595 ),
596 },
597 None /* no idea */),
598 span: sp(0,1)});
599 parser_done(parser);
600 }
601
602 // check the contents of the tt manually:
603 #[test] fn parse_fundecl () {
604 // this test depends on the intern order of "fn" and "int"
605 assert!(string_to_item("fn a (b : int) { b; }".to_strbuf()) ==
606 Some(
607 @ast::Item{ident:str_to_ident("a"),
608 attrs:Vec::new(),
609 id: ast::DUMMY_NODE_ID,
610 node: ast::ItemFn(ast::P(ast::FnDecl {
611 inputs: vec!(ast::Arg{
612 ty: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID,
613 node: ast::TyPath(ast::Path{
614 span:sp(10,13),
615 global:false,
616 segments: vec!(
617 ast::PathSegment {
618 identifier:
619 str_to_ident("int"),
620 lifetimes: Vec::new(),
621 types: OwnedSlice::empty(),
622 }
623 ),
624 }, None, ast::DUMMY_NODE_ID),
625 span:sp(10,13)
626 }),
627 pat: @ast::Pat {
628 id: ast::DUMMY_NODE_ID,
629 node: ast::PatIdent(
630 ast::BindByValue(ast::MutImmutable),
631 ast::Path {
632 span:sp(6,7),
633 global:false,
634 segments: vec!(
635 ast::PathSegment {
636 identifier:
637 str_to_ident("b"),
638 lifetimes: Vec::new(),
639 types: OwnedSlice::empty(),
640 }
641 ),
642 },
643 None // no idea
644 ),
645 span: sp(6,7)
646 },
647 id: ast::DUMMY_NODE_ID
648 }),
649 output: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID,
650 node: ast::TyNil,
651 span:sp(15,15)}), // not sure
652 cf: ast::Return,
653 variadic: false
654 }),
655 ast::NormalFn,
656 abi::Rust,
657 ast::Generics{ // no idea on either of these:
658 lifetimes: Vec::new(),
659 ty_params: OwnedSlice::empty(),
660 },
661 ast::P(ast::Block {
662 view_items: Vec::new(),
663 stmts: vec!(@Spanned{
664 node: ast::StmtSemi(@ast::Expr{
665 id: ast::DUMMY_NODE_ID,
666 node: ast::ExprPath(
667 ast::Path{
668 span:sp(17,18),
669 global:false,
670 segments: vec!(
671 ast::PathSegment {
672 identifier:
673 str_to_ident(
674 "b"),
675 lifetimes:
676 Vec::new(),
677 types:
678 OwnedSlice::empty()
679 }
680 ),
681 }),
682 span: sp(17,18)},
683 ast::DUMMY_NODE_ID),
684 span: sp(17,19)}),
685 expr: None,
686 id: ast::DUMMY_NODE_ID,
687 rules: ast::DefaultBlock, // no idea
688 span: sp(15,21),
689 })),
690 vis: ast::Inherited,
691 span: sp(0,21)}));
692 }
693
694
695 #[test] fn parse_exprs () {
696 // just make sure that they parse....
697 string_to_expr("3 + 4".to_strbuf());
698 string_to_expr("a::z.froob(b,@(987+3))".to_strbuf());
699 }
700
701 #[test] fn attrs_fix_bug () {
702 string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
703 -> Result<@Writer, StrBuf> {
704 #[cfg(windows)]
705 fn wb() -> c_int {
706 (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
707 }
708
709 #[cfg(unix)]
710 fn wb() -> c_int { O_WRONLY as c_int }
711
712 let mut fflags: c_int = wb();
713 }".to_strbuf());
714 }
715
716 }
libsyntax/parse/mod.rs:164:44-164:44 -fn- definition:
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
references:- 7109: -> @ast::Expr {
110: let mut p = new_parser_from_source_str(sess, cfg, name, source);
111: maybe_aborted(p.parse_expr(), p)
--
152: -> Vec<ast::TokenTree> {
153: let mut p = new_parser_from_source_str(
154: sess,
libsyntax/parse/mod.rs:199:28-199:28 -fn- definition:
// compiler expands into it
pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
references:- 5libsyntax/ext/asm.rs:
49: -> Box<base::MacResult> {
50: let mut p = parse::new_parser_from_tts(cx.parse_sess(),
51: cx.cfg(),
libsyntax/ext/base.rs:
562: tts: &[ast::TokenTree]) -> Option<Vec<@ast::Expr> > {
563: let mut p = parse::new_parser_from_tts(cx.parse_sess(),
564: cx.cfg(),
libsyntax/ext/quote.rs:
622: let mut p = parse::new_parser_from_tts(cx.parse_sess(),
623: cx.cfg(),
libsyntax/ext/cfg.rs:
31: -> Box<base::MacResult> {
32: let mut p = parse::new_parser_from_tts(cx.parse_sess(),
33: cx.cfg(),
libsyntax/ext/format.rs:
77: let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
78: ecx.cfg(),
libsyntax/parse/mod.rs:113:1-113:1 -fn- definition:
pub fn parse_item_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
references:- 2libsyntax/ext/quote.rs:
282: fn parse_item(&self, s: StrBuf) -> @ast::Item {
283: let res = parse::parse_item_from_source_str(
284: "<quote expansion>".to_strbuf(),
libsyntax/ext/expand.rs:
498: for source in macros.iter() {
499: let item = parse::parse_item_from_source_str(name.clone(),
500: (*source).clone(),
libsyntax/parse/mod.rs:254:39-254:39 -fn- definition:
// given tts and cfg, produce a parser
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
tts: Vec<ast::TokenTree>,
references:- 2194: cfg: ast::CrateConfig) -> Parser<'a> {
195: tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
196: }
--
202: tts: Vec<ast::TokenTree>) -> Parser<'a> {
203: tts_to_parser(sess, tts, cfg)
204: }
libsyntax/parse/mod.rs:183:66-183:66 -fn- definition:
/// On an error, use the given span as the source of the problem.
pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
references:- 2libsyntax/parse/parser.rs:
4220: let mut p0 =
4221: new_sub_parser_from_file(self.sess,
4222: self.cfg.clone(),
libsyntax/ext/source_util.rs:
92: let mut p =
93: parse::new_sub_parser_from_file(cx.parse_sess(),
94: cx.cfg(),
libsyntax/parse/mod.rs:237:52-237:52 -fn- definition:
// the session's codemap and return the new filemap
pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
-> Rc<FileMap> {
references:- 2227: Some(s) => {
228: return string_to_filemap(sess, s.to_strbuf(),
229: path.as_str().unwrap().to_strbuf())
libsyntax/parse/mod.rs:174:30-174:30 -fn- definition:
/// if the file doesn't exist
pub fn new_parser_from_file<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
references:- 265: ) -> ast::Crate {
66: new_parser_from_file(sess, cfg, input).parse_crate_mod()
67: // why is there no p.abort_if_errors here?
--
74: ) -> Vec<ast::Attribute> {
75: let mut parser = new_parser_from_file(sess, cfg, input);
76: let (inner, _) = parser.parse_inner_attrs_and_next();
libsyntax/parse/mod.rs:35:33-35:33 -struct- definition:
// info about a parsing session.
pub struct ParseSess {
pub span_diagnostic: SpanHandler, // better be the same as the one in the reader!
references:- 32libsyntax/parse/parser.rs:
libsyntax/ext/base.rs:
libsyntax/ext/tt/macro_parser.rs:
libsyntax/ext/expand.rs:
libsyntax/parse/mod.rs:262:22-262:22 -fn- definition:
// abort if necessary
pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
p.abort_if_errors();
references:- 7100: source);
101: let (inner, _) = maybe_aborted(p.parse_inner_attrs_and_next(),p);
102: inner
--
129: let mut p = new_parser_from_source_str(sess, cfg, name, source);
130: maybe_aborted(p.parse_meta_item(),p)
131: }
--
160: // right now this is re-creating the token trees from ... token trees.
161: maybe_aborted(p.parse_all_token_trees(),p)
162: }
libsyntax/parse/mod.rs:210:70-210:70 -fn- definition:
/// add the path to the session's codemap and return the new filemap.
pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Rc<FileMap> {
references:- 2177: path: &Path) -> Parser<'a> {
178: filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
179: }
--
187: sp: Span) -> Parser<'a> {
188: filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
189: }
libsyntax/parse/mod.rs:191:48-191:48 -fn- definition:
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
filemap: Rc<FileMap>,
references:- 3169: -> Parser<'a> {
170: filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
171: }
--
177: path: &Path) -> Parser<'a> {
178: filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
179: }
--
187: sp: Span) -> Parser<'a> {
188: filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
189: }