diff --git a/benches/src/lib.rs b/benches/src/lib.rs index cd7c6959..2c713393 100644 --- a/benches/src/lib.rs +++ b/benches/src/lib.rs @@ -93,7 +93,7 @@ impl Parser for Sulk { let sess = Session::with_tty_emitter(source_map.into()); let filename = PathBuf::from("test.sol"); let mut parser = - sulk_parse::Parser::from_source_code(&sess, filename.into(), src.into())?; + sulk_parse::Parser::from_source_code(&sess, filename.into(), || Ok(src.into()))?; let result = parser.parse_file().map_err(|e| e.emit())?; sess.dcx.has_errors()?; black_box(result); diff --git a/crates/interface/src/lib.rs b/crates/interface/src/lib.rs index d317ec82..4ee9ef40 100644 --- a/crates/interface/src/lib.rs +++ b/crates/interface/src/lib.rs @@ -52,31 +52,3 @@ pub type Result = std::result::Result; pub fn enter(f: impl FnOnce() -> R) -> R { SessionGlobals::with_or_default(|_| f()) } - -#[macro_export] -macro_rules! time { - ($level:expr, $what:literal, || $e:expr) => { - if enabled!($level) { - let timer = std::time::Instant::now(); - let res = $e; - event!($level, elapsed=?timer.elapsed(), $what); - res - } else { - $e - } - }; -} - -#[macro_export] -macro_rules! debug_time { - ($what:literal, || $e:expr) => { - $crate::time!(tracing::Level::DEBUG, $what, || $e) - }; -} - -#[macro_export] -macro_rules! trace_time { - ($what:literal, || $e:expr) => { - $crate::time!(tracing::Level::TRACE, $what, || $e) - }; -} diff --git a/crates/interface/src/source_map/file.rs b/crates/interface/src/source_map/file.rs index 504df551..242c8a80 100644 --- a/crates/interface/src/source_map/file.rs +++ b/crates/interface/src/source_map/file.rs @@ -221,7 +221,7 @@ pub struct SourceFile { impl SourceFile { pub fn new( name: FileName, - src: String, + mut src: String, hash_kind: SourceFileHashAlgorithm, ) -> Result { // Compute the file hash before any normalization. @@ -234,6 +234,7 @@ impl SourceFile { let (lines, multibyte_chars, non_narrow_chars) = super::analyze::analyze_source_file(&src); + src.shrink_to_fit(); Ok(Self { name, src: Lrc::new(src), diff --git a/crates/interface/src/source_map/file_resolver.rs b/crates/interface/src/source_map/file_resolver.rs index 6d321947..8ac77b0b 100644 --- a/crates/interface/src/source_map/file_resolver.rs +++ b/crates/interface/src/source_map/file_resolver.rs @@ -78,6 +78,7 @@ impl<'a> FileResolver<'a> { } /// Resolves an import path. `parent` is the path of the file that contains the import, if any. + #[instrument(level = "debug", skip_all, fields(path = %path.display()))] pub fn resolve_file( &self, path: &Path, @@ -140,7 +141,7 @@ impl<'a> FileResolver<'a> { } /// Applies the import path mappings to `path`. - #[instrument(level = "trace", skip(self), ret)] + #[instrument(level = "trace", skip_all, ret)] pub fn remap_path<'b>(&self, path: &'b Path) -> Cow<'b, Path> { let orig = path; let mut remapped = Cow::Borrowed(path); @@ -160,7 +161,7 @@ impl<'a> FileResolver<'a> { } /// Loads `path` into the source map. Returns `None` if the file doesn't exist. - #[instrument(level = "trace", skip(self))] + #[instrument(level = "debug", skip_all)] pub fn try_file(&self, path: &Path) -> Result>, ResolveError> { let cache_path = path.normalize(); if let Ok(file) = self.source_map().load_file(&cache_path) { diff --git a/crates/interface/src/source_map/mod.rs b/crates/interface/src/source_map/mod.rs index b9ea0e7d..d8880cdd 100644 --- a/crates/interface/src/source_map/mod.rs +++ b/crates/interface/src/source_map/mod.rs @@ -3,7 +3,7 @@ use crate::{BytePos, CharPos, Pos, Span}; use std::{ io::{self, Read}, - path::Path, + path::{Path, PathBuf}, }; use sulk_data_structures::{ map::FxHashMap, @@ -124,16 +124,52 @@ impl SourceMap { /// Loads a file from the given path. pub fn load_file(&self, path: &Path) -> io::Result> { - let src = std::fs::read_to_string(path)?; let filename = path.to_owned().into(); - self.new_source_file(filename, src).map_err(Into::into) + self.new_source_file(filename, || std::fs::read_to_string(path)) } /// Loads `stdin`. pub fn load_stdin(&self) -> io::Result> { - let mut src = String::new(); - io::stdin().read_to_string(&mut src)?; - self.new_source_file(FileName::Stdin, src).map_err(Into::into) + self.new_source_file(FileName::Stdin, || { + let mut src = String::new(); + io::stdin().read_to_string(&mut src)?; + Ok(src) + }) + } + + /// Loads a file with the given source string. + /// + /// This is useful for testing. + pub fn new_dummy_source_file(&self, path: PathBuf, src: String) -> io::Result> { + self.new_source_file(path.into(), || Ok(src)) + } + + /// Creates a new `SourceFile`. + /// + /// If a file already exists in the `SourceMap` with the same ID, that file is returned + /// unmodified. + /// + /// Returns an error if the file is larger than 4GiB or other errors occur while creating the + /// `SourceFile`. + #[instrument(level = "debug", skip_all, fields(filename = %filename.display()))] + pub fn new_source_file( + &self, + filename: FileName, + get_src: impl FnOnce() -> io::Result, + ) -> io::Result> { + let stable_id = StableSourceFileId::from_filename_in_current_crate(&filename); + match self.source_file_by_stable_id(stable_id) { + Some(lrc_sf) => Ok(lrc_sf), + None => { + let source_file = SourceFile::new(filename, get_src()?, self.hash_kind)?; + + // Let's make sure the file_id we generated above actually matches + // the ID we generate for the SourceFile we just created. + debug_assert_eq!(source_file.stable_id, stable_id); + + self.register_source_file(stable_id, source_file).map_err(Into::into) + } + } } pub fn files(&self) -> MappedReadGuard<'_, Vec>> { @@ -174,33 +210,6 @@ impl SourceMap { Ok(file) } - /// Creates a new `SourceFile`. - /// - /// If a file already exists in the `SourceMap` with the same ID, that file is returned - /// unmodified. - /// - /// Returns an error if the file is larger than 4GiB or other errors occur while creating the - /// `SourceFile`. - pub fn new_source_file( - &self, - filename: FileName, - src: String, - ) -> Result, OffsetOverflowError> { - let stable_id = StableSourceFileId::from_filename_in_current_crate(&filename); - match self.source_file_by_stable_id(stable_id) { - Some(lrc_sf) => Ok(lrc_sf), - None => { - let source_file = SourceFile::new(filename, src, self.hash_kind)?; - - // Let's make sure the file_id we generated above actually matches - // the ID we generate for the SourceFile we just created. - debug_assert_eq!(source_file.stable_id, stable_id); - - self.register_source_file(stable_id, source_file) - } - } - } - pub fn filename_for_diagnostics<'a>(&self, filename: &'a FileName) -> FileNameDisplay<'a> { filename.display() } diff --git a/crates/interface/src/source_map/tests.rs b/crates/interface/src/source_map/tests.rs index f2175cd9..1dfa74dd 100644 --- a/crates/interface/src/source_map/tests.rs +++ b/crates/interface/src/source_map/tests.rs @@ -3,10 +3,10 @@ use std::path::PathBuf; fn init_source_map() -> SourceMap { let sm = SourceMap::empty(); - sm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()) + sm.new_dummy_source_file(PathBuf::from("blork.rs"), "first line.\nsecond line".to_string()) .unwrap(); - sm.new_source_file(PathBuf::from("empty.rs").into(), String::new()).unwrap(); - sm.new_source_file(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string()) + sm.new_dummy_source_file(PathBuf::from("empty.rs"), String::new()).unwrap(); + sm.new_dummy_source_file(PathBuf::from("blork2.rs"), "first line.\nsecond line".to_string()) .unwrap(); sm } @@ -105,13 +105,13 @@ fn t5() { fn init_source_map_mbc() -> SourceMap { let sm = SourceMap::empty(); // "€" is a three-byte UTF8 char. - sm.new_source_file( - PathBuf::from("blork.rs").into(), + sm.new_dummy_source_file( + PathBuf::from("blork.rs"), "fir€st €€€€ line.\nsecond line".to_string(), ) .unwrap(); - sm.new_source_file( - PathBuf::from("blork2.rs").into(), + sm.new_dummy_source_file( + PathBuf::from("blork2.rs"), "first line€€.\n€ second line".to_string(), ) .unwrap(); @@ -166,7 +166,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() { let sm = SourceMap::empty(); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n"; - sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()).unwrap(); + sm.new_dummy_source_file(Path::new("blork.rs").to_owned(), inputtext.to_string()).unwrap(); let span = span_from_selection(inputtext, selection); // Check that we are extracting the text we thought we were extracting. @@ -209,7 +209,7 @@ fn span_merging_fail() { let inputtext = "bbbb BB\ncc CCC\n"; let selection1 = " ~~\n \n"; let selection2 = " \n ~~~\n"; - sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()).unwrap(); + sm.new_dummy_source_file(Path::new("blork.rs").to_owned(), inputtext.to_owned()).unwrap(); let span1 = span_from_selection(inputtext, selection1); let span2 = span_from_selection(inputtext, selection2); @@ -224,7 +224,8 @@ fn t10() { let unnormalized = "first line.\r\nsecond line"; let normalized = "first line.\nsecond line"; - let src_file = sm.new_source_file(PathBuf::from("blork.rs").into(), unnormalized.to_string()); + let src_file = + sm.new_dummy_source_file(PathBuf::from("blork.rs").into(), unnormalized.to_string()); assert_eq!(src_file.src.as_ref().unwrap().as_ref(), normalized); assert!( @@ -554,7 +555,7 @@ fn path_prefix_remapping_reverse() { #[test] fn test_next_point() { let sm = SourceMap::empty(); - sm.new_source_file(PathBuf::from("example.rs").into(), "a…b".to_string()); + sm.new_dummy_source_file(PathBuf::from("example.rs").into(), "a…b".to_string()); // Dummy spans don't advance. let span = DUMMY_SP; diff --git a/crates/parse/src/lexer/mod.rs b/crates/parse/src/lexer/mod.rs index 9c950890..668cca34 100644 --- a/crates/parse/src/lexer/mod.rs +++ b/crates/parse/src/lexer/mod.rs @@ -85,7 +85,9 @@ impl<'sess, 'src> Lexer<'sess, 'src> { /// Note that this skips comments, as [required by the parser](crate::Parser::new). /// /// Prefer using this method instead of manually collecting tokens using [`Iterator`]. + #[instrument(name = "lex", level = "debug", skip_all)] pub fn into_tokens(mut self) -> Vec { + // `src.len() / 8` is an estimate of the number of tokens in the source. let mut tokens = Vec::with_capacity(self.src.len() / 8); loop { let token = self.next_token(); @@ -387,11 +389,11 @@ impl<'sess, 'src> Lexer<'sess, 'src> { let content_end = end - 1; let lit_content = self.str_from_to(content_start, content_end); - let mut has_fatal_err = false; + let mut has_err = false; unescape::unescape_literal(lit_content, mode, |range, result| { // Here we only check for errors. The actual unescaping is done later. if let Err(err) = result { - has_fatal_err = true; + has_err = true; let (start, end) = (range.start as u32, range.end as u32); let lo = content_start + BytePos(start); let hi = lo + BytePos(end - start); @@ -402,11 +404,9 @@ impl<'sess, 'src> Lexer<'sess, 'src> { // We normally exclude the quotes for the symbol, but for errors we // include it because it results in clearer error messages. - if has_fatal_err { - (TokenLitKind::Err, self.symbol_from_to(start, end)) - } else { - (kind, Symbol::intern(lit_content)) - } + let symbol = + if has_err { self.symbol_from_to(start, end) } else { Symbol::intern(lit_content) }; + (kind, symbol) } #[inline] diff --git a/crates/parse/src/lexer/unescape/mod.rs b/crates/parse/src/lexer/unescape/mod.rs index 4820f0dd..8c352081 100644 --- a/crates/parse/src/lexer/unescape/mod.rs +++ b/crates/parse/src/lexer/unescape/mod.rs @@ -1,6 +1,6 @@ //! Utilities for validating string and char literals and turning them into values they represent. -use std::{ops::Range, slice, str::Chars}; +use std::{borrow::Cow, ops::Range, slice, str::Chars}; mod errors; pub(crate) use errors::emit_unescape_error; @@ -18,33 +18,43 @@ pub enum Mode { } /// Parses a string literal (without quotes) into a byte array. -pub fn parse_literal(src: &str, mode: Mode, f: F) -> Vec +#[instrument(level = "debug", skip_all)] +pub fn parse_string_literal(src: &str, mode: Mode, f: F) -> Vec where F: FnMut(Range, EscapeError), { let mut bytes = if needs_unescape(src, mode) { - let mut bytes = Vec::with_capacity(src.len()); - parse_literal_unescape(src, mode, f, &mut bytes); - bytes + Cow::Owned(parse_literal_unescape(src, mode, f)) } else { - src.as_bytes().to_vec() + Cow::Borrowed(src.as_bytes()) }; if mode == Mode::HexStr { // Currently this should never fail, but it's a good idea to check anyway. if let Ok(decoded) = hex::decode(&bytes) { - bytes = decoded; + bytes = Cow::Owned(decoded); } } + bytes.into_owned() +} + +#[cold] +fn parse_literal_unescape(src: &str, mode: Mode, f: F) -> Vec +where + F: FnMut(Range, EscapeError), +{ + let mut bytes = Vec::with_capacity(src.len()); + parse_literal_unescape_into(src, mode, f, &mut bytes); bytes } -#[inline] -fn parse_literal_unescape(src: &str, mode: Mode, mut f: F, dst_buf: &mut Vec) +fn parse_literal_unescape_into(src: &str, mode: Mode, mut f: F, dst_buf: &mut Vec) where F: FnMut(Range, EscapeError), { // `src.len()` is enough capacity for the unescaped string, so we can just use a slice. // SAFETY: The buffer is never read from. + debug_assert!(dst_buf.is_empty()); + debug_assert!(dst_buf.capacity() >= src.len()); let mut dst = unsafe { slice::from_raw_parts_mut(dst_buf.as_mut_ptr(), dst_buf.capacity()) }; unescape_literal(src, mode, |range, res| match res { Ok(c) => { @@ -67,6 +77,7 @@ where /// Unescapes the contents of a string literal (without quotes). /// /// The callback is invoked with a range and either a unicode code point or an error. +#[instrument(level = "debug", skip_all)] pub fn unescape_literal(src: &str, mode: Mode, mut callback: F) where F: FnMut(Range, Result), @@ -297,7 +308,7 @@ mod tests { assert_eq!(ok, expected_str, "{panic_str}"); let mut errs2 = Vec::with_capacity(errs.len()); - let out = parse_literal(src, mode, |range, e| { + let out = parse_string_literal(src, mode, |range, e| { errs2.push((range, e)); }); assert_eq!(errs2, errs, "{panic_str}"); diff --git a/crates/parse/src/parser/expr.rs b/crates/parse/src/parser/expr.rs index 3d73ad05..78c61d5b 100644 --- a/crates/parse/src/parser/expr.rs +++ b/crates/parse/src/parser/expr.rs @@ -9,6 +9,7 @@ impl<'a> Parser<'a> { self.parse_expr_with(None) } + #[instrument(name = "parse_expr", level = "debug", skip_all)] pub(super) fn parse_expr_with(&mut self, with: Option>) -> PResult<'a, Box> { let expr = self.parse_binary_expr(4, with)?; if self.eat(&TokenKind::Question) { diff --git a/crates/parse/src/parser/item.rs b/crates/parse/src/parser/item.rs index 8a85cba6..9dc4a660 100644 --- a/crates/parse/src/parser/item.rs +++ b/crates/parse/src/parser/item.rs @@ -7,6 +7,7 @@ use sulk_interface::{error_code, kw, sym, Ident, Span}; impl<'a> Parser<'a> { /// Parses a source unit. + #[instrument(level = "debug", skip_all)] pub fn parse_file(&mut self) -> PResult<'a, SourceUnit> { let items = self.parse_items(&TokenKind::Eof)?; Ok(SourceUnit { items }) @@ -49,6 +50,7 @@ impl<'a> Parser<'a> { } /// Parses an item. + #[instrument(level = "debug", skip_all)] pub fn parse_item(&mut self) -> PResult<'a, Option> { let docs = self.parse_doc_comments()?; self.parse_spanned(Self::parse_item_kind) @@ -1226,7 +1228,8 @@ mod tests { for (i, &(v, req_s, res)) in tests.iter().enumerate() { let name = i.to_string(); let src = format!("{v} {req_s}"); - let mut parser = Parser::from_source_code(&sess, FileName::Custom(name), src)?; + let mut parser = + Parser::from_source_code(&sess, FileName::Custom(name), || Ok(src))?; let version = parser.parse_semver_version().map_err(|e| e.emit()).unwrap(); assert_eq!(version.to_string(), v); diff --git a/crates/parse/src/parser/lit.rs b/crates/parse/src/parser/lit.rs index f35c9308..a4b5a4a1 100644 --- a/crates/parse/src/parser/lit.rs +++ b/crates/parse/src/parser/lit.rs @@ -1,4 +1,4 @@ -use crate::{unescape, PErr, PResult, Parser}; +use crate::{unescape, PResult, Parser}; use alloy_primitives::Address; use num_bigint::BigInt; use num_rational::BigRational; @@ -9,6 +9,7 @@ use sulk_interface::{kw, Symbol}; impl<'a> Parser<'a> { /// Parses a literal. + #[instrument(level = "debug", skip_all)] pub fn parse_lit(&mut self) -> PResult<'a, Lit> { self.parse_spanned(Self::parse_lit_inner).map(|(span, (symbol, kind))| Lit { span, @@ -51,7 +52,15 @@ impl<'a> Parser<'a> { /// Parses a subdenomination. pub fn parse_subdenomination(&mut self) -> Option { - let sub = match self.token.ident()?.name { + let sub = self.subdenomination(); + if sub.is_some() { + self.bump(); + } + sub + } + + fn subdenomination(&self) -> Option { + match self.token.ident()?.name { kw::Wei => Some(SubDenomination::Ether(EtherSubDenomination::Wei)), kw::Gwei => Some(SubDenomination::Ether(EtherSubDenomination::Gwei)), kw::Ether => Some(SubDenomination::Ether(EtherSubDenomination::Ether)), @@ -64,25 +73,17 @@ impl<'a> Parser<'a> { kw::Years => Some(SubDenomination::Time(TimeSubDenomination::Years)), _ => None, - }; - if sub.is_some() { - self.bump(); } - sub } /// Emits an error if a subdenomination was parsed. pub(super) fn expect_no_subdenomination(&mut self) { if let Some(_sub) = self.parse_subdenomination() { - self.no_subdenomination_error().emit(); + let span = self.prev_token.span; + self.dcx().err("subdenominations aren't allowed here").span(span).emit(); } } - pub(super) fn no_subdenomination_error(&mut self) -> PErr<'a> { - let span = self.prev_token.span; - self.dcx().err("subdenominations aren't allowed here").span(span) - } - fn parse_lit_inner(&mut self) -> PResult<'a, (Symbol, LitKind)> { if let TokenKind::Ident(symbol @ (kw::True | kw::False)) = self.token.kind { self.bump(); @@ -153,14 +154,14 @@ impl<'a> Parser<'a> { TokenLitKind::HexStr => unescape::Mode::HexStr, _ => unreachable!(), }; - let unescape = |s: Symbol| unescape::parse_literal(s.as_str(), mode, |_, _| {}); + let parse = |s: Symbol| unescape::parse_string_literal(s.as_str(), mode, |_, _| {}); - let mut value = unescape(lit.symbol); + let mut value = parse(lit.symbol); while let Some(TokenLit { symbol, kind }) = self.token.lit() { if kind != lit.kind { break; } - value.append(&mut unescape(symbol)); + value.append(&mut parse(symbol)); self.bump(); } diff --git a/crates/parse/src/parser/mod.rs b/crates/parse/src/parser/mod.rs index 65fc4a0a..d24568dc 100644 --- a/crates/parse/src/parser/mod.rs +++ b/crates/parse/src/parser/mod.rs @@ -138,10 +138,14 @@ impl<'a> Parser<'a> { } /// Creates a new parser from a source code string. - pub fn from_source_code(sess: &'a Session, filename: FileName, src: String) -> Result { + pub fn from_source_code( + sess: &'a Session, + filename: FileName, + get_src: impl FnOnce() -> std::io::Result, + ) -> Result { let file = sess .source_map() - .new_source_file(filename, src) + .new_source_file(filename, get_src) .map_err(|e| sess.dcx.err(e.to_string()).emit())?; Ok(Self::from_source_file(sess, &file)) } @@ -658,6 +662,7 @@ impl<'a> Parser<'a> { /// /// [`Eof`](Token::EOF) will be returned if the look-ahead is any distance past the end of the /// tokens. + #[inline] pub fn look_ahead(&self, dist: usize) -> &Token { if dist == 0 { &self.token @@ -669,6 +674,7 @@ impl<'a> Parser<'a> { /// Calls `f` with the token `dist` tokens ahead of the current one. /// /// See [`look_ahead`](Self::look_ahead) for more information. + #[inline] pub fn look_ahead_with(&self, dist: usize, f: impl FnOnce(&Token) -> R) -> R { f(self.look_ahead(dist)) } diff --git a/crates/parse/src/parser/stmt.rs b/crates/parse/src/parser/stmt.rs index f760966b..ec40a926 100644 --- a/crates/parse/src/parser/stmt.rs +++ b/crates/parse/src/parser/stmt.rs @@ -5,6 +5,7 @@ use sulk_interface::{kw, Ident, Span}; impl<'a> Parser<'a> { /// Parses a statement. + #[instrument(level = "debug", skip_all)] pub fn parse_stmt(&mut self) -> PResult<'a, Stmt> { let docs = self.parse_doc_comments()?; self.parse_spanned(Self::parse_stmt_kind).map(|(span, kind)| Stmt { docs, kind, span }) @@ -442,7 +443,7 @@ mod tests { for (i, &(s, results)) in tests.iter().enumerate() { let name = i.to_string(); let mut parser = - Parser::from_source_code(&sess, FileName::Custom(name), s.into())?; + Parser::from_source_code(&sess, FileName::Custom(name), || Ok(s.into()))?; let list = parser .parse_optional_items_seq(Delimiter::Parenthesis, Parser::parse_ident) diff --git a/crates/parse/src/parser/ty.rs b/crates/parse/src/parser/ty.rs index 0c4a83f9..6116044f 100644 --- a/crates/parse/src/parser/ty.rs +++ b/crates/parse/src/parser/ty.rs @@ -6,6 +6,7 @@ use sulk_interface::kw; impl<'a> Parser<'a> { /// Parses a type. + #[instrument(level = "debug", skip_all)] pub fn parse_type(&mut self) -> PResult<'a, Ty> { let mut ty = self.parse_spanned(Self::parse_basic_ty_kind).map(|(span, kind)| Ty { span, kind })?; diff --git a/crates/parse/src/parser/yul.rs b/crates/parse/src/parser/yul.rs index 695eb97a..c4e0834f 100644 --- a/crates/parse/src/parser/yul.rs +++ b/crates/parse/src/parser/yul.rs @@ -11,6 +11,7 @@ impl<'a> Parser<'a> { /// /// The plain block gets returned as a Yul object named "object", with a single `code` block. /// See: + #[instrument(level = "debug", skip_all)] pub fn parse_yul_file_object(&mut self) -> PResult<'a, Object> { let docs = self.parse_doc_comments()?; let object = if self.check_keyword(sym::object) { diff --git a/crates/sema/src/lib.rs b/crates/sema/src/lib.rs index a3fe4145..ae2fe967 100644 --- a/crates/sema/src/lib.rs +++ b/crates/sema/src/lib.rs @@ -18,10 +18,9 @@ use sulk_data_structures::{ sync::Lrc, }; use sulk_interface::{ - debug_time, diagnostics::DiagCtxt, source_map::{FileName, FileResolver, ResolveError, SourceFile}, - trace_time, Result, Session, + Result, Session, }; use sulk_parse::{Lexer, Parser}; @@ -98,6 +97,7 @@ impl<'a> Resolver<'a> { &self.sess.dcx } + #[instrument(level = "debug", skip_all)] pub fn add_files_from_args( &mut self, stdin: bool, @@ -112,9 +112,11 @@ impl<'a> Resolver<'a> { } for path in paths { let path = path.as_ref(); - // Base paths from arguments to the current directory for shorter diagnostics output. + // Paths must be canonicalized before passing to the resolver. let path = match path.canonicalize() { Ok(path) => { + // Base paths from arguments to the current directory for shorter diagnostics + // output. match path.strip_prefix(std::env::current_dir().unwrap_or(PathBuf::from(""))) { Ok(path) => path.to_path_buf(), Err(_) => path, @@ -136,18 +138,19 @@ impl<'a> Resolver<'a> { } pub fn parse_and_resolve(&mut self) -> Result<()> { - debug_time!("parse all files", || self.parse_all_files()); + self.parse(); if self.sess.language.is_yul() || self.sess.stop_after.is_some_and(|s| s.is_parsing()) { return Ok(()); } - debug_time!("validate ASTs", || self.validate_asts()); + self.validate_asts(); Ok(()) } - fn parse_all_files(&mut self) { + #[instrument(level = "debug", skip_all)] + fn parse(&mut self) { let mut sources = std::mem::take(&mut self.sources); for i in 0.. { let current_file = SourceId::from_usize(i); @@ -165,20 +168,16 @@ impl<'a> Resolver<'a> { } /// Parses a single file. - #[instrument(name = "parse", level = "debug", skip_all, fields(file = %file.name.display()))] + #[instrument(level = "debug", skip_all, fields(file = %file.name.display()))] fn parse_one(&self, file: &SourceFile) -> Option { let lexer = Lexer::from_source_file(self.sess, file); - let tokens = trace_time!("lex file", || lexer.into_tokens()); - - let mut parser = Parser::new(self.sess, tokens); - trace_time!("parse file", || { - if self.sess.language.is_yul() { - let _file = parser.parse_yul_file_object().map_err(|e| e.emit()); - None - } else { - parser.parse_file().map_err(|e| e.emit()).ok() - } - }) + let mut parser = Parser::from_lexer(lexer); + if self.sess.language.is_yul() { + let _file = parser.parse_yul_file_object().map_err(|e| e.emit()); + None + } else { + parser.parse_file().map_err(|e| e.emit()).ok() + } } /// Resolves the imports of the given file, returning an iterator over all the imported files. @@ -214,6 +213,7 @@ impl<'a> Resolver<'a> { }) } + #[instrument(level = "debug", skip_all)] fn validate_asts(&self) { self.sources.par_asts().for_each(|ast| AstValidator::validate(self.sess, ast)); }