Skip to content

Commit

Permalink
perf: instrument more code with tracing and optimize it
Browse files Browse the repository at this point in the history
  • Loading branch information
DaniPopes committed Jun 26, 2024
1 parent ae133bc commit 67aea44
Show file tree
Hide file tree
Showing 19 changed files with 236 additions and 152 deletions.
99 changes: 88 additions & 11 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion benches/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl Parser for Sulk {
let sess = Session::with_tty_emitter(source_map.into());
let filename = PathBuf::from("test.sol");
let mut parser =
sulk_parse::Parser::from_source_code(&sess, filename.into(), src.into())?;
sulk_parse::Parser::from_source_code(&sess, filename.into(), || Ok(src.into()))?;
let result = parser.parse_file().map_err(|e| e.emit())?;
sess.dcx.has_errors()?;
black_box(result);
Expand Down
28 changes: 0 additions & 28 deletions crates/interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,31 +52,3 @@ pub type Result<T = (), E = ErrorGuaranteed> = std::result::Result<T, E>;
pub fn enter<R>(f: impl FnOnce() -> R) -> R {
SessionGlobals::with_or_default(|_| f())
}

#[macro_export]
macro_rules! time {
($level:expr, $what:literal, || $e:expr) => {
if enabled!($level) {
let timer = std::time::Instant::now();
let res = $e;
event!($level, elapsed=?timer.elapsed(), $what);
res
} else {
$e
}
};
}

#[macro_export]
macro_rules! debug_time {
($what:literal, || $e:expr) => {
$crate::time!(tracing::Level::DEBUG, $what, || $e)
};
}

#[macro_export]
macro_rules! trace_time {
($what:literal, || $e:expr) => {
$crate::time!(tracing::Level::TRACE, $what, || $e)
};
}
3 changes: 2 additions & 1 deletion crates/interface/src/source_map/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ pub struct SourceFile {
impl SourceFile {
pub fn new(
name: FileName,
src: String,
mut src: String,
hash_kind: SourceFileHashAlgorithm,
) -> Result<Self, OffsetOverflowError> {
// Compute the file hash before any normalization.
Expand All @@ -234,6 +234,7 @@ impl SourceFile {

let (lines, multibyte_chars, non_narrow_chars) = super::analyze::analyze_source_file(&src);

src.shrink_to_fit();
Ok(Self {
name,
src: Lrc::new(src),
Expand Down
5 changes: 3 additions & 2 deletions crates/interface/src/source_map/file_resolver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ impl<'a> FileResolver<'a> {
}

/// Resolves an import path. `parent` is the path of the file that contains the import, if any.
#[instrument(level = "debug", skip_all, fields(path = %path.display()))]
pub fn resolve_file(
&self,
path: &Path,
Expand Down Expand Up @@ -140,7 +141,7 @@ impl<'a> FileResolver<'a> {
}

/// Applies the import path mappings to `path`.
#[instrument(level = "trace", skip(self), ret)]
#[instrument(level = "trace", skip_all, ret)]
pub fn remap_path<'b>(&self, path: &'b Path) -> Cow<'b, Path> {
let orig = path;
let mut remapped = Cow::Borrowed(path);
Expand All @@ -160,7 +161,7 @@ impl<'a> FileResolver<'a> {
}

/// Loads `path` into the source map. Returns `None` if the file doesn't exist.
#[instrument(level = "trace", skip(self))]
#[instrument(level = "debug", skip_all)]
pub fn try_file(&self, path: &Path) -> Result<Option<Lrc<SourceFile>>, ResolveError> {
let cache_path = path.normalize();
if let Ok(file) = self.source_map().load_file(&cache_path) {
Expand Down
75 changes: 42 additions & 33 deletions crates/interface/src/source_map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
use crate::{BytePos, CharPos, Pos, Span};
use std::{
io::{self, Read},
path::Path,
path::{Path, PathBuf},
};
use sulk_data_structures::{
map::FxHashMap,
Expand Down Expand Up @@ -124,16 +124,52 @@ impl SourceMap {

/// Loads a file from the given path.
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
let src = std::fs::read_to_string(path)?;
let filename = path.to_owned().into();
self.new_source_file(filename, src).map_err(Into::into)
self.new_source_file(filename, || std::fs::read_to_string(path))
}

/// Loads `stdin`.
pub fn load_stdin(&self) -> io::Result<Lrc<SourceFile>> {
let mut src = String::new();
io::stdin().read_to_string(&mut src)?;
self.new_source_file(FileName::Stdin, src).map_err(Into::into)
self.new_source_file(FileName::Stdin, || {
let mut src = String::new();
io::stdin().read_to_string(&mut src)?;
Ok(src)
})
}

/// Loads a file with the given source string.
///
/// This is useful for testing.
pub fn new_dummy_source_file(&self, path: PathBuf, src: String) -> io::Result<Lrc<SourceFile>> {
self.new_source_file(path.into(), || Ok(src))
}

/// Creates a new `SourceFile`.
///
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
/// unmodified.
///
/// Returns an error if the file is larger than 4GiB or other errors occur while creating the
/// `SourceFile`.
#[instrument(level = "debug", skip_all, fields(filename = %filename.display()))]
pub fn new_source_file(
&self,
filename: FileName,
get_src: impl FnOnce() -> io::Result<String>,
) -> io::Result<Lrc<SourceFile>> {
let stable_id = StableSourceFileId::from_filename_in_current_crate(&filename);
match self.source_file_by_stable_id(stable_id) {
Some(lrc_sf) => Ok(lrc_sf),
None => {
let source_file = SourceFile::new(filename, get_src()?, self.hash_kind)?;

// Let's make sure the file_id we generated above actually matches
// the ID we generate for the SourceFile we just created.
debug_assert_eq!(source_file.stable_id, stable_id);

self.register_source_file(stable_id, source_file).map_err(Into::into)
}
}
}

pub fn files(&self) -> MappedReadGuard<'_, Vec<Lrc<SourceFile>>> {
Expand Down Expand Up @@ -174,33 +210,6 @@ impl SourceMap {
Ok(file)
}

/// Creates a new `SourceFile`.
///
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
/// unmodified.
///
/// Returns an error if the file is larger than 4GiB or other errors occur while creating the
/// `SourceFile`.
pub fn new_source_file(
&self,
filename: FileName,
src: String,
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
let stable_id = StableSourceFileId::from_filename_in_current_crate(&filename);
match self.source_file_by_stable_id(stable_id) {
Some(lrc_sf) => Ok(lrc_sf),
None => {
let source_file = SourceFile::new(filename, src, self.hash_kind)?;

// Let's make sure the file_id we generated above actually matches
// the ID we generate for the SourceFile we just created.
debug_assert_eq!(source_file.stable_id, stable_id);

self.register_source_file(stable_id, source_file)
}
}
}

pub fn filename_for_diagnostics<'a>(&self, filename: &'a FileName) -> FileNameDisplay<'a> {
filename.display()
}
Expand Down
23 changes: 12 additions & 11 deletions crates/interface/src/source_map/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@ use std::path::PathBuf;

fn init_source_map() -> SourceMap {
let sm = SourceMap::empty();
sm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string())
sm.new_dummy_source_file(PathBuf::from("blork.rs"), "first line.\nsecond line".to_string())
.unwrap();
sm.new_source_file(PathBuf::from("empty.rs").into(), String::new()).unwrap();
sm.new_source_file(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string())
sm.new_dummy_source_file(PathBuf::from("empty.rs"), String::new()).unwrap();
sm.new_dummy_source_file(PathBuf::from("blork2.rs"), "first line.\nsecond line".to_string())
.unwrap();
sm
}
Expand Down Expand Up @@ -105,13 +105,13 @@ fn t5() {
fn init_source_map_mbc() -> SourceMap {
let sm = SourceMap::empty();
// "€" is a three-byte UTF8 char.
sm.new_source_file(
PathBuf::from("blork.rs").into(),
sm.new_dummy_source_file(
PathBuf::from("blork.rs"),
"fir€st €€€€ line.\nsecond line".to_string(),
)
.unwrap();
sm.new_source_file(
PathBuf::from("blork2.rs").into(),
sm.new_dummy_source_file(
PathBuf::from("blork2.rs"),
"first line€€.\n€ second line".to_string(),
)
.unwrap();
Expand Down Expand Up @@ -166,7 +166,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() {
let sm = SourceMap::empty();
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()).unwrap();
sm.new_dummy_source_file(Path::new("blork.rs").to_owned(), inputtext.to_string()).unwrap();
let span = span_from_selection(inputtext, selection);

// Check that we are extracting the text we thought we were extracting.
Expand Down Expand Up @@ -209,7 +209,7 @@ fn span_merging_fail() {
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()).unwrap();
sm.new_dummy_source_file(Path::new("blork.rs").to_owned(), inputtext.to_owned()).unwrap();
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);

Expand All @@ -224,7 +224,8 @@ fn t10() {
let unnormalized = "first line.\r\nsecond line";
let normalized = "first line.\nsecond line";

let src_file = sm.new_source_file(PathBuf::from("blork.rs").into(), unnormalized.to_string());
let src_file =
sm.new_dummy_source_file(PathBuf::from("blork.rs").into(), unnormalized.to_string());

assert_eq!(src_file.src.as_ref().unwrap().as_ref(), normalized);
assert!(
Expand Down Expand Up @@ -554,7 +555,7 @@ fn path_prefix_remapping_reverse() {
#[test]
fn test_next_point() {
let sm = SourceMap::empty();
sm.new_source_file(PathBuf::from("example.rs").into(), "a…b".to_string());
sm.new_dummy_source_file(PathBuf::from("example.rs").into(), "a…b".to_string());

// Dummy spans don't advance.
let span = DUMMY_SP;
Expand Down
Loading

0 comments on commit 67aea44

Please sign in to comment.