-
Notifications
You must be signed in to change notification settings - Fork 13.6k
[rustdoc] Display total time and compilation time of merged doctests #144308
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -11,7 +11,8 @@ use std::path::{Path, PathBuf}; | |
use std::process::{self, Command, Stdio}; | ||
use std::sync::atomic::{AtomicUsize, Ordering}; | ||
use std::sync::{Arc, Mutex}; | ||
use std::{panic, str}; | ||
use std::time::{Duration, Instant}; | ||
use std::{fmt, panic, str}; | ||
|
||
pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder}; | ||
pub(crate) use markdown::test as test_markdown; | ||
|
@@ -36,6 +37,50 @@ use crate::config::{Options as RustdocOptions, OutputFormat}; | |
use crate::html::markdown::{ErrorCodes, Ignore, LangString, MdRelLine}; | ||
use crate::lint::init_lints; | ||
|
||
/// Type used to display times (compilation and total) information for merged doctests. | ||
struct MergedDoctestTimes { | ||
total_time: Instant, | ||
/// Total time spent compiling all merged doctests. | ||
compilation_time: Duration, | ||
/// This field is used to keep track of how many merged doctests we (tried to) compile. | ||
added_compilation_times: usize, | ||
} | ||
|
||
impl MergedDoctestTimes { | ||
fn new() -> Self { | ||
Self { | ||
total_time: Instant::now(), | ||
compilation_time: Duration::default(), | ||
added_compilation_times: 0, | ||
} | ||
} | ||
|
||
fn add_compilation_time(&mut self, duration: Duration) { | ||
self.compilation_time += duration; | ||
self.added_compilation_times += 1; | ||
} | ||
|
||
fn display_times(&self) { | ||
// If no merged doctest was compiled, then there is nothing to display since the numbers | ||
// displayed by `libtest` for standalone tests are already accurate (they include both | ||
// compilation and runtime). | ||
if self.added_compilation_times > 0 { | ||
println!("{self}"); | ||
} | ||
} | ||
} | ||
|
||
impl fmt::Display for MergedDoctestTimes { | ||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
write!( | ||
f, | ||
"all doctests ran in {:.2}s; merged doctests compilation took {:.2}s", | ||
self.total_time.elapsed().as_secs_f64(), | ||
self.compilation_time.as_secs_f64(), | ||
) | ||
} | ||
} | ||
|
||
/// Options that apply to all doctests in a crate or Markdown file (for `rustdoc foo.md`). | ||
#[derive(Clone)] | ||
pub(crate) struct GlobalTestOptions { | ||
|
@@ -295,6 +340,7 @@ pub(crate) fn run_tests( | |
|
||
let mut nb_errors = 0; | ||
let mut ran_edition_tests = 0; | ||
let mut times = MergedDoctestTimes::new(); | ||
let target_str = rustdoc_options.target.to_string(); | ||
|
||
for (MergeableTestKey { edition, global_crate_attrs_hash }, mut doctests) in mergeable_tests { | ||
|
@@ -314,13 +360,15 @@ pub(crate) fn run_tests( | |
for (doctest, scraped_test) in &doctests { | ||
tests_runner.add_test(doctest, scraped_test, &target_str); | ||
} | ||
if let Ok(success) = tests_runner.run_merged_tests( | ||
let (duration, ret) = tests_runner.run_merged_tests( | ||
rustdoc_test_options, | ||
edition, | ||
&opts, | ||
&test_args, | ||
rustdoc_options, | ||
) { | ||
); | ||
times.add_compilation_time(duration); | ||
if let Ok(success) = ret { | ||
ran_edition_tests += 1; | ||
if !success { | ||
nb_errors += 1; | ||
|
@@ -354,11 +402,13 @@ pub(crate) fn run_tests( | |
test::test_main_with_exit_callback(&test_args, standalone_tests, None, || { | ||
// We ensure temp dir destructor is called. | ||
std::mem::drop(temp_dir.take()); | ||
times.display_times(); | ||
}); | ||
} | ||
if nb_errors != 0 { | ||
// We ensure temp dir destructor is called. | ||
std::mem::drop(temp_dir); | ||
times.display_times(); | ||
// libtest::ERROR_EXIT_CODE is not public but it's the same value. | ||
std::process::exit(101); | ||
} | ||
|
@@ -496,16 +546,19 @@ impl RunnableDocTest { | |
/// | ||
/// This is the function that calculates the compiler command line, invokes the compiler, then | ||
/// invokes the test or tests in a separate executable (if applicable). | ||
/// | ||
/// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test. | ||
fn run_test( | ||
doctest: RunnableDocTest, | ||
rustdoc_options: &RustdocOptions, | ||
supports_color: bool, | ||
report_unused_externs: impl Fn(UnusedExterns), | ||
) -> Result<(), TestFailure> { | ||
) -> (Duration, Result<(), TestFailure>) { | ||
GuillaumeGomez marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let langstr = &doctest.langstr; | ||
// Make sure we emit well-formed executable names for our target. | ||
let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target); | ||
let output_file = doctest.test_opts.outdir.path().join(rust_out); | ||
let instant = Instant::now(); | ||
|
||
// Common arguments used for compiling the doctest runner. | ||
// On merged doctests, the compiler is invoked twice: once for the test code itself, | ||
|
@@ -589,7 +642,7 @@ fn run_test( | |
if std::fs::write(&input_file, &doctest.full_test_code).is_err() { | ||
// If we cannot write this file for any reason, we leave. All combined tests will be | ||
// tested as standalone tests. | ||
return Err(TestFailure::CompileError); | ||
return (Duration::default(), Err(TestFailure::CompileError)); | ||
} | ||
if !rustdoc_options.nocapture { | ||
// If `nocapture` is disabled, then we don't display rustc's output when compiling | ||
|
@@ -660,7 +713,7 @@ fn run_test( | |
if std::fs::write(&runner_input_file, merged_test_code).is_err() { | ||
// If we cannot write this file for any reason, we leave. All combined tests will be | ||
// tested as standalone tests. | ||
return Err(TestFailure::CompileError); | ||
return (instant.elapsed(), Err(TestFailure::CompileError)); | ||
} | ||
if !rustdoc_options.nocapture { | ||
// If `nocapture` is disabled, then we don't display rustc's output when compiling | ||
|
@@ -713,7 +766,7 @@ fn run_test( | |
let _bomb = Bomb(&out); | ||
match (output.status.success(), langstr.compile_fail) { | ||
(true, true) => { | ||
return Err(TestFailure::UnexpectedCompilePass); | ||
return (instant.elapsed(), Err(TestFailure::UnexpectedCompilePass)); | ||
} | ||
(true, false) => {} | ||
(false, true) => { | ||
|
@@ -729,17 +782,18 @@ fn run_test( | |
.collect(); | ||
|
||
if !missing_codes.is_empty() { | ||
return Err(TestFailure::MissingErrorCodes(missing_codes)); | ||
return (instant.elapsed(), Err(TestFailure::MissingErrorCodes(missing_codes))); | ||
} | ||
} | ||
} | ||
(false, false) => { | ||
return Err(TestFailure::CompileError); | ||
return (instant.elapsed(), Err(TestFailure::CompileError)); | ||
} | ||
} | ||
|
||
let duration = instant.elapsed(); | ||
if doctest.no_run { | ||
return Ok(()); | ||
return (duration, Ok(())); | ||
} | ||
|
||
// Run the code! | ||
|
@@ -771,17 +825,17 @@ fn run_test( | |
cmd.output() | ||
}; | ||
match result { | ||
Err(e) => return Err(TestFailure::ExecutionError(e)), | ||
Err(e) => return (duration, Err(TestFailure::ExecutionError(e))), | ||
Ok(out) => { | ||
if langstr.should_panic && out.status.success() { | ||
return Err(TestFailure::UnexpectedRunPass); | ||
return (duration, Err(TestFailure::UnexpectedRunPass)); | ||
} else if !langstr.should_panic && !out.status.success() { | ||
return Err(TestFailure::ExecutionFailure(out)); | ||
return (duration, Err(TestFailure::ExecutionFailure(out))); | ||
} | ||
} | ||
} | ||
|
||
Ok(()) | ||
(duration, Ok(())) | ||
} | ||
|
||
/// Converts a path intended to use as a command to absolute if it is | ||
|
@@ -1071,7 +1125,7 @@ fn doctest_run_fn( | |
no_run: scraped_test.no_run(&rustdoc_options), | ||
merged_test_code: None, | ||
}; | ||
let res = | ||
let (_, res) = | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is there a reason we're not also tracking this, so we can report I believe doctests are run with some level of parallelism, so currently we would be reporting the total absolute wall clock time for the total compilation time, but for merged doctests, we're reporting the sum of the amount of time each thread spent compiling. This inconsistency could over-report the impact of merged doctests. Also, if parallelism is involved, then I think perhaps a more useful metric for minimizing compilation time would be how long the longest test took. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We only compile and run standalone tests in parallel, merged doctests are never compiled in parallel (but still run in parallel). We report raw numbers. Also we cannot get which tests took the longest to run as only |
||
run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs); | ||
|
||
if let Err(err) = res { | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1,13 @@ | ||
|
||
running 1 test | ||
test $DIR/2024-doctests-checks.rs - Foo (line 8) ... ok | ||
test $DIR/2024-doctests-checks.rs - Foo (line 10) ... ok | ||
|
||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME | ||
|
||
|
||
running 1 test | ||
test $DIR/2024-doctests-checks.rs - Foo (line 15) ... ok | ||
test $DIR/2024-doctests-checks.rs - Foo (line 17) ... ok | ||
|
||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME | ||
|
||
all doctests ran in $TIME; merged doctests compilation took $TIME |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1,13 @@ | ||
|
||
running 1 test | ||
test $DIR/2024-doctests-crate-attribute.rs - Foo (line 20) ... ok | ||
test $DIR/2024-doctests-crate-attribute.rs - Foo (line 22) ... ok | ||
|
||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME | ||
|
||
|
||
running 1 test | ||
test $DIR/2024-doctests-crate-attribute.rs - Foo (line 11) ... ok | ||
test $DIR/2024-doctests-crate-attribute.rs - Foo (line 13) ... ok | ||
|
||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME | ||
|
||
all doctests ran in $TIME; merged doctests compilation took $TIME |
Uh oh!
There was an error while loading. Please reload this page.