Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ ignore.workspace = true
im-rc.workspace = true
indexmap.workspace = true
itertools.workspace = true
jiff.workspace = true
jiff = { workspace = true, features = ["serde", "std"] }
jobserver.workspace = true
lazycell.workspace = true
libgit2-sys.workspace = true
Expand Down
14 changes: 1 addition & 13 deletions src/cargo/core/compiler/build_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,18 +115,6 @@ impl BuildConfig {
(None, _) => false,
};

let timing_outputs = match (cfg.analysis.as_ref(), gctx.cli_unstable().build_analysis) {
// Enable HTML output to pretend we are persisting timing data for now.
(Some(analysis), true) if analysis.enabled => vec![TimingOutput::Html],
(Some(_), false) => {
gctx.shell().warn(
"ignoring 'build.analysis' config, pass `-Zbuild-analysis` to enable it",
)?;
Vec::new()
}
_ => Vec::new(),
};

Ok(BuildConfig {
requested_kinds,
jobs,
Expand All @@ -142,7 +130,7 @@ impl BuildConfig {
rustfix_diagnostic_server: Rc::new(RefCell::new(None)),
export_dir: None,
future_incompat_report: false,
timing_outputs,
timing_outputs: Vec::new(),
sbom,
compile_time_deps_only: false,
})
Expand Down
19 changes: 19 additions & 0 deletions src/cargo/ops/cargo_compile/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,10 @@ use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
use crate::drop_println;
use crate::ops;
use crate::ops::resolve::{SpecsAndResolvedFeatures, WorkspaceResolve};
use crate::util::BuildLogger;
use crate::util::context::{GlobalContext, WarningHandling};
use crate::util::interning::InternedString;
use crate::util::log_message::LogMessage;
use crate::util::{CargoResult, StableHasher};

mod compile_filter;
Expand Down Expand Up @@ -155,7 +157,24 @@ pub fn compile_ws<'a>(
exec: &Arc<dyn Executor>,
) -> CargoResult<Compilation<'a>> {
let interner = UnitInterner::new();
let logger = BuildLogger::maybe_new(ws)?;

if let Some(ref logger) = logger {
let rustc = ws.gctx().load_global_rustc(Some(ws))?;
logger.log(LogMessage::BuildStarted {
cwd: ws.gctx().cwd().to_path_buf(),
host: rustc.host.to_string(),
jobs: options.build_config.jobs,
profile: options.build_config.requested_profile.to_string(),
rustc_version: rustc.version.to_string(),
rustc_version_verbose: rustc.verbose_version.clone(),
target_dir: ws.target_dir().as_path_unlocked().to_path_buf(),
workspace_root: ws.root().to_path_buf(),
});
}

let bcx = create_bcx(ws, options, &interner)?;

if options.build_config.unit_graph {
unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
return Compilation::new(&bcx);
Expand Down
49 changes: 49 additions & 0 deletions src/cargo/util/log_message.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
//! Messages for logging.

use std::io::Write;
use std::path::PathBuf;

use jiff::Timestamp;
use serde::Serialize;

/// A log message.
///
/// Each variant represents a different type of event.
#[derive(Serialize)]
#[serde(tag = "reason", rename_all = "kebab-case")]
pub enum LogMessage {
/// Emitted when a build starts.
BuildStarted {
cwd: PathBuf,
host: String,
jobs: u32,
profile: String,
rustc_version: String,
rustc_version_verbose: String,
target_dir: PathBuf,
workspace_root: PathBuf,
},
}

impl LogMessage {
/// Serializes this message as a JSON log line directly to the writer.
pub fn write_json_log<W: Write>(&self, writer: &mut W, run_id: &str) -> std::io::Result<()> {
#[derive(Serialize)]
struct LogEntry<'a> {
run_id: &'a str,
timestamp: Timestamp,
#[serde(flatten)]
msg: &'a LogMessage,
}

let entry = LogEntry {
run_id,
timestamp: Timestamp::now(),
msg: self,
};

serde_json::to_writer(&mut *writer, &entry)?;
writer.write_all(b"\n")?;
Ok(())
}
}
105 changes: 105 additions & 0 deletions src/cargo/util/logger.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
//! Build analysis logging infrastructure.

use std::io::{BufWriter, Write};
use std::mem::ManuallyDrop;
use std::path::Path;
use std::sync::mpsc;
use std::sync::mpsc::Sender;
use std::thread::JoinHandle;

use cargo_util::paths;

use crate::CargoResult;
use crate::core::Workspace;
use crate::util::log_message::LogMessage;
use crate::util::short_hash;

/// Logger for `-Zbuild-analysis`.
pub struct BuildLogger {
tx: ManuallyDrop<Sender<LogMessage>>,
run_id: String,
handle: Option<JoinHandle<()>>,
}

impl BuildLogger {
/// Creates a logger if `-Zbuild-analysis` is enabled.
pub fn maybe_new(ws: &Workspace<'_>) -> CargoResult<Option<Self>> {
let analysis = ws.gctx().build_config()?.analysis.as_ref();
match (analysis, ws.gctx().cli_unstable().build_analysis) {
(Some(analysis), true) if analysis.enabled => Ok(Some(Self::new(ws)?)),
(Some(_), false) => {
ws.gctx().shell().warn(
"ignoring 'build.analysis' config, pass `-Zbuild-analysis` to enable it",
)?;
Ok(None)
}
_ => Ok(None),
}
}

fn new(ws: &Workspace<'_>) -> CargoResult<Self> {
let run_id = Self::generate_run_id(ws)?;

let log_dir = ws.gctx().home().join("log");
paths::create_dir_all(log_dir.as_path_unlocked())?;

let filename = format!("{run_id}.jsonl");
let log_file = log_dir.open_rw_exclusive_create(
Path::new(&filename),
ws.gctx(),
"build analysis log",
)?;

let (tx, rx) = mpsc::channel::<LogMessage>();

let run_id_clone = run_id.clone();
let handle = std::thread::spawn(move || {
let mut writer = BufWriter::new(log_file);
for msg in rx {
let _ = msg.write_json_log(&mut writer, &run_id_clone);
}
let _ = writer.flush();
});

Ok(Self {
tx: ManuallyDrop::new(tx),
run_id,
handle: Some(handle),
})
}

/// Generates a unique run ID.
///
/// The format is `{timestamp}-{hash}`, with `:` and `.` in the timestamp
/// removed to make it safe for filenames.
/// For example, `20251024T194502773638Z-f891d525d52ecab3`.
pub fn generate_run_id(ws: &Workspace<'_>) -> CargoResult<String> {
let hash = short_hash(&ws.root());
let timestamp = jiff::Timestamp::now().to_string().replace([':', '.'], "");
Ok(format!("{timestamp}-{hash}"))
}

/// Returns the run ID for this build session.
pub fn run_id(&self) -> &str {
&self.run_id
}

/// Logs a message.
pub fn log(&self, msg: LogMessage) {
let _ = self.tx.send(msg);
}
}

impl Drop for BuildLogger {
fn drop(&mut self) {
// SAFETY: tx is dropped exactly once here to signal thread shutdown.
// ManuallyDrop prevents automatic drop after this impl runs.
unsafe {
ManuallyDrop::drop(&mut self.tx);
}

if let Some(handle) = self.handle.take() {
let _ = handle.join();
}
}
}
3 changes: 3 additions & 0 deletions src/cargo/util/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ pub use self::into_url::IntoUrl;
pub use self::into_url_with_base::IntoUrlWithBase;
pub(crate) use self::io::LimitErrorReader;
pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted};
pub use self::logger::BuildLogger;
pub use self::once::OnceExt;
pub use self::progress::{Progress, ProgressStyle};
pub use self::queue::Queue;
Expand Down Expand Up @@ -55,6 +56,8 @@ mod io;
pub mod job;
pub mod lints;
mod lockserver;
pub mod log_message;
mod logger;
pub mod machine_message;
pub mod network;
mod once;
Expand Down
84 changes: 82 additions & 2 deletions tests/testsuite/build_analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
use crate::prelude::*;

use cargo_test_support::basic_manifest;
use cargo_test_support::compare::assert_e2e;
use cargo_test_support::paths;
use cargo_test_support::project;
use cargo_test_support::str;

Expand All @@ -26,20 +28,98 @@ fn gated() {
}

#[cargo_test]
fn simple() {
fn one_logfile_per_invocation() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
.file("src/lib.rs", "")
.build();

let cargo_home = paths::cargo_home();
let log_dir = cargo_home.join("log");

// First invocation
p.cargo("check -Zbuild-analysis")
.env("CARGO_BUILD_ANALYSIS_ENABLED", "true")
.masquerade_as_nightly_cargo(&["build-analysis"])
.with_stderr_data(str![[r#"
[CHECKING] foo v0.0.0 ([ROOT]/foo)
Timing report saved to [ROOT]/foo/target/cargo-timings/cargo-timing-[..].html
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s

"#]])
.run();

assert!(log_dir.exists());
let entries = std::fs::read_dir(&log_dir).unwrap();
let log_files: Vec<_> = entries
.filter_map(Result::ok)
.filter(|e| e.path().extension().and_then(|s| s.to_str()) == Some("jsonl"))
.collect();

assert_eq!(log_files.len(), 1);

// Second invocation
p.cargo("check -Zbuild-analysis")
.env("CARGO_BUILD_ANALYSIS_ENABLED", "true")
.masquerade_as_nightly_cargo(&["build-analysis"])
.with_stderr_data(str![[r#"
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s

"#]])
.run();

let entries = std::fs::read_dir(&log_dir).unwrap();
let log_files: Vec<_> = entries
.filter_map(Result::ok)
.filter(|e| e.path().extension().and_then(|s| s.to_str()) == Some("jsonl"))
.collect();

assert_eq!(log_files.len(), 2);
}

#[cargo_test]
fn log_msg_build_started() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
.file("src/lib.rs", "")
.build();

p.cargo("check -Zbuild-analysis")
.env("CARGO_BUILD_ANALYSIS_ENABLED", "true")
.masquerade_as_nightly_cargo(&["build-analysis"])
.run();

let cargo_home = paths::cargo_home();
let log_dir = cargo_home.join("log");
assert!(log_dir.exists());

let entries = std::fs::read_dir(&log_dir).unwrap();
let log_file = entries
.filter_map(Result::ok)
.find(|e| e.path().extension().and_then(|s| s.to_str()) == Some("jsonl"))
.unwrap();

let content = std::fs::read_to_string(log_file.path()).unwrap();

assert_e2e().eq(
&content,
str![[r#"
[
{
"cwd": "[ROOT]/foo",
"host": "[HOST_TARGET]",
"jobs": "{...}",
"profile": "dev",
"reason": "build-started",
"run_id": "[..]T[..]Z-[..]",
"rustc_version": "1.[..]",
"rustc_version_verbose": "{...}",
"target_dir": "[ROOT]/foo/target",
"timestamp": "[..]T[..]Z",
"workspace_root": "[ROOT]/foo"
}
]
"#]]
.is_json()
.against_jsonlines(),
);
}