Skip to content

Bye bye as command mut #143354

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 12 additions & 13 deletions src/bootstrap/src/core/build_steps/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use std::ffi::OsStr;
use std::io::BufReader;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::{env, fs, str};

use serde_derive::Deserialize;
Expand Down Expand Up @@ -2511,11 +2510,11 @@ pub fn stream_cargo(
cb: &mut dyn FnMut(CargoMessage<'_>),
) -> bool {
let mut cmd = cargo.into_cmd();
cmd.do_not_cache();

#[cfg(feature = "tracing")]
let _run_span = crate::trace_cmd!(cmd);

let cargo = cmd.as_command_mut();
// Instruct Cargo to give us json messages on stdout, critically leaving
// stderr as piped so we can get those pretty colors.
let mut message_format = if builder.config.json_output {
Expand All @@ -2527,27 +2526,26 @@ pub fn stream_cargo(
message_format.push_str(",json-diagnostic-");
message_format.push_str(s);
}
cargo.arg("--message-format").arg(message_format).stdout(Stdio::piped());
cmd.arg("--message-format").arg(message_format);

for arg in tail_args {
cargo.arg(arg);
cmd.arg(arg);
}

builder.verbose(|| println!("running: {cargo:?}"));
builder.verbose(|| println!("running: {cmd:?}"));

if builder.config.dry_run() {
let streaming_command = cmd.start_capture_stdout(&builder.config.exec_ctx).stream();

if streaming_command.is_none() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

let Some(mut streaming_command) = streaming_command else { return true; };
return true;
}

let mut child = match cargo.spawn() {
Ok(child) => child,
Err(e) => panic!("failed to execute command: {cargo:?}\nERROR: {e}"),
};
let mut streaming_command = streaming_command.unwrap();

// Spawn Cargo slurping up its JSON output. We'll start building up the
// `deps` array of all files it generated along with a `toplevel` array of
// files we need to probe for later.
let stdout = BufReader::new(child.stdout.take().unwrap());
let stdout = BufReader::new(streaming_command.stdout.take().unwrap());
for line in stdout.lines() {
let line = t!(line);
match serde_json::from_str::<CargoMessage<'_>>(&line) {
Expand All @@ -2564,13 +2562,14 @@ pub fn stream_cargo(
}

// Make sure Cargo actually succeeded after we read all of its stdout.
let status = t!(child.wait());
let status = t!(streaming_command.wait());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is good enough for now, but for profiling we'll want to have a unified way of finishing even the streaming commands in the execution context. But for now we can keep it like this.

if builder.is_verbose() && !status.success() {
eprintln!(
"command did not execute successfully: {cargo:?}\n\
"command did not execute successfully: {cmd:?}\n\
expected success, got: {status}"
);
}

status.success()
}

Expand Down
62 changes: 50 additions & 12 deletions src/bootstrap/src/utils/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ use std::fmt::{Debug, Formatter};
use std::hash::Hash;
use std::panic::Location;
use std::path::Path;
use std::process::{Child, Command, CommandArgs, CommandEnvs, ExitStatus, Output, Stdio};
use std::process::{
Child, ChildStderr, ChildStdout, Command, CommandArgs, CommandEnvs, ExitStatus, Output, Stdio,
};
use std::sync::{Arc, Mutex};

use build_helper::ci::CiEnv;
Expand Down Expand Up @@ -209,17 +211,6 @@ impl<'a> BootstrapCommand {
exec_ctx.as_ref().start(self, OutputMode::Capture, OutputMode::Print)
}

/// Provides access to the stdlib Command inside.
/// FIXME: This function should be eventually removed from bootstrap.
pub fn as_command_mut(&mut self) -> &mut Command {
// We proactively mark this command as executed since we can't be certain how the returned
// command will be handled. Caching must also be avoided here, as the inner command could be
// modified externally without us being aware.
self.mark_as_executed();
self.do_not_cache();
&mut self.command
}

/// Mark the command as being executed, disarming the drop bomb.
/// If this method is not called before the command is dropped, its drop will panic.
pub fn mark_as_executed(&mut self) {
Expand Down Expand Up @@ -449,6 +440,12 @@ enum CommandState<'a> {
},
}

pub struct CommandStreaming {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe StreamingCommand? To keep it consistent with DeferredCommand.

child: Child,
pub stdout: Option<ChildStdout>,
pub stderr: Option<ChildStderr>,
}

#[must_use]
pub struct DeferredCommand<'a> {
state: CommandState<'a>,
Expand Down Expand Up @@ -625,7 +622,48 @@ impl AsRef<ExecutionContext> for ExecutionContext {
}
}

impl CommandStreaming {
pub fn wait(mut self) -> Result<ExitStatus, std::io::Error> {
self.child.wait()
}

pub fn wait_with_output(self) -> Result<Output, std::io::Error> {
self.child.wait_with_output()
}
}

impl<'a> DeferredCommand<'a> {
pub fn stream(self) -> Option<CommandStreaming> {
if let CommandState::Deferred {
process,
command,
stdout: _,
stderr: _,
executed_at: _,
cache_key: _,
} = self.state
{
command.mark_as_executed();

let child = match process {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should explicitly panic here and let the user know that stream cannot be combined with caching and that they should use do_not_cache. Actually, it would be even better if stream was a method on BoostrapCommand, that would disable caching and return the streaming command directly, rather than going through DeferredCommand.

Some(child) => child,
None => {
return None;
}
};

let mut child = match child {
Ok(child) => child,
Err(e) => panic!("failed to execute command: {:?}\nERROR: {e}", command),
};

let stdout = child.stdout.take();
let stderr = child.stderr.take();
return Some(CommandStreaming { child, stdout, stderr });
}
None
}

pub fn wait_for_output(self, exec_ctx: impl AsRef<ExecutionContext>) -> CommandOutput {
match self.state {
CommandState::Cached(output) => output,
Expand Down
12 changes: 6 additions & 6 deletions src/bootstrap/src/utils/render_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
//! to reimplement all the rendering logic in this module because of that.

use std::io::{BufRead, BufReader, Read, Write};
use std::process::{ChildStdout, Stdio};
use std::process::ChildStdout;
use std::time::Duration;

use termcolor::{Color, ColorSpec, WriteColor};
Expand Down Expand Up @@ -52,23 +52,23 @@ pub(crate) fn try_run_tests(
}

fn run_tests(builder: &Builder<'_>, cmd: &mut BootstrapCommand, stream: bool) -> bool {
let cmd = cmd.as_command_mut();
cmd.stdout(Stdio::piped());
let cmd = cmd.do_not_cache();

builder.verbose(|| println!("running: {cmd:?}"));

let mut process = cmd.spawn().unwrap();
let mut streaming_command =
cmd.start_capture_stdout(&builder.config.exec_ctx).stream().unwrap();

// This runs until the stdout of the child is closed, which means the child exited. We don't
// run this on another thread since the builder is not Sync.
let renderer = Renderer::new(process.stdout.take().unwrap(), builder);
let renderer = Renderer::new(streaming_command.stdout.take().unwrap(), builder);
if stream {
renderer.stream_all();
} else {
renderer.render_all();
}

let result = process.wait_with_output().unwrap();
let result = streaming_command.wait_with_output().unwrap();
if !result.status.success() && builder.is_verbose() {
println!(
"\n\ncommand did not execute successfully: {cmd:?}\n\
Expand Down
Loading