Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,546 changes: 879 additions & 667 deletions .github/workflows/openvmm-ci.yaml

Large diffs are not rendered by default.

1,546 changes: 879 additions & 667 deletions .github/workflows/openvmm-pr-release.yaml

Large diffs are not rendered by default.

1,579 changes: 896 additions & 683 deletions .github/workflows/openvmm-pr.yaml

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1950,6 +1950,7 @@ dependencies = [
"igvmfilegen_config",
"log",
"powershell_builder",
"quick-xml",
"serde",
"serde_json",
"target-lexicon",
Expand Down Expand Up @@ -6007,6 +6008,15 @@ dependencies = [
"syn 2.0.106",
]

[[package]]
name = "quick-xml"
version = "0.38.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89"
dependencies = [
"memchr",
]

[[package]]
name = "quote"
version = "1.0.40"
Expand Down
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,7 @@ proc-macro2 = "1.0"
prost = "0.11"
prost-build = "0.11"
prost-types = "0.11"
quick-xml = "0.38.3"
quote = "1.0"
range_map_vec = "0.2.0"
rayon = "1.5"
Expand Down
23 changes: 17 additions & 6 deletions flowey/flowey_cli/src/pipeline_resolver/ado_yaml.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use super::common_yaml::FloweySource;
use super::common_yaml::check_generated_yaml_and_json;
use super::common_yaml::job_flowey_bootstrap_source;
use super::common_yaml::write_generated_yaml_and_json;
use super::generic::ResolvedJobArtifact;
use super::generic::ResolvedJobUseParameter;
use crate::cli::exec_snippet::FloweyPipelineStaticDb;
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_WORKING_DIR;
Expand All @@ -16,6 +15,8 @@ use crate::flow_resolver::stage1_dag::OutputGraphEntry;
use crate::flow_resolver::stage1_dag::Step;
use crate::pipeline_resolver::generic::ResolvedPipeline;
use crate::pipeline_resolver::generic::ResolvedPipelineJob;
use crate::pipeline_resolver::generic::ResolvedPublishedArtifact;
use crate::pipeline_resolver::generic::ResolvedUsedArtifact;
use anyhow::Context;
use flowey_core::node::FlowArch;
use flowey_core::node::FlowBackend;
Expand Down Expand Up @@ -190,7 +191,7 @@ pub fn ado_yaml(
}

// also download any artifacts that'll be used
for ResolvedJobArtifact {
for ResolvedUsedArtifact {
flowey_var: _,
name,
} in artifacts_used
Expand Down Expand Up @@ -310,7 +311,10 @@ EOF

// next, emit ado steps to create dirs for artifacts which will be
// published
for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
writeln!(
flowey_bootstrap_bash,
r#"mkdir -p "$(AgentTempDirNormal)/publish_artifacts/{name}""#
Expand All @@ -324,7 +328,7 @@ EOF

// lastly, emit ado steps that report the dirs for any artifacts which
// are used by this job
for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
// do NOT use ADO macro syntax $(...), since this is in the same
// bootstrap block as where those ADO vars get defined, meaning it's
// not available yet!
Expand Down Expand Up @@ -422,20 +426,27 @@ EOF

// ..and once that's done, the last order of business is to emit some
// ado steps to publish the various artifacts created by this job
for ResolvedJobArtifact {
for ResolvedPublishedArtifact {
flowey_var: _,
name,
force_upload,
} in artifacts_published
{
ado_steps.push({
let map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
let mut map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
r#"
publish: $(FLOWEY_TEMP_DIR)/publish_artifacts/{name}
displayName: '🌼📦 Publish {name}'
artifact: {name}
"#
))
.unwrap();
if *force_upload {
map.insert(
"condition".into(),
serde_yaml::Value::String("always()".into()),
);
}
map.into()
});
}
Expand Down
10 changes: 7 additions & 3 deletions flowey/flowey_cli/src/pipeline_resolver/direct_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_PERSISTENT_STORAGE_DIR;
use crate::flow_resolver::stage1_dag::OutputGraphEntry;
use crate::flow_resolver::stage1_dag::Step;
use crate::pipeline_resolver::generic::ResolvedJobArtifact;
use crate::pipeline_resolver::generic::ResolvedJobUseParameter;
use crate::pipeline_resolver::generic::ResolvedPipeline;
use crate::pipeline_resolver::generic::ResolvedPipelineJob;
use crate::pipeline_resolver::generic::ResolvedPublishedArtifact;
use crate::pipeline_resolver::generic::ResolvedUsedArtifact;
use flowey_core::node::FlowArch;
use flowey_core::node::FlowBackend;
use flowey_core::node::FlowPlatform;
Expand Down Expand Up @@ -275,7 +276,10 @@ fn direct_run_do_work(
serde_json::to_string(&persist_dir).unwrap().into(),
);

for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
let path = out_dir.join("artifacts").join(name);
fs_err::create_dir_all(&path)?;

Expand All @@ -291,7 +295,7 @@ fn direct_run_do_work(
}
fs_err::create_dir_all(out_dir.join(".job_artifacts"))?;

for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
let path = out_dir.join(".job_artifacts").join(name);
fs_err::create_dir_all(&path)?;
copy_dir_all(out_dir.join("artifacts").join(name), &path)?;
Expand Down
32 changes: 23 additions & 9 deletions flowey/flowey_cli/src/pipeline_resolver/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,14 @@ pub struct ResolvedPipeline {
}

#[derive(Debug, Clone)]
pub struct ResolvedJobArtifact {
pub struct ResolvedPublishedArtifact {
pub flowey_var: String,
pub name: String,
pub force_upload: bool,
}

#[derive(Debug, Clone)]
pub struct ResolvedUsedArtifact {
pub flowey_var: String,
pub name: String,
}
Expand Down Expand Up @@ -76,9 +83,9 @@ pub struct ResolvedPipelineJob {

pub parameters_used: Vec<ResolvedJobUseParameter>,
// correspond to injected download nodes at the start of the job
pub artifacts_used: Vec<ResolvedJobArtifact>,
pub artifacts_used: Vec<ResolvedUsedArtifact>,
// correspond to injected publish nodes at the end of the job
pub artifacts_published: Vec<ResolvedJobArtifact>,
pub artifacts_published: Vec<ResolvedPublishedArtifact>,
}

pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline> {
Expand Down Expand Up @@ -112,6 +119,7 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>
name,
published_by_job,
used_by_jobs,
force_published: _,
} in &artifacts
{
let no_existing = m
Expand Down Expand Up @@ -183,16 +191,21 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>

let artifacts_published: Vec<_> = artifacts_published
.into_iter()
.map(|a| ResolvedJobArtifact {
flowey_var: flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, false,
),
name: a,
.map(|a| {
let artifact_meta = artifacts.iter().find(|meta| meta.name == a).unwrap();
ResolvedPublishedArtifact {
flowey_var:
flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, false,
),
name: a,
force_upload: artifact_meta.force_published,
}
})
.collect();
let artifacts_used: Vec<_> = artifacts_used
.into_iter()
.map(|a| ResolvedJobArtifact {
.map(|a| ResolvedUsedArtifact {
flowey_var: flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, true,
),
Expand Down Expand Up @@ -245,6 +258,7 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>
name: _,
published_by_job,
used_by_jobs,
force_published: _,
} in artifacts
{
let published_idx = job_graph_idx[published_by_job.expect("checked in loop above")];
Expand Down
18 changes: 13 additions & 5 deletions flowey/flowey_cli/src/pipeline_resolver/github_yaml/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
use super::common_yaml::BashCommands;
use super::common_yaml::check_generated_yaml_and_json;
use super::common_yaml::write_generated_yaml_and_json;
use super::generic::ResolvedJobArtifact;
use super::generic::ResolvedJobUseParameter;
use super::generic::ResolvedPublishedArtifact;
use super::generic::ResolvedUsedArtifact;
use crate::cli::exec_snippet::FloweyPipelineStaticDb;
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_WORKING_DIR;
use crate::cli::pipeline::CheckMode;
Expand Down Expand Up @@ -368,7 +369,10 @@ EOF

// next, emit GitHub steps to create dirs for artifacts which will be
// published
for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
writeln!(
flowey_bootstrap_bash,
r#"mkdir -p "$AgentTempDirNormal/publish_artifacts/{name}""#
Expand All @@ -392,7 +396,7 @@ EOF

// lastly, emit GitHub steps that report the dirs for any artifacts which
// are used by this job
for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
let var_db_inject_cmd = bootstrap_bash_var_db_inject(flowey_var, true);
match platform.kind() {
FlowPlatformKind::Windows => {
Expand Down Expand Up @@ -427,13 +431,14 @@ EOF

// ..and once that's done, the last order of business is to emit some
// GitHub steps to publish the various artifacts created by this job
for ResolvedJobArtifact {
for ResolvedPublishedArtifact {
flowey_var: _,
name,
force_upload,
} in artifacts_published
{
gh_steps.push({
let map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
let mut map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
r#"
name: 🌼📦 Publish {name}
uses: actions/upload-artifact@v4
Expand All @@ -444,6 +449,9 @@ EOF
"#
))
.unwrap();
if *force_upload {
map.insert("if".into(), serde_yaml::Value::String("always()".into()));
}
map.into()
});
}
Expand Down
26 changes: 26 additions & 0 deletions flowey/flowey_core/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -641,6 +641,7 @@ impl Pipeline {
name: owned_name,
published_by_job: None,
used_by_jobs: BTreeSet::new(),
force_published: false,
});

(PublishArtifact { idx }, UseArtifact { idx })
Expand All @@ -660,6 +661,30 @@ impl Pipeline {
)
}

/// Mark an artifact to be force published, meaning it will be published
/// even if the job fails.
///
/// This is useful for artifacts that contain diagnostic information or logs
/// that are needed to debug failures.
#[track_caller]
pub fn force_publish_artifact(&mut self, artifact: &PublishArtifact) -> &mut Self {
self.artifacts[artifact.idx].force_published = true;
self
}

/// Mark a typed artifact to be force published, meaning it will be published
/// even if the job fails.
///
/// This is useful for artifacts that contain diagnostic information or logs
/// that are needed to debug failures.
#[track_caller]
pub fn force_publish_typed_artifact<T: Artifact>(
&mut self,
artifact: &PublishTypedArtifact<T>,
) -> &mut Self {
self.force_publish_artifact(&artifact.0)
}

/// (ADO only) Set the pipeline-level name.
///
/// <https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number?view=azure-devops&tabs=yaml>
Expand Down Expand Up @@ -1332,6 +1357,7 @@ pub mod internal {
pub name: String,
pub published_by_job: Option<usize>,
pub used_by_jobs: BTreeSet<usize>,
pub force_published: bool,
}

#[derive(Debug)]
Expand Down
Loading
Loading