flowey_hvlite/pipelines/
build_docs.rs

1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! See [`BuildDocsCli`]
5
6use flowey::node::prelude::FlowPlatformLinuxDistro;
7use flowey::node::prelude::GhPermission;
8use flowey::node::prelude::GhPermissionValue;
9use flowey::node::prelude::ReadVar;
10use flowey::pipeline::prelude::*;
11use flowey_lib_common::git_checkout::RepoSource;
12use flowey_lib_hvlite::run_cargo_build::common::CommonTriple;
13
14#[derive(Copy, Clone, clap::ValueEnum)]
15enum PipelineConfig {
16    /// Run on all PRs targeting the OpenVMM `main` branch.
17    Pr,
18    /// Run on all commits that land in OpenVMM's `main` branch.
19    ///
20    /// The CI pipeline also publishes the guide to openvmm.dev.
21    Ci,
22}
23
24/// A pipeline defining documentation CI and PR jobs.
25#[derive(clap::Args)]
26pub struct BuildDocsCli {
27    #[clap(long)]
28    config: PipelineConfig,
29
30    #[clap(flatten)]
31    local_run_args: Option<crate::pipelines_shared::cfg_common_params::LocalRunArgs>,
32}
33
34impl IntoPipeline for BuildDocsCli {
35    fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result<Pipeline> {
36        let Self {
37            config,
38            local_run_args,
39        } = self;
40
41        let mut pipeline = Pipeline::new();
42
43        // The docs pipeline should only run on the main branch.
44        {
45            let branches = vec!["main".into()];
46            match config {
47                PipelineConfig::Ci => {
48                    pipeline
49                        .gh_set_ci_triggers(GhCiTriggers {
50                            branches,
51                            ..Default::default()
52                        })
53                        .gh_set_name("[flowey] OpenVMM Docs CI");
54                }
55                PipelineConfig::Pr => {
56                    pipeline
57                        .gh_set_pr_triggers(GhPrTriggers {
58                            branches,
59                            ..GhPrTriggers::new_draftable()
60                        })
61                        .gh_set_name("[flowey] OpenVMM Docs PR");
62                }
63            }
64        }
65
66        let openvmm_repo_source = {
67            if matches!(backend_hint, PipelineBackendHint::Local) {
68                RepoSource::ExistingClone(ReadVar::from_static(crate::repo_root()))
69            } else if matches!(backend_hint, PipelineBackendHint::Github) {
70                RepoSource::GithubSelf
71            } else {
72                anyhow::bail!(
73                    "Unsupported backend: Docs Pipeline only supports Local and GitHub backends"
74                );
75            }
76        };
77
78        if let RepoSource::GithubSelf = &openvmm_repo_source {
79            pipeline.gh_set_flowey_bootstrap_template(
80                crate::pipelines_shared::gh_flowey_bootstrap_template::get_template(),
81            );
82        }
83
84        let cfg_common_params = crate::pipelines_shared::cfg_common_params::get_cfg_common_params(
85            &mut pipeline,
86            backend_hint,
87            local_run_args,
88        )?;
89
90        pipeline.inject_all_jobs_with(move |job| {
91            job.dep_on(&cfg_common_params)
92                .dep_on(|_| flowey_lib_hvlite::_jobs::cfg_versions::Request {})
93                .dep_on(
94                    |_| flowey_lib_hvlite::_jobs::cfg_hvlite_reposource::Params {
95                        hvlite_repo_source: openvmm_repo_source.clone(),
96                    },
97                )
98                .gh_grant_permissions::<flowey_lib_common::git_checkout::Node>([(
99                    GhPermission::Contents,
100                    GhPermissionValue::Read,
101                )])
102                .gh_grant_permissions::<flowey_lib_common::gh_task_azure_login::Node>([(
103                    GhPermission::IdToken,
104                    GhPermissionValue::Write,
105                )])
106        });
107
108        // We need to maintain a list of all jobs, so we can hang the "all good"
109        // job off of them. This is requires because github status checks only allow
110        // specifying jobs, and not workflows.
111        // There's more info in the following discussion:
112        // <https://github.com/orgs/community/discussions/12395>
113        let mut all_jobs = Vec::new();
114
115        // emit mdbook guide build job
116        let (pub_guide, use_guide) = pipeline.new_typed_artifact("guide");
117        let job = pipeline
118            .new_job(
119                FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
120                FlowArch::X86_64,
121                "build mdbook guide",
122            )
123            .gh_set_pool(crate::pipelines_shared::gh_pools::default_gh_hosted(
124                FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
125            ))
126            .dep_on(|ctx| flowey_lib_hvlite::build_guide::Request {
127                built_guide: ctx.publish_typed_artifact(pub_guide),
128            })
129            .finish();
130
131        all_jobs.push(job);
132
133        // emit rustdoc jobs
134        let (pub_rustdoc_linux, use_rustdoc_linux) =
135            pipeline.new_typed_artifact("x64-linux-rustdoc");
136        let (pub_rustdoc_win, use_rustdoc_win) = pipeline.new_typed_artifact("x64-windows-rustdoc");
137        for (target, platform, pub_rustdoc) in [
138            (
139                CommonTriple::X86_64_WINDOWS_MSVC,
140                FlowPlatform::Windows,
141                pub_rustdoc_win,
142            ),
143            (
144                CommonTriple::X86_64_LINUX_GNU,
145                FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
146                pub_rustdoc_linux,
147            ),
148        ] {
149            let job = pipeline
150                .new_job(
151                    platform,
152                    FlowArch::X86_64,
153                    format!("build and check docs [x64-{platform}]"),
154                )
155                .gh_set_pool(crate::pipelines_shared::gh_pools::default_gh_hosted(
156                    platform,
157                ))
158                .dep_on(|ctx| flowey_lib_hvlite::build_rustdoc::Request {
159                    target_triple: target.as_triple(),
160                    docs: ctx.publish_typed_artifact(pub_rustdoc),
161                })
162                .finish();
163
164            all_jobs.push(job);
165        }
166
167        // emit consolidated gh pages publish job
168        if matches!(config, PipelineConfig::Ci) {
169            let pub_artifact = if matches!(backend_hint, PipelineBackendHint::Local) {
170                let (publish, _use) = pipeline.new_typed_artifact("gh-pages");
171                Some(publish)
172            } else {
173                None
174            };
175
176            let job = pipeline
177                .new_job(FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu), FlowArch::X86_64, "publish openvmm.dev")
178                .gh_set_pool(crate::pipelines_shared::gh_pools::default_gh_hosted(
179                    FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
180                ))
181                .dep_on(
182                    |ctx| flowey_lib_hvlite::_jobs::consolidate_and_publish_gh_pages::Params {
183                        rustdoc_linux: ctx.use_typed_artifact(&use_rustdoc_linux),
184                        rustdoc_windows: ctx.use_typed_artifact(&use_rustdoc_win),
185                        guide: ctx.use_typed_artifact(&use_guide),
186                        output: if let Some(pub_artifact) = pub_artifact {
187                            ctx.publish_typed_artifact(pub_artifact)
188                        } else {
189                            ctx.new_done_handle().discard_result()
190                        }
191                    },
192                )
193                .gh_grant_permissions::<flowey_lib_hvlite::_jobs::consolidate_and_publish_gh_pages::Node>([
194                    (GhPermission::IdToken, GhPermissionValue::Write),
195                    (GhPermission::Pages, GhPermissionValue::Write),
196                ])
197                .finish();
198
199            all_jobs.push(job);
200        }
201
202        if matches!(config, PipelineConfig::Pr) {
203            // Add a job that depends on all others as a workaround for
204            // https://github.com/orgs/community/discussions/12395.
205            //
206            // This workaround then itself requires _another_ workaround, requiring
207            // the use of `gh_dangerous_override_if`, and some additional custom job
208            // logic, to deal with https://github.com/actions/runner/issues/2566.
209            //
210            // TODO: Add a way for this job to skip flowey setup and become a true
211            // no-op.
212            let all_good_job = pipeline
213                .new_job(
214                    FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
215                    FlowArch::X86_64,
216                    "openvmm build docs gates",
217                )
218                .gh_set_pool(crate::pipelines_shared::gh_pools::default_gh_hosted(
219                    FlowPlatform::Linux(FlowPlatformLinuxDistro::Ubuntu),
220                ))
221                // always run this job, regardless whether or not any previous jobs failed
222                .gh_dangerous_override_if("always() && github.event.pull_request.draft == false")
223                .gh_dangerous_global_env_var("ANY_JOBS_FAILED", "${{ contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'failure') }}")
224                .dep_on(|ctx| flowey_lib_hvlite::_jobs::all_good_job::Params {
225                    did_fail_env_var: "ANY_JOBS_FAILED".into(),
226                    done: ctx.new_done_handle(),
227                })
228                .finish();
229
230            for job in all_jobs.iter() {
231                pipeline.non_artifact_dep(&all_good_job, job);
232            }
233        }
234
235        Ok(pipeline)
236    }
237}