flowey_core/pipeline.rs
1// Copyright (c) Microsoft Corporation.
2// Licensed under the MIT License.
3
4//! Core types and traits used to create and work with flowey pipelines.
5
6mod artifact;
7
8pub use artifact::Artifact;
9
10use self::internal::*;
11use crate::node::FlowArch;
12use crate::node::FlowNodeBase;
13use crate::node::FlowPlatform;
14use crate::node::FlowPlatformLinuxDistro;
15use crate::node::GhUserSecretVar;
16use crate::node::IntoRequest;
17use crate::node::NodeHandle;
18use crate::node::ReadVar;
19use crate::node::WriteVar;
20use crate::node::steps::ado::AdoResourcesRepositoryId;
21use crate::node::user_facing::AdoRuntimeVar;
22use crate::node::user_facing::GhPermission;
23use crate::node::user_facing::GhPermissionValue;
24use crate::patch::PatchResolver;
25use crate::patch::ResolvedPatches;
26use serde::Serialize;
27use serde::de::DeserializeOwned;
28use std::collections::BTreeMap;
29use std::collections::BTreeSet;
30use std::path::PathBuf;
31
32/// Pipeline types which are considered "user facing", and included in the
33/// `flowey` prelude.
34pub mod user_facing {
35 pub use super::AdoCiTriggers;
36 pub use super::AdoPrTriggers;
37 pub use super::AdoResourcesRepository;
38 pub use super::AdoResourcesRepositoryRef;
39 pub use super::AdoResourcesRepositoryType;
40 pub use super::AdoScheduleTriggers;
41 pub use super::GhCiTriggers;
42 pub use super::GhPrTriggers;
43 pub use super::GhRunner;
44 pub use super::GhRunnerOsLabel;
45 pub use super::GhScheduleTriggers;
46 pub use super::HostExt;
47 pub use super::IntoPipeline;
48 pub use super::ParameterKind;
49 pub use super::Pipeline;
50 pub use super::PipelineBackendHint;
51 pub use super::PipelineJob;
52 pub use super::PipelineJobCtx;
53 pub use super::PipelineJobHandle;
54 pub use super::PublishArtifact;
55 pub use super::PublishTypedArtifact;
56 pub use super::UseArtifact;
57 pub use super::UseParameter;
58 pub use super::UseTypedArtifact;
59 pub use crate::node::FlowArch;
60 pub use crate::node::FlowPlatform;
61}
62
63fn linux_distro() -> FlowPlatformLinuxDistro {
64 if let Ok(etc_os_release) = fs_err::read_to_string("/etc/os-release") {
65 if etc_os_release.contains("ID=ubuntu") {
66 FlowPlatformLinuxDistro::Ubuntu
67 } else if etc_os_release.contains("ID=fedora") {
68 FlowPlatformLinuxDistro::Fedora
69 } else {
70 FlowPlatformLinuxDistro::Unknown
71 }
72 } else {
73 FlowPlatformLinuxDistro::Unknown
74 }
75}
76
77pub trait HostExt: Sized {
78 /// Return the value for the current host machine.
79 ///
80 /// Will panic on non-local backends.
81 fn host(backend_hint: PipelineBackendHint) -> Self;
82}
83
84impl HostExt for FlowPlatform {
85 /// Return the platform of the current host machine.
86 ///
87 /// Will panic on non-local backends.
88 fn host(backend_hint: PipelineBackendHint) -> Self {
89 if !matches!(backend_hint, PipelineBackendHint::Local) {
90 panic!("can only use `FlowPlatform::host` when defining a local-only pipeline");
91 }
92
93 if cfg!(target_os = "windows") {
94 Self::Windows
95 } else if cfg!(target_os = "linux") {
96 Self::Linux(linux_distro())
97 } else if cfg!(target_os = "macos") {
98 Self::MacOs
99 } else {
100 panic!("no valid host-os")
101 }
102 }
103}
104
105impl HostExt for FlowArch {
106 /// Return the arch of the current host machine.
107 ///
108 /// Will panic on non-local backends.
109 fn host(backend_hint: PipelineBackendHint) -> Self {
110 if !matches!(backend_hint, PipelineBackendHint::Local) {
111 panic!("can only use `FlowArch::host` when defining a local-only pipeline");
112 }
113
114 // xtask-fmt allow-target-arch oneoff-flowey
115 if cfg!(target_arch = "x86_64") {
116 Self::X86_64
117 // xtask-fmt allow-target-arch oneoff-flowey
118 } else if cfg!(target_arch = "aarch64") {
119 Self::Aarch64
120 } else {
121 panic!("no valid host-arch")
122 }
123 }
124}
125
126/// Trigger ADO pipelines via Continuous Integration
127#[derive(Default, Debug)]
128pub struct AdoScheduleTriggers {
129 /// Friendly name for the scheduled run
130 pub display_name: String,
131 /// Run the pipeline whenever there is a commit on these specified branches
132 /// (supports glob syntax)
133 pub branches: Vec<String>,
134 /// Specify any branches which should be filtered out from the list of
135 /// `branches` (supports glob syntax)
136 pub exclude_branches: Vec<String>,
137 /// Run the pipeline in a schedule, as specified by a cron string
138 pub cron: String,
139}
140
141/// Trigger ADO pipelines per PR
142#[derive(Debug)]
143pub struct AdoPrTriggers {
144 /// Run the pipeline whenever there is a PR to these specified branches
145 /// (supports glob syntax)
146 pub branches: Vec<String>,
147 /// Specify any branches which should be filtered out from the list of
148 /// `branches` (supports glob syntax)
149 pub exclude_branches: Vec<String>,
150 /// Run the pipeline even if the PR is a draft PR. Defaults to `false`.
151 pub run_on_draft: bool,
152 /// Automatically cancel the pipeline run if a new commit lands in the
153 /// branch. Defaults to `true`.
154 pub auto_cancel: bool,
155}
156
157/// Trigger ADO pipelines per PR
158#[derive(Debug, Default)]
159pub struct AdoCiTriggers {
160 /// Run the pipeline whenever there is a PR to these specified branches
161 /// (supports glob syntax)
162 pub branches: Vec<String>,
163 /// Specify any branches which should be filtered out from the list of
164 /// `branches` (supports glob syntax)
165 pub exclude_branches: Vec<String>,
166 /// Whether to batch changes per branch.
167 pub batch: bool,
168}
169
170impl Default for AdoPrTriggers {
171 fn default() -> Self {
172 Self {
173 branches: Vec::new(),
174 exclude_branches: Vec::new(),
175 run_on_draft: false,
176 auto_cancel: true,
177 }
178 }
179}
180
181/// ADO repository resource.
182#[derive(Debug)]
183pub struct AdoResourcesRepository {
184 /// Type of repo that is being connected to.
185 pub repo_type: AdoResourcesRepositoryType,
186 /// Repository name. Format depends on `repo_type`.
187 pub name: String,
188 /// git ref to checkout.
189 pub git_ref: AdoResourcesRepositoryRef,
190 /// (optional) ID of the service endpoint connecting to this repository.
191 pub endpoint: Option<String>,
192}
193
194/// ADO repository resource type
195#[derive(Debug)]
196pub enum AdoResourcesRepositoryType {
197 /// Azure Repos Git repository
198 AzureReposGit,
199 /// Github repository
200 GitHub,
201}
202
203/// ADO repository ref
204#[derive(Debug)]
205pub enum AdoResourcesRepositoryRef<P = UseParameter<String>> {
206 /// Hard-coded ref (e.g: refs/heads/main)
207 Fixed(String),
208 /// Connected to pipeline-level parameter
209 Parameter(P),
210}
211
212/// Trigger Github Actions pipelines via Continuous Integration
213///
214/// NOTE: Github Actions doesn't support specifying the branch when triggered by `schedule`.
215/// To run on a specific branch, modify the branch checked out in the pipeline.
216#[derive(Default, Debug)]
217pub struct GhScheduleTriggers {
218 /// Run the pipeline in a schedule, as specified by a cron string
219 pub cron: String,
220}
221
222/// Trigger Github Actions pipelines per PR
223#[derive(Debug)]
224pub struct GhPrTriggers {
225 /// Run the pipeline whenever there is a PR to these specified branches
226 /// (supports glob syntax)
227 pub branches: Vec<String>,
228 /// Specify any branches which should be filtered out from the list of
229 /// `branches` (supports glob syntax)
230 pub exclude_branches: Vec<String>,
231 /// Automatically cancel the pipeline run if a new commit lands in the
232 /// branch. Defaults to `true`.
233 pub auto_cancel: bool,
234 /// Run the pipeline whenever the PR trigger matches the specified types
235 pub types: Vec<String>,
236}
237
238/// Trigger Github Actions pipelines per PR
239#[derive(Debug, Default)]
240pub struct GhCiTriggers {
241 /// Run the pipeline whenever there is a PR to these specified branches
242 /// (supports glob syntax)
243 pub branches: Vec<String>,
244 /// Specify any branches which should be filtered out from the list of
245 /// `branches` (supports glob syntax)
246 pub exclude_branches: Vec<String>,
247 /// Run the pipeline whenever there is a PR to these specified tags
248 /// (supports glob syntax)
249 pub tags: Vec<String>,
250 /// Specify any tags which should be filtered out from the list of `tags`
251 /// (supports glob syntax)
252 pub exclude_tags: Vec<String>,
253}
254
255impl GhPrTriggers {
256 /// Triggers the pipeline on the default PR events plus when a draft is marked as ready for review.
257 pub fn new_draftable() -> Self {
258 Self {
259 branches: Vec::new(),
260 exclude_branches: Vec::new(),
261 types: vec![
262 "opened".into(),
263 "synchronize".into(),
264 "reopened".into(),
265 "ready_for_review".into(),
266 ],
267 auto_cancel: true,
268 }
269 }
270}
271
272#[derive(Debug, Clone, PartialEq)]
273pub enum GhRunnerOsLabel {
274 UbuntuLatest,
275 Ubuntu2204,
276 Ubuntu2004,
277 WindowsLatest,
278 Windows2022,
279 Windows2019,
280 MacOsLatest,
281 MacOs14,
282 MacOs13,
283 MacOs12,
284 MacOs11,
285 Custom(String),
286}
287
288/// GitHub runner type
289#[derive(Debug, Clone, PartialEq)]
290pub enum GhRunner {
291 // See <https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#choosing-github-hosted-runners>
292 // for more details.
293 GhHosted(GhRunnerOsLabel),
294 // Self hosted runners are selected by matching runner labels to <labels>.
295 // 'self-hosted' is a common label for self hosted runners, but is not required.
296 // Labels are case-insensitive and can take the form of arbitrary strings.
297 // See <https://docs.github.com/en/actions/hosting-your-own-runners> for more details.
298 SelfHosted(Vec<String>),
299 // This uses a runner belonging to <group> that matches all labels in <labels>.
300 // See <https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#choosing-github-hosted-runners>
301 // for more details.
302 RunnerGroup { group: String, labels: Vec<String> },
303}
304
305impl GhRunner {
306 /// Whether this is a self-hosted runner with the provided label
307 pub fn is_self_hosted_with_label(&self, label: &str) -> bool {
308 matches!(self, GhRunner::SelfHosted(labels) if labels.iter().any(|s| s.as_str() == label))
309 }
310}
311
312/// Parameter type (unstable / stable).
313#[derive(Debug, Clone)]
314pub enum ParameterKind {
315 // The parameter is considered an unstable API, and should not be
316 // taken as a dependency.
317 Unstable,
318 // The parameter is considered a stable API, and can be used by
319 // external pipelines to control behavior of the pipeline.
320 Stable,
321}
322
323#[derive(Clone, Debug)]
324#[must_use]
325pub struct UseParameter<T> {
326 idx: usize,
327 _kind: std::marker::PhantomData<T>,
328}
329
330/// Opaque handle to an artifact which must be published by a single job.
331#[must_use]
332pub struct PublishArtifact {
333 idx: usize,
334}
335
336/// Opaque handle to an artifact which can be used by one or more jobs.
337#[derive(Clone)]
338#[must_use]
339pub struct UseArtifact {
340 idx: usize,
341}
342
343/// Opaque handle to an artifact of type `T` which must be published by a single job.
344#[must_use]
345pub struct PublishTypedArtifact<T>(PublishArtifact, std::marker::PhantomData<fn() -> T>);
346
347/// Opaque handle to an artifact of type `T` which can be used by one or more
348/// jobs.
349#[must_use]
350pub struct UseTypedArtifact<T>(UseArtifact, std::marker::PhantomData<fn(T)>);
351
352impl<T> Clone for UseTypedArtifact<T> {
353 fn clone(&self) -> Self {
354 UseTypedArtifact(self.0.clone(), std::marker::PhantomData)
355 }
356}
357
358#[derive(Default)]
359pub struct Pipeline {
360 jobs: Vec<PipelineJobMetadata>,
361 artifacts: Vec<ArtifactMeta>,
362 parameters: Vec<ParameterMeta>,
363 extra_deps: BTreeSet<(usize, usize)>,
364 // builder internal
365 artifact_names: BTreeSet<String>,
366 dummy_done_idx: usize,
367 artifact_map_idx: usize,
368 global_patchfns: Vec<crate::patch::PatchFn>,
369 inject_all_jobs_with: Option<Box<dyn for<'a> Fn(PipelineJob<'a>) -> PipelineJob<'a>>>,
370 // backend specific
371 ado_name: Option<String>,
372 ado_job_id_overrides: BTreeMap<usize, String>,
373 ado_schedule_triggers: Vec<AdoScheduleTriggers>,
374 ado_ci_triggers: Option<AdoCiTriggers>,
375 ado_pr_triggers: Option<AdoPrTriggers>,
376 ado_resources_repository: Vec<InternalAdoResourcesRepository>,
377 ado_bootstrap_template: String,
378 ado_variables: BTreeMap<String, String>,
379 ado_post_process_yaml_cb: Option<Box<dyn FnOnce(serde_yaml::Value) -> serde_yaml::Value>>,
380 gh_name: Option<String>,
381 gh_schedule_triggers: Vec<GhScheduleTriggers>,
382 gh_ci_triggers: Option<GhCiTriggers>,
383 gh_pr_triggers: Option<GhPrTriggers>,
384 gh_bootstrap_template: String,
385}
386
387impl Pipeline {
388 pub fn new() -> Pipeline {
389 Pipeline::default()
390 }
391
392 /// Inject all pipeline jobs with some common logic. (e.g: to resolve common
393 /// configuration requirements shared by all jobs).
394 ///
395 /// Can only be invoked once per pipeline.
396 #[track_caller]
397 pub fn inject_all_jobs_with(
398 &mut self,
399 cb: impl for<'a> Fn(PipelineJob<'a>) -> PipelineJob<'a> + 'static,
400 ) -> &mut Self {
401 if self.inject_all_jobs_with.is_some() {
402 panic!("can only call inject_all_jobs_with once!")
403 }
404 self.inject_all_jobs_with = Some(Box::new(cb));
405 self
406 }
407
408 /// (ADO only) Provide a YAML template used to bootstrap flowey at the start
409 /// of an ADO pipeline.
410 ///
411 /// The template has access to the following vars, which will be statically
412 /// interpolated into the template's text:
413 ///
414 /// - `{{FLOWEY_OUTDIR}}`
415 /// - Directory to copy artifacts into.
416 /// - NOTE: this var will include `\` on Windows, and `/` on linux!
417 /// - `{{FLOWEY_BIN_EXTENSION}}`
418 /// - Extension of the expected flowey bin (either "", or ".exe")
419 /// - `{{FLOWEY_CRATE}}`
420 /// - Name of the project-specific flowey crate to be built
421 /// - `{{FLOWEY_TARGET}}`
422 /// - The target-triple flowey is being built for
423 /// - `{{FLOWEY_PIPELINE_PATH}}`
424 /// - Repo-root relative path to the pipeline (as provided when
425 /// generating the pipeline via the flowey CLI)
426 ///
427 /// The template's sole responsibility is to copy 3 files into the
428 /// `{{FLOWEY_OUTDIR}}`:
429 ///
430 /// 1. The bootstrapped flowey binary, with the file name
431 /// `flowey{{FLOWEY_BIN_EXTENSION}}`
432 /// 2. Two files called `pipeline.yaml` and `pipeline.json`, which are
433 /// copied of the pipeline YAML and pipeline JSON currently being run.
434 /// `{{FLOWEY_PIPELINE_PATH}}` is provided as a way to disambiguate in
435 /// cases where the same template is being for multiple pipelines (e.g: a
436 /// debug vs. release pipeline).
437 pub fn ado_set_flowey_bootstrap_template(&mut self, template: String) -> &mut Self {
438 self.ado_bootstrap_template = template;
439 self
440 }
441
442 /// (ADO only) Provide a callback function which will be used to
443 /// post-process any YAML flowey generates for the pipeline.
444 ///
445 /// Until flowey defines a stable API for maintaining out-of-tree backends,
446 /// this method can be used to integrate the output from the generic ADO
447 /// backend with any organization-specific templates that one may be
448 /// required to use (e.g: for compliance reasons).
449 pub fn ado_post_process_yaml(
450 &mut self,
451 cb: impl FnOnce(serde_yaml::Value) -> serde_yaml::Value + 'static,
452 ) -> &mut Self {
453 self.ado_post_process_yaml_cb = Some(Box::new(cb));
454 self
455 }
456
457 /// (ADO only) Add a new scheduled CI trigger. Can be called multiple times
458 /// to set up multiple schedules runs.
459 pub fn ado_add_schedule_trigger(&mut self, triggers: AdoScheduleTriggers) -> &mut Self {
460 self.ado_schedule_triggers.push(triggers);
461 self
462 }
463
464 /// (ADO only) Set a PR trigger. Calling this method multiple times will
465 /// overwrite any previously set triggers.
466 pub fn ado_set_pr_triggers(&mut self, triggers: AdoPrTriggers) -> &mut Self {
467 self.ado_pr_triggers = Some(triggers);
468 self
469 }
470
471 /// (ADO only) Set a CI trigger. Calling this method multiple times will
472 /// overwrite any previously set triggers.
473 pub fn ado_set_ci_triggers(&mut self, triggers: AdoCiTriggers) -> &mut Self {
474 self.ado_ci_triggers = Some(triggers);
475 self
476 }
477
478 /// (ADO only) Declare a new repository resource, returning a type-safe
479 /// handle which downstream ADO steps are able to consume via
480 /// [`AdoStepServices::resolve_repository_id`](crate::node::user_facing::AdoStepServices::resolve_repository_id).
481 pub fn ado_add_resources_repository(
482 &mut self,
483 repo: AdoResourcesRepository,
484 ) -> AdoResourcesRepositoryId {
485 let AdoResourcesRepository {
486 repo_type,
487 name,
488 git_ref,
489 endpoint,
490 } = repo;
491
492 let repo_id = format!("repo{}", self.ado_resources_repository.len());
493
494 self.ado_resources_repository
495 .push(InternalAdoResourcesRepository {
496 repo_id: repo_id.clone(),
497 repo_type,
498 name,
499 git_ref: match git_ref {
500 AdoResourcesRepositoryRef::Fixed(s) => AdoResourcesRepositoryRef::Fixed(s),
501 AdoResourcesRepositoryRef::Parameter(p) => {
502 AdoResourcesRepositoryRef::Parameter(p.idx)
503 }
504 },
505 endpoint,
506 });
507 AdoResourcesRepositoryId { repo_id }
508 }
509
510 /// (GitHub Actions only) Set the pipeline-level name.
511 ///
512 /// <https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#name>
513 pub fn gh_set_name(&mut self, name: impl AsRef<str>) -> &mut Self {
514 self.gh_name = Some(name.as_ref().into());
515 self
516 }
517
518 /// Provide a YAML template used to bootstrap flowey at the start of an GitHub
519 /// pipeline.
520 ///
521 /// The template has access to the following vars, which will be statically
522 /// interpolated into the template's text:
523 ///
524 /// - `{{FLOWEY_OUTDIR}}`
525 /// - Directory to copy artifacts into.
526 /// - NOTE: this var will include `\` on Windows, and `/` on linux!
527 /// - `{{FLOWEY_BIN_EXTENSION}}`
528 /// - Extension of the expected flowey bin (either "", or ".exe")
529 /// - `{{FLOWEY_CRATE}}`
530 /// - Name of the project-specific flowey crate to be built
531 /// - `{{FLOWEY_TARGET}}`
532 /// - The target-triple flowey is being built for
533 /// - `{{FLOWEY_PIPELINE_PATH}}`
534 /// - Repo-root relative path to the pipeline (as provided when
535 /// generating the pipeline via the flowey CLI)
536 ///
537 /// The template's sole responsibility is to copy 3 files into the
538 /// `{{FLOWEY_OUTDIR}}`:
539 ///
540 /// 1. The bootstrapped flowey binary, with the file name
541 /// `flowey{{FLOWEY_BIN_EXTENSION}}`
542 /// 2. Two files called `pipeline.yaml` and `pipeline.json`, which are
543 /// copied of the pipeline YAML and pipeline JSON currently being run.
544 /// `{{FLOWEY_PIPELINE_PATH}}` is provided as a way to disambiguate in
545 /// cases where the same template is being for multiple pipelines (e.g: a
546 /// debug vs. release pipeline).
547 pub fn gh_set_flowey_bootstrap_template(&mut self, template: String) -> &mut Self {
548 self.gh_bootstrap_template = template;
549 self
550 }
551
552 /// (GitHub Actions only) Add a new scheduled CI trigger. Can be called multiple times
553 /// to set up multiple schedules runs.
554 pub fn gh_add_schedule_trigger(&mut self, triggers: GhScheduleTriggers) -> &mut Self {
555 self.gh_schedule_triggers.push(triggers);
556 self
557 }
558
559 /// (GitHub Actions only) Set a PR trigger. Calling this method multiple times will
560 /// overwrite any previously set triggers.
561 pub fn gh_set_pr_triggers(&mut self, triggers: GhPrTriggers) -> &mut Self {
562 self.gh_pr_triggers = Some(triggers);
563 self
564 }
565
566 /// (GitHub Actions only) Set a CI trigger. Calling this method multiple times will
567 /// overwrite any previously set triggers.
568 pub fn gh_set_ci_triggers(&mut self, triggers: GhCiTriggers) -> &mut Self {
569 self.gh_ci_triggers = Some(triggers);
570 self
571 }
572
573 /// (GitHub Actions only) Use a pre-defined GitHub Actions secret variable.
574 ///
575 /// For more information on defining secrets for use in GitHub Actions, see
576 /// <https://docs.github.com/en/actions/security-guides/using-secrets-in-github-actions>
577 pub fn gh_use_secret(&mut self, secret_name: impl AsRef<str>) -> GhUserSecretVar {
578 GhUserSecretVar(secret_name.as_ref().to_string())
579 }
580
581 pub fn new_job(
582 &mut self,
583 platform: FlowPlatform,
584 arch: FlowArch,
585 label: impl AsRef<str>,
586 ) -> PipelineJob<'_> {
587 let idx = self.jobs.len();
588 self.jobs.push(PipelineJobMetadata {
589 root_nodes: BTreeMap::new(),
590 patches: ResolvedPatches::build(),
591 label: label.as_ref().into(),
592 platform,
593 arch,
594 cond_param_idx: None,
595 ado_pool: None,
596 ado_variables: BTreeMap::new(),
597 gh_override_if: None,
598 gh_global_env: BTreeMap::new(),
599 gh_pool: None,
600 gh_permissions: BTreeMap::new(),
601 });
602
603 PipelineJob {
604 pipeline: self,
605 job_idx: idx,
606 }
607 }
608
609 /// Declare a dependency between two jobs that does is not a result of an
610 /// artifact.
611 pub fn non_artifact_dep(
612 &mut self,
613 job: &PipelineJobHandle,
614 depends_on_job: &PipelineJobHandle,
615 ) -> &mut Self {
616 self.extra_deps
617 .insert((depends_on_job.job_idx, job.job_idx));
618 self
619 }
620
621 #[track_caller]
622 pub fn new_artifact(&mut self, name: impl AsRef<str>) -> (PublishArtifact, UseArtifact) {
623 let name = name.as_ref();
624 let owned_name = name.to_string();
625
626 let not_exists = self.artifact_names.insert(owned_name.clone());
627 if !not_exists {
628 panic!("duplicate artifact name: {}", name)
629 }
630
631 let idx = self.artifacts.len();
632 self.artifacts.push(ArtifactMeta {
633 name: owned_name,
634 published_by_job: None,
635 used_by_jobs: BTreeSet::new(),
636 });
637
638 (PublishArtifact { idx }, UseArtifact { idx })
639 }
640
641 /// Returns a pair of opaque handles to a new artifact for use across jobs
642 /// in the pipeline.
643 #[track_caller]
644 pub fn new_typed_artifact<T: Artifact>(
645 &mut self,
646 name: impl AsRef<str>,
647 ) -> (PublishTypedArtifact<T>, UseTypedArtifact<T>) {
648 let (publish, use_artifact) = self.new_artifact(name);
649 (
650 PublishTypedArtifact(publish, std::marker::PhantomData),
651 UseTypedArtifact(use_artifact, std::marker::PhantomData),
652 )
653 }
654
655 /// (ADO only) Set the pipeline-level name.
656 ///
657 /// <https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number?view=azure-devops&tabs=yaml>
658 pub fn ado_add_name(&mut self, name: String) -> &mut Self {
659 self.ado_name = Some(name);
660 self
661 }
662
663 /// (ADO only) Declare a pipeline-level, named, read-only ADO variable.
664 ///
665 /// `name` and `value` are both arbitrary strings.
666 ///
667 /// Returns an instance of [`AdoRuntimeVar`], which, if need be, can be
668 /// converted into a [`ReadVar<String>`] using
669 /// [`NodeCtx::get_ado_variable`].
670 ///
671 /// NOTE: Unless required by some particular third-party task, it's strongly
672 /// recommended to _avoid_ using this method, and to simply use
673 /// [`ReadVar::from_static`] to get a obtain a static variable.
674 ///
675 /// [`NodeCtx::get_ado_variable`]: crate::node::NodeCtx::get_ado_variable
676 pub fn ado_new_named_variable(
677 &mut self,
678 name: impl AsRef<str>,
679 value: impl AsRef<str>,
680 ) -> AdoRuntimeVar {
681 let name = name.as_ref();
682 let value = value.as_ref();
683
684 self.ado_variables.insert(name.into(), value.into());
685
686 // safe, since we'll ensure that the global exists in the ADO backend
687 AdoRuntimeVar::dangerous_from_global(name, false)
688 }
689
690 /// (ADO only) Declare multiple pipeline-level, named, read-only ADO
691 /// variables at once.
692 ///
693 /// This is a convenience method to streamline invoking
694 /// [`Self::ado_new_named_variable`] multiple times.
695 ///
696 /// NOTE: Unless required by some particular third-party task, it's strongly
697 /// recommended to _avoid_ using this method, and to simply use
698 /// [`ReadVar::from_static`] to get a obtain a static variable.
699 ///
700 /// DEVNOTE: In the future, this API may be updated to return a handle that
701 /// will allow resolving the resulting `AdoRuntimeVar`, but for
702 /// implementation expediency, this API does not currently do this. If you
703 /// need to read the value of this variable at runtime, you may need to
704 /// invoke [`AdoRuntimeVar::dangerous_from_global`] manually.
705 ///
706 /// [`NodeCtx::get_ado_variable`]: crate::node::NodeCtx::get_ado_variable
707 pub fn ado_new_named_variables<K, V>(
708 &mut self,
709 vars: impl IntoIterator<Item = (K, V)>,
710 ) -> &mut Self
711 where
712 K: AsRef<str>,
713 V: AsRef<str>,
714 {
715 self.ado_variables.extend(
716 vars.into_iter()
717 .map(|(k, v)| (k.as_ref().into(), v.as_ref().into())),
718 );
719 self
720 }
721
722 /// Declare a pipeline-level runtime parameter with type `bool`.
723 ///
724 /// To obtain a [`ReadVar<bool>`] that can be used within a node, use the
725 /// [`PipelineJobCtx::use_parameter`] method.
726 ///
727 /// `name` is the name of the parameter.
728 ///
729 /// `description` is an arbitrary string, which will be be shown to users.
730 ///
731 /// `kind` is the type of parameter and if it should be treated as a stable
732 /// external API to callers of the pipeline.
733 ///
734 /// `default` is the default value for the parameter. If none is provided,
735 /// the parameter _must_ be specified in order for the pipeline to run.
736 ///
737 /// `possible_values` can be used to limit the set of valid values the
738 /// parameter accepts.
739 pub fn new_parameter_bool(
740 &mut self,
741 name: impl AsRef<str>,
742 description: impl AsRef<str>,
743 kind: ParameterKind,
744 default: Option<bool>,
745 ) -> UseParameter<bool> {
746 let idx = self.parameters.len();
747 let name = new_parameter_name(name, kind.clone());
748 self.parameters.push(ParameterMeta {
749 parameter: Parameter::Bool {
750 name,
751 description: description.as_ref().into(),
752 kind,
753 default,
754 },
755 used_by_jobs: BTreeSet::new(),
756 });
757
758 UseParameter {
759 idx,
760 _kind: std::marker::PhantomData,
761 }
762 }
763
764 /// Declare a pipeline-level runtime parameter with type `i64`.
765 ///
766 /// To obtain a [`ReadVar<i64>`] that can be used within a node, use the
767 /// [`PipelineJobCtx::use_parameter`] method.
768 ///
769 /// `name` is the name of the parameter.
770 ///
771 /// `description` is an arbitrary string, which will be be shown to users.
772 ///
773 /// `kind` is the type of parameter and if it should be treated as a stable
774 /// external API to callers of the pipeline.
775 ///
776 /// `default` is the default value for the parameter. If none is provided,
777 /// the parameter _must_ be specified in order for the pipeline to run.
778 ///
779 /// `possible_values` can be used to limit the set of valid values the
780 /// parameter accepts.
781 pub fn new_parameter_num(
782 &mut self,
783 name: impl AsRef<str>,
784 description: impl AsRef<str>,
785 kind: ParameterKind,
786 default: Option<i64>,
787 possible_values: Option<Vec<i64>>,
788 ) -> UseParameter<i64> {
789 let idx = self.parameters.len();
790 let name = new_parameter_name(name, kind.clone());
791 self.parameters.push(ParameterMeta {
792 parameter: Parameter::Num {
793 name,
794 description: description.as_ref().into(),
795 kind,
796 default,
797 possible_values,
798 },
799 used_by_jobs: BTreeSet::new(),
800 });
801
802 UseParameter {
803 idx,
804 _kind: std::marker::PhantomData,
805 }
806 }
807
808 /// Declare a pipeline-level runtime parameter with type `String`.
809 ///
810 /// To obtain a [`ReadVar<String>`] that can be used within a node, use the
811 /// [`PipelineJobCtx::use_parameter`] method.
812 ///
813 /// `name` is the name of the parameter.
814 ///
815 /// `description` is an arbitrary string, which will be be shown to users.
816 ///
817 /// `kind` is the type of parameter and if it should be treated as a stable
818 /// external API to callers of the pipeline.
819 ///
820 /// `default` is the default value for the parameter. If none is provided,
821 /// the parameter _must_ be specified in order for the pipeline to run.
822 ///
823 /// `possible_values` allows restricting inputs to a set of possible values.
824 /// Depending on the backend, these options may be presented as a set of
825 /// radio buttons, a dropdown menu, or something in that vein. If `None`,
826 /// then any string is allowed.
827 pub fn new_parameter_string(
828 &mut self,
829 name: impl AsRef<str>,
830 description: impl AsRef<str>,
831 kind: ParameterKind,
832 default: Option<impl AsRef<str>>,
833 possible_values: Option<Vec<String>>,
834 ) -> UseParameter<String> {
835 let idx = self.parameters.len();
836 let name = new_parameter_name(name, kind.clone());
837 self.parameters.push(ParameterMeta {
838 parameter: Parameter::String {
839 name,
840 description: description.as_ref().into(),
841 kind,
842 default: default.map(|x| x.as_ref().into()),
843 possible_values,
844 },
845 used_by_jobs: BTreeSet::new(),
846 });
847
848 UseParameter {
849 idx,
850 _kind: std::marker::PhantomData,
851 }
852 }
853}
854
855pub struct PipelineJobCtx<'a> {
856 pipeline: &'a mut Pipeline,
857 job_idx: usize,
858}
859
860impl PipelineJobCtx<'_> {
861 /// Create a new `WriteVar<SideEffect>` anchored to the pipeline job.
862 pub fn new_done_handle(&mut self) -> WriteVar<crate::node::SideEffect> {
863 self.pipeline.dummy_done_idx += 1;
864 crate::node::thin_air_write_runtime_var(format!("start{}", self.pipeline.dummy_done_idx))
865 }
866
867 /// Claim that this job will use this artifact, obtaining a path to a folder
868 /// with the artifact's contents.
869 pub fn use_artifact(&mut self, artifact: &UseArtifact) -> ReadVar<PathBuf> {
870 self.pipeline.artifacts[artifact.idx]
871 .used_by_jobs
872 .insert(self.job_idx);
873
874 crate::node::thin_air_read_runtime_var(consistent_artifact_runtime_var_name(
875 &self.pipeline.artifacts[artifact.idx].name,
876 true,
877 ))
878 }
879
880 /// Claim that this job will publish this artifact, obtaining a path to a
881 /// fresh, empty folder which will be published as the specific artifact at
882 /// the end of the job.
883 pub fn publish_artifact(&mut self, artifact: PublishArtifact) -> ReadVar<PathBuf> {
884 let existing = self.pipeline.artifacts[artifact.idx]
885 .published_by_job
886 .replace(self.job_idx);
887 assert!(existing.is_none()); // PublishArtifact isn't cloneable
888
889 crate::node::thin_air_read_runtime_var(consistent_artifact_runtime_var_name(
890 &self.pipeline.artifacts[artifact.idx].name,
891 false,
892 ))
893 }
894
895 fn helper_request<R: IntoRequest>(&mut self, req: R)
896 where
897 R::Node: 'static,
898 {
899 self.pipeline.jobs[self.job_idx]
900 .root_nodes
901 .entry(NodeHandle::from_type::<R::Node>())
902 .or_default()
903 .push(serde_json::to_vec(&req.into_request()).unwrap().into());
904 }
905
906 fn new_artifact_map_vars<T: Artifact>(&mut self) -> (ReadVar<T>, WriteVar<T>) {
907 let artifact_map_idx = self.pipeline.artifact_map_idx;
908 self.pipeline.artifact_map_idx += 1;
909
910 let backing_var = format!("artifact_map{}", artifact_map_idx);
911 let read_var = crate::node::thin_air_read_runtime_var(backing_var.clone());
912 let write_var = crate::node::thin_air_write_runtime_var(backing_var);
913 (read_var, write_var)
914 }
915
916 /// Claim that this job will use this artifact, obtaining the resolved
917 /// contents of the artifact.
918 pub fn use_typed_artifact<T: Artifact>(
919 &mut self,
920 artifact: &UseTypedArtifact<T>,
921 ) -> ReadVar<T> {
922 let artifact_path = self.use_artifact(&artifact.0);
923 let (read, write) = self.new_artifact_map_vars::<T>();
924 self.helper_request(artifact::resolve::Request::new(artifact_path, write));
925 read
926 }
927
928 /// Claim that this job will publish this artifact, obtaining a variable to
929 /// write the artifact's contents to. The artifact will be published at
930 /// the end of the job.
931 pub fn publish_typed_artifact<T: Artifact>(
932 &mut self,
933 artifact: PublishTypedArtifact<T>,
934 ) -> WriteVar<T> {
935 let artifact_path = self.publish_artifact(artifact.0);
936 let (read, write) = self.new_artifact_map_vars::<T>();
937 let done = self.new_done_handle();
938 self.helper_request(artifact::publish::Request::new(read, artifact_path, done));
939 write
940 }
941
942 /// Obtain a `ReadVar<T>` corresponding to a pipeline parameter which is
943 /// specified at runtime.
944 pub fn use_parameter<T>(&mut self, param: UseParameter<T>) -> ReadVar<T>
945 where
946 T: Serialize + DeserializeOwned,
947 {
948 self.pipeline.parameters[param.idx]
949 .used_by_jobs
950 .insert(self.job_idx);
951
952 crate::node::thin_air_read_runtime_var(
953 self.pipeline.parameters[param.idx]
954 .parameter
955 .name()
956 .to_string(),
957 )
958 }
959
960 /// Shortcut which allows defining a bool pipeline parameter within a Job.
961 ///
962 /// To share a single parameter between multiple jobs, don't use this method
963 /// - use [`Pipeline::new_parameter_bool`] + [`Self::use_parameter`] instead.
964 pub fn new_parameter_bool(
965 &mut self,
966 name: impl AsRef<str>,
967 description: impl AsRef<str>,
968 kind: ParameterKind,
969 default: Option<bool>,
970 ) -> ReadVar<bool> {
971 let param = self
972 .pipeline
973 .new_parameter_bool(name, description, kind, default);
974 self.use_parameter(param)
975 }
976
977 /// Shortcut which allows defining a number pipeline parameter within a Job.
978 ///
979 /// To share a single parameter between multiple jobs, don't use this method
980 /// - use [`Pipeline::new_parameter_num`] + [`Self::use_parameter`] instead.
981 pub fn new_parameter_num(
982 &mut self,
983 name: impl AsRef<str>,
984 description: impl AsRef<str>,
985 kind: ParameterKind,
986 default: Option<i64>,
987 possible_values: Option<Vec<i64>>,
988 ) -> ReadVar<i64> {
989 let param =
990 self.pipeline
991 .new_parameter_num(name, description, kind, default, possible_values);
992 self.use_parameter(param)
993 }
994
995 /// Shortcut which allows defining a string pipeline parameter within a Job.
996 ///
997 /// To share a single parameter between multiple jobs, don't use this method
998 /// - use [`Pipeline::new_parameter_string`] + [`Self::use_parameter`] instead.
999 pub fn new_parameter_string(
1000 &mut self,
1001 name: impl AsRef<str>,
1002 description: impl AsRef<str>,
1003 kind: ParameterKind,
1004 default: Option<String>,
1005 possible_values: Option<Vec<String>>,
1006 ) -> ReadVar<String> {
1007 let param =
1008 self.pipeline
1009 .new_parameter_string(name, description, kind, default, possible_values);
1010 self.use_parameter(param)
1011 }
1012}
1013
1014#[must_use]
1015pub struct PipelineJob<'a> {
1016 pipeline: &'a mut Pipeline,
1017 job_idx: usize,
1018}
1019
1020impl PipelineJob<'_> {
1021 /// (ADO only) specify which agent pool this job will be run on.
1022 pub fn ado_set_pool(self, pool: impl AsRef<str>) -> Self {
1023 self.ado_set_pool_with_demands(pool, Vec::new())
1024 }
1025
1026 /// (ADO only) specify which agent pool this job will be run on, with
1027 /// additional special runner demands.
1028 pub fn ado_set_pool_with_demands(self, pool: impl AsRef<str>, demands: Vec<String>) -> Self {
1029 self.pipeline.jobs[self.job_idx].ado_pool = Some(AdoPool {
1030 name: pool.as_ref().into(),
1031 demands,
1032 });
1033 self
1034 }
1035
1036 /// (ADO only) Declare a job-level, named, read-only ADO variable.
1037 ///
1038 /// `name` and `value` are both arbitrary strings, which may include ADO
1039 /// template expressions.
1040 ///
1041 /// NOTE: Unless required by some particular third-party task, it's strongly
1042 /// recommended to _avoid_ using this method, and to simply use
1043 /// [`ReadVar::from_static`] to get a obtain a static variable.
1044 ///
1045 /// DEVNOTE: In the future, this API may be updated to return a handle that
1046 /// will allow resolving the resulting `AdoRuntimeVar`, but for
1047 /// implementation expediency, this API does not currently do this. If you
1048 /// need to read the value of this variable at runtime, you may need to
1049 /// invoke [`AdoRuntimeVar::dangerous_from_global`] manually.
1050 ///
1051 /// [`NodeCtx::get_ado_variable`]: crate::node::NodeCtx::get_ado_variable
1052 pub fn ado_new_named_variable(self, name: impl AsRef<str>, value: impl AsRef<str>) -> Self {
1053 let name = name.as_ref();
1054 let value = value.as_ref();
1055 self.pipeline.jobs[self.job_idx]
1056 .ado_variables
1057 .insert(name.into(), value.into());
1058 self
1059 }
1060
1061 /// (ADO only) Declare multiple job-level, named, read-only ADO variables at
1062 /// once.
1063 ///
1064 /// This is a convenience method to streamline invoking
1065 /// [`Self::ado_new_named_variable`] multiple times.
1066 ///
1067 /// NOTE: Unless required by some particular third-party task, it's strongly
1068 /// recommended to _avoid_ using this method, and to simply use
1069 /// [`ReadVar::from_static`] to get a obtain a static variable.
1070 ///
1071 /// DEVNOTE: In the future, this API may be updated to return a handle that
1072 /// will allow resolving the resulting `AdoRuntimeVar`, but for
1073 /// implementation expediency, this API does not currently do this. If you
1074 /// need to read the value of this variable at runtime, you may need to
1075 /// invoke [`AdoRuntimeVar::dangerous_from_global`] manually.
1076 ///
1077 /// [`NodeCtx::get_ado_variable`]: crate::node::NodeCtx::get_ado_variable
1078 pub fn ado_new_named_variables<K, V>(self, vars: impl IntoIterator<Item = (K, V)>) -> Self
1079 where
1080 K: AsRef<str>,
1081 V: AsRef<str>,
1082 {
1083 self.pipeline.jobs[self.job_idx].ado_variables.extend(
1084 vars.into_iter()
1085 .map(|(k, v)| (k.as_ref().into(), v.as_ref().into())),
1086 );
1087 self
1088 }
1089
1090 /// Overrides the id of the job.
1091 ///
1092 /// Flowey typically generates a reasonable job ID but some use cases that depend
1093 /// on the ID may find it useful to override it to something custom.
1094 pub fn ado_override_job_id(self, name: impl AsRef<str>) -> Self {
1095 self.pipeline
1096 .ado_job_id_overrides
1097 .insert(self.job_idx, name.as_ref().into());
1098 self
1099 }
1100
1101 /// (GitHub Actions only) specify which Github runner this job will be run on.
1102 pub fn gh_set_pool(self, pool: GhRunner) -> Self {
1103 self.pipeline.jobs[self.job_idx].gh_pool = Some(pool);
1104 self
1105 }
1106
1107 /// (GitHub Actions only) Manually override the `if:` condition for this
1108 /// particular job.
1109 ///
1110 /// **This is dangerous**, as an improperly set `if` condition may break
1111 /// downstream flowey jobs which assume flowey is in control of the job's
1112 /// scheduling logic.
1113 ///
1114 /// See
1115 /// <https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#jobsjob_idif>
1116 /// for more info.
1117 pub fn gh_dangerous_override_if(self, condition: impl AsRef<str>) -> Self {
1118 self.pipeline.jobs[self.job_idx].gh_override_if = Some(condition.as_ref().into());
1119 self
1120 }
1121
1122 /// (GitHub Actions only) Declare a global job-level environment variable,
1123 /// visible to all downstream steps.
1124 ///
1125 /// `name` and `value` are both arbitrary strings, which may include GitHub
1126 /// Actions template expressions.
1127 ///
1128 /// **This is dangerous**, as it is easy to misuse this API in order to
1129 /// write a node which takes an implicit dependency on there being a global
1130 /// variable set on its behalf by the top-level pipeline code, making it
1131 /// difficult to "locally reason" about the behavior of a node simply by
1132 /// reading its code.
1133 ///
1134 /// Whenever possible, nodes should "late bind" environment variables:
1135 /// accepting a compile-time / runtime flowey parameter, and then setting it
1136 /// prior to executing a child command that requires it.
1137 ///
1138 /// Only use this API in exceptional cases, such as obtaining an environment
1139 /// variable whose value is determined by a job-level GitHub Actions
1140 /// expression evaluation.
1141 pub fn gh_dangerous_global_env_var(
1142 self,
1143 name: impl AsRef<str>,
1144 value: impl AsRef<str>,
1145 ) -> Self {
1146 let name = name.as_ref();
1147 let value = value.as_ref();
1148 self.pipeline.jobs[self.job_idx]
1149 .gh_global_env
1150 .insert(name.into(), value.into());
1151 self
1152 }
1153
1154 /// (GitHub Actions only) Grant permissions required by nodes in the job.
1155 ///
1156 /// For a given node handle, grant the specified permissions.
1157 /// The list provided must match the permissions specified within the node
1158 /// using `requires_permission`.
1159 ///
1160 /// NOTE: While this method is called at a node-level for auditability, the emitted
1161 /// yaml grants permissions at the job-level.
1162 ///
1163 /// This can lead to weird situations where node 1 might not specify a permission
1164 /// required according to Github Actions, but due to job-level granting of the permission
1165 /// by another node 2, the pipeline executes even though it wouldn't if node 2 was removed.
1166 ///
1167 /// For available permission scopes and their descriptions, see
1168 /// <https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions>.
1169 pub fn gh_grant_permissions<N: FlowNodeBase + 'static>(
1170 self,
1171 permissions: impl IntoIterator<Item = (GhPermission, GhPermissionValue)>,
1172 ) -> Self {
1173 let node_handle = NodeHandle::from_type::<N>();
1174 for (permission, value) in permissions {
1175 self.pipeline.jobs[self.job_idx]
1176 .gh_permissions
1177 .entry(node_handle)
1178 .or_default()
1179 .insert(permission, value);
1180 }
1181 self
1182 }
1183
1184 pub fn apply_patchfn(self, patchfn: crate::patch::PatchFn) -> Self {
1185 self.pipeline.jobs[self.job_idx]
1186 .patches
1187 .apply_patchfn(patchfn);
1188 self
1189 }
1190
1191 /// Only run the job if the specified condition is true.
1192 ///
1193 /// When running locally, the `cond`'s default value will be used to
1194 /// determine if the job will be run.
1195 pub fn with_condition(self, cond: UseParameter<bool>) -> Self {
1196 self.pipeline.jobs[self.job_idx].cond_param_idx = Some(cond.idx);
1197 self
1198 }
1199
1200 /// Add a flow node which will be run as part of the job.
1201 pub fn dep_on<R: IntoRequest + 'static>(
1202 self,
1203 f: impl FnOnce(&mut PipelineJobCtx<'_>) -> R,
1204 ) -> Self {
1205 // JobToNodeCtx will ensure artifact deps are taken care of
1206 let req = f(&mut PipelineJobCtx {
1207 pipeline: self.pipeline,
1208 job_idx: self.job_idx,
1209 });
1210
1211 self.pipeline.jobs[self.job_idx]
1212 .root_nodes
1213 .entry(NodeHandle::from_type::<R::Node>())
1214 .or_default()
1215 .push(serde_json::to_vec(&req.into_request()).unwrap().into());
1216
1217 self
1218 }
1219
1220 /// Finish describing the pipeline job.
1221 pub fn finish(self) -> PipelineJobHandle {
1222 PipelineJobHandle {
1223 job_idx: self.job_idx,
1224 }
1225 }
1226
1227 /// Return the job's platform.
1228 pub fn get_platform(&self) -> FlowPlatform {
1229 self.pipeline.jobs[self.job_idx].platform
1230 }
1231
1232 /// Return the job's architecture.
1233 pub fn get_arch(&self) -> FlowArch {
1234 self.pipeline.jobs[self.job_idx].arch
1235 }
1236}
1237
1238#[derive(Clone)]
1239pub struct PipelineJobHandle {
1240 job_idx: usize,
1241}
1242
1243impl PipelineJobHandle {
1244 pub fn is_handle_for(&self, job: &PipelineJob<'_>) -> bool {
1245 self.job_idx == job.job_idx
1246 }
1247}
1248
1249#[derive(Clone, Copy)]
1250pub enum PipelineBackendHint {
1251 /// Pipeline is being run on the user's dev machine (via bash / direct run)
1252 Local,
1253 /// Pipeline is run on ADO
1254 Ado,
1255 /// Pipeline is run on GitHub Actions
1256 Github,
1257}
1258
1259pub trait IntoPipeline {
1260 fn into_pipeline(self, backend_hint: PipelineBackendHint) -> anyhow::Result<Pipeline>;
1261}
1262
1263fn new_parameter_name(name: impl AsRef<str>, kind: ParameterKind) -> String {
1264 match kind {
1265 ParameterKind::Unstable => format!("__unstable_{}", name.as_ref()),
1266 ParameterKind::Stable => name.as_ref().into(),
1267 }
1268}
1269
1270/// Structs which should only be used by top-level flowey emitters. If you're a
1271/// pipeline author, these are not types you need to care about!
1272pub mod internal {
1273 use super::*;
1274 use std::collections::BTreeMap;
1275
1276 pub fn consistent_artifact_runtime_var_name(artifact: impl AsRef<str>, is_use: bool) -> String {
1277 format!(
1278 "artifact_{}_{}",
1279 if is_use { "use_from" } else { "publish_from" },
1280 artifact.as_ref()
1281 )
1282 }
1283
1284 #[derive(Debug)]
1285 pub struct InternalAdoResourcesRepository {
1286 /// flowey-generated unique repo identifier
1287 pub repo_id: String,
1288 /// Type of repo that is being connected to.
1289 pub repo_type: AdoResourcesRepositoryType,
1290 /// Repository name. Format depends on `repo_type`.
1291 pub name: String,
1292 /// git ref to checkout.
1293 pub git_ref: AdoResourcesRepositoryRef<usize>,
1294 /// (optional) ID of the service endpoint connecting to this repository.
1295 pub endpoint: Option<String>,
1296 }
1297
1298 pub struct PipelineJobMetadata {
1299 pub root_nodes: BTreeMap<NodeHandle, Vec<Box<[u8]>>>,
1300 pub patches: PatchResolver,
1301 pub label: String,
1302 pub platform: FlowPlatform,
1303 pub arch: FlowArch,
1304 pub cond_param_idx: Option<usize>,
1305 // backend specific
1306 pub ado_pool: Option<AdoPool>,
1307 pub ado_variables: BTreeMap<String, String>,
1308 pub gh_override_if: Option<String>,
1309 pub gh_pool: Option<GhRunner>,
1310 pub gh_global_env: BTreeMap<String, String>,
1311 pub gh_permissions: BTreeMap<NodeHandle, BTreeMap<GhPermission, GhPermissionValue>>,
1312 }
1313
1314 // TODO: support a more structured format for demands
1315 // See https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pool-demands
1316 #[derive(Debug, Clone)]
1317 pub struct AdoPool {
1318 pub name: String,
1319 pub demands: Vec<String>,
1320 }
1321
1322 #[derive(Debug)]
1323 pub struct ArtifactMeta {
1324 pub name: String,
1325 pub published_by_job: Option<usize>,
1326 pub used_by_jobs: BTreeSet<usize>,
1327 }
1328
1329 #[derive(Debug)]
1330 pub struct ParameterMeta {
1331 pub parameter: Parameter,
1332 pub used_by_jobs: BTreeSet<usize>,
1333 }
1334
1335 /// Mirror of [`Pipeline`], except with all field marked as `pub`.
1336 pub struct PipelineFinalized {
1337 pub jobs: Vec<PipelineJobMetadata>,
1338 pub artifacts: Vec<ArtifactMeta>,
1339 pub parameters: Vec<ParameterMeta>,
1340 pub extra_deps: BTreeSet<(usize, usize)>,
1341 // backend specific
1342 pub ado_name: Option<String>,
1343 pub ado_schedule_triggers: Vec<AdoScheduleTriggers>,
1344 pub ado_ci_triggers: Option<AdoCiTriggers>,
1345 pub ado_pr_triggers: Option<AdoPrTriggers>,
1346 pub ado_bootstrap_template: String,
1347 pub ado_resources_repository: Vec<InternalAdoResourcesRepository>,
1348 pub ado_post_process_yaml_cb:
1349 Option<Box<dyn FnOnce(serde_yaml::Value) -> serde_yaml::Value>>,
1350 pub ado_variables: BTreeMap<String, String>,
1351 pub ado_job_id_overrides: BTreeMap<usize, String>,
1352 pub gh_name: Option<String>,
1353 pub gh_schedule_triggers: Vec<GhScheduleTriggers>,
1354 pub gh_ci_triggers: Option<GhCiTriggers>,
1355 pub gh_pr_triggers: Option<GhPrTriggers>,
1356 pub gh_bootstrap_template: String,
1357 }
1358
1359 impl PipelineFinalized {
1360 pub fn from_pipeline(mut pipeline: Pipeline) -> Self {
1361 if let Some(cb) = pipeline.inject_all_jobs_with.take() {
1362 for job_idx in 0..pipeline.jobs.len() {
1363 let _ = cb(PipelineJob {
1364 pipeline: &mut pipeline,
1365 job_idx,
1366 });
1367 }
1368 }
1369
1370 let Pipeline {
1371 mut jobs,
1372 artifacts,
1373 parameters,
1374 extra_deps,
1375 ado_name,
1376 ado_bootstrap_template,
1377 ado_schedule_triggers,
1378 ado_ci_triggers,
1379 ado_pr_triggers,
1380 ado_resources_repository,
1381 ado_post_process_yaml_cb,
1382 ado_variables,
1383 ado_job_id_overrides,
1384 gh_name,
1385 gh_schedule_triggers,
1386 gh_ci_triggers,
1387 gh_pr_triggers,
1388 gh_bootstrap_template,
1389 // not relevant to consumer code
1390 dummy_done_idx: _,
1391 artifact_map_idx: _,
1392 artifact_names: _,
1393 global_patchfns,
1394 inject_all_jobs_with: _, // processed above
1395 } = pipeline;
1396
1397 for patchfn in global_patchfns {
1398 for job in &mut jobs {
1399 job.patches.apply_patchfn(patchfn)
1400 }
1401 }
1402
1403 Self {
1404 jobs,
1405 artifacts,
1406 parameters,
1407 extra_deps,
1408 ado_name,
1409 ado_schedule_triggers,
1410 ado_ci_triggers,
1411 ado_pr_triggers,
1412 ado_bootstrap_template,
1413 ado_resources_repository,
1414 ado_post_process_yaml_cb,
1415 ado_variables,
1416 ado_job_id_overrides,
1417 gh_name,
1418 gh_schedule_triggers,
1419 gh_ci_triggers,
1420 gh_pr_triggers,
1421 gh_bootstrap_template,
1422 }
1423 }
1424 }
1425
1426 #[derive(Debug, Clone)]
1427 pub enum Parameter {
1428 Bool {
1429 name: String,
1430 description: String,
1431 kind: ParameterKind,
1432 default: Option<bool>,
1433 },
1434 String {
1435 name: String,
1436 description: String,
1437 default: Option<String>,
1438 kind: ParameterKind,
1439 possible_values: Option<Vec<String>>,
1440 },
1441 Num {
1442 name: String,
1443 description: String,
1444 default: Option<i64>,
1445 kind: ParameterKind,
1446 possible_values: Option<Vec<i64>>,
1447 },
1448 }
1449
1450 impl Parameter {
1451 pub fn name(&self) -> &str {
1452 match self {
1453 Parameter::Bool { name, .. } => name,
1454 Parameter::String { name, .. } => name,
1455 Parameter::Num { name, .. } => name,
1456 }
1457 }
1458 }
1459}