|
| 1 | +use std::{collections::HashMap, io::SeekFrom}; |
| 2 | + |
| 3 | +use camino::{Utf8Path, Utf8PathBuf}; |
| 4 | +use clap::{ArgAction, Parser}; |
| 5 | +use color_eyre::eyre::{Context, Report, Result}; |
| 6 | +use open_build_service_api as obs; |
| 7 | +use serde::{Deserialize, Serialize}; |
| 8 | +use tokio::io::{AsyncSeekExt, AsyncWriteExt}; |
| 9 | +use tracing::{debug, info, instrument}; |
| 10 | + |
| 11 | +use crate::{ |
| 12 | + artifacts::{ArtifactDirectory, ArtifactReader, ArtifactWriter, MissingArtifactToNone}, |
| 13 | + binaries::download_binaries, |
| 14 | + build_meta::{ |
| 15 | + BuildHistoryRetrieval, BuildMeta, BuildMetaOptions, CommitBuildInfo, DisabledRepos, |
| 16 | + RepoArch, |
| 17 | + }, |
| 18 | + monitor::{MonitoredPackage, ObsMonitor, PackageCompletion, PackageMonitoringOptions}, |
| 19 | + prune::prune_branch, |
| 20 | + retry_request, |
| 21 | + upload::ObsDscUploader, |
| 22 | +}; |
| 23 | + |
| 24 | +pub const DEFAULT_BUILD_INFO: &str = "build-info.yml"; |
| 25 | +pub const DEFAULT_BUILD_LOG: &str = "build.log"; |
| 26 | + |
| 27 | +// Our flags can all take explicit values, because it makes it easier to |
| 28 | +// conditionally set things in the pipelines. |
| 29 | +pub trait FlagSupportingExplicitValue { |
| 30 | + fn flag_supporting_explicit_value(self) -> Self; |
| 31 | +} |
| 32 | + |
| 33 | +impl FlagSupportingExplicitValue for clap::Arg { |
| 34 | + fn flag_supporting_explicit_value(self) -> Self { |
| 35 | + self.num_args(0..=1) |
| 36 | + .require_equals(true) |
| 37 | + .required(false) |
| 38 | + .default_value("false") |
| 39 | + .default_missing_value("true") |
| 40 | + .action(ArgAction::Set) |
| 41 | + } |
| 42 | +} |
| 43 | + |
| 44 | +#[derive(Parser, Debug)] |
| 45 | +pub struct DputAction { |
| 46 | + pub project: String, |
| 47 | + pub dsc: String, |
| 48 | + #[clap(long, default_value = "")] |
| 49 | + pub branch_to: String, |
| 50 | + #[clap(long, default_value_t = DEFAULT_BUILD_INFO.to_owned().into())] |
| 51 | + pub build_info_out: Utf8PathBuf, |
| 52 | + #[clap(long, flag_supporting_explicit_value())] |
| 53 | + pub rebuild_if_unchanged: bool, |
| 54 | +} |
| 55 | + |
| 56 | +#[derive(Parser, Debug)] |
| 57 | +pub struct MonitorAction { |
| 58 | + #[clap(long)] |
| 59 | + pub project: String, |
| 60 | + #[clap(long)] |
| 61 | + pub package: String, |
| 62 | + #[clap(long)] |
| 63 | + pub rev: String, |
| 64 | + #[clap(long)] |
| 65 | + pub srcmd5: String, |
| 66 | + #[clap(long)] |
| 67 | + pub repository: String, |
| 68 | + #[clap(long)] |
| 69 | + pub arch: String, |
| 70 | + #[clap(long)] |
| 71 | + pub prev_endtime_for_commit: Option<u64>, |
| 72 | + #[clap(long)] |
| 73 | + pub build_log_out: String, |
| 74 | +} |
| 75 | + |
| 76 | +#[derive(Parser, Debug)] |
| 77 | +pub struct DownloadBinariesAction { |
| 78 | + #[clap(long)] |
| 79 | + pub project: String, |
| 80 | + #[clap(long)] |
| 81 | + pub package: String, |
| 82 | + #[clap(long)] |
| 83 | + pub repository: String, |
| 84 | + #[clap(long)] |
| 85 | + pub arch: String, |
| 86 | + #[clap(long)] |
| 87 | + pub build_results_dir: Utf8PathBuf, |
| 88 | +} |
| 89 | + |
| 90 | +#[derive(Parser, Debug)] |
| 91 | +pub struct PruneAction { |
| 92 | + #[clap(long, default_value_t = DEFAULT_BUILD_INFO.to_owned())] |
| 93 | + pub build_info: String, |
| 94 | + #[clap(long, flag_supporting_explicit_value())] |
| 95 | + pub ignore_missing_build_info: bool, |
| 96 | +} |
| 97 | + |
| 98 | +#[derive(Clone, Debug, Serialize, Deserialize)] |
| 99 | +pub struct ObsBuildInfo { |
| 100 | + pub project: String, |
| 101 | + pub package: String, |
| 102 | + pub rev: Option<String>, |
| 103 | + pub srcmd5: Option<String>, |
| 104 | + pub is_branched: bool, |
| 105 | + pub enabled_repos: HashMap<RepoArch, CommitBuildInfo>, |
| 106 | +} |
| 107 | + |
| 108 | +impl ObsBuildInfo { |
| 109 | + #[instrument(skip(artifacts))] |
| 110 | + async fn save(self, artifacts: &mut impl ArtifactDirectory, path: &Utf8Path) -> Result<()> { |
| 111 | + artifacts |
| 112 | + .save_with(path, async |file: &mut ArtifactWriter| { |
| 113 | + let data = |
| 114 | + serde_yaml::to_string(&self).wrap_err("Failed to serialize build info")?; |
| 115 | + file.write_all(data.as_bytes()) |
| 116 | + .await |
| 117 | + .wrap_err("Failed to write build info file")?; |
| 118 | + Ok::<_, Report>(()) |
| 119 | + }) |
| 120 | + .await |
| 121 | + } |
| 122 | +} |
| 123 | + |
| 124 | +#[derive(Debug, thiserror::Error)] |
| 125 | +#[error("Failed build")] |
| 126 | +pub struct FailedBuild; |
| 127 | + |
| 128 | +pub const LOG_TAIL_2MB: u64 = 2 * 1024 * 1024; |
| 129 | + |
| 130 | +pub struct Actions { |
| 131 | + pub client: obs::Client, |
| 132 | +} |
| 133 | + |
| 134 | +impl Actions { |
| 135 | + #[instrument(skip_all, fields(args))] |
| 136 | + pub async fn dput( |
| 137 | + &mut self, |
| 138 | + args: DputAction, |
| 139 | + artifacts: &mut impl ArtifactDirectory, |
| 140 | + ) -> Result<()> { |
| 141 | + let branch_to = if !args.branch_to.is_empty() { |
| 142 | + Some(args.branch_to) |
| 143 | + } else { |
| 144 | + None |
| 145 | + }; |
| 146 | + let is_branched = branch_to.is_some(); |
| 147 | + |
| 148 | + // The upload prep and actual upload are split in two so that we can |
| 149 | + // already tell what the project & package name are, so build-info.yaml |
| 150 | + // can be written and pruning can take place regardless of the actual |
| 151 | + // *upload* success. |
| 152 | + let uploader = ObsDscUploader::prepare( |
| 153 | + self.client.clone(), |
| 154 | + args.project.clone(), |
| 155 | + branch_to, |
| 156 | + args.dsc.as_str().into(), |
| 157 | + artifacts, |
| 158 | + ) |
| 159 | + .await?; |
| 160 | + |
| 161 | + let build_info = ObsBuildInfo { |
| 162 | + project: uploader.project().to_owned(), |
| 163 | + package: uploader.package().to_owned(), |
| 164 | + rev: None, |
| 165 | + srcmd5: None, |
| 166 | + is_branched, |
| 167 | + enabled_repos: HashMap::new(), |
| 168 | + }; |
| 169 | + debug!("Saving initial build info: {:?}", build_info); |
| 170 | + build_info |
| 171 | + .clone() |
| 172 | + .save(artifacts, &args.build_info_out) |
| 173 | + .await?; |
| 174 | + |
| 175 | + let initial_build_meta = BuildMeta::get_if_package_exists( |
| 176 | + self.client.clone(), |
| 177 | + build_info.project.clone(), |
| 178 | + build_info.package.clone(), |
| 179 | + &BuildMetaOptions { |
| 180 | + history_retrieval: BuildHistoryRetrieval::Full, |
| 181 | + // Getting disabled repos has to happen *after* the upload, |
| 182 | + // since the new version can change the supported architectures. |
| 183 | + disabled_repos: DisabledRepos::Keep, |
| 184 | + }, |
| 185 | + ) |
| 186 | + .await?; |
| 187 | + debug!(?initial_build_meta); |
| 188 | + |
| 189 | + let result = uploader.upload_package(artifacts).await?; |
| 190 | + |
| 191 | + // If we couldn't get the metadata before because the package didn't |
| 192 | + // exist yet, get it now but without history, so we leave the previous |
| 193 | + // endtime empty (if there was no previous package, there were no |
| 194 | + // previous builds). |
| 195 | + let mut build_meta = if let Some(mut build_meta) = initial_build_meta { |
| 196 | + build_meta |
| 197 | + .remove_disabled_repos(&Default::default()) |
| 198 | + .await?; |
| 199 | + build_meta |
| 200 | + } else { |
| 201 | + BuildMeta::get( |
| 202 | + self.client.clone(), |
| 203 | + build_info.project.clone(), |
| 204 | + build_info.package.clone(), |
| 205 | + &BuildMetaOptions { |
| 206 | + history_retrieval: BuildHistoryRetrieval::None, |
| 207 | + disabled_repos: DisabledRepos::Skip { |
| 208 | + wait_options: Default::default(), |
| 209 | + }, |
| 210 | + }, |
| 211 | + ) |
| 212 | + .await? |
| 213 | + }; |
| 214 | + |
| 215 | + if result.unchanged { |
| 216 | + info!("Package unchanged at revision {}.", result.rev); |
| 217 | + |
| 218 | + if args.rebuild_if_unchanged { |
| 219 | + retry_request!( |
| 220 | + self.client |
| 221 | + .project(build_info.project.clone()) |
| 222 | + .package(build_info.package.clone()) |
| 223 | + .rebuild() |
| 224 | + .await |
| 225 | + .wrap_err("Failed to trigger rebuild") |
| 226 | + )?; |
| 227 | + } else { |
| 228 | + // Clear out the history used to track endtime values. This is |
| 229 | + // normally important to make sure the monitor doesn't |
| 230 | + // accidentally pick up an old build result...but if we didn't |
| 231 | + // rebuild anything, picking up the old result is *exactly* the |
| 232 | + // behavior we want. |
| 233 | + build_meta.clear_stored_history(); |
| 234 | + } |
| 235 | + } else { |
| 236 | + info!("Package uploaded with revision {}.", result.rev); |
| 237 | + } |
| 238 | + |
| 239 | + let enabled_repos = build_meta.get_commit_build_info(&result.build_srcmd5); |
| 240 | + let build_info = ObsBuildInfo { |
| 241 | + rev: Some(result.rev), |
| 242 | + srcmd5: Some(result.build_srcmd5), |
| 243 | + enabled_repos, |
| 244 | + ..build_info |
| 245 | + }; |
| 246 | + debug!("Saving complete build info: {:?}", build_info); |
| 247 | + build_info.save(artifacts, &args.build_info_out).await?; |
| 248 | + |
| 249 | + Ok(()) |
| 250 | + } |
| 251 | + |
| 252 | + #[instrument(skip_all, fields(args))] |
| 253 | + pub async fn monitor<F: Future<Output = Result<()>> + Send>( |
| 254 | + &mut self, |
| 255 | + args: MonitorAction, |
| 256 | + monitoring_options: PackageMonitoringOptions, |
| 257 | + log_tail_cb: impl FnOnce(ArtifactReader) -> F, |
| 258 | + log_tail_bytes: u64, |
| 259 | + artifacts: &mut impl ArtifactDirectory, |
| 260 | + ) -> Result<()> { |
| 261 | + let monitor = ObsMonitor::new( |
| 262 | + self.client.clone(), |
| 263 | + MonitoredPackage { |
| 264 | + project: args.project.clone(), |
| 265 | + package: args.package.clone(), |
| 266 | + repository: args.repository.clone(), |
| 267 | + arch: args.arch.clone(), |
| 268 | + rev: args.rev.clone(), |
| 269 | + srcmd5: args.srcmd5.clone(), |
| 270 | + prev_endtime_for_commit: args.prev_endtime_for_commit, |
| 271 | + }, |
| 272 | + ); |
| 273 | + |
| 274 | + let completion = monitor.monitor_package(monitoring_options).await?; |
| 275 | + debug!("Completed with: {:?}", completion); |
| 276 | + |
| 277 | + let mut log_file = monitor |
| 278 | + .download_build_log(&args.build_log_out, artifacts) |
| 279 | + .await?; |
| 280 | + |
| 281 | + match completion { |
| 282 | + PackageCompletion::Succeeded => { |
| 283 | + info!("Build succeeded!"); |
| 284 | + } |
| 285 | + PackageCompletion::Superceded => { |
| 286 | + info!("Build was superceded by a newer revision."); |
| 287 | + } |
| 288 | + PackageCompletion::Disabled => { |
| 289 | + info!("Package is disabled for this architecture."); |
| 290 | + } |
| 291 | + PackageCompletion::Failed(reason) => { |
| 292 | + log_file |
| 293 | + .file |
| 294 | + .seek(SeekFrom::End( |
| 295 | + -(std::cmp::min(log_tail_bytes, log_file.len) as i64), |
| 296 | + )) |
| 297 | + .await |
| 298 | + .wrap_err("Failed to find length of log file")?; |
| 299 | + |
| 300 | + log_tail_cb(log_file.file).await?; |
| 301 | + |
| 302 | + info!("{}", "=".repeat(64)); |
| 303 | + info!( |
| 304 | + "Build failed with reason '{}'.", |
| 305 | + reason.to_string().to_lowercase() |
| 306 | + ); |
| 307 | + info!("The last 2MB of the build log is printed above."); |
| 308 | + info!( |
| 309 | + "(Full logs are available in the build artifact '{}'.)", |
| 310 | + args.build_log_out |
| 311 | + ); |
| 312 | + return Err(FailedBuild.into()); |
| 313 | + } |
| 314 | + } |
| 315 | + |
| 316 | + Ok(()) |
| 317 | + } |
| 318 | + |
| 319 | + #[instrument(skip_all, fields(args))] |
| 320 | + pub async fn download_binaries( |
| 321 | + &mut self, |
| 322 | + args: DownloadBinariesAction, |
| 323 | + actions: &mut impl ArtifactDirectory, |
| 324 | + ) -> Result<()> { |
| 325 | + let binaries = download_binaries( |
| 326 | + self.client.clone(), |
| 327 | + &args.project, |
| 328 | + &args.package, |
| 329 | + &args.repository, |
| 330 | + &args.arch, |
| 331 | + actions, |
| 332 | + &args.build_results_dir, |
| 333 | + ) |
| 334 | + .await?; |
| 335 | + |
| 336 | + info!("Downloaded {} artifact(s).", binaries.paths.len()); |
| 337 | + Ok(()) |
| 338 | + } |
| 339 | + |
| 340 | + #[instrument(skip_all, fields(args))] |
| 341 | + pub async fn prune( |
| 342 | + &mut self, |
| 343 | + args: PruneAction, |
| 344 | + artifacts: &impl ArtifactDirectory, |
| 345 | + ) -> Result<()> { |
| 346 | + let build_info_data = if args.ignore_missing_build_info { |
| 347 | + if let Some(build_info_data) = artifacts |
| 348 | + .read_string(&args.build_info) |
| 349 | + .await |
| 350 | + .missing_artifact_to_none()? |
| 351 | + { |
| 352 | + build_info_data |
| 353 | + } else { |
| 354 | + info!( |
| 355 | + "Skipping prune: build info file '{}' not found.", |
| 356 | + args.build_info |
| 357 | + ); |
| 358 | + return Ok(()); |
| 359 | + } |
| 360 | + } else { |
| 361 | + artifacts.read_string(&args.build_info).await? |
| 362 | + }; |
| 363 | + |
| 364 | + let build_info: ObsBuildInfo = serde_yaml::from_str(&build_info_data) |
| 365 | + .wrap_err("Failed to parse provided build info file")?; |
| 366 | + |
| 367 | + if build_info.is_branched { |
| 368 | + info!( |
| 369 | + "Pruning branched package {}/{}...", |
| 370 | + build_info.project, build_info.package |
| 371 | + ); |
| 372 | + prune_branch( |
| 373 | + &self.client, |
| 374 | + &build_info.project, |
| 375 | + &build_info.package, |
| 376 | + build_info.rev.as_deref(), |
| 377 | + ) |
| 378 | + .await?; |
| 379 | + } else { |
| 380 | + info!("Skipping prune: package was not branched."); |
| 381 | + } |
| 382 | + |
| 383 | + Ok(()) |
| 384 | + } |
| 385 | +} |
0 commit comments