diff --git a/subprojects/crates/db/src/connection.rs b/subprojects/crates/db/src/connection.rs index a43b0f5e5..c99679738 100644 --- a/subprojects/crates/db/src/connection.rs +++ b/subprojects/crates/db/src/connection.rs @@ -395,6 +395,41 @@ impl Connection { Ok(results) } + /// Look up a single output of a derivation from the most recent + /// successful buildstep. + pub async fn resolve_drv_output( + &mut self, + store_dir: &StoreDir, + drv_path: &StorePath, + output_name: &OutputName, + ) -> sqlx::Result> { + let drv_display = store_dir.display(drv_path).to_string(); + let output_name_str: &str = output_name.as_ref(); + let row: Option<(String,)> = sqlx::query_as( + r"SELECT o.path + FROM buildsteps s + JOIN buildstepoutputs o + ON s.build = o.build AND s.stepnr = o.stepnr + WHERE s.drvPath = $1 + AND o.name = $2 + AND o.path IS NOT NULL + AND s.status = 0 + ORDER BY s.build DESC + LIMIT 1", + ) + .bind(&drv_display) + .bind(output_name_str) + .fetch_optional(&mut *self.conn) + .await?; + + row.map(|(path,)| { + store_dir + .parse(&path) + .map_err(|e| sqlx::Error::Decode(Box::new(e))) + }) + .transpose() + } + #[tracing::instrument(skip(self), err)] pub async fn get_status(&mut self) -> sqlx::Result> { Ok( @@ -1461,4 +1496,142 @@ mod tests { ] ); } + + // -- resolve_drv_output (depth-1) tests ------------------------------------ + + #[tokio::test] + async fn resolve_drv_output_basic() { + let (_pg, mut conn) = setup().await; + insert_step(&mut conn, 1, 1, &sp("foo.drv")).await; + insert_output(&mut conn, 1, 1, "out", &sp("result")).await; + + let result = conn + .resolve_drv_output(&test_store_dir(), &sp("foo.drv"), &on("out")) + .await + .unwrap(); + assert_eq!(result, Some(sp("result"))); + } + + #[tokio::test] + async fn resolve_drv_output_missing() { + let (_pg, mut conn) = setup().await; + let result = conn + .resolve_drv_output(&test_store_dir(), &sp("nonexistent.drv"), &on("out")) + .await + .unwrap(); + assert_eq!(result, None); + } + + #[tokio::test] + async fn resolve_drv_output_picks_latest_build() { + let (_pg, mut conn) = setup().await; + insert_step(&mut conn, 1, 1, &sp("foo.drv")).await; + insert_output(&mut conn, 1, 1, "out", &sp("old-result")).await; + insert_step(&mut conn, 5, 1, &sp("foo.drv")).await; + insert_output(&mut conn, 5, 1, "out", &sp("new-result")).await; + + let result = conn + .resolve_drv_output(&test_store_dir(), &sp("foo.drv"), &on("out")) + .await + .unwrap(); + assert_eq!(result, Some(sp("new-result"))); + } + + // -- Simulate the Rust-side loop that replaces the recursive SQL ---------- + // + // These mirror the resolved-step tests from the DB-column approach, + // but use resolve_drv_output + an in-memory map instead of + // resolvedDrvPath in the SQL. + + /// Helper: resolve a chain one level at a time using `resolve_drv_output`, + /// translating through `resolved_map` between levels. + async fn resolve_chain_with_map( + conn: &mut Connection, + resolved_map: &std::collections::HashMap, + root: &StorePath, + outputs: &[&OutputName], + ) -> Option { + let sd = test_store_dir(); + let mut current = root.clone(); + for output_name in outputs { + let translated = resolved_map.get(¤t).cloned().unwrap_or(current); + current = conn + .resolve_drv_output(&sd, &translated, output_name) + .await + .unwrap()?; + } + Some(current) + } + + /// Depth-1: unresolved.drv was resolved to resolved.drv, which has + /// the outputs. The in-memory map translates before lookup. + #[tokio::test] + async fn resolve_with_map_depth_1() { + let (_pg, mut conn) = setup().await; + + // resolved.drv was built successfully + insert_step(&mut conn, 2, 1, &sp("resolved.drv")).await; + insert_output(&mut conn, 2, 1, "out", &sp("result")).await; + + let mut map = std::collections::HashMap::new(); + map.insert(sp("unresolved.drv"), sp("resolved.drv")); + + let result = + resolve_chain_with_map(&mut conn, &map, &sp("unresolved.drv"), &[&on("out")]).await; + assert_eq!(result, Some(sp("result"))); + } + + /// Depth-2: unresolved.drv was resolved to resolved.drv, whose output + /// is an intermediate.drv that has the final output. + #[tokio::test] + async fn resolve_with_map_depth_2() { + let (_pg, mut conn) = setup().await; + + insert_step(&mut conn, 2, 1, &sp("resolved.drv")).await; + insert_output(&mut conn, 2, 1, "out", &sp("intermediate.drv")).await; + insert_step(&mut conn, 3, 1, &sp("intermediate.drv")).await; + insert_output(&mut conn, 3, 1, "out", &sp("final")).await; + + let mut map = std::collections::HashMap::new(); + map.insert(sp("unresolved.drv"), sp("resolved.drv")); + + let result = resolve_chain_with_map( + &mut conn, + &map, + &sp("unresolved.drv"), + &[&on("out"), &on("out")], + ) + .await; + assert_eq!(result, Some(sp("final"))); + } + + /// Depth-2 where the intermediate result was also resolved: + /// root.drv.drv (not resolved) → intermediate.drv (resolved) → final + #[tokio::test] + async fn resolve_with_map_intermediate_resolved() { + let (_pg, mut conn) = setup().await; + + // root.drv.drv^out → unresolved-intermediate.drv + insert_step(&mut conn, 1, 1, &sp("root.drv.drv")).await; + insert_output(&mut conn, 1, 1, "out", &sp("unresolved-intermediate.drv")).await; + + // resolved-intermediate.drv^out → final-result + insert_step(&mut conn, 2, 1, &sp("resolved-intermediate.drv")).await; + insert_output(&mut conn, 2, 1, "out", &sp("final-result")).await; + + let mut map = std::collections::HashMap::new(); + map.insert( + sp("unresolved-intermediate.drv"), + sp("resolved-intermediate.drv"), + ); + + let result = resolve_chain_with_map( + &mut conn, + &map, + &sp("root.drv.drv"), + &[&on("out"), &on("out")], + ) + .await; + assert_eq!(result, Some(sp("final-result"))); + } } diff --git a/subprojects/hydra-queue-runner/src/state/drv.rs b/subprojects/hydra-queue-runner/src/state/drv.rs index 97af92693..681b683b2 100644 --- a/subprojects/hydra-queue-runner/src/state/drv.rs +++ b/subprojects/hydra-queue-runner/src/state/drv.rs @@ -1,3 +1,5 @@ +use std::collections::BTreeSet; + use nix_utils::SingleDerivedPath; /// Output names of intermediate derivations for a dynamic derivation @@ -42,3 +44,21 @@ pub fn flatten_chain( chain.0.push(output_name.clone()); (root, chain) } + +/// Extract `Built` input dependencies from a derivation. +/// +/// Returns `(root_drv_path, relation)` pairs. `Opaque` (source) inputs are +/// skipped — only derivation build dependencies are returned. For each +/// `Built` input, the outermost output name (what we consume) is discarded; +/// intermediate output names form the [`OutputNameChain`]. +pub fn input_drvs( + drv: &nix_utils::Derivation, +) -> BTreeSet<(nix_utils::StorePath, OutputNameChain)> { + drv.inputs + .iter() + .filter_map(|sdp| match sdp { + SingleDerivedPath::Opaque(_) => None, + SingleDerivedPath::Built { drv_path, .. } => Some(flatten_path(drv_path)), + }) + .collect() +} diff --git a/subprojects/hydra-queue-runner/src/state/mod.rs b/subprojects/hydra-queue-runner/src/state/mod.rs index 0bf627d18..60f324ac2 100644 --- a/subprojects/hydra-queue-runner/src/state/mod.rs +++ b/subprojects/hydra-queue-runner/src/state/mod.rs @@ -521,7 +521,7 @@ impl State { // original step's drv path differs from the resolved one, so // completing the resolved step wouldn't clear the dep). for rdep in step_info.step.clone_rdeps() { - if let Some(rdep) = rdep.upgrade() { + if let Some(rdep) = rdep.step.upgrade() { rdep.remove_dep(&step_info.step); resolved_step.make_rdep(&rdep); } @@ -1388,6 +1388,74 @@ impl State { tx.commit().await?; } + // Process dynamic rdeps first, as we must add new step dependencies for dynamically + // generated derivations + { + for (dep_step, output_name, relation) in item.step_info.step.pop_dynamic_rdeps() { + let Some(dependent_step) = dep_step.upgrade() else { + continue; + }; + + let resolved_drv = output.outputs.get(&output_name).cloned().ok_or_else(|| { + anyhow::anyhow!( + "Dynamic rdep references output `{output_name}` not produced by {drv_path}" + ) + })?; + + // Find a build associated with this step. For intermediate steps + // (not top-level), `direct` is empty, so we walk the dependency + // chain via `get_dependents` to find the owning build. + let build = if let Some(b) = direct.get(0) { + b.clone() + } else { + let mut dependents = HashSet::new(); + let mut visited_steps = HashSet::new(); + item.step_info + .step + .get_dependents(&mut dependents, &mut visited_steps); + let Some(b) = dependents.into_iter().next() else { + tracing::warn!("Finished step does not have associated build"); + continue; + }; + b + }; + + // Create the actual step for the new derivation. + // finished_drvs is not necessary as it is only a memoization table to reduce + // checks if a dependency is finished from the database. + // new_steps is not necessary either as + let new_runnable: Arc>>> = Default::default(); + let new_step = match self + .create_step( + build.clone(), + resolved_drv.clone(), + None, + Some((dependent_step.clone(), relation)), + Default::default(), + Default::default(), + new_runnable.clone(), + ) + .await + { + CreateStepResult::None => continue, + CreateStepResult::Valid(step) => step, + CreateStepResult::PreviousFailure(step) => { + if let Err(e) = self.handle_previous_failure(build.clone(), step).await { + tracing::error!("Failed to handle previous failure: {e}"); + } + // TODO: figure out what to do here + continue; + } + }; + + for r in new_runnable.read().iter() { + r.make_runnable(); + } + + // create_step already added rdeps, but we need to add a forward dep as well + dependent_step.add_dep(new_step); + } + } item.step_info.step.make_rdeps_runnable(); // always trigger dispatch, as we now might have a free machine again @@ -1933,7 +2001,7 @@ impl State { build: Arc, drv_path: nix_utils::StorePath, referring_build: Option>, - referring_step: Option>, + referring_step: Option<(Arc, drv::OutputNameChain)>, finished_drvs: Arc>>, new_steps: Arc>>>, new_runnable: Arc>>>, @@ -1947,9 +2015,13 @@ impl State { } } - let (step, is_new) = - self.steps - .create(&drv_path, referring_build.as_ref(), referring_step.as_ref()); + let (step, is_new) = self.steps.create( + &drv_path, + referring_build.as_ref(), + referring_step + .as_ref() + .map(|(step, relation)| (step, relation.clone())), + ); if !is_new { return CreateStepResult::Valid(step); } @@ -2045,6 +2117,7 @@ impl State { self.store.query_missing_outputs(output_paths).await }; + let input_drvs = drv::input_drvs(&drv); step.set_drv(drv); if self.check_cached_failure(step.clone()).await { @@ -2102,35 +2175,32 @@ impl State { } tracing::debug!("creating build step '{drv_path}"); - let Some(input_drvs) = step.get_input_drvs() else { - // this should never happen because we always a a drv set at this point in time - return CreateStepResult::None; - }; let step2 = step.clone(); - let mut stream = futures::StreamExt::map(tokio_stream::iter(input_drvs), |i| { - let build = build.clone(); - let step = step2.clone(); - let finished_drvs = finished_drvs.clone(); - let new_steps = new_steps.clone(); - let new_runnable = new_runnable.clone(); - async move { - Box::pin(self.create_step( - // conn, - build, - i, - None, - Some(step), - finished_drvs, - new_steps, - new_runnable, - )) - .await - } - }) - .buffered(25); - while let Some(v) = tokio_stream::StreamExt::next(&mut stream).await { - match v { + let mut stream = + futures::StreamExt::map(tokio_stream::iter(input_drvs), |(input_path, relation)| { + let build = build.clone(); + let step = step2.clone(); + let finished_drvs = finished_drvs.clone(); + let new_steps = new_steps.clone(); + let new_runnable = new_runnable.clone(); + + async move { + Box::pin(self.create_step( + build, + input_path, + None, + Some((step, relation)), + finished_drvs, + new_steps, + new_runnable, + )) + .await + } + }) + .buffered(25); + while let Some(result) = tokio_stream::StreamExt::next(&mut stream).await { + match result { CreateStepResult::None => (), CreateStepResult::Valid(dep) => { if !dep.get_finished() && !dep.get_previous_failure() { diff --git a/subprojects/hydra-queue-runner/src/state/step.rs b/subprojects/hydra-queue-runner/src/state/step.rs index b6e0a087b..55e16263e 100644 --- a/subprojects/hydra-queue-runner/src/state/step.rs +++ b/subprojects/hydra-queue-runner/src/state/step.rs @@ -8,6 +8,15 @@ use hashbrown::{HashMap, HashSet}; use super::{Build, Jobset}; use db::models::BuildID; +use super::drv::OutputNameChain; + +#[derive(Debug, Clone)] +pub struct ReverseDep { + /// The step that depends on us + pub step: Weak, + pub relation: OutputNameChain, +} + #[derive(Debug)] pub struct StepAtomicState { /// Whether the step has finished initialisation. @@ -66,10 +75,11 @@ impl StepAtomicState { #[derive(Debug)] pub(super) struct StepState { - /// The build steps on which this step depends. + /// The resolved build steps on which this step depends deps: HashSet>, /// The build steps that depend on this step. - rdeps: Vec>, + /// An empty `relation` signifies a regular (non-dynamic) reverse dependency. + rdeps: Vec, /// Builds that have this step as the top-level derivation. builds: Vec>, /// Jobsets to which this step belongs. Used for determining scheduling priority. @@ -186,16 +196,6 @@ impl Step { }) } - pub fn get_input_drvs(&self) -> Option> { - let drv = self.drv.load_full(); - drv.as_ref().map(|drv| { - harmonia_store_core::derivation::DerivationInputs::from(&drv.inputs) - .drvs - .into_keys() - .collect::>() - }) - } - pub fn get_after(&self) -> jiff::Timestamp { self.atomic_state.after.load() } @@ -270,7 +270,9 @@ impl Step { }; for rdep in rdeps { - let Some(rdep) = rdep.upgrade() else { continue }; + let Some(rdep) = rdep.step.upgrade() else { + continue; + }; rdep.get_dependents(builds, steps); } } @@ -286,7 +288,7 @@ impl Step { let mut state = self.state.write(); state.rdeps.retain(|rdep| { - let Some(rdep) = rdep.upgrade() else { + let Some(rdep) = rdep.step.upgrade() else { return false; }; @@ -374,21 +376,45 @@ impl Step { pub fn make_rdep(self: &Arc, dep: &Arc) { dep.add_dep(self.clone()); let mut state = self.state.write(); - state.rdeps.push(Arc::downgrade(dep)); + state.rdeps.push(ReverseDep { + step: Arc::downgrade(dep), + relation: OutputNameChain::default(), + }); self.atomic_state .rdeps_len .store(state.rdeps.len() as u64, Ordering::Relaxed); } - pub fn clone_rdeps(&self) -> Vec> { + pub fn clone_rdeps(&self) -> Vec { let state = self.state.read(); state.rdeps.clone() } + /// Pop one level of dynamic indirection from each dynamic rdep, + /// returning `(dependent_step, popped_output_name, remaining_relation)` triples. + /// + /// The rdep entries remain in the list (with shortened relations) so that + /// `make_rdeps_runnable` can still clean up forward deps. + /// + /// We collect into a `Vec` rather than returning an iterator because the + /// write lock on the step's state must be released before the caller can + /// do async work (e.g. `create_step`) with the results. + pub fn pop_dynamic_rdeps(&self) -> Vec<(Weak, nix_utils::OutputName, OutputNameChain)> { + let mut state = self.state.write(); + state + .rdeps + .iter_mut() + .filter_map(|rdep| { + let output_name = rdep.relation.pop()?; + Some((rdep.step.clone(), output_name, rdep.relation.clone())) + }) + .collect() + } + pub fn add_referring_data( &self, referring_build: Option<&Arc>, - referring_step: Option<&Arc>, + referring_step: Option<(&Arc, OutputNameChain)>, ) { if referring_build.is_none() && referring_step.is_none() { return; @@ -398,8 +424,11 @@ impl Step { if let Some(referring_build) = referring_build { state.builds.push(Arc::downgrade(referring_build)); } - if let Some(referring_step) = referring_step { - state.rdeps.push(Arc::downgrade(referring_step)); + if let Some((referring_step, relation)) = referring_step { + state.rdeps.push(ReverseDep { + step: Arc::downgrade(referring_step), + relation, + }); self.atomic_state .rdeps_len .store(state.rdeps.len() as u64, Ordering::Relaxed); @@ -531,7 +560,7 @@ impl Steps { &self, drv_path: &nix_utils::StorePath, referring_build: Option<&Arc>, - referring_step: Option<&Arc>, + referring_step: Option<(&Arc, OutputNameChain)>, ) -> (Arc, bool) { let mut is_new = false; let mut steps = self.inner.write(); diff --git a/subprojects/hydra-queue-runner/src/state/step_info.rs b/subprojects/hydra-queue-runner/src/state/step_info.rs index 6ef2d812d..134bc4a89 100644 --- a/subprojects/hydra-queue-runner/src/state/step_info.rs +++ b/subprojects/hydra-queue-runner/src/state/step_info.rs @@ -73,46 +73,55 @@ impl StepInfo { let mut conn = db.get().await.ok()?; + // Memoize depth-1 lookups across all chains resolved in this call. + let mut memo = std::collections::HashMap::< + (nix_utils::StorePath, nix_utils::OutputName), + Option, + >::new(); + drv.try_resolve(store_dir, &mut |inputs| { tokio::task::block_in_place(|| { - // Flatten each SingleDerivedPath chain into (root, [outputs...]) - // and resolve everything in a single recursive SQL query. + let rt = tokio::runtime::Handle::current(); + let chains: Vec<_> = inputs .iter() .map(|(drv_path, output_name)| flatten_chain(drv_path, output_name)) .collect(); - // Translate unresolved drv paths to resolved ones using the - // in-memory map, so the SQL query finds outputs under the - // resolved drv path directly. - let translated_roots: Vec<_> = chains - .iter() - .map(|(root, _)| { - resolved_drv_map - .get(root) - .cloned() - .unwrap_or_else(|| root.clone()) - }) - .collect(); - - // SQL needs forward order; OutputNameChain stores reversed. - let chain_refs: Vec<_> = chains + // Resolve each chain one level at a time, translating + // through the in-memory resolved-drv map between levels. + chains .iter() - .zip(&translated_roots) - .map(|((_, chain), root)| (root, chain.0.iter().rev().collect::>())) - .collect(); - - let sql_input: Vec<_> = chain_refs - .iter() - .map(|(root, outputs)| (*root, outputs.as_slice())) - .collect(); - - tokio::runtime::Handle::current() - .block_on(conn.resolve_drv_output_chains(store_dir, &sql_input)) - .unwrap_or_else(|e| { - tracing::warn!("resolve_drv_output_chains failed: {e}"); - vec![None; inputs.len()] + .map(|(root, chain)| { + let mut current = root.clone(); + // OutputNameChain is in stack order; iterate + // reversed for forward (root-to-leaf) order. + for output_name in chain.0.iter().rev() { + let translated = resolved_drv_map + .get(¤t) + .cloned() + .unwrap_or_else(|| current.clone()); + let key = (translated, output_name.clone()); + let result = match memo.get(&key) { + Some(cached) => cached.clone(), + None => { + let r = rt + .block_on( + conn.resolve_drv_output(store_dir, &key.0, &key.1), + ) + .unwrap_or_else(|e| { + tracing::warn!("resolve_drv_output failed: {e}"); + None + }); + memo.insert(key, r.clone()); + r + } + }; + current = result?; + } + Some(current) }) + .collect() }) }) } diff --git a/subprojects/hydra-tests/content-addressed/dyn-drv-non-trivial.t b/subprojects/hydra-tests/content-addressed/dyn-drv-non-trivial.t index 142989e68..da143006f 100644 --- a/subprojects/hydra-tests/content-addressed/dyn-drv-non-trivial.t +++ b/subprojects/hydra-tests/content-addressed/dyn-drv-non-trivial.t @@ -4,10 +4,6 @@ use warnings; use Setup; use Test2::V0; -# FIXME now that we're properly resolving things in Hydra rather than Nix, -# dynamic derivations stopped-fake working -plan skip_all => 'dynamic derivation resolution not yet implemented'; - # Adapted from https://github.com/NixOS/nix/blob/master/tests/functional/dyn-drv/non-trivial.nix # # A single derivation uses recursive-nix to dynamically create a DAG of @@ -43,6 +39,38 @@ if ($wrapper) { $wrapper->discard_changes; is($wrapper->finished, 1, "wrapper should be finished"); is($wrapper->buildstatus, 0, "wrapper should succeed"); + + # Full dynamic derivation chain: 12 steps total + # 1. make-derivations.drv.drv (status=0, build makeDerivations) + # 2. build-a.drv (status=0, build a) + # 3. build-c.drv (status=13, resolve c) + # 4. build-b.drv (status=13, resolve b) + # 5. build-b.drv (status=0, build resolved b) + # 6. build-c.drv (status=0, build resolved c) + # 7. build-d.drv (status=13, resolve d) + # 8. build-d.drv (status=0, build resolved d) + # 9. make-derivations.drv (status=13, resolve e — named after makeDerivations output) + # 10. make-derivations.drv (status=0, build resolved e) + # 11. wrapper.drv (status=13, resolve wrapper) + # 12. wrapper.drv (status=0, build resolved wrapper) + my @steps = $wrapper->buildsteps->search({}, { order_by => 'stepnr' })->all; + is(scalar @steps, 12, "wrapper should have 12 build steps"); + + # Check that derivations a-d each got a successful (status=0) build step. + # build-e is named make-derivations.drv (the output of makeDerivations), + # so we check for it separately. + my @built = sort map { + my $drv = $_->drvpath // ""; + (defined $_->status && $_->status == 0 && $drv =~ m{-build-([a-d])\.drv$}) ? $1 : () + } @steps; + is(\@built, [qw(a b c d)], "derivations a-d should each have a successful build step"); + + # build-e is the make-derivations.drv step (status=0, not the .drv.drv) + my @build_e = grep { + my $drv = $_->drvpath // ""; + defined $_->status && $_->status == 0 && $drv =~ m{-make-derivations\.drv$} + } @steps; + is(scalar @build_e, 1, "build-e (make-derivations.drv) should have a successful build step"); } done_testing; diff --git a/subprojects/hydra-tests/content-addressed/dyn-drv.t b/subprojects/hydra-tests/content-addressed/dyn-drv.t index cb6bf6c30..44e6af0cf 100644 --- a/subprojects/hydra-tests/content-addressed/dyn-drv.t +++ b/subprojects/hydra-tests/content-addressed/dyn-drv.t @@ -4,10 +4,6 @@ use warnings; use Setup; use Test2::V0; -# FIXME now that we're properly resolving things in Hydra rather than Nix, -# dynamic derivations stopped-fake working -plan skip_all => 'dynamic derivation resolution not yet implemented'; - # Based on https://github.com/NixOS/nix/blob/14ffc1787182b8702910788aea02bd5804afb32e/tests/functional/dyn-drv/text-hashed-output.nix # # A single derivation produces a .drv file as its output; another @@ -30,28 +26,51 @@ is(nrQueuedBuildsForJobset($jobset), 1, "Should queue 1 build (wrapper)"); my @builds = queuedBuildsForJobset($jobset); ok(runBuilds($ctx, @builds), "All dynamic derivation builds should complete"); -# hello and producingDrv are standard CA derivations, so they must succeed. -for my $build (grep { $_->job ne 'wrapper' } @builds) { - $build->discard_changes; - is($build->finished, 1, "Build '" . $build->job . "' should be finished"); - is($build->buildstatus, 0, "Build '" . $build->job . "' should succeed"); -} - -# wrapper is the dynamic derivation consumer. +# wrapper is the only queued build — it is the dynamic derivation consumer. # It exercises the full resolution path: build producingDrv, discover the .drv # at its output, resolve via try_resolve + flatten_chain, build the resolved drv. -my ($wrapper) = grep { $_->job eq 'wrapper' } @builds; +my ($wrapper) = @builds; ok(defined $wrapper, "wrapper (dynamic derivation consumer) build should exist"); if ($wrapper) { $wrapper->discard_changes; is($wrapper->finished, 1, "wrapper should be finished"); is($wrapper->buildstatus, 0, "wrapper should succeed"); - # Hydra currently doesn't understand the dynamic derivation structure, - # so it only sees 2 build steps (producingDrv + wrapper itself) rather - # than the full chain (producingDrv + dynamic hello + wrapper). - my $nrSteps = $wrapper->buildsteps->count; - is($nrSteps, 2, "wrapper should have 2 build steps (dynamic structure not yet tracked)"); + # Full dynamic derivation chain: 4 steps total + # 1. hello.drv.drv (status=0, build producingDrv — outputs a .drv file) + # 2. hello.drv (status=0, build the dynamically-produced derivation) + # 3. dyn-drv-wrapper.drv (status=13, resolve wrapper — CA resolution step) + # 4. dyn-drv-wrapper.drv (status=0, build the resolved wrapper derivation) + my @steps = $wrapper->buildsteps->search({}, { order_by => 'stepnr' })->all; + is(scalar @steps, 4, "wrapper should have 4 build steps"); + + # producingDrv: builds the .drv file (hello.drv.drv) + my @producing_drv = grep { + my $drv = $_->drvpath // ""; + defined $_->status && $_->status == 0 && $drv =~ m{-hello\.drv\.drv$} + } @steps; + is(scalar @producing_drv, 1, "producingDrv (hello.drv.drv) should have a successful build step"); + + # The dynamically-produced derivation (hello.drv, not hello.drv.drv) + my @dyn_drv = grep { + my $drv = $_->drvpath // ""; + defined $_->status && $_->status == 0 && $drv =~ m{-hello\.drv$} + } @steps; + is(scalar @dyn_drv, 1, "dynamically-produced hello.drv should have a successful build step"); + + # wrapper CA resolution step (status=13 means Resolved) + my @resolved = grep { + my $drv = $_->drvpath // ""; + defined $_->status && $_->status == 13 && $drv =~ m{-dyn-drv-wrapper\.drv$} + } @steps; + is(scalar @resolved, 1, "wrapper should have a resolution step (status=Resolved)"); + + # wrapper final build (status=0) + my @wrapper_built = grep { + my $drv = $_->drvpath // ""; + defined $_->status && $_->status == 0 && $drv =~ m{-dyn-drv-wrapper\.drv$} + } @steps; + is(scalar @wrapper_built, 1, "resolved wrapper should have a successful build step"); } done_testing;