Skip to content

Commit 502276a

Browse files
committed
implement more required features
1 parent cd5a389 commit 502276a

9 files changed

Lines changed: 685 additions & 402 deletions

File tree

plugins/provider-circleci/src/main.rs

Lines changed: 233 additions & 284 deletions
Large diffs are not rendered by default.

proto/plugin.proto

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,12 @@ message JobDefinition {
128128
repeated string source_files = 13; // Expanded source file patterns
129129
repeated PackageSpec package_specs = 14;
130130
repeated string services = 15; // Declared service containers
131+
repeated MatrixRow matrix_rows = 16; // Explicit matrix rows (alternative to dimensions)
132+
string stage = 17; // Stage this job belongs to
133+
}
134+
135+
message MatrixRow {
136+
map<string, string> values = 1;
131137
}
132138

133139
message MatrixValue {

src/loader.rs

Lines changed: 86 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -154,43 +154,66 @@ fn load_jobs_and_workflows(config_dir: &Path, config: &mut CigenConfig) -> Resul
154154

155155
let workflow_id = workflow_name.to_string();
156156
let workflow_config = load_workflow_config(&workflow_path)?;
157-
if !config.workflows.contains_key(&workflow_id) {
158-
config
159-
.workflows
160-
.insert(workflow_id.clone(), workflow_config);
161-
}
157+
// Insert workflow config if not already present (load_workflow_config returns default if not found)
158+
// We want to insert it even if default to track the workflow existence.
159+
// But we should check if we already loaded it from a file in the loop below?
160+
// Actually, the loop handles directories first, then files.
161+
// If we have a directory, we load config from inside it.
162+
// If we check config.workflows first, we might skip loading/overwriting?
163+
// Let's just insert/update.
164+
config
165+
.workflows
166+
.insert(workflow_id.clone(), workflow_config.clone());
162167

163168
let jobs_dir = workflow_path.join("jobs");
164169
if !jobs_dir.exists() {
165170
continue;
166171
}
167172

168-
for job_file in fs::read_dir(&jobs_dir)? {
169-
let job_file = job_file?;
170-
let job_path = job_file.path();
171-
172-
if !matches!(
173-
job_path.extension().and_then(|s| s.to_str()),
174-
Some("yml" | "yaml")
175-
) {
176-
continue;
173+
// Stack for recursive traversal: (current_path, stage_name)
174+
// If stage_name is None, we are at root jobs dir.
175+
let mut stack = vec![(jobs_dir.clone(), None::<String>)];
176+
177+
while let Some((dir, current_stage)) = stack.pop() {
178+
for entry in fs::read_dir(&dir)? {
179+
let entry = entry?;
180+
let path = entry.path();
181+
182+
if path.is_dir() {
183+
let dir_name = path
184+
.file_name()
185+
.and_then(|s| s.to_str())
186+
.unwrap_or_default()
187+
.to_string();
188+
189+
let next_stage = current_stage.clone().or(Some(dir_name));
190+
stack.push((path, next_stage));
191+
} else if matches!(
192+
path.extension().and_then(|s| s.to_str()),
193+
Some("yml" | "yaml")
194+
) {
195+
let stage = current_stage.clone().unwrap_or_else(|| "default".to_string());
196+
197+
// Use path relative to jobs_dir as job_id
198+
let relative_path = path.strip_prefix(&jobs_dir).unwrap_or(&path);
199+
let job_id = relative_path.with_extension("").to_string_lossy().to_string();
200+
201+
let job_yaml = fs::read_to_string(&path)?;
202+
let mut job: Job = serde_yaml::from_str(&job_yaml)
203+
.with_context(|| format!("Failed to parse {}", path.display()))?;
204+
205+
job.workflow = Some(workflow_name.to_string());
206+
job.stage = Some(stage.clone());
207+
migrate_requires_to_needs(&mut job);
208+
209+
config.jobs.insert(job_id, job);
210+
}
177211
}
178-
179-
let job_name = job_path
180-
.file_stem()
181-
.and_then(|s| s.to_str())
182-
.context("Invalid job filename")?
183-
.to_string();
184-
185-
let job_yaml = fs::read_to_string(&job_path)?;
186-
let mut job: Job = serde_yaml::from_str(&job_yaml)
187-
.with_context(|| format!("Failed to parse {}", job_path.display()))?;
188-
189-
job.workflow = Some(workflow_name.to_string());
190-
migrate_requires_to_needs(&mut job);
191-
192-
config.jobs.insert(job_name, job);
193212
}
213+
214+
// Resolve dependencies (siblings)
215+
resolve_job_dependencies(&mut config.jobs);
216+
194217
} else if matches!(
195218
workflow_path.extension().and_then(|s| s.to_str()),
196219
Some("yml" | "yaml")
@@ -212,6 +235,40 @@ fn load_jobs_and_workflows(config_dir: &Path, config: &mut CigenConfig) -> Resul
212235
Ok(())
213236
}
214237

238+
fn resolve_job_dependencies(jobs: &mut HashMap<String, Job>) {
239+
let job_keys: Vec<String> = jobs.keys().cloned().collect();
240+
241+
for (job_id, job) in jobs.iter_mut() {
242+
for need in &mut job.needs {
243+
// If need is already a valid key, skip
244+
if job_keys.contains(need) {
245+
continue;
246+
}
247+
248+
// Try to resolve as sibling
249+
let parent_dir = Path::new(job_id).parent().unwrap_or(Path::new(""));
250+
let sibling_path = parent_dir.join(need.as_str());
251+
let sibling_key = sibling_path.to_string_lossy().to_string();
252+
253+
if job_keys.contains(&sibling_key) {
254+
*need = sibling_key;
255+
continue;
256+
}
257+
258+
// Check if it's a simple name that exists elsewhere (ambiguous but maybe intended?)
259+
// For now, only support siblings or full paths.
260+
}
261+
}
262+
}
263+
264+
fn should_prefix_job(config: &WorkflowConfig, stage: &str) -> bool {
265+
if stage == "default" {
266+
config.default_stage_prefix
267+
} else {
268+
config.stage_prefix
269+
}
270+
}
271+
215272
fn load_workflow_config(workflow_path: &Path) -> Result<WorkflowConfig> {
216273
for candidate in ["config.yml", "config.yaml"] {
217274
let candidate_path = workflow_path.join(candidate);

src/orchestrator/convert.rs

Lines changed: 28 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,11 @@ use std::collections::HashMap;
22

33
use crate::plugin::protocol::{
44
self, CacheDefinition, CigenSchema, CommandDefinition as ProtoCommandDefinition,
5-
CommandParameter as ProtoCommandParameter, CustomStep, JobDefinition, MatrixValue,
5+
CommandParameter as ProtoCommandParameter, CustomStep, JobDefinition, MatrixRow, MatrixValue,
66
PackageSpec as ProtoPackageSpec, ProjectConfig, RestoreCacheStep, RunStep, RunnerDefinition,
7-
SaveCacheStep, SkipConfig, Step, StringList, UsesStep, WorkflowDefinition,
7+
SaveCacheStep, SkipConfig, Step, StringList, UsesStep, WorkflowConditionKind as ProtoWorkflowConditionKind, WorkflowDefinition,
88
};
9-
use crate::schema;
9+
use crate::schema::{self, JobMatrix};
1010
use serde_yaml::Value;
1111

1212
/// Convert schema::CigenConfig to protobuf CigenSchema
@@ -100,14 +100,27 @@ fn command_parameter_to_proto(parameter: &schema::CommandParameter) -> ProtoComm
100100
}
101101

102102
fn job_to_proto(id: &str, job: &schema::Job) -> JobDefinition {
103+
let (matrix_dimensions_map, matrix_rows_vec) = match &job.matrix {
104+
Some(JobMatrix::Dimensions(dims)) => (
105+
dims.iter()
106+
.map(|(key, value)| (key.clone(), MatrixValue { values: value.clone() }))
107+
.collect(),
108+
Vec::new(),
109+
),
110+
Some(JobMatrix::Explicit(rows)) => (
111+
HashMap::new(),
112+
rows.iter()
113+
.map(|row| MatrixRow { values: row.clone() })
114+
.collect(),
115+
),
116+
None => (HashMap::new(), Vec::new()),
117+
};
118+
103119
JobDefinition {
104120
id: id.to_string(),
105121
needs: job.needs.clone(),
106-
matrix: job
107-
.matrix
108-
.iter()
109-
.map(|(key, value)| (key.clone(), matrix_value_to_proto(value)))
110-
.collect(),
122+
matrix: matrix_dimensions_map,
123+
matrix_rows: matrix_rows_vec,
111124
packages: job.packages.iter().map(|pkg| pkg.name.clone()).collect(),
112125
steps: job.steps.iter().map(step_to_proto).collect(),
113126
skip_if: job.skip_if.as_ref().map(skip_config_to_proto),
@@ -133,6 +146,7 @@ fn job_to_proto(id: &str, job: &schema::Job) -> JobDefinition {
133146
source_files: job.source_files.clone(),
134147
package_specs: job.packages.iter().map(package_to_proto).collect(),
135148
services: job.services.clone(),
149+
stage: job.stage.clone().unwrap_or_default(),
136150
}
137151
}
138152

@@ -156,12 +170,12 @@ fn workflow_condition_to_proto(
156170
.unwrap_or(schema::WorkflowConditionKind::Parameter)
157171
{
158172
schema::WorkflowConditionKind::Parameter => {
159-
protocol::WorkflowConditionKind::Parameter as i32
173+
ProtoWorkflowConditionKind::Parameter as i32
160174
}
161-
schema::WorkflowConditionKind::Variable => protocol::WorkflowConditionKind::Variable as i32,
162-
schema::WorkflowConditionKind::Env => protocol::WorkflowConditionKind::Env as i32,
175+
schema::WorkflowConditionKind::Variable => ProtoWorkflowConditionKind::Variable as i32,
176+
schema::WorkflowConditionKind::Env => ProtoWorkflowConditionKind::Env as i32,
163177
schema::WorkflowConditionKind::Expression => {
164-
protocol::WorkflowConditionKind::Expression as i32
178+
ProtoWorkflowConditionKind::Expression as i32
165179
}
166180
};
167181

@@ -188,13 +202,7 @@ fn package_to_proto(package: &schema::PackageSpec) -> ProtoPackageSpec {
188202
}
189203
}
190204

191-
fn matrix_value_to_proto(value: &schema::MatrixDimension) -> MatrixValue {
192-
match value {
193-
schema::MatrixDimension::List(values) => MatrixValue {
194-
values: values.clone(),
195-
},
196-
}
197-
}
205+
198206

199207
fn step_to_proto(step: &schema::Step) -> Step {
200208
match step {
@@ -345,7 +353,7 @@ mod tests {
345353
"test".to_string(),
346354
schema::Job {
347355
needs: vec![],
348-
matrix: HashMap::new(),
356+
matrix: None, // Updated for Option<JobMatrix>
349357
packages: vec![schema::PackageSpec::from_name("ruby".to_string())],
350358
services: vec![],
351359
environment: HashMap::new(),

0 commit comments

Comments
 (0)