diff --git a/.github/workflows/deplo-main.yml b/.github/workflows/deplo-main.yml index 977fc3d..655ebe8 100644 --- a/.github/workflows/deplo-main.yml +++ b/.github/workflows/deplo-main.yml @@ -12,12 +12,8 @@ on: env: DEPLO_GHACTION_CI_ID: ${{ github.run_id }}-${{ github.run_attempt }} - DEPLO_GHACTION_PR_URL: ${{ github.event.pull_request.url }} DEPLO_GHACTION_EVENT_DATA: ${{ toJson(github) }} - DEPLO_OVERWRITE_COMMIT: ${{ github.event.client_payload.commit }} - DEPLO_OVERWRITE_RELEASE_TARGET: ${{ github.event.client_payload.release_target }} - DEPLO_OVERWRITE_VERBOSITY: ${{ github.event.client_payload.verbosity }} - DEPLO_OVERWRITE_WORKFLOW: ${{ github.event.client_payload.workflow }} + DEPLO_OVERWRITE_VERBOSITY: ${{ github.event.client_payload.exec.verbosity }} SUNTOMI_VCS_ACCOUNT: ${{ secrets.SUNTOMI_VCS_ACCOUNT }} SUNTOMI_VCS_ACCOUNT_EMAIL: ${{ secrets.SUNTOMI_VCS_ACCOUNT_EMAIL }} SUNTOMI_VCS_ACCOUNT_KEY: ${{ secrets.SUNTOMI_VCS_ACCOUNT_KEY }} diff --git a/.vscode/launch.json b/.vscode/launch.json index 9d5f751..a3c4ef3 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -39,12 +39,12 @@ ] }, { - "name": "Start", + "name": "Boot", "type": "lldb", "request": "launch", "program": "${workspaceFolder}/target/debug/cli", "args": [ - "-v=3", "start", "-r=nightly", "-w=integrate" + "-v=3", "boot", "-p=payload.json" ] }, { diff --git a/Deplo.toml b/Deplo.toml index ba7932f..9111c21 100644 --- a/Deplo.toml +++ b/Deplo.toml @@ -188,8 +188,8 @@ docker push ghcr.io/suntomi/deplo:${DEPLO_RELEASE_VERSION} # deplo provides unified command for controlling vcs. # `deplo vcs release` and `deplo vcs release-assets` are used for creating release of vcs like github/gitlab # with unified interface. -deplo -v=3 vcs release ${DEPLO_RELEASE_TAG} -o name=${DEPLO_RELEASE_NAME} -deplo -v=3 vcs release-assets ${DEPLO_RELEASE_TAG} tools/docker/bin/cli --replace -o name=deplo-Linux +deplo vcs release ${DEPLO_RELEASE_TAG} -o name=${DEPLO_RELEASE_NAME} +deplo vcs release-assets ${DEPLO_RELEASE_TAG} tools/docker/bin/cli --replace -o name=deplo-Linux # deplo provides dyanamically generated job output. you can use value for 'foo' and 'fuga' in jobs that depends on the job. deplo job set-output foo bar diff --git a/cli/src/command/run.rs b/cli/src/command/run.rs index 0c02ce2..ff39fb3 100644 --- a/cli/src/command/run.rs +++ b/cli/src/command/run.rs @@ -25,7 +25,7 @@ impl command::Command for Run { self.config.prepare_workflow()?; let workflow = config::runtime::Workflow::new(args, &self.config, true)?; let config = self.config.borrow(); - config.jobs.boot(&config, &workflow, &self.shell)?; + config.jobs.run(&config, &workflow, &self.shell)?; return Ok(()); } } \ No newline at end of file diff --git a/core/res/ci/ghaction/main.yml.tmpl b/core/res/ci/ghaction/main.yml.tmpl index 0af242b..8c6e31c 100644 --- a/core/res/ci/ghaction/main.yml.tmpl +++ b/core/res/ci/ghaction/main.yml.tmpl @@ -8,12 +8,8 @@ on: env: DEPLO_GHACTION_CI_ID: ${{{{ github.run_id }}}}-${{{{ github.run_attempt }}}} - DEPLO_GHACTION_PR_URL: ${{{{ github.event.pull_request.url }}}} DEPLO_GHACTION_EVENT_DATA: ${{{{ toJson(github) }}}} - DEPLO_OVERWRITE_COMMIT: ${{{{ github.event.client_payload.commit }}}} - DEPLO_OVERWRITE_RELEASE_TARGET: ${{{{ github.event.client_payload.release_target }}}} - DEPLO_OVERWRITE_VERBOSITY: ${{{{ github.event.client_payload.verbosity }}}} - DEPLO_OVERWRITE_WORKFLOW: ${{{{ github.event.client_payload.workflow }}}} + DEPLO_OVERWRITE_VERBOSITY: ${{{{ github.event.client_payload.exec.verbosity }}}} {secrets:>2} jobs: deplo-main: diff --git a/core/src/ci.rs b/core/src/ci.rs index 3292465..eacbc71 100644 --- a/core/src/ci.rs +++ b/core/src/ci.rs @@ -47,7 +47,7 @@ pub trait CI { fn process_env(&self) -> Result, Box>; fn filter_workflows( &self, trigger: Option - ) -> Result)>, Box>; + ) -> Result, Box>; fn set_job_output(&self, job_name: &str, kind: OutputKind, outputs: HashMap<&str, &str>) -> Result<(), Box>; fn job_output(&self, job_name: &str, kind: OutputKind, key: &str) -> Result, Box>; } diff --git a/core/src/ci/circleci.rs b/core/src/ci/circleci.rs index 3d04f14..3498881 100644 --- a/core/src/ci/circleci.rs +++ b/core/src/ci/circleci.rs @@ -155,7 +155,7 @@ impl<'a, S: shell::Shell> ci::CI for CircleCI { } fn filter_workflows( &self, _trigger: Option - ) -> Result)>, Box> { + ) -> Result, Box> { log::warn!("TODO: implement filter_workflows for circleci"); Ok(vec![]) } diff --git a/core/src/ci/ghaction.rs b/core/src/ci/ghaction.rs index 401d303..7954368 100644 --- a/core/src/ci/ghaction.rs +++ b/core/src/ci/ghaction.rs @@ -10,6 +10,7 @@ use chrono::{Utc, Duration}; use log; use maplit::hashmap; use serde::{Deserialize, Serialize}; +use serde_json::{Value as JsonValue}; use crate::config; use crate::ci; @@ -34,7 +35,7 @@ enum EventPayload { // to avoid wrongly being matched as 'Repository' variant. RepositoryDispatch { action: String, - client_payload: config::AnyValue + client_payload: JsonValue }, Repository { action: Option @@ -569,7 +570,7 @@ impl ci::CI for GhAction { } fn filter_workflows( &self, trigger: Option - ) -> Result)>, Box> { + ) -> Result, Box> { let resolved_trigger = match trigger { Some(t) => t, // on github action, full event payload is stored env var 'DEPLO_GHACTION_EVENT_DATA' @@ -601,28 +602,30 @@ impl ci::CI for GhAction { _ => {} }, // repository_dispatch has a few possibility. - // config::DEPLO_REMOTE_JOB_EVENT_TYPE => should contain workflow_name in client_payload + // config::DEPLO_REMOTE_JOB_EVENT_TYPE => should contain workflow name in client_payload["name"] // config::DEPLO_MODULE_EVENT_TYPE => Module workflow invocation // others => Repository workflow invocation "repository_dispatch" => if let EventPayload::RepositoryDispatch{ action, client_payload } = &workflow_event.event { if action == config::DEPLO_REMOTE_JOB_EVENT_TYPE { - match client_payload.index("workflow_name") { - Some(n) => match n.as_str() { - Some(s) => matched_names.push(s.to_string()), - None => panic!( - "{}: event payload invalid {}", - config::DEPLO_REMOTE_JOB_EVENT_TYPE, client_payload - ) + match &client_payload["name"] { + JsonValue::String(s) => { + let workflow_name = s.to_string(); + if workflow_name == name { + return Ok(vec![config::runtime::Workflow::with_payload( + &serde_json::to_string(client_payload)? + )?]); + } }, - None => panic!( + _ => panic!( "{}: event payload invalid {}", config::DEPLO_REMOTE_JOB_EVENT_TYPE, client_payload ) } } else if action == config::DEPLO_MODULE_EVENT_TYPE { if let config::workflow::Workflow::Module(..) = v { + log::warn!("TODO: should check current workflow is matched for the module?"); matched_names.push(name); } } else if let config::workflow::Workflow::Repository{..} = v { @@ -644,18 +647,20 @@ impl ci::CI for GhAction { match config.workflows.get(&name).expect(&format!("workflow {} not found", name)) { config::workflow::Workflow::Deploy|config::workflow::Workflow::Integrate => { let target = vcs.release_target(); - matches.push((name, if target.is_none() { hashmap!{} } else { hashmap!{ - "release_target".to_string() => config::AnyValue::new(&target.unwrap()) - }})) + matches.push(config::runtime::Workflow::with_context( + name, if target.is_none() { hashmap!{} } else { hashmap!{ + "release_target".to_string() => config::AnyValue::new(&target.unwrap()) + }} + )) }, config::workflow::Workflow::Cron{schedules} => { if let EventPayload::Schedule{ref schedule} = workflow_event.event { match schedules.iter().find_map(|(k, v)| { if v.resolve().as_str() == schedule.as_str() { Some(k) } else { None } }) { - Some(schedule_name) => matches.push((name, hashmap!{ - "schedule".to_string() => config::AnyValue::new(schedule_name) - })), + Some(schedule_name) => matches.push(config::runtime::Workflow::with_context( + name, hashmap!{ "schedule".to_string() => config::AnyValue::new(schedule_name) } + )), None => {} } } else { @@ -674,16 +679,16 @@ impl ci::CI for GhAction { match events.iter().find_map(|(k, vs)| { if vs.iter().find(|t| t == &key).is_some() { Some(k) } else { None } }) { - Some(event_name) => matches.push((name, hashmap!{ - "event".to_string() => config::AnyValue::new(event_name) - })), + Some(event_name) => matches.push(config::runtime::Workflow::with_context( + name, hashmap!{ "event".to_string() => config::AnyValue::new(event_name) } + )), None => {} } } else { panic!("event payload type does not match {}", workflow_event.event); } }, - config::workflow::Workflow::Module(c) => { + config::workflow::Workflow::Module(_c) => { panic!("not implemented yet") } } diff --git a/core/src/config/job.rs b/core/src/config/job.rs index 5161185..f30a41f 100644 --- a/core/src/config/job.rs +++ b/core/src/config/job.rs @@ -790,14 +790,25 @@ impl Jobs { } log::info!("remote job {} id={} finished", job_name, job_id); Ok(()) - } + } + pub fn run( + &self, config: &config::Config, runtime_workflow_config: &config::runtime::Workflow, shell: &S + ) -> Result<(), Box> where S: shell::Shell { + let name = &runtime_workflow_config.job.as_ref().expect("should have job setting").name; + let job = config.jobs.find(name).expect(&format!("job '{}' does not exist", name)); + match job.run(shell, config, runtime_workflow_config)? { + Some(job_id) => self.wait_job(&job_id, name, config, runtime_workflow_config)?, + None => {} + }; + Ok(()) + } pub fn boot( &self, config: &config::Config, runtime_workflow_config: &config::runtime::Workflow, shell: &S ) -> Result<(), Box> where S: shell::Shell { let modules = &config.modules; let (_, ci) = modules.ci_by_env(); for (name, job) in self.filter_as_map(config, runtime_workflow_config) { - if config::Config::is_running_on_ci() && runtime_workflow_config.job.is_none() { + if config::Config::is_running_on_ci() { ci.schedule_job(name)?; } else { match job.run(shell, config, runtime_workflow_config)? { diff --git a/core/src/config/job/runner.rs b/core/src/config/job/runner.rs index c2e2079..a23b0f0 100644 --- a/core/src/config/job/runner.rs +++ b/core/src/config/job/runner.rs @@ -22,10 +22,17 @@ impl<'a> Runner<'a> { } fn adjust_commit_hash(&self, commit: &Option<&str>) -> Result<(), Box> { let config = self.config; - if !config::Config::is_running_on_ci() { - if let Some(ref c) = commit { + if let Some(ref c) = commit { + let vcs = config.modules.vcs(); + if config::Config::is_running_on_ci() { + let target = vcs.commit_hash(Some(c))?; + let current = vcs.commit_hash(None)?; + if target != current { + panic!("on CI, HEAD should already set to '{}' but '{}'", target, current); + } + } else { log::debug!("change commit hash to {}", c); - config.modules.vcs().checkout(c, Some(config::DEPLO_VCS_TEMPORARY_WORKSPACE_NAME))?; + vcs.checkout(c, Some(config::DEPLO_VCS_TEMPORARY_WORKSPACE_NAME))?; } } Ok(()) @@ -116,7 +123,13 @@ impl<'a> Runner<'a> { let config = self.config; let job = self.job; let exec = &runtime_workflow_config.exec; + // apply exec settings to current workspace. + // verbosity is set via envvar DEPLO_OVERWRITE_VERBOSITY + // revision + self.adjust_commit_hash(&exec.revision.as_ref().map(|v| v.as_str()))?; + defer!{self.recover_branch().unwrap();}; let command = runtime_workflow_config.command(); + // silent let shell_settings = &mut match command { job::Command::Shell => shell::interactive(), _ => if exec.silent { @@ -126,9 +139,6 @@ impl<'a> Runner<'a> { } }; let (steps, main_command) = self.create_steps(&command); - // if current commit is modified, rollback after all operation is done. - self.adjust_commit_hash(&exec.revision.as_ref().map(|v| v.as_str()))?; - defer!{self.recover_branch().unwrap();}; if exec.remote { log::debug!( "force running job {} on remote with steps {} at {}", diff --git a/core/src/config/runtime.rs b/core/src/config/runtime.rs index c02e1fc..2012dda 100644 --- a/core/src/config/runtime.rs +++ b/core/src/config/runtime.rs @@ -8,6 +8,7 @@ use serde::{Deserialize, Serialize}; use crate::args::{Args}; use crate::config; +use crate::util::{merge_hashmap}; /// runtime configuration for single job execution #[derive(Serialize, Deserialize, Clone)] @@ -21,18 +22,45 @@ pub struct ExecOptions { pub timeout: Option, } impl ExecOptions { + pub fn default() -> Self { + Self { + envs: hashmap!{}, + revision: None, + release_target: None, + verbosity: 0, + remote: false, + silent: false, + timeout: None, + } + } pub fn new(args: &A, config: &config::Container) -> Result> { - Ok(Self { - envs: args.map_of("env"), - revision: args.value_of("revision").map(|v| v.to_string()), - release_target: args.value_of("release_target").map(|v| v.to_string()), - verbosity: config.borrow().runtime.verbosity, - remote: args.occurence_of("remote") > 0, - silent: args.occurence_of("silent") > 0, - timeout: args.value_of("timeout").map(|v| v.parse().expect( + let mut instance = Self::default(); + instance.verbosity = config.borrow().runtime.verbosity; + instance.apply(args); + Ok(instance) + } + pub fn apply(&mut self, args: &A) { + self.envs = merge_hashmap(&self.envs, &args.map_of("env")); + match args.value_of("revision") { + Some(v) => self.revision = Some(v.to_string()), + None => {} + }; + match args.value_of("release_target") { + Some(v) => self.release_target = Some(v.to_string()), + None => {} + }; + match args.value_of("timeout") { + Some(v) => self.timeout = Some(v.parse().expect( &format!("value of `timeout` should be a number but {}", v) )), - }) + None => {} + }; + // remote always apply cmdline parameter, to avoid remote parameter from event payload + // wrongly used. + self.remote = args.occurence_of("remote") > 0; + if args.occurence_of("silent") > 0 { + self.silent = true; + } } } @@ -70,6 +98,21 @@ impl Job { ); Self { name, command } } + pub fn apply( + &mut self, args: &A, config: &config::Container + ) { + match args.value_of("job").map(|v| v.to_string()) { + Some(v) => self.name = v, + None => {}, + } + match config.borrow().jobs.find(&self.name) { + Some(j) => match Command::new_or_none(args, j) { + Some(c) => self.command = Some(c), + None => {} + }, + None => {} + }; + } } /// runtime configuration for workflow execution commands @@ -88,20 +131,28 @@ pub struct Workflow { } impl Workflow { pub fn new(args: &A, config: &config::Container, has_job_config: bool) -> Result> { - let (workflow_name, context) = match args.value_of("workflow") { + match args.value_of("workflow") { // directly specify workflow_name and context - Some(v) => ( - v.to_string(), - match args.value_of("workflow_context") { - Some(v) => serde_json::from_str(v)?, + Some(v) => return Ok(Self { + name: v.to_string(), + job: if has_job_config { Some(Job::new(args, config)) } else { None }, + context: match args.value_of("workflow_context") { + Some(v) => match fs::read_to_string(Path::new(v)) { + Ok(s) => { + log::debug!("read context payload from {}", v); + serde_json::from_str(&s)? + }, + Err(_) => serde_json::from_str(v)? + }, None => hashmap!{} - } - ), + }, + exec: ExecOptions::new(args, config)? + }), None => { let trigger = match args.value_of("workflow_event_payload") { Some(v) => match fs::read_to_string(Path::new(v)) { Ok(s) => { - log::debug!("read payload from {}", v); + log::debug!("read event payload from {}", v); Some(crate::ci::WorkflowTrigger::EventPayload(s.to_string())) }, Err(_) => Some(crate::ci::WorkflowTrigger::EventPayload(v.to_string())) @@ -118,18 +169,29 @@ impl Workflow { } else if matches.len() > 2 { log::warn!( "multiple workflow matches({})", - matches.iter().map(|(n,_)| {n.to_string()}).collect::>().join(",") + matches.iter().map(|m| {m.name.as_str()}).collect::>().join(",") ); } - matches.remove(0) + let mut v = matches.remove(0); + v.apply(args, config, has_job_config); + Ok(v) } - }; - Ok(Self { - name: workflow_name, - job: if has_job_config { Some(Job::new(args, config)) } else { None }, - context: context, - exec: ExecOptions::new(args, config)? - }) + } + } + pub fn with_context(name: String, context: HashMap) -> Self { + Self { name, context, job: None, exec: ExecOptions::default() } + } + pub fn with_payload(payload: &str) -> Result> { + Ok(serde_json::from_str(payload)?) + } + pub fn apply(&mut self, args: &A, config: &config::Container, has_job_config: bool) { + self.exec.apply(args); + if has_job_config { + match self.job.as_mut() { + Some(j) => j.apply(args, config), + None => self.job = Some(Job::new(args, config)) + }; + } } pub fn command(&self) -> config::job::Command { match &self.job { @@ -189,7 +251,7 @@ impl Config { }; Ok(()) } - fn setup_logger(verbosity: u64) { + pub fn setup_logger(verbosity: u64) { // apply verbosity match std::env::var("RUST_LOG") { Ok(v) => { @@ -200,7 +262,7 @@ impl Config { }, Err(_) => {}, }; - simple_logger::init_with_level(match + match simple_logger::init_with_level(match match std::env::var("DEPLO_OVERWRITE_VERBOSITY") { Ok(v) => if !v.is_empty() { println!("overwrite log verbosity from {} to {}", verbosity, v); @@ -216,7 +278,10 @@ impl Config { 2 => log::Level::Debug, 3 => log::Level::Trace, _ => log::Level::Trace - }).unwrap(); + }) { + Ok(_) => {}, + Err(e) => panic!("fail to init logger {}", e) + } } }