From d3de4435ce86f3f85a4cc58978b2544af2ac4248 Mon Sep 17 00:00:00 2001 From: Alexandre Gaudreault Date: Wed, 13 Aug 2025 16:08:24 -0400 Subject: [PATCH] feat(health): CronJob health and suspend, resume and terminate Job actions (#23991) Signed-off-by: Alexandre Gaudreault --- controller/health.go | 3 +- controller/health_test.go | 2 +- docs/operator-manual/resource_actions.md | 2 +- .../resource_actions_builtin.md | 5 + .../CronWorkflow/actions/discovery.lua | 4 +- .../WorkflowTemplate/actions/discovery.lua | 4 +- .../batch/CronJob/actions/action_test.yaml | 35 ++++- .../batch/CronJob/actions/discovery.lua | 14 +- .../batch/CronJob/actions/resume/action.lua | 4 + .../batch/CronJob/actions/suspend/action.lua | 2 + .../actions/testdata/cronjob-resumed.yaml | 34 ++++ .../actions/testdata/cronjob-suspended.yaml | 34 ++++ .../batch/CronJob/actions/testdata/job.yaml | 21 ++- .../batch/CronJob/health.lua | 40 +++++ .../batch/CronJob/health_test.yaml | 25 +++ .../batch/CronJob/testdata/active.yaml | 34 ++++ .../batch/CronJob/testdata/degraded.yaml | 27 ++++ .../batch/CronJob/testdata/healthy.yaml | 27 ++++ .../CronJob/testdata/never-scheduled.yaml | 25 +++ .../CronJob/testdata/never-succeeded.yaml | 26 +++ .../batch/CronJob/testdata/suspended.yaml | 35 +++++ .../batch/Job/actions/action_test.yaml | 42 +++++ .../batch/Job/actions/discovery.lua | 30 ++++ .../batch/Job/actions/resume/action.lua | 4 + .../batch/Job/actions/suspend/action.lua | 2 + .../batch/Job/actions/terminate/action.lua | 32 ++++ .../batch/Job/actions/testdata/active.yaml | 21 +++ .../batch/Job/actions/testdata/completed.yaml | 35 +++++ .../Job/actions/testdata/created-output.yaml | 17 ++ .../batch/Job/actions/testdata/created.yaml | 16 ++ .../batch/Job/actions/testdata/failed.yaml | 33 ++++ .../Job/actions/testdata/resumed-output.yaml | 28 ++++ .../testdata/resumed-terminated-output.yaml | 33 ++++ .../batch/Job/actions/testdata/resumed.yaml | 28 ++++ .../actions/testdata/suspended-output.yaml | 28 ++++ .../batch/Job/actions/testdata/suspended.yaml | 28 ++++ .../Job/actions/testdata/terminated.yaml | 27 ++++ util/lua/custom_actions_test.go | 33 +++- util/lua/lua.go | 7 +- util/lua/lua_test.go | 148 ++++++++++++++++-- 40 files changed, 946 insertions(+), 49 deletions(-) create mode 100644 resource_customizations/batch/CronJob/actions/resume/action.lua create mode 100644 resource_customizations/batch/CronJob/actions/suspend/action.lua create mode 100644 resource_customizations/batch/CronJob/actions/testdata/cronjob-resumed.yaml create mode 100644 resource_customizations/batch/CronJob/actions/testdata/cronjob-suspended.yaml create mode 100644 resource_customizations/batch/CronJob/health.lua create mode 100644 resource_customizations/batch/CronJob/health_test.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/active.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/degraded.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/healthy.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/never-scheduled.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/never-succeeded.yaml create mode 100644 resource_customizations/batch/CronJob/testdata/suspended.yaml create mode 100644 resource_customizations/batch/Job/actions/action_test.yaml create mode 100644 resource_customizations/batch/Job/actions/discovery.lua create mode 100644 resource_customizations/batch/Job/actions/resume/action.lua create mode 100644 resource_customizations/batch/Job/actions/suspend/action.lua create mode 100644 resource_customizations/batch/Job/actions/terminate/action.lua create mode 100644 resource_customizations/batch/Job/actions/testdata/active.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/completed.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/created-output.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/created.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/failed.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/resumed-output.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/resumed-terminated-output.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/resumed.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/suspended-output.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/suspended.yaml create mode 100644 resource_customizations/batch/Job/actions/testdata/terminated.yaml diff --git a/controller/health.go b/controller/health.go index ba2270629d..2fe4cfc0a5 100644 --- a/controller/health.go +++ b/controller/health.go @@ -27,10 +27,9 @@ func setApplicationHealth(resources []managedResource, statuses []appv1.Resource if res.Target != nil && hookutil.Skip(res.Target) { continue } - if res.Target != nil && res.Target.GetAnnotations() != nil && res.Target.GetAnnotations()[common.AnnotationIgnoreHealthCheck] == "true" { + if res.Live != nil && res.Live.GetAnnotations() != nil && res.Live.GetAnnotations()[common.AnnotationIgnoreHealthCheck] == "true" { continue } - if res.Live != nil && (hookutil.IsHook(res.Live) || ignore.Ignore(res.Live)) { continue } diff --git a/controller/health_test.go b/controller/health_test.go index fff2adce73..c09a583940 100644 --- a/controller/health_test.go +++ b/controller/health_test.go @@ -82,7 +82,7 @@ func TestSetApplicationHealth(t *testing.T) { // The app is considered healthy failedJob.SetAnnotations(nil) failedJobIgnoreHealthcheck := resourceFromFile("./testdata/job-failed-ignore-healthcheck.yaml") - resources[1].Target = &failedJobIgnoreHealthcheck + resources[1].Live = &failedJobIgnoreHealthcheck healthStatus, err = setApplicationHealth(resources, resourceStatuses, nil, app, true) require.NoError(t, err) assert.Equal(t, health.HealthStatusHealthy, healthStatus) diff --git a/docs/operator-manual/resource_actions.md b/docs/operator-manual/resource_actions.md index f5532245f9..0ab52926b4 100644 --- a/docs/operator-manual/resource_actions.md +++ b/docs/operator-manual/resource_actions.md @@ -216,7 +216,7 @@ The `fa-fw` class ensures that the icon is displayed with a fixed width, to avoi ```lua local actions = {} actions["create-workflow"] = { - ["iconClass"] = "fa fa-fw fa-play", + ["iconClass"] = "fa fa-fw fa-plus", ["displayName"] = "Create Workflow" } return actions diff --git a/docs/operator-manual/resource_actions_builtin.md b/docs/operator-manual/resource_actions_builtin.md index d4a59380d4..4e07afa894 100644 --- a/docs/operator-manual/resource_actions_builtin.md +++ b/docs/operator-manual/resource_actions_builtin.md @@ -16,6 +16,11 @@ - [argoproj.io/Rollout/skip-current-step](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/argoproj.io/Rollout/actions/skip-current-step/action.lua) - [argoproj.io/WorkflowTemplate/create-workflow](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/argoproj.io/WorkflowTemplate/actions/create-workflow/action.lua) - [batch/CronJob/create-job](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/CronJob/actions/create-job/action.lua) +- [batch/CronJob/resume](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/CronJob/actions/resume/action.lua) +- [batch/CronJob/suspend](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/CronJob/actions/suspend/action.lua) +- [batch/Job/resume](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/Job/actions/resume/action.lua) +- [batch/Job/suspend](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/Job/actions/suspend/action.lua) +- [batch/Job/terminate](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/batch/Job/actions/terminate/action.lua) - [external-secrets.io/ExternalSecret/refresh](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/external-secrets.io/ExternalSecret/actions/refresh/action.lua) - [external-secrets.io/PushSecret/push](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/external-secrets.io/PushSecret/actions/push/action.lua) - [helm.toolkit.fluxcd.io/HelmRelease/reconcile](https://github.com/argoproj/argo-cd/blob/master/resource_customizations/helm.toolkit.fluxcd.io/HelmRelease/actions/reconcile/action.lua) diff --git a/resource_customizations/argoproj.io/CronWorkflow/actions/discovery.lua b/resource_customizations/argoproj.io/CronWorkflow/actions/discovery.lua index 9a76d96643..80b897333c 100644 --- a/resource_customizations/argoproj.io/CronWorkflow/actions/discovery.lua +++ b/resource_customizations/argoproj.io/CronWorkflow/actions/discovery.lua @@ -1,6 +1,6 @@ local actions = {} actions["create-workflow"] = { - ["iconClass"] = "fa fa-fw fa-play", + ["iconClass"] = "fa fa-fw fa-plus", ["displayName"] = "Create Workflow" } -return actions \ No newline at end of file +return actions diff --git a/resource_customizations/argoproj.io/WorkflowTemplate/actions/discovery.lua b/resource_customizations/argoproj.io/WorkflowTemplate/actions/discovery.lua index 9a76d96643..80b897333c 100644 --- a/resource_customizations/argoproj.io/WorkflowTemplate/actions/discovery.lua +++ b/resource_customizations/argoproj.io/WorkflowTemplate/actions/discovery.lua @@ -1,6 +1,6 @@ local actions = {} actions["create-workflow"] = { - ["iconClass"] = "fa fa-fw fa-play", + ["iconClass"] = "fa fa-fw fa-plus", ["displayName"] = "Create Workflow" } -return actions \ No newline at end of file +return actions diff --git a/resource_customizations/batch/CronJob/actions/action_test.yaml b/resource_customizations/batch/CronJob/actions/action_test.yaml index a9b5320db5..839fe138c2 100644 --- a/resource_customizations/batch/CronJob/actions/action_test.yaml +++ b/resource_customizations/batch/CronJob/actions/action_test.yaml @@ -1,4 +1,33 @@ +discoveryTests: + - inputPath: testdata/cronjob.yaml + result: + - name: create-job + displayName: 'Create Job' + iconClass: 'fa fa-fw fa-plus' + - name: suspend + iconClass: 'fa fa-fw fa-pause' + - inputPath: testdata/cronjob-resumed.yaml + result: + - name: create-job + displayName: 'Create Job' + iconClass: 'fa fa-fw fa-plus' + - name: suspend + iconClass: 'fa fa-fw fa-pause' + - inputPath: testdata/cronjob-suspended.yaml + result: + - name: create-job + displayName: 'Create Job' + iconClass: 'fa fa-fw fa-plus' + - name: resume + iconClass: 'fa fa-fw fa-play' + actionTests: -- action: create-job - inputPath: testdata/cronjob.yaml - expectedOutputPath: testdata/job.yaml + - action: create-job + inputPath: testdata/cronjob.yaml + expectedOutputPath: testdata/job.yaml + - action: suspend + inputPath: testdata/cronjob.yaml + expectedOutputPath: testdata/cronjob-suspended.yaml + - action: resume + inputPath: testdata/cronjob-suspended.yaml + expectedOutputPath: testdata/cronjob-resumed.yaml diff --git a/resource_customizations/batch/CronJob/actions/discovery.lua b/resource_customizations/batch/CronJob/actions/discovery.lua index 61be2c3500..e11003f261 100644 --- a/resource_customizations/batch/CronJob/actions/discovery.lua +++ b/resource_customizations/batch/CronJob/actions/discovery.lua @@ -1,6 +1,10 @@ local actions = {} -actions["create-job"] = { - ["iconClass"] = "fa fa-fw fa-play", - ["displayName"] = "Create Job" -} -return actions \ No newline at end of file +actions["create-job"] = {["iconClass"] = "fa fa-fw fa-plus", ["displayName"] = "Create Job"} + +if obj.spec.suspend ~= nil and obj.spec.suspend then + actions["resume"] = {["iconClass"] = "fa fa-fw fa-play" } +else + actions["suspend"] = {["iconClass"] = "fa fa-fw fa-pause"} +end + +return actions diff --git a/resource_customizations/batch/CronJob/actions/resume/action.lua b/resource_customizations/batch/CronJob/actions/resume/action.lua new file mode 100644 index 0000000000..50808c26a2 --- /dev/null +++ b/resource_customizations/batch/CronJob/actions/resume/action.lua @@ -0,0 +1,4 @@ +if obj.spec.suspend ~= nil and obj.spec.suspend then + obj.spec.suspend = false +end +return obj diff --git a/resource_customizations/batch/CronJob/actions/suspend/action.lua b/resource_customizations/batch/CronJob/actions/suspend/action.lua new file mode 100644 index 0000000000..a9bfa3db9c --- /dev/null +++ b/resource_customizations/batch/CronJob/actions/suspend/action.lua @@ -0,0 +1,2 @@ +obj.spec.suspend = true +return obj diff --git a/resource_customizations/batch/CronJob/actions/testdata/cronjob-resumed.yaml b/resource_customizations/batch/CronJob/actions/testdata/cronjob-resumed.yaml new file mode 100644 index 0000000000..fa7a9afbd0 --- /dev/null +++ b/resource_customizations/batch/CronJob/actions/testdata/cronjob-resumed.yaml @@ -0,0 +1,34 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: hello + namespace: test-ns + uid: '123' +spec: + suspend: false + schedule: '* * * * *' + jobTemplate: + metadata: + labels: + my: label + annotations: + my: annotation + spec: + ttlSecondsAfterFinished: 100 + template: + metadata: + labels: + pod: label + annotations: + pod: annotation + spec: + containers: + - name: hello + image: busybox:1.28 + imagePullPolicy: IfNotPresent + command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + resources: {} + restartPolicy: OnFailure diff --git a/resource_customizations/batch/CronJob/actions/testdata/cronjob-suspended.yaml b/resource_customizations/batch/CronJob/actions/testdata/cronjob-suspended.yaml new file mode 100644 index 0000000000..15822d7ccc --- /dev/null +++ b/resource_customizations/batch/CronJob/actions/testdata/cronjob-suspended.yaml @@ -0,0 +1,34 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: hello + namespace: test-ns + uid: '123' +spec: + suspend: true + schedule: '* * * * *' + jobTemplate: + metadata: + labels: + my: label + annotations: + my: annotation + spec: + ttlSecondsAfterFinished: 100 + template: + metadata: + labels: + pod: label + annotations: + pod: annotation + spec: + containers: + - name: hello + image: busybox:1.28 + imagePullPolicy: IfNotPresent + command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + resources: {} + restartPolicy: OnFailure diff --git a/resource_customizations/batch/CronJob/actions/testdata/job.yaml b/resource_customizations/batch/CronJob/actions/testdata/job.yaml index 322ab0480b..85d1997d7c 100644 --- a/resource_customizations/batch/CronJob/actions/testdata/job.yaml +++ b/resource_customizations/batch/CronJob/actions/testdata/job.yaml @@ -3,6 +3,13 @@ apiVersion: batch/v1 kind: Job metadata: + ownerReferences: + - apiVersion: batch/v1 + blockOwnerDeletion: true + controller: true + kind: CronJob + name: hello + uid: '123' name: hello-00000000000 namespace: test-ns labels: @@ -20,11 +27,11 @@ pod: annotation spec: containers: - - name: hello - image: busybox:1.28 - imagePullPolicy: IfNotPresent - command: - - /bin/sh - - -c - - date; echo Hello from the Kubernetes cluster + - name: hello + image: busybox:1.28 + imagePullPolicy: IfNotPresent + command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster restartPolicy: OnFailure diff --git a/resource_customizations/batch/CronJob/health.lua b/resource_customizations/batch/CronJob/health.lua new file mode 100644 index 0000000000..4e442e4958 --- /dev/null +++ b/resource_customizations/batch/CronJob/health.lua @@ -0,0 +1,40 @@ +hs = {} + +if obj.spec.suspend == true then + hs.status = "Suspended" + hs.message = "CronJob is Suspended" + return hs +end + +if obj.status ~= nil then + if obj.status.active ~= nil and table.getn(obj.status.active) > 0 then + -- We could be Progressing very often, depending on the Cron schedule, which would bubble up + -- to the Application health. If this is undesired, the annotation `argocd.argoproj.io/ignore-healthcheck: "true"` + -- can be added on the CronJob. + hs.status = "Progressing" + hs.message = string.format("Waiting for %d Jobs to complete", table.getn(obj.status.active)) + return hs + end + + -- If the CronJob has no active jobs and the lastSuccessfulTime < lastScheduleTime + -- then we know it failed the last execution + if obj.status.lastScheduleTime ~= nil then + -- No issue comparing time as text + if obj.status.lastSuccessfulTime == nil or obj.status.lastSuccessfulTime < obj.status.lastScheduleTime then + hs.status = "Degraded" + hs.message = "CronJob has not completed its last execution successfully" + return hs + end + hs.message = "CronJob has completed its last execution successfully" + end + + -- There is no way to know if as CronJob missed its execution based on status + -- so we assume Healthy even if a cronJob is not getting scheduled. + -- https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs/#job-creation + hs.status = "Healthy" + return hs +end + +hs.status = "Progressing" +hs.message = "Waiting for CronJob" +return hs diff --git a/resource_customizations/batch/CronJob/health_test.yaml b/resource_customizations/batch/CronJob/health_test.yaml new file mode 100644 index 0000000000..370ab11e66 --- /dev/null +++ b/resource_customizations/batch/CronJob/health_test.yaml @@ -0,0 +1,25 @@ +tests: + - healthStatus: + status: Healthy + message: CronJob has completed its last execution successfully + inputPath: testdata/healthy.yaml + - healthStatus: + status: Healthy + message: '' + inputPath: testdata/never-scheduled.yaml + - healthStatus: + status: Degraded + message: CronJob has not completed its last execution successfully + inputPath: testdata/degraded.yaml + - healthStatus: + status: Degraded + message: CronJob has not completed its last execution successfully + inputPath: testdata/never-succeeded.yaml + - healthStatus: + status: Progressing + message: Waiting for 1 Jobs to complete + inputPath: testdata/active.yaml + - healthStatus: + status: Suspended + message: CronJob is Suspended + inputPath: testdata/suspended.yaml diff --git a/resource_customizations/batch/CronJob/testdata/active.yaml b/resource_customizations/batch/CronJob/testdata/active.yaml new file mode 100644 index 0000000000..f71ddb6320 --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/active.yaml @@ -0,0 +1,34 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: + active: + - apiVersion: batch/v1 + kind: Job + name: hello-29231490 + namespace: test-cronjob + resourceVersion: '21226' + uid: 996e6ed6-8494-4c9a-9862-93de4af310cb + lastScheduleTime: '2025-07-30T13:46:00Z' + lastSuccessfulTime: '2025-07-30T13:44:19Z' diff --git a/resource_customizations/batch/CronJob/testdata/degraded.yaml b/resource_customizations/batch/CronJob/testdata/degraded.yaml new file mode 100644 index 0000000000..a734cca460 --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/degraded.yaml @@ -0,0 +1,27 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: + lastScheduleTime: '2025-07-30T13:46:00Z' + lastSuccessfulTime: '2025-07-30T13:44:19Z' diff --git a/resource_customizations/batch/CronJob/testdata/healthy.yaml b/resource_customizations/batch/CronJob/testdata/healthy.yaml new file mode 100644 index 0000000000..8347d7f0d4 --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/healthy.yaml @@ -0,0 +1,27 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: + lastScheduleTime: '2025-07-30T13:42:00Z' + lastSuccessfulTime: '2025-07-30T13:44:19Z' diff --git a/resource_customizations/batch/CronJob/testdata/never-scheduled.yaml b/resource_customizations/batch/CronJob/testdata/never-scheduled.yaml new file mode 100644 index 0000000000..a2296ad4a6 --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/never-scheduled.yaml @@ -0,0 +1,25 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: {} diff --git a/resource_customizations/batch/CronJob/testdata/never-succeeded.yaml b/resource_customizations/batch/CronJob/testdata/never-succeeded.yaml new file mode 100644 index 0000000000..3416c309ec --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/never-succeeded.yaml @@ -0,0 +1,26 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: + lastScheduleTime: '2025-07-30T13:46:00Z' diff --git a/resource_customizations/batch/CronJob/testdata/suspended.yaml b/resource_customizations/batch/CronJob/testdata/suspended.yaml new file mode 100644 index 0000000000..77b1f73eec --- /dev/null +++ b/resource_customizations/batch/CronJob/testdata/suspended.yaml @@ -0,0 +1,35 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + annotations: + argocd.argoproj.io/tracking-id: test-cronjob:batch/CronJob:test-cronjob/hello + labels: + app.kubernetes.io/instance: test-cronjob + name: hello + namespace: test-cronjob +spec: + suspend: true + jobTemplate: + spec: + template: + spec: + containers: + - command: + - /bin/sh + - '-c' + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure + schedule: '* * * * *' +status: + active: + - apiVersion: batch/v1 + kind: Job + name: hello-29231490 + namespace: test-cronjob + resourceVersion: '21226' + uid: 996e6ed6-8494-4c9a-9862-93de4af310cb + lastScheduleTime: '2025-07-30T13:46:00Z' + lastSuccessfulTime: '2025-07-30T13:44:19Z' diff --git a/resource_customizations/batch/Job/actions/action_test.yaml b/resource_customizations/batch/Job/actions/action_test.yaml new file mode 100644 index 0000000000..4375765e13 --- /dev/null +++ b/resource_customizations/batch/Job/actions/action_test.yaml @@ -0,0 +1,42 @@ +discoveryTests: + - inputPath: testdata/suspended.yaml + result: + - name: resume + iconClass: 'fa fa-fw fa-play' + - name: terminate + iconClass: 'fa fa-fw fa-stop' + - inputPath: testdata/active.yaml + result: + - name: suspend + iconClass: 'fa fa-fw fa-pause' + - name: terminate + iconClass: 'fa fa-fw fa-stop' + - inputPath: testdata/completed.yaml + result: [] + - inputPath: testdata/failed.yaml + result: [] + - inputPath: testdata/terminated.yaml + result: [] + +actionTests: + - action: resume + inputPath: testdata/suspended.yaml + expectedOutputPath: testdata/suspended-output.yaml + - action: suspend + inputPath: testdata/resumed.yaml + expectedOutputPath: testdata/resumed-output.yaml + - action: suspend + inputPath: testdata/created.yaml + expectedOutputPath: testdata/created-output.yaml + - action: terminate + inputPath: testdata/active.yaml + expectedOutputPath: testdata/terminated.yaml + - action: terminate + inputPath: testdata/resumed.yaml + expectedOutputPath: testdata/resumed-terminated-output.yaml + - action: terminate + inputPath: testdata/completed.yaml + expectedOutputPath: testdata/completed.yaml + - action: terminate + inputPath: testdata/failed.yaml + expectedOutputPath: testdata/failed.yaml diff --git a/resource_customizations/batch/Job/actions/discovery.lua b/resource_customizations/batch/Job/actions/discovery.lua new file mode 100644 index 0000000000..1541b8e0a3 --- /dev/null +++ b/resource_customizations/batch/Job/actions/discovery.lua @@ -0,0 +1,30 @@ +local actions = {} + +local completed = false +if obj.status ~= nil then + if obj.status.conditions ~= nil then + for i, condition in pairs(obj.status.conditions) do + if condition.type == "Complete" and condition.status == "True" then + completed = true + elseif condition.type == "Failed" and condition.status == "True" then + completed = true + elseif condition.type == "FailureTarget" and condition.status == "True" then + completed = true + elseif condition.type == "SuccessCriteriaMet" and condition.status == "True" then + completed = true + end + end + end +end + +if not(completed) and obj.spec.suspend then + actions["resume"] = {["iconClass"] = "fa fa-fw fa-play" } +elseif not(completed) and (obj.spec.suspend == nil or not(obj.spec.suspend)) then + actions["suspend"] = {["iconClass"] = "fa fa-fw fa-pause" } +end + +if not(completed) and obj.status ~= nil then + actions["terminate"] = {["iconClass"] = "fa fa-fw fa-stop" } +end + +return actions diff --git a/resource_customizations/batch/Job/actions/resume/action.lua b/resource_customizations/batch/Job/actions/resume/action.lua new file mode 100644 index 0000000000..50808c26a2 --- /dev/null +++ b/resource_customizations/batch/Job/actions/resume/action.lua @@ -0,0 +1,4 @@ +if obj.spec.suspend ~= nil and obj.spec.suspend then + obj.spec.suspend = false +end +return obj diff --git a/resource_customizations/batch/Job/actions/suspend/action.lua b/resource_customizations/batch/Job/actions/suspend/action.lua new file mode 100644 index 0000000000..a9bfa3db9c --- /dev/null +++ b/resource_customizations/batch/Job/actions/suspend/action.lua @@ -0,0 +1,2 @@ +obj.spec.suspend = true +return obj diff --git a/resource_customizations/batch/Job/actions/terminate/action.lua b/resource_customizations/batch/Job/actions/terminate/action.lua new file mode 100644 index 0000000000..7cc06261a7 --- /dev/null +++ b/resource_customizations/batch/Job/actions/terminate/action.lua @@ -0,0 +1,32 @@ +local os = require("os") + +local completed = false +if obj.status ~= nil then + + if obj.status.conditions ~= nil then + for i, condition in pairs(obj.status.conditions) do + if condition.type == "Complete" and condition.status == "True" then + completed = true + elseif condition.type == "Failed" and condition.status == "True" then + completed = true + elseif condition.type == "FailureTarget" and condition.status == "True" then + completed = true + elseif condition.type == "SuccessCriteriaMet" and condition.status == "True" then + completed = true + end + end + end + + if not(completed) then + obj.status.conditions = obj.status.conditions or {} + table.insert(obj.status.conditions, { + lastTransitionTime = os.date("!%Y-%m-%dT%XZ"), + message = "Job was terminated explicitly through Argo CD", + reason = "ManuallyTerminated", + status = "True", + type = "FailureTarget" + }) + end + +end +return obj diff --git a/resource_customizations/batch/Job/actions/testdata/active.yaml b/resource_customizations/batch/Job/actions/testdata/active.yaml new file mode 100644 index 0000000000..c179fe68b9 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/active.yaml @@ -0,0 +1,21 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + ready: 0 + startTime: '2025-07-28T19:37:00Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/completed.yaml b/resource_customizations/batch/Job/actions/testdata/completed.yaml new file mode 100644 index 0000000000..4983c785c5 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/completed.yaml @@ -0,0 +1,35 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + completionTime: '2025-07-28T22:15:24Z' + conditions: + - lastProbeTime: '2025-07-28T22:15:24Z' + lastTransitionTime: '2025-07-28T22:15:24Z' + message: Reached expected number of succeeded pods + reason: CompletionsReached + status: 'True' + type: SuccessCriteriaMet + - lastProbeTime: '2025-07-28T22:15:24Z' + lastTransitionTime: '2025-07-28T22:15:24Z' + message: Reached expected number of succeeded pods + reason: CompletionsReached + status: 'True' + type: Complete + ready: 0 + startTime: '2025-07-28T19:37:00Z' + succeeded: 1 + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/created-output.yaml b/resource_customizations/batch/Job/actions/testdata/created-output.yaml new file mode 100644 index 0000000000..68de6a93cc --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/created-output.yaml @@ -0,0 +1,17 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: true + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure diff --git a/resource_customizations/batch/Job/actions/testdata/created.yaml b/resource_customizations/batch/Job/actions/testdata/created.yaml new file mode 100644 index 0000000000..9ea5ac5f8c --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/created.yaml @@ -0,0 +1,16 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure diff --git a/resource_customizations/batch/Job/actions/testdata/failed.yaml b/resource_customizations/batch/Job/actions/testdata/failed.yaml new file mode 100644 index 0000000000..d138d9c87f --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/failed.yaml @@ -0,0 +1,33 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastTransitionTime: '0001-01-01T00:00:00Z' + message: Job was terminated explicitly through Argo CD + reason: ManuallyTerminated + status: 'True' + type: FailureTarget + - lastProbeTime: '2025-07-28T20:47:17Z' + lastTransitionTime: '2025-07-28T20:47:17Z' + message: Job was terminated explicitly through Argo CD + reason: ManuallyTerminated + status: 'True' + type: Failed + ready: 0 + startTime: '2025-07-28T19:37:00Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/resumed-output.yaml b/resource_customizations/batch/Job/actions/testdata/resumed-output.yaml new file mode 100644 index 0000000000..d1a01c753a --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/resumed-output.yaml @@ -0,0 +1,28 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: true + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastProbeTime: '2025-07-28T22:02:46Z' + lastTransitionTime: '2025-07-28T22:02:46Z' + message: Job resumed + reason: JobResumed + status: 'False' + type: Suspended + ready: 0 + startTime: '2025-07-28T22:02:46Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/resumed-terminated-output.yaml b/resource_customizations/batch/Job/actions/testdata/resumed-terminated-output.yaml new file mode 100644 index 0000000000..c92e422396 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/resumed-terminated-output.yaml @@ -0,0 +1,33 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastProbeTime: '2025-07-28T22:02:46Z' + lastTransitionTime: '2025-07-28T22:02:46Z' + message: Job resumed + reason: JobResumed + status: 'False' + type: Suspended + - lastTransitionTime: '0001-01-01T00:00:00Z' + message: Job was terminated explicitly through Argo CD + reason: ManuallyTerminated + status: 'True' + type: FailureTarget + ready: 0 + startTime: '2025-07-28T22:02:46Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/resumed.yaml b/resource_customizations/batch/Job/actions/testdata/resumed.yaml new file mode 100644 index 0000000000..aaf121ce54 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/resumed.yaml @@ -0,0 +1,28 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastProbeTime: '2025-07-28T22:02:46Z' + lastTransitionTime: '2025-07-28T22:02:46Z' + message: Job resumed + reason: JobResumed + status: 'False' + type: Suspended + ready: 0 + startTime: '2025-07-28T22:02:46Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/suspended-output.yaml b/resource_customizations/batch/Job/actions/testdata/suspended-output.yaml new file mode 100644 index 0000000000..c486066505 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/suspended-output.yaml @@ -0,0 +1,28 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastProbeTime: '2025-07-28T20:01:53Z' + lastTransitionTime: '2025-07-28T20:01:53Z' + message: Job suspended + reason: JobSuspended + status: 'True' + type: Suspended + ready: 0 + startTime: '2025-07-28T19:37:00Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/suspended.yaml b/resource_customizations/batch/Job/actions/testdata/suspended.yaml new file mode 100644 index 0000000000..a546336ce0 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/suspended.yaml @@ -0,0 +1,28 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: true + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastProbeTime: '2025-07-28T20:01:53Z' + lastTransitionTime: '2025-07-28T20:01:53Z' + message: Job suspended + reason: JobSuspended + status: 'True' + type: Suspended + ready: 0 + startTime: '2025-07-28T19:37:00Z' + terminating: 0 diff --git a/resource_customizations/batch/Job/actions/testdata/terminated.yaml b/resource_customizations/batch/Job/actions/testdata/terminated.yaml new file mode 100644 index 0000000000..e258dab1b9 --- /dev/null +++ b/resource_customizations/batch/Job/actions/testdata/terminated.yaml @@ -0,0 +1,27 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: test-29228857 +spec: + suspend: false + template: + spec: + containers: + - command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + image: busybox:1.28 + imagePullPolicy: IfNotPresent + name: hello + restartPolicy: OnFailure +status: + conditions: + - lastTransitionTime: '0001-01-01T00:00:00Z' + message: Job was terminated explicitly through Argo CD + reason: ManuallyTerminated + status: 'True' + type: FailureTarget + ready: 0 + startTime: '2025-07-28T19:37:00Z' + terminating: 0 diff --git a/util/lua/custom_actions_test.go b/util/lua/custom_actions_test.go index 1e52590662..ff2f4dad31 100644 --- a/util/lua/custom_actions_test.go +++ b/util/lua/custom_actions_test.go @@ -26,13 +26,9 @@ func (t testNormalizer) Normalize(un *unstructured.Unstructured) error { if un == nil { return nil } - if un.GetKind() == "Job" { - err := unstructured.SetNestedField(un.Object, map[string]any{"name": "not sure why this works"}, "metadata") - if err != nil { - return fmt.Errorf("failed to normalize Job: %w", err) - } - } switch un.GetKind() { + case "Job": + return t.normalizeJob(un) case "DaemonSet", "Deployment", "StatefulSet": err := unstructured.SetNestedStringMap(un.Object, map[string]string{"kubectl.kubernetes.io/restartedAt": "0001-01-01T00:00:00Z"}, "spec", "template", "metadata", "annotations") if err != nil { @@ -85,6 +81,28 @@ func (t testNormalizer) Normalize(un *unstructured.Unstructured) error { return nil } +func (t testNormalizer) normalizeJob(un *unstructured.Unstructured) error { + if conditions, exist, err := unstructured.NestedSlice(un.Object, "status", "conditions"); err != nil { + return fmt.Errorf("failed to normalize %s: %w", un.GetKind(), err) + } else if exist { + changed := false + for i := range conditions { + condition := conditions[i].(map[string]any) + cType := condition["type"].(string) + if cType == "FailureTarget" { + condition["lastTransitionTime"] = "0001-01-01T00:00:00Z" + changed = true + } + } + if changed { + if err := unstructured.SetNestedSlice(un.Object, conditions, "status", "conditions"); err != nil { + return fmt.Errorf("failed to normalize %s: %w", un.GetKind(), err) + } + } + } + return nil +} + type ActionTestStructure struct { DiscoveryTests []IndividualDiscoveryTest `yaml:"discoveryTests"` ActionTests []IndividualActionTest `yaml:"actionTests"` @@ -208,8 +226,7 @@ func TestLuaResourceActionsScript(t *testing.T) { assert.Equal(t, sourceObj.GetNamespace(), result.GetNamespace()) case CreateOperation: switch result.GetKind() { - case "Job": - case "Workflow": + case "Job", "Workflow": // The name of the created resource is derived from the source object name, so the returned name is not actually equal to the testdata output name result.SetName(expectedObj.GetName()) } diff --git a/util/lua/lua.go b/util/lua/lua.go index abdc88f84a..ddc1aff788 100644 --- a/util/lua/lua.go +++ b/util/lua/lua.go @@ -290,7 +290,8 @@ func cleanReturnedObj(newObj, obj map[string]any) map[string]any { switch oldValue := oldValueInterface.(type) { case map[string]any: if len(newValue) == 0 { - mapToReturn[key] = oldValue + // Lua incorrectly decoded the empty object as an empty array, so set it to an empty object + mapToReturn[key] = map[string]any{} } case []any: newArray := cleanReturnedArray(newValue, oldValue) @@ -307,6 +308,10 @@ func cleanReturnedObj(newObj, obj map[string]any) map[string]any { func cleanReturnedArray(newObj, obj []any) []any { arrayToReturn := newObj for i := range newObj { + if i >= len(obj) { + // If the new object is longer than the old one, we added an item to the array + break + } switch newValue := newObj[i].(type) { case map[string]any: if oldValue, ok := obj[i].(map[string]any); ok { diff --git a/util/lua/lua_test.go b/util/lua/lua_test.go index 7e0a49fc05..3f1f59aace 100644 --- a/util/lua/lua_test.go +++ b/util/lua/lua_test.go @@ -705,7 +705,9 @@ func TestExecuteResourceActionInvalidUnstructured(t *testing.T) { require.Error(t, err) } -const objWithEmptyStruct = ` +func TestCleanPatch(t *testing.T) { + t.Run("Empty Struct preserved", func(t *testing.T) { + const obj = ` apiVersion: argoproj.io/v1alpha1 kind: Test metadata: @@ -717,7 +719,8 @@ metadata: resourceVersion: "123" spec: resources: {} - paused: true + updated: + something: true containers: - name: name1 test: {} @@ -725,8 +728,7 @@ spec: - name: name2 test2: {} ` - -const expectedUpdatedObjWithEmptyStruct = ` + const expected = ` apiVersion: argoproj.io/v1alpha1 kind: Test metadata: @@ -738,7 +740,7 @@ metadata: resourceVersion: "123" spec: resources: {} - paused: false + updated: {} containers: - name: name1 test: {} @@ -746,21 +748,133 @@ spec: - name: name2 test2: {} ` - -const pausedToFalseLua = ` -obj.spec.paused = false + const luaAction = ` +obj.spec.updated = {} return obj ` + testObj := StrToUnstructured(obj) + expectedObj := StrToUnstructured(expected) + vm := VM{} + newObjects, err := vm.ExecuteResourceAction(testObj, luaAction, nil) + require.NoError(t, err) + assert.Len(t, newObjects, 1) + assert.Equal(t, newObjects[0].K8SOperation, K8SOperation("patch")) + assert.Equal(t, expectedObj, newObjects[0].UnstructuredObj) + }) -func TestCleanPatch(t *testing.T) { - testObj := StrToUnstructured(objWithEmptyStruct) - expectedObj := StrToUnstructured(expectedUpdatedObjWithEmptyStruct) - vm := VM{} - newObjects, err := vm.ExecuteResourceAction(testObj, pausedToFalseLua, nil) - require.NoError(t, err) - assert.Len(t, newObjects, 1) - assert.Equal(t, newObjects[0].K8SOperation, K8SOperation("patch")) - assert.Equal(t, expectedObj, newObjects[0].UnstructuredObj) + t.Run("New item added to array", func(t *testing.T) { + const obj = ` +apiVersion: argoproj.io/v1alpha1 +kind: Test +metadata: + labels: + app.kubernetes.io/instance: helm-guestbook + test: test + name: helm-guestbook + namespace: default + resourceVersion: "123" +spec: + containers: + - name: name1 + test: {} + anotherList: + - name: name2 + test2: {} +` + const expected = ` +apiVersion: argoproj.io/v1alpha1 +kind: Test +metadata: + labels: + app.kubernetes.io/instance: helm-guestbook + test: test + name: helm-guestbook + namespace: default + resourceVersion: "123" +spec: + containers: + - name: name1 + test: {} + anotherList: + - name: name2 + test2: {} + - name: added + #test: {} ### would be decoded as an empty array and is not supported. The type is unknown + testArray: [] ### works since it is decoded in the correct type + another: + supported: true +` + // `test: {}` in new container would be decoded as an empty array and is not supported. The type is unknown + // `testArray: []` works since it is decoded in the correct type + const luaAction = ` +table.insert(obj.spec.containers, {name = "added", testArray = {}, another = {supported = true}}) +return obj +` + testObj := StrToUnstructured(obj) + expectedObj := StrToUnstructured(expected) + vm := VM{} + newObjects, err := vm.ExecuteResourceAction(testObj, luaAction, nil) + require.NoError(t, err) + assert.Len(t, newObjects, 1) + assert.Equal(t, newObjects[0].K8SOperation, K8SOperation("patch")) + assert.Equal(t, expectedObj, newObjects[0].UnstructuredObj) + }) + + t.Run("Last item removed from array", func(t *testing.T) { + const obj = ` +apiVersion: argoproj.io/v1alpha1 +kind: Test +metadata: + labels: + app.kubernetes.io/instance: helm-guestbook + test: test + name: helm-guestbook + namespace: default + resourceVersion: "123" +spec: + containers: + - name: name1 + test: {} + anotherList: + - name: name2 + test2: {} + - name: name3 + test: {} + anotherList: + - name: name4 + test2: {} +` + const expected = ` +apiVersion: argoproj.io/v1alpha1 +kind: Test +metadata: + labels: + app.kubernetes.io/instance: helm-guestbook + test: test + name: helm-guestbook + namespace: default + resourceVersion: "123" +spec: + containers: + - name: name1 + test: {} + anotherList: + - name: name2 + test2: {} +` + const luaAction = ` +table.remove(obj.spec.containers) +return obj +` + testObj := StrToUnstructured(obj) + expectedObj := StrToUnstructured(expected) + vm := VM{} + newObjects, err := vm.ExecuteResourceAction(testObj, luaAction, nil) + require.NoError(t, err) + assert.Len(t, newObjects, 1) + assert.Equal(t, newObjects[0].K8SOperation, K8SOperation("patch")) + assert.Equal(t, expectedObj, newObjects[0].UnstructuredObj) + }) } func TestGetResourceHealth(t *testing.T) {