mirror of
https://github.com/argoproj/argo-cd.git
synced 2026-02-20 17:48:47 +01:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec77e6105b |
@@ -11,17 +11,3 @@ cmd/**/debug
|
||||
debug.test
|
||||
coverage.out
|
||||
ui/node_modules/
|
||||
test-results/
|
||||
test/
|
||||
manifests/
|
||||
hack/
|
||||
docs/
|
||||
examples/
|
||||
.github/
|
||||
!test/fixture
|
||||
!test/container
|
||||
!hack/installers
|
||||
!hack/gpg-wrapper.sh
|
||||
!hack/git-verify-wrapper.sh
|
||||
!hack/tool-versions.sh
|
||||
!hack/install.sh
|
||||
32
.github/ISSUE_TEMPLATE/release.md
vendored
32
.github/ISSUE_TEMPLATE/release.md
vendored
@@ -1,32 +0,0 @@
|
||||
---
|
||||
name: Argo CD Release
|
||||
about: Used by our Release Champion to track progress of a minor release
|
||||
title: 'Argo CD Release vX.X'
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
Target RC1 date: ___. __, ____
|
||||
Target GA date: ___. __, ____
|
||||
|
||||
- [ ] Create new section in the [Release Planning doc](https://docs.google.com/document/d/1trJIomcgXcfvLw0aYnERrFWfPjQOfYMDJOCh1S8nMBc/edit?usp=sharing)
|
||||
- [ ] Schedule a Release Planning meeting roughly two weeks before the scheduled Release freeze date by adding it to the community calendar (or delegate this task to someone with write access to the community calendar)
|
||||
- [ ] Include Zoom link in the invite
|
||||
- [ ] Post in #argo-cd and #argo-contributors one week before the meeting
|
||||
- [ ] Post again one hour before the meeting
|
||||
- [ ] At the meeting, remove issues/PRs from the project's column for that release which have not been “claimed” by at least one Approver (add it to the next column if Approver requests that)
|
||||
- [ ] 1wk before feature freeze post in #argo-contributors that PRs must be merged by DD-MM-YYYY to be included in the release - ask approvers to drop items from milestone they can’t merge
|
||||
- [ ] At least two days before RC1 date, draft RC blog post and submit it for review (or delegate this task)
|
||||
- [ ] Cut RC1 (or delegate this task to an Approver and coordinate timing)
|
||||
- [ ] Create new release branch
|
||||
- [ ] Add the release branch to ReadTheDocs
|
||||
- [ ] Confirm that tweet and blog post are ready
|
||||
- [ ] Trigger the release
|
||||
- [ ] After the release is finished, publish tweet and blog post
|
||||
- [ ] Post in #argo-cd and #argo-announcements with lots of emojis announcing the release and requesting help testing
|
||||
- [ ] Monitor support channels for issues, cherry-picking bugfixes and docs fixes as appropriate (or delegate this task to an Approver and coordinate timing)
|
||||
- [ ] At release date, evaluate if any bugs justify delaying the release. If not, cut the release (or delegate this task to an Approver and coordinate timing)
|
||||
- [ ] If unreleased changes are on the release branch for {current minor version minus 3}, cut a final patch release for that series (or delegate this task to an Approver and coordinate timing)
|
||||
- [ ] After the release, post in #argo-cd that the {current minor version minus 3} has reached EOL (example: https://cloud-native.slack.com/archives/C01TSERG0KZ/p1667336234059729)
|
||||
- [ ] (For the next release champion) Review the [items scheduled for the next release](https://github.com/orgs/argoproj/projects/25). If any item does not have an assignee who can commit to finish the feature, move it to the next release.
|
||||
- [ ] (For the next release champion) Schedule a time mid-way through the release cycle to review items again.
|
||||
3
.github/cherry-pick-bot.yml
vendored
3
.github/cherry-pick-bot.yml
vendored
@@ -1,3 +0,0 @@
|
||||
enabled: true
|
||||
preservePullRequestTitle: true
|
||||
|
||||
43
.github/dependabot.yml
vendored
43
.github/dependabot.yml
vendored
@@ -1,43 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
ignore:
|
||||
- dependency-name: k8s.io/*
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/test/container/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/test/e2e/multiarch-container/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/test/remote/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/ui-test/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
15
.github/pr-title-checker-config.json
vendored
15
.github/pr-title-checker-config.json
vendored
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"LABEL": {
|
||||
"name": "title needs formatting",
|
||||
"color": "EEEEEE"
|
||||
},
|
||||
"CHECKS": {
|
||||
"prefixes": ["[Bot] docs: "],
|
||||
"regexp": "^(feat|fix|docs|test|ci|chore)!?(\\(.*\\))?!?:.*"
|
||||
},
|
||||
"MESSAGES": {
|
||||
"success": "PR title is valid",
|
||||
"failure": "PR title is invalid",
|
||||
"notice": "PR Title needs to pass regex '^(feat|fix|docs|test|ci|chore)!?(\\(.*\\))?!?:.*"
|
||||
}
|
||||
}
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,23 +1,17 @@
|
||||
<!--
|
||||
Note on DCO:
|
||||
|
||||
If the DCO action in the integration test fails, one or more of your commits are not signed off. Please click on the *Details* link next to the DCO action for instructions on how to resolve this.
|
||||
-->
|
||||
|
||||
Checklist:
|
||||
|
||||
* [ ] Either (a) I've created an [enhancement proposal](https://github.com/argoproj/argo-cd/issues/new/choose) and discussed it with the community, (b) this is a bug fix, or (c) this does not need to be in the release notes.
|
||||
* [ ] The title of the PR states what changed and the related issues number (used for the release note).
|
||||
* [ ] The title of the PR conforms to the [Toolchain Guide](https://argo-cd.readthedocs.io/en/latest/developer-guide/toolchain-guide/#title-of-the-pr)
|
||||
* [ ] I've included "Closes [ISSUE #]" or "Fixes [ISSUE #]" in the description to automatically close the associated issue.
|
||||
* [ ] I've updated both the CLI and UI to expose my feature, or I plan to submit a second PR with them.
|
||||
* [ ] Does this PR require documentation updates?
|
||||
* [ ] I've updated documentation as required by this PR.
|
||||
* [ ] Optional. My organization is added to USERS.md.
|
||||
* [ ] I have signed off all my commits as required by [DCO](https://github.com/argoproj/argoproj/blob/master/community/CONTRIBUTING.md#legal)
|
||||
* [ ] I have signed off all my commits as required by [DCO](https://github.com/argoproj/argoproj/tree/master/community#contributing-to-argo)
|
||||
* [ ] I have written unit and/or e2e tests for my change. PRs without these are unlikely to be merged.
|
||||
* [ ] My build is green ([troubleshooting builds](https://argo-cd.readthedocs.io/en/latest/developer-guide/ci/)).
|
||||
* [ ] My new feature complies with the [feature status](https://github.com/argoproj/argoproj/blob/master/community/feature-status.md) guidelines.
|
||||
* [ ] I have added a brief description of why this PR is necessary and/or what this PR solves.
|
||||
* [ ] My build is green ([troubleshooting builds](https://argo-cd.readthedocs.io/en/latest/developer-guide/ci/)).
|
||||
|
||||
<!-- Please see [Contribution FAQs](https://argo-cd.readthedocs.io/en/latest/developer-guide/faq/) if you have questions about your pull-request. -->
|
||||
|
||||
38
.github/workflows/README.md
vendored
38
.github/workflows/README.md
vendored
@@ -1,38 +0,0 @@
|
||||
# Workflows
|
||||
|
||||
| Workflow | Description |
|
||||
|--------------------|----------------------------------------------------------------|
|
||||
| ci-build.yaml | Build, lint, test, codegen, build-ui, analyze, e2e-test |
|
||||
| codeql.yaml | CodeQL analysis |
|
||||
| image-reuse.yaml | Build, push, and Sign container images |
|
||||
| image.yaml | Build container image for PR's & publish for push events |
|
||||
| pr-title-check.yaml| Lint PR for semantic information |
|
||||
| init-release.yaml | Build manifests and version then create a PR for release branch|
|
||||
| release.yaml | Build images, cli-binaries, provenances, and post actions |
|
||||
| update-snyk.yaml | Scheduled snyk reports |
|
||||
|
||||
# Reusable workflows
|
||||
|
||||
## image-reuse.yaml
|
||||
|
||||
- The resuable workflow can be used to publish or build images with multiple container registries(Quay,GHCR, dockerhub), and then sign them with cosign when an image is published.
|
||||
- A GO version `must` be specified e.g. 1.21
|
||||
- The image name for each registry *must* contain the tag. Note: multiple tags are allowed for each registry using a CSV type.
|
||||
- Multiple platforms can be specified e.g. linux/amd64,linux/arm64
|
||||
- Images are not published by default. A boolean value must be set to `true` to push images.
|
||||
- An optional target can be specified.
|
||||
|
||||
| Inputs | Description | Type | Required | Defaults |
|
||||
|-------------------|-------------------------------------|-------------|----------|-----------------|
|
||||
| go-version | Version of Go to be used | string | true | none |
|
||||
| quay_image_name | Full image name and tag | CSV, string | false | none |
|
||||
| ghcr_image_name | Full image name and tag | CSV, string | false | none |
|
||||
| docker_image_name | Full image name and tag | CSV, string | false | none |
|
||||
| platforms | Platforms to build (linux/amd64) | CSV, string | false | linux/amd64 |
|
||||
| push | Whether to push image/s to registry | boolean | false | false |
|
||||
| target | Target build stage | string | false | none |
|
||||
|
||||
| Outputs | Description | Type |
|
||||
|-------------|------------------------------------------|-------|
|
||||
|image-digest | Image digest of image container created | string|
|
||||
|
||||
92
.github/workflows/ci-build.yaml
vendored
92
.github/workflows/ci-build.yaml
vendored
@@ -9,11 +9,10 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release-*'
|
||||
|
||||
env:
|
||||
# Golang version to use across CI steps
|
||||
GOLANG_VERSION: '1.21'
|
||||
GOLANG_VERSION: '1.18'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -28,9 +27,9 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Download all Go modules
|
||||
@@ -46,13 +45,13 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Restore go build cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/go-build
|
||||
key: ${{ runner.os }}-go-build-v1-${{ github.run_id }}
|
||||
@@ -70,16 +69,16 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Run golangci-lint
|
||||
uses: golangci/golangci-lint-action@3a919529898de77ec3da873e3063ca4b10e7f5cc # v3.7.0
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.54.0
|
||||
args: --enable gofmt --timeout 10m --exclude SA5011 --verbose --max-issues-per-linter 0 --max-same-issues 0
|
||||
version: v1.46.2
|
||||
args: --timeout 10m --exclude SA5011 --verbose
|
||||
|
||||
test-go:
|
||||
name: Run unit tests for Go packages
|
||||
@@ -93,11 +92,11 @@ jobs:
|
||||
- name: Create checkout directory
|
||||
run: mkdir -p ~/go/src/github.com/argoproj
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Create symlink in GOPATH
|
||||
run: ln -s $(pwd) ~/go/src/github.com/argoproj/argo-cd
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Install required packages
|
||||
@@ -117,17 +116,13 @@ jobs:
|
||||
run: |
|
||||
echo "/usr/local/bin" >> $GITHUB_PATH
|
||||
- name: Restore go build cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/go-build
|
||||
key: ${{ runner.os }}-go-build-v1-${{ github.run_id }}
|
||||
- name: Install all tools required for building & testing
|
||||
run: |
|
||||
make install-test-tools-local
|
||||
# We install kustomize in the dist directory
|
||||
- name: Add dist to PATH
|
||||
run: |
|
||||
echo "/home/runner/work/argo-cd/argo-cd/dist" >> $GITHUB_PATH
|
||||
- name: Setup git username and email
|
||||
run: |
|
||||
git config --global user.name "John Doe"
|
||||
@@ -138,18 +133,18 @@ jobs:
|
||||
- name: Run all unit tests
|
||||
run: make test-local
|
||||
- name: Generate code coverage artifacts
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: code-coverage
|
||||
path: coverage.out
|
||||
- name: Generate test results artifacts
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: test-results
|
||||
path: test-results/
|
||||
|
||||
test-go-race:
|
||||
name: Run unit tests with -race for Go packages
|
||||
name: Run unit tests with -race, for Go packages
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-go
|
||||
@@ -160,11 +155,11 @@ jobs:
|
||||
- name: Create checkout directory
|
||||
run: mkdir -p ~/go/src/github.com/argoproj
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Create symlink in GOPATH
|
||||
run: ln -s $(pwd) ~/go/src/github.com/argoproj/argo-cd
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Install required packages
|
||||
@@ -184,17 +179,13 @@ jobs:
|
||||
run: |
|
||||
echo "/usr/local/bin" >> $GITHUB_PATH
|
||||
- name: Restore go build cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/go-build
|
||||
key: ${{ runner.os }}-go-build-v1-${{ github.run_id }}
|
||||
- name: Install all tools required for building & testing
|
||||
run: |
|
||||
make install-test-tools-local
|
||||
# We install kustomize in the dist directory
|
||||
- name: Add dist to PATH
|
||||
run: |
|
||||
echo "/home/runner/work/argo-cd/argo-cd/dist" >> $GITHUB_PATH
|
||||
- name: Setup git username and email
|
||||
run: |
|
||||
git config --global user.name "John Doe"
|
||||
@@ -205,7 +196,7 @@ jobs:
|
||||
- name: Run all unit tests
|
||||
run: make test-race-local
|
||||
- name: Generate test results artifacts
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: race-results
|
||||
path: test-results/
|
||||
@@ -215,9 +206,9 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: Create symlink in GOPATH
|
||||
@@ -241,10 +232,6 @@ jobs:
|
||||
make install-codegen-tools-local
|
||||
make install-go-tools-local
|
||||
working-directory: /home/runner/go/src/github.com/argoproj/argo-cd
|
||||
# We install kustomize in the dist directory
|
||||
- name: Add dist to PATH
|
||||
run: |
|
||||
echo "/home/runner/work/argo-cd/argo-cd/dist" >> $GITHUB_PATH
|
||||
- name: Run codegen
|
||||
run: |
|
||||
set -x
|
||||
@@ -263,14 +250,14 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '20.7.0'
|
||||
node-version: '12.18.4'
|
||||
- name: Restore node dependency cache
|
||||
id: cache-dependencies
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ui/node_modules
|
||||
key: ${{ runner.os }}-node-dep-v2-${{ hashFiles('**/yarn.lock') }}
|
||||
@@ -300,12 +287,12 @@ jobs:
|
||||
sonar_secret: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Restore node dependency cache
|
||||
id: cache-dependencies
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ui/node_modules
|
||||
key: ${{ runner.os }}-node-dep-v2-${{ hashFiles('**/yarn.lock') }}
|
||||
@@ -316,16 +303,16 @@ jobs:
|
||||
run: |
|
||||
mkdir -p test-results
|
||||
- name: Get code coverage artifiact
|
||||
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: code-coverage
|
||||
- name: Get test result artifact
|
||||
uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: test-results
|
||||
path: test-results
|
||||
- name: Upload code coverage information to codecov.io
|
||||
uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: coverage.out
|
||||
- name: Perform static code analysis using SonarCloud
|
||||
@@ -361,7 +348,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
k3s-version: [v1.28.2, v1.27.6, v1.26.9, v1.25.14]
|
||||
k3s-version: [v1.24.3, v1.23.3, v1.22.6]
|
||||
needs:
|
||||
- build-go
|
||||
env:
|
||||
@@ -379,9 +366,9 @@ jobs:
|
||||
GITLAB_TOKEN: ${{ secrets.E2E_TEST_GITLAB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- name: GH actions workaround - Kill XSP4 process
|
||||
@@ -397,10 +384,9 @@ jobs:
|
||||
sudo mkdir -p $HOME/.kube && sudo chown -R runner $HOME/.kube
|
||||
sudo k3s kubectl config view --raw > $HOME/.kube/config
|
||||
sudo chown runner $HOME/.kube/config
|
||||
sudo chmod go-r $HOME/.kube/config
|
||||
kubectl version
|
||||
- name: Restore go build cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/go-build
|
||||
key: ${{ runner.os }}-go-build-v1-${{ github.run_id }}
|
||||
@@ -426,9 +412,9 @@ jobs:
|
||||
git config --global user.email "john.doe@example.com"
|
||||
- name: Pull Docker image required for tests
|
||||
run: |
|
||||
docker pull ghcr.io/dexidp/dex:v2.37.0
|
||||
docker pull ghcr.io/dexidp/dex:v2.35.3-distroless
|
||||
docker pull argoproj/argo-cd-ci-builder:v1.0.0
|
||||
docker pull redis:7.0.11-alpine
|
||||
docker pull redis:7.0.5-alpine
|
||||
- name: Create target directory for binaries in the build-process
|
||||
run: |
|
||||
mkdir -p dist
|
||||
@@ -456,7 +442,7 @@ jobs:
|
||||
set -x
|
||||
make test-e2e-local
|
||||
- name: Upload e2e-server logs
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: e2e-server-k8s${{ matrix.k3s-version }}.log
|
||||
path: /tmp/e2e-server.log
|
||||
|
||||
9
.github/workflows/codeql.yml
vendored
9
.github/workflows/codeql.yml
vendored
@@ -5,7 +5,6 @@ on:
|
||||
# Secrets aren't available for dependabot on push. https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/troubleshooting-the-codeql-workflow#error-403-resource-not-accessible-by-integration-when-using-dependabot
|
||||
branches-ignore:
|
||||
- 'dependabot/**'
|
||||
- 'cherry-pick-*'
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 19 * * 0'
|
||||
@@ -30,11 +29,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@8aff97f12c99086bdb92ff62ae06dbbcdf07941b # v2.1.33
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -42,7 +41,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@8aff97f12c99086bdb92ff62ae06dbbcdf07941b # v2.1.33
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -56,4 +55,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@8aff97f12c99086bdb92ff62ae06dbbcdf07941b # v2.1.33
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
173
.github/workflows/image-reuse.yaml
vendored
173
.github/workflows/image-reuse.yaml
vendored
@@ -1,173 +0,0 @@
|
||||
name: Publish and Sign Container Image
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
required: true
|
||||
type: string
|
||||
quay_image_name:
|
||||
required: false
|
||||
type: string
|
||||
ghcr_image_name:
|
||||
required: false
|
||||
type: string
|
||||
docker_image_name:
|
||||
required: false
|
||||
type: string
|
||||
platforms:
|
||||
required: true
|
||||
type: string
|
||||
default: linux/amd64
|
||||
push:
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
target:
|
||||
required: false
|
||||
type: string
|
||||
|
||||
secrets:
|
||||
quay_username:
|
||||
required: false
|
||||
quay_password:
|
||||
required: false
|
||||
ghcr_username:
|
||||
required: false
|
||||
ghcr_password:
|
||||
required: false
|
||||
docker_username:
|
||||
required: false
|
||||
docker_password:
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
image-digest:
|
||||
description: "sha256 digest of container image"
|
||||
value: ${{ jobs.publish.outputs.image-digest }}
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # Used to push images to `ghcr.io` if used.
|
||||
id-token: write # Needed to create an OIDC token for keyless signing
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
image-digest: ${{ steps.image.outputs.digest }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
if: ${{ github.ref_type == 'tag'}}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
if: ${{ github.ref_type != 'tag'}}
|
||||
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0
|
||||
with:
|
||||
go-version: ${{ inputs.go-version }}
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@11086d25041f77fe8fe7b9ea4e48e3b9192b8f19 # v3.1.2
|
||||
with:
|
||||
cosign-release: 'v2.0.2'
|
||||
|
||||
- uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.2.0
|
||||
- uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
|
||||
|
||||
- name: Setup tags for container image as a CSV type
|
||||
run: |
|
||||
IMAGE_TAGS=$(for str in \
|
||||
${{ inputs.quay_image_name }} \
|
||||
${{ inputs.ghcr_image_name }} \
|
||||
${{ inputs.docker_image_name}}; do
|
||||
echo -n "${str}",;done | sed 's/,$//')
|
||||
|
||||
echo $IMAGE_TAGS
|
||||
echo "TAGS=$IMAGE_TAGS" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup image namespace for signing, strip off the tag
|
||||
run: |
|
||||
TAGS=$(for tag in \
|
||||
${{ inputs.quay_image_name }} \
|
||||
${{ inputs.ghcr_image_name }} \
|
||||
${{ inputs.docker_image_name}}; do
|
||||
echo -n "${tag}" | awk -F ":" '{print $1}' -;done)
|
||||
|
||||
echo $TAGS
|
||||
echo 'SIGNING_TAGS<<EOF' >> $GITHUB_ENV
|
||||
echo $TAGS >> $GITHUB_ENV
|
||||
echo 'EOF' >> $GITHUB_ENV
|
||||
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.quay_username }}
|
||||
password: ${{ secrets.quay_password }}
|
||||
if: ${{ inputs.quay_image_name && inputs.push }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.ghcr_username }}
|
||||
password: ${{ secrets.ghcr_password }}
|
||||
if: ${{ inputs.ghcr_image_name && inputs.push }}
|
||||
|
||||
- name: Login to dockerhub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||
with:
|
||||
username: ${{ secrets.docker_username }}
|
||||
password: ${{ secrets.docker_password }}
|
||||
if: ${{ inputs.docker_image_name && inputs.push }}
|
||||
|
||||
- name: Set up build args for container image
|
||||
run: |
|
||||
echo "GIT_TAG=$(if [ -z "`git status --porcelain`" ]; then git describe --exact-match --tags HEAD 2>/dev/null; fi)" >> $GITHUB_ENV
|
||||
echo "GIT_COMMIT=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
|
||||
echo "GIT_TREE_STATE=$(if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi)" >> $GITHUB_ENV
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@4d9e71b726748f254fe64fa44d273194bd18ec91
|
||||
with:
|
||||
large-packages: false
|
||||
docker-images: false
|
||||
swap-storage: false
|
||||
tool-cache: false
|
||||
|
||||
- name: Build and push container image
|
||||
id: image
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 #v4.1.1
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ inputs.platforms }}
|
||||
push: ${{ inputs.push }}
|
||||
tags: ${{ env.TAGS }}
|
||||
target: ${{ inputs.target }}
|
||||
provenance: false
|
||||
sbom: false
|
||||
build-args: |
|
||||
GIT_TAG=${{env.GIT_TAG}}
|
||||
GIT_COMMIT=${{env.GIT_COMMIT}}
|
||||
BUILD_DATE=${{env.BUILD_DATE}}
|
||||
GIT_TREE_STATE=${{env.GIT_TREE_STATE}}
|
||||
|
||||
- name: Sign container images
|
||||
run: |
|
||||
for signing_tag in $SIGNING_TAGS; do
|
||||
cosign sign \
|
||||
-a "repo=${{ github.repository }}" \
|
||||
-a "workflow=${{ github.workflow }}" \
|
||||
-a "sha=${{ github.sha }}" \
|
||||
-y \
|
||||
"$signing_tag"@${{ steps.image.outputs.digest }}
|
||||
done
|
||||
if: ${{ inputs.push }}
|
||||
136
.github/workflows/image.yaml
vendored
136
.github/workflows/image.yaml
vendored
@@ -9,109 +9,89 @@ on:
|
||||
- master
|
||||
types: [ labeled, unlabeled, opened, synchronize, reopened ]
|
||||
|
||||
env:
|
||||
GOLANG_VERSION: '1.18'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
set-vars:
|
||||
publish:
|
||||
permissions:
|
||||
contents: read
|
||||
contents: write # for git to push upgrade commit if not already deployed
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
image-tag: ${{ steps.image.outputs.tag}}
|
||||
platforms: ${{ steps.platforms.outputs.platforms }}
|
||||
env:
|
||||
GOPATH: /home/runner/work/argo-cd/argo-cd
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
- uses: actions/checkout@master
|
||||
with:
|
||||
path: src/github.com/argoproj/argo-cd
|
||||
|
||||
- name: Set image tag for ghcr
|
||||
run: echo "tag=$(cat ./VERSION)-${GITHUB_SHA::8}" >> $GITHUB_OUTPUT
|
||||
# get image tag
|
||||
- run: echo ::set-output name=tag::$(cat ./VERSION)-${GITHUB_SHA::8}
|
||||
working-directory: ./src/github.com/argoproj/argo-cd
|
||||
id: image
|
||||
|
||||
- name: Determine image platforms to use
|
||||
id: platforms
|
||||
run: |
|
||||
# login
|
||||
- run: |
|
||||
docker login ghcr.io --username $USERNAME --password $PASSWORD
|
||||
docker login quay.io --username "${DOCKER_USERNAME}" --password "${DOCKER_TOKEN}"
|
||||
if: github.event_name == 'push'
|
||||
env:
|
||||
USERNAME: ${{ secrets.USERNAME }}
|
||||
PASSWORD: ${{ secrets.TOKEN }}
|
||||
DOCKER_USERNAME: ${{ secrets.RELEASE_QUAY_USERNAME }}
|
||||
DOCKER_TOKEN: ${{ secrets.RELEASE_QUAY_TOKEN }}
|
||||
|
||||
# build
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
- run: |
|
||||
IMAGE_PLATFORMS=linux/amd64
|
||||
if [[ "${{ github.event_name }}" == "push" || "${{ contains(github.event.pull_request.labels.*.name, 'test-multi-image') }}" == "true" ]]
|
||||
if [[ "${{ github.event_name }}" == "push" || "${{ contains(github.event.pull_request.labels.*.name, 'test-arm-image') }}" == "true" ]]
|
||||
then
|
||||
IMAGE_PLATFORMS=linux/amd64,linux/arm64,linux/s390x,linux/ppc64le
|
||||
fi
|
||||
echo "Building image for platforms: $IMAGE_PLATFORMS"
|
||||
echo "platforms=$IMAGE_PLATFORMS" >> $GITHUB_OUTPUT
|
||||
docker buildx build --platform $IMAGE_PLATFORMS --push="${{ github.event_name == 'push' }}" \
|
||||
-t ghcr.io/argoproj/argocd:${{ steps.image.outputs.tag }} \
|
||||
-t quay.io/argoproj/argocd:latest .
|
||||
working-directory: ./src/github.com/argoproj/argo-cd
|
||||
|
||||
build-only:
|
||||
needs: [set-vars]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # for pushing packages to GHCR, which is used by cd.apps.argoproj.io to avoid polluting Quay with tags
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
if: ${{ github.repository == 'argoproj/argo-cd' && github.event_name != 'push' }}
|
||||
uses: ./.github/workflows/image-reuse.yaml
|
||||
with:
|
||||
# Note: cannot use env variables to set go-version (https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations)
|
||||
go-version: 1.21
|
||||
platforms: ${{ needs.set-vars.outputs.platforms }}
|
||||
push: false
|
||||
# sign container images
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@main
|
||||
with:
|
||||
cosign-release: 'v1.13.0'
|
||||
|
||||
build-and-publish:
|
||||
needs: [set-vars]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # for pushing packages to GHCR, which is used by cd.apps.argoproj.io to avoid polluting Quay with tags
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
if: ${{ github.repository == 'argoproj/argo-cd' && github.event_name == 'push' }}
|
||||
uses: ./.github/workflows/image-reuse.yaml
|
||||
with:
|
||||
quay_image_name: quay.io/argoproj/argocd:latest
|
||||
ghcr_image_name: ghcr.io/argoproj/argo-cd/argocd:${{ needs.set-vars.outputs.image-tag }}
|
||||
# Note: cannot use env variables to set go-version (https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations)
|
||||
go-version: 1.21
|
||||
platforms: ${{ needs.set-vars.outputs.platforms }}
|
||||
push: true
|
||||
secrets:
|
||||
quay_username: ${{ secrets.RELEASE_QUAY_USERNAME }}
|
||||
quay_password: ${{ secrets.RELEASE_QUAY_TOKEN }}
|
||||
ghcr_username: ${{ github.actor }}
|
||||
ghcr_password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Sign Argo CD latest image
|
||||
run: |
|
||||
cosign sign --key env://COSIGN_PRIVATE_KEY quay.io/argoproj/argocd:latest
|
||||
# Displays the public key to share.
|
||||
cosign public-key --key env://COSIGN_PRIVATE_KEY
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{secrets.COSIGN_PRIVATE_KEY}}
|
||||
COSIGN_PASSWORD: ${{secrets.COSIGN_PASSWORD}}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
|
||||
build-and-publish-provenance: # Push attestations to GHCR, latest image is polluting quay.io
|
||||
needs:
|
||||
- build-and-publish
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment.
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
packages: write # for uploading attestations. (https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#known-issues)
|
||||
if: ${{ github.repository == 'argoproj/argo-cd' && github.event_name == 'push' }}
|
||||
# Must be refernced by a tag. https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#referencing-the-slsa-generator
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v1.7.0
|
||||
with:
|
||||
image: ghcr.io/argoproj/argo-cd/argocd
|
||||
digest: ${{ needs.build-and-publish.outputs.image-digest }}
|
||||
registry-username: ${{ github.actor }}
|
||||
secrets:
|
||||
registry-password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
Deploy:
|
||||
needs:
|
||||
- build-and-publish
|
||||
- set-vars
|
||||
permissions:
|
||||
contents: write # for git to push upgrade commit if not already deployed
|
||||
packages: write # for pushing packages to GHCR, which is used by cd.apps.argoproj.io to avoid polluting Quay with tags
|
||||
if: ${{ github.repository == 'argoproj/argo-cd' && github.event_name == 'push' }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
# deploy
|
||||
- run: git clone "https://$TOKEN@github.com/argoproj/argoproj-deployments"
|
||||
if: github.event_name == 'push'
|
||||
env:
|
||||
TOKEN: ${{ secrets.TOKEN }}
|
||||
- run: |
|
||||
docker run -u $(id -u):$(id -g) -v $(pwd):/src -w /src --rm -t ghcr.io/argoproj/argo-cd/argocd:${{ needs.set-vars.outputs.image-tag }} kustomize edit set image quay.io/argoproj/argocd=ghcr.io/argoproj/argo-cd/argocd:${{ needs.set-vars.outputs.image-tag }}
|
||||
docker run -u $(id -u):$(id -g) -v $(pwd):/src -w /src --rm -t ghcr.io/argoproj/argocd:${{ steps.image.outputs.tag }} kustomize edit set image quay.io/argoproj/argocd=ghcr.io/argoproj/argocd:${{ steps.image.outputs.tag }}
|
||||
git config --global user.email 'ci@argoproj.com'
|
||||
git config --global user.name 'CI'
|
||||
git diff --exit-code && echo 'Already deployed' || (git commit -am 'Upgrade argocd to ${{ needs.set-vars.outputs.image-tag }}' && git push)
|
||||
git diff --exit-code && echo 'Already deployed' || (git commit -am 'Upgrade argocd to ${{ steps.image.outputs.tag }}' && git push)
|
||||
if: github.event_name == 'push'
|
||||
working-directory: argoproj-deployments/argocd
|
||||
|
||||
# TODO: clean up old images once github supports it: https://github.community/t5/How-to-use-Git-and-GitHub/Deleting-images-from-GitHub-Package-Registry/m-p/41202/thread-id/9811
|
||||
|
||||
77
.github/workflows/init-release.yaml
vendored
77
.github/workflows/init-release.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Init ArgoCD Release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
TARGET_BRANCH:
|
||||
description: 'TARGET_BRANCH to checkout (e.g. release-2.5)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
TARGET_VERSION:
|
||||
description: 'TARGET_VERSION to build manifests (e.g. 2.5.0-rc1) Note: the `v` prefix is not used'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
permissions:
|
||||
contents: write # for peter-evans/create-pull-request to create branch
|
||||
pull-requests: write # for peter-evans/create-pull-request to create a PR
|
||||
name: Automatically generate version and manifests on ${{ inputs.TARGET_BRANCH }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
ref: ${{ inputs.TARGET_BRANCH }}
|
||||
|
||||
- name: Check if TARGET_VERSION is well formed.
|
||||
run: |
|
||||
set -xue
|
||||
# Target version must not contain 'v' prefix
|
||||
if echo "${{ inputs.TARGET_VERSION }}" | grep -e '^v'; then
|
||||
echo "::error::Target version '${{ inputs.TARGET_VERSION }}' should not begin with a 'v' prefix, refusing to continue." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create VERSION information
|
||||
run: |
|
||||
set -ue
|
||||
echo "Bumping version from $(cat VERSION) to ${{ inputs.TARGET_VERSION }}"
|
||||
echo "${{ inputs.TARGET_VERSION }}" > VERSION
|
||||
|
||||
# We install kustomize in the dist directory
|
||||
- name: Add dist to PATH
|
||||
run: |
|
||||
echo "/home/runner/work/argo-cd/argo-cd/dist" >> $GITHUB_PATH
|
||||
|
||||
- name: Generate new set of manifests
|
||||
run: |
|
||||
set -ue
|
||||
make install-codegen-tools-local
|
||||
make manifests-local VERSION=${{ inputs.TARGET_VERSION }}
|
||||
git diff
|
||||
|
||||
- name: Generate version compatibility table
|
||||
run: |
|
||||
git stash
|
||||
bash hack/update-supported-versions.sh
|
||||
git add -u .
|
||||
git stash pop
|
||||
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # v5.0.2
|
||||
with:
|
||||
commit-message: "Bump version to ${{ inputs.TARGET_VERSION }}"
|
||||
title: "Bump version to ${{ inputs.TARGET_VERSION }} on ${{ inputs.TARGET_BRANCH }} branch"
|
||||
body: Updating VERSION and manifests to ${{ inputs.TARGET_VERSION }}
|
||||
branch: update-version
|
||||
branch-suffix: random
|
||||
signoff: true
|
||||
labels: release
|
||||
|
||||
|
||||
29
.github/workflows/pr-title-check.yml
vendored
29
.github/workflows/pr-title-check.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: "Lint PR"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, reopened, synchronize]
|
||||
|
||||
# IMPORTANT: No checkout actions, scripts, or builds should be added to this workflow. Permissions should always be used
|
||||
# with extreme caution. https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target
|
||||
permissions: {}
|
||||
|
||||
# PR updates can happen in quick succession leading to this
|
||||
# workflow being trigger a number of times. This limits it
|
||||
# to one run per PR.
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
name: Validate PR Title
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: thehanimo/pr-title-checker@0cf5902181e78341bb97bb06646396e5bd354b3f # v1.4.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
configuration_path: ".github/pr-title-checker-config.json"
|
||||
486
.github/workflows/release.yaml
vendored
486
.github/workflows/release.yaml
vendored
@@ -1,153 +1,259 @@
|
||||
name: Publish ArgoCD Release
|
||||
name: Create ArgoCD release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- '!v2.4*'
|
||||
- '!v2.5*'
|
||||
- '!v2.6*'
|
||||
|
||||
permissions: {}
|
||||
- "release-v*"
|
||||
- "!release-v1.5*"
|
||||
- "!release-v1.4*"
|
||||
- "!release-v1.3*"
|
||||
- "!release-v1.2*"
|
||||
- "!release-v1.1*"
|
||||
- "!release-v1.0*"
|
||||
- "!release-v0*"
|
||||
|
||||
env:
|
||||
GOLANG_VERSION: '1.21' # Note: go-version must also be set in job argocd-image.with.go-version
|
||||
GOLANG_VERSION: '1.18'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
argocd-image:
|
||||
prepare-release:
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
packages: write # used to push images to `ghcr.io` if used.
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
uses: ./.github/workflows/image-reuse.yaml
|
||||
with:
|
||||
quay_image_name: quay.io/argoproj/argocd:${{ github.ref_name }}
|
||||
# Note: cannot use env variables to set go-version (https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations)
|
||||
go-version: 1.21
|
||||
platforms: linux/amd64,linux/arm64,linux/s390x,linux/ppc64le
|
||||
push: true
|
||||
secrets:
|
||||
quay_username: ${{ secrets.RELEASE_QUAY_USERNAME }}
|
||||
quay_password: ${{ secrets.RELEASE_QUAY_TOKEN }}
|
||||
|
||||
argocd-image-provenance:
|
||||
needs: [argocd-image]
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment.
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
packages: write # for uploading attestations. (https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#known-issues)
|
||||
# Must be refernced by a tag. https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#referencing-the-slsa-generator
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v1.9.0
|
||||
with:
|
||||
image: quay.io/argoproj/argocd
|
||||
digest: ${{ needs.argocd-image.outputs.image-digest }}
|
||||
secrets:
|
||||
registry-username: ${{ secrets.RELEASE_QUAY_USERNAME }}
|
||||
registry-password: ${{ secrets.RELEASE_QUAY_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
needs:
|
||||
- argocd-image
|
||||
- argocd-image-provenance
|
||||
permissions:
|
||||
contents: write # used for uploading assets
|
||||
contents: write # To push changes to release branch
|
||||
name: Perform automatic release on trigger ${{ github.ref }}
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
hashes: ${{ steps.hash.outputs.hashes }}
|
||||
|
||||
env:
|
||||
# The name of the tag as supplied by the GitHub event
|
||||
SOURCE_TAG: ${{ github.ref }}
|
||||
# The image namespace where Docker image will be published to
|
||||
IMAGE_NAMESPACE: quay.io/argoproj
|
||||
# Whether to create & push image and release assets
|
||||
DRY_RUN: false
|
||||
# Whether a draft release should be created, instead of public one
|
||||
DRAFT_RELEASE: false
|
||||
# Whether to update homebrew with this release as well
|
||||
# Set RELEASE_HOMEBREW_TOKEN secret in repository for this to work - needs
|
||||
# access to public repositories
|
||||
UPDATE_HOMEBREW: false
|
||||
# Name of the GitHub user for Git config
|
||||
GIT_USERNAME: argo-bot
|
||||
# E-Mail of the GitHub user for Git config
|
||||
GIT_EMAIL: argoproj@gmail.com
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Fetch all tags
|
||||
run: git fetch --force --tags
|
||||
|
||||
- name: Set GORELEASER_PREVIOUS_TAG # Workaround, GoReleaser uses 'git-describe' to determine a previous tag. Our tags are created in realease branches.
|
||||
- name: Check if the published tag is well formed and setup vars
|
||||
run: |
|
||||
set -xue
|
||||
if echo ${{ github.ref_name }} | grep -E -- '-rc1+$';then
|
||||
echo "GORELEASER_PREVIOUS_TAG=$(git -c 'versionsort.suffix=-rc' tag --list --sort=version:refname | tail -n 2 | head -n 1)" >> $GITHUB_ENV
|
||||
else
|
||||
echo "This is not the first release on the branch, Using GoReleaser defaults"
|
||||
# Target version must match major.minor.patch and optional -rcX suffix
|
||||
# where X must be a number.
|
||||
TARGET_VERSION=${SOURCE_TAG#*release-v}
|
||||
if ! echo "${TARGET_VERSION}" | egrep '^[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)*$'; then
|
||||
echo "::error::Target version '${TARGET_VERSION}' is malformed, refusing to continue." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Target branch is the release branch we're going to operate on
|
||||
# Its name is 'release-<major>.<minor>'
|
||||
TARGET_BRANCH="release-${TARGET_VERSION%\.[0-9]*}"
|
||||
|
||||
# The release tag is the source tag, minus the release- prefix
|
||||
RELEASE_TAG="${SOURCE_TAG#*release-}"
|
||||
|
||||
# Whether this is a pre-release (indicated by -rc suffix)
|
||||
PRE_RELEASE=false
|
||||
if echo "${RELEASE_TAG}" | egrep -- '-rc[0-9]+$'; then
|
||||
PRE_RELEASE=true
|
||||
fi
|
||||
|
||||
# We must not have a release trigger within the same release branch,
|
||||
# because that means a release for this branch is already running.
|
||||
if git tag -l | grep "release-v${TARGET_VERSION%\.[0-9]*}" | grep -v "release-v${TARGET_VERSION}"; then
|
||||
echo "::error::Another release for branch ${TARGET_BRANCH} is currently in progress."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that release do not yet exist
|
||||
if git rev-parse ${RELEASE_TAG}; then
|
||||
echo "::error::Release tag ${RELEASE_TAG} already exists in repository. Refusing to continue."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make the variables available in follow-up steps
|
||||
echo "TARGET_VERSION=${TARGET_VERSION}" >> $GITHUB_ENV
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> $GITHUB_ENV
|
||||
echo "RELEASE_TAG=${RELEASE_TAG}" >> $GITHUB_ENV
|
||||
echo "PRE_RELEASE=${PRE_RELEASE}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check if our release tag has a correct annotation
|
||||
run: |
|
||||
set -ue
|
||||
# Fetch all tag information as well
|
||||
git fetch --prune --tags --force
|
||||
|
||||
echo "=========== BEGIN COMMIT MESSAGE ============="
|
||||
git show ${SOURCE_TAG}
|
||||
echo "============ END COMMIT MESSAGE =============="
|
||||
|
||||
# Quite dirty hack to get the release notes from the annotated tag
|
||||
# into a temporary file.
|
||||
RELEASE_NOTES=$(mktemp -p /tmp release-notes.XXXXXX)
|
||||
|
||||
prefix=true
|
||||
begin=false
|
||||
git show ${SOURCE_TAG} | while read line; do
|
||||
# Whatever is in commit history for the tag, we only want that
|
||||
# annotation from our tag. We discard everything else.
|
||||
if test "$begin" = "false"; then
|
||||
if echo "$line" | grep -q "tag ${SOURCE_TAG#refs/tags/}"; then begin="true"; fi
|
||||
continue
|
||||
fi
|
||||
if test "$prefix" = "true"; then
|
||||
if test -z "$line"; then prefix=false; fi
|
||||
else
|
||||
if echo "$line" | egrep -q '^commit [0-9a-f]+'; then
|
||||
break
|
||||
fi
|
||||
echo "$line" >> ${RELEASE_NOTES}
|
||||
fi
|
||||
done
|
||||
|
||||
# For debug purposes
|
||||
echo "============BEGIN RELEASE NOTES================="
|
||||
cat ${RELEASE_NOTES}
|
||||
echo "=============END RELEASE NOTES=================="
|
||||
|
||||
# Too short release notes are suspicious. We need at least 100 bytes.
|
||||
relNoteLen=$(stat -c '%s' $RELEASE_NOTES)
|
||||
if test $relNoteLen -lt 100; then
|
||||
echo "::error::No release notes provided in tag annotation (or tag is not annotated)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for magic string '## Quick Start' in head of release notes
|
||||
if ! head -2 ${RELEASE_NOTES} | grep -iq '## Quick Start'; then
|
||||
echo "::error::Release notes seem invalid, quick start section not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# We store path to temporary release notes file for later reading, we
|
||||
# need it when creating release.
|
||||
echo "RELEASE_NOTES=${RELEASE_NOTES}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.0.0
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
|
||||
- name: Set environment variables for ldflags
|
||||
id: set_ldflag
|
||||
- name: Setup Git author information
|
||||
run: |
|
||||
echo "KUBECTL_VERSION=$(go list -m k8s.io/client-go | head -n 1 | rev | cut -d' ' -f1 | rev)" >> $GITHUB_ENV
|
||||
echo "GIT_TREE_STATE=$(if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi)" >> $GITHUB_ENV
|
||||
set -ue
|
||||
git config --global user.email "${GIT_EMAIL}"
|
||||
git config --global user.name "${GIT_USERNAME}"
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@7ec5c2b0c6cdda6e8bbb49444bc797dd33d74dd8 # v5.0.0
|
||||
id: run-goreleaser
|
||||
- name: Checkout corresponding release branch
|
||||
run: |
|
||||
set -ue
|
||||
echo "Switching to release branch '${TARGET_BRANCH}'"
|
||||
if ! git checkout ${TARGET_BRANCH}; then
|
||||
echo "::error::Checking out release branch '${TARGET_BRANCH}' for target version '${TARGET_VERSION}' (tagged '${RELEASE_TAG}') failed. Does it exist in repo?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create VERSION information
|
||||
run: |
|
||||
set -ue
|
||||
echo "Bumping version from $(cat VERSION) to ${TARGET_VERSION}"
|
||||
echo "${TARGET_VERSION}" > VERSION
|
||||
git commit -m "Bump version to ${TARGET_VERSION}" VERSION
|
||||
|
||||
- name: Generate new set of manifests
|
||||
run: |
|
||||
set -ue
|
||||
make install-codegen-tools-local
|
||||
make manifests-local VERSION=${TARGET_VERSION}
|
||||
git diff
|
||||
git commit manifests/ -m "Bump version to ${TARGET_VERSION}"
|
||||
|
||||
- name: Create the release tag
|
||||
run: |
|
||||
set -ue
|
||||
echo "Creating release ${RELEASE_TAG}"
|
||||
git tag ${RELEASE_TAG}
|
||||
|
||||
- name: Login to docker repositories
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.RELEASE_DOCKERHUB_USERNAME }}
|
||||
DOCKER_TOKEN: ${{ secrets.RELEASE_DOCKERHUB_TOKEN }}
|
||||
QUAY_USERNAME: ${{ secrets.RELEASE_QUAY_USERNAME }}
|
||||
QUAY_TOKEN: ${{ secrets.RELEASE_QUAY_TOKEN }}
|
||||
run: |
|
||||
set -ue
|
||||
docker login quay.io --username "${QUAY_USERNAME}" --password "${QUAY_TOKEN}"
|
||||
# Remove the following when Docker Hub is gone
|
||||
docker login --username "${DOCKER_USERNAME}" --password "${DOCKER_TOKEN}"
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
- name: Build and push Docker image for release
|
||||
run: |
|
||||
set -ue
|
||||
git clean -fd
|
||||
mkdir -p dist/
|
||||
docker buildx build --platform linux/amd64,linux/arm64,linux/s390x,linux/ppc64le --push -t ${IMAGE_NAMESPACE}/argocd:v${TARGET_VERSION} -t argoproj/argocd:v${TARGET_VERSION} .
|
||||
make release-cli
|
||||
make checksums
|
||||
chmod +x ./dist/argocd-linux-amd64
|
||||
./dist/argocd-linux-amd64 version --client
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@main
|
||||
with:
|
||||
version: latest
|
||||
args: release --clean --timeout 55m
|
||||
cosign-release: 'v1.13.0'
|
||||
|
||||
- name: Sign Argo CD container images and assets
|
||||
run: |
|
||||
cosign sign --key env://COSIGN_PRIVATE_KEY ${IMAGE_NAMESPACE}/argocd:v${TARGET_VERSION}
|
||||
cosign sign-blob --key env://COSIGN_PRIVATE_KEY ./dist/argocd-${TARGET_VERSION}-checksums.txt > ./dist/argocd-${TARGET_VERSION}-checksums.sig
|
||||
# Retrieves the public key to release as an asset
|
||||
cosign public-key --key env://COSIGN_PRIVATE_KEY > ./dist/argocd-cosign.pub
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{secrets.COSIGN_PRIVATE_KEY}}
|
||||
COSIGN_PASSWORD: ${{secrets.COSIGN_PASSWORD}}
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- name: Read release notes file
|
||||
id: release-notes
|
||||
uses: juliangruber/read-file-action@v1
|
||||
with:
|
||||
path: ${{ env.RELEASE_NOTES }}
|
||||
|
||||
- name: Push changes to release branch
|
||||
run: |
|
||||
set -ue
|
||||
git push origin ${TARGET_BRANCH}
|
||||
git push origin ${RELEASE_TAG}
|
||||
|
||||
- name: Dry run GitHub release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
KUBECTL_VERSION: ${{ env.KUBECTL_VERSION }}
|
||||
GIT_TREE_STATE: ${{ env.GIT_TREE_STATE }}
|
||||
|
||||
- name: Generate subject for provenance
|
||||
id: hash
|
||||
env:
|
||||
ARTIFACTS: "${{ steps.run-goreleaser.outputs.artifacts }}"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
hashes=$(echo $ARTIFACTS | jq --raw-output '.[] | {name, "digest": (.extra.Digest // .extra.Checksum)} | select(.digest) | {digest} + {name} | join(" ") | sub("^sha256:";"")' | base64 -w0)
|
||||
if test "$hashes" = ""; then # goreleaser < v1.13.0
|
||||
checksum_file=$(echo "$ARTIFACTS" | jq -r '.[] | select (.type=="Checksum") | .path')
|
||||
hashes=$(cat $checksum_file | base64 -w0)
|
||||
fi
|
||||
echo "hashes=$hashes" >> $GITHUB_OUTPUT
|
||||
|
||||
goreleaser-provenance:
|
||||
needs: [goreleaser]
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment
|
||||
id-token: write # Needed for provenance signing and ID
|
||||
contents: write # Needed for release uploads
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
# Must be refernced by a tag. https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#referencing-the-slsa-generator
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0
|
||||
with:
|
||||
base64-subjects: "${{ needs.goreleaser.outputs.hashes }}"
|
||||
provenance-name: "argocd-cli.intoto.jsonl"
|
||||
upload-assets: true
|
||||
|
||||
generate-sbom:
|
||||
name: Create SBOM and generate hash
|
||||
needs:
|
||||
- argocd-image
|
||||
- goreleaser
|
||||
permissions:
|
||||
contents: write # Needed for release uploads
|
||||
outputs:
|
||||
hashes: ${{ steps.sbom-hash.outputs.hashes}}
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
id: create_release
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup Golang
|
||||
uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0
|
||||
with:
|
||||
go-version: ${{ env.GOLANG_VERSION }}
|
||||
tag_name: ${{ env.RELEASE_TAG }}
|
||||
release_name: ${{ env.RELEASE_TAG }}
|
||||
draft: ${{ env.DRAFT_RELEASE }}
|
||||
prerelease: ${{ env.PRE_RELEASE }}
|
||||
body: ${{ steps.release-notes.outputs.content }}
|
||||
if: ${{ env.DRY_RUN == 'true' }}
|
||||
|
||||
- name: Generate SBOM (spdx)
|
||||
id: spdx-builder
|
||||
@@ -158,9 +264,9 @@ jobs:
|
||||
SIGS_BOM_VERSION: v0.2.1
|
||||
# comma delimited list of project relative folders to inspect for package
|
||||
# managers (gomod, yarn, npm).
|
||||
PROJECT_FOLDERS: ".,./ui"
|
||||
PROJECT_FOLDERS: ".,./ui"
|
||||
# full qualified name of the docker image to be inspected
|
||||
DOCKER_IMAGE: quay.io/argoproj/argocd:${{ github.ref_name }}
|
||||
DOCKER_IMAGE: ${{env.IMAGE_NAMESPACE}}/argocd:v${{env.TARGET_VERSION}}
|
||||
run: |
|
||||
yarn install --cwd ./ui
|
||||
go install github.com/spdx/spdx-sbom-generator/cmd/generator@$SPDX_GEN_VERSION
|
||||
@@ -178,116 +284,44 @@ jobs:
|
||||
fi
|
||||
|
||||
cd /tmp && tar -zcf sbom.tar.gz *.spdx
|
||||
|
||||
- name: Generate SBOM hash
|
||||
shell: bash
|
||||
id: sbom-hash
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- name: Sign sbom
|
||||
run: |
|
||||
# sha256sum generates sha256 hash for sbom.
|
||||
# base64 -w0 encodes to base64 and outputs on a single line.
|
||||
# sha256sum /tmp/sbom.tar.gz ... | base64 -w0
|
||||
echo "hashes=$(sha256sum /tmp/sbom.tar.gz | base64 -w0)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload SBOM
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15
|
||||
cosign sign-blob --key env://COSIGN_PRIVATE_KEY /tmp/sbom.tar.gz > /tmp/sbom.tar.gz.sig
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{secrets.COSIGN_PRIVATE_KEY}}
|
||||
COSIGN_PASSWORD: ${{secrets.COSIGN_PASSWORD}}
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- name: Create GitHub release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
name: ${{ env.RELEASE_TAG }}
|
||||
tag_name: ${{ env.RELEASE_TAG }}
|
||||
draft: ${{ env.DRAFT_RELEASE }}
|
||||
prerelease: ${{ env.PRE_RELEASE }}
|
||||
generate_release_notes: true
|
||||
body: ${{ steps.release-notes.outputs.content }} # Pre-pended to the generated notes
|
||||
files: |
|
||||
dist/argocd-*
|
||||
/tmp/sbom.tar.gz
|
||||
|
||||
sbom-provenance:
|
||||
needs: [generate-sbom]
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment
|
||||
id-token: write # Needed for provenance signing and ID
|
||||
contents: write # Needed for release uploads
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
# Must be refernced by a tag. https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/container/README.md#referencing-the-slsa-generator
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0
|
||||
with:
|
||||
base64-subjects: "${{ needs.generate-sbom.outputs.hashes }}"
|
||||
provenance-name: "argocd-sbom.intoto.jsonl"
|
||||
upload-assets: true
|
||||
|
||||
post-release:
|
||||
needs:
|
||||
- argocd-image
|
||||
- goreleaser
|
||||
- generate-sbom
|
||||
permissions:
|
||||
contents: write # Needed to push commit to update stable tag
|
||||
pull-requests: write # Needed to create PR for VERSION update.
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
/tmp/sbom.tar.gz.sig
|
||||
if: ${{ env.DRY_RUN != 'true' }}
|
||||
|
||||
- name: Setup Git author information
|
||||
- name: Update homebrew formula
|
||||
env:
|
||||
HOMEBREW_TOKEN: ${{ secrets.RELEASE_HOMEBREW_TOKEN }}
|
||||
uses: dawidd6/action-homebrew-bump-formula@v3
|
||||
with:
|
||||
token: ${{env.HOMEBREW_TOKEN}}
|
||||
formula: argocd
|
||||
if: ${{ env.HOMEBREW_TOKEN != '' && env.UPDATE_HOMEBREW == 'true' && env.PRE_RELEASE != 'true' }}
|
||||
|
||||
- name: Delete original request tag from repository
|
||||
run: |
|
||||
set -ue
|
||||
git config --global user.email 'ci@argoproj.com'
|
||||
git config --global user.name 'CI'
|
||||
|
||||
- name: Check if tag is the latest version and not a pre-release
|
||||
run: |
|
||||
set -xue
|
||||
# Fetch all tag information
|
||||
git fetch --prune --tags --force
|
||||
|
||||
LATEST_TAG=$(git -c 'versionsort.suffix=-rc' tag --list --sort=version:refname | tail -n1)
|
||||
|
||||
PRE_RELEASE=false
|
||||
# Check if latest tag is a pre-release
|
||||
if echo $LATEST_TAG | grep -E -- '-rc[0-9]+$';then
|
||||
PRE_RELEASE=true
|
||||
fi
|
||||
|
||||
# Ensure latest tag matches github.ref_name & not a pre-release
|
||||
if [[ $LATEST_TAG == ${{ github.ref_name }} ]] && [[ $PRE_RELEASE != 'true' ]];then
|
||||
echo "TAG_STABLE=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "TAG_STABLE=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Update stable tag to latest version
|
||||
run: |
|
||||
git tag -f stable ${{ github.ref_name }}
|
||||
git push -f origin stable
|
||||
if: ${{ env.TAG_STABLE == 'true' }}
|
||||
|
||||
- name: Check to see if VERSION should be updated on master branch
|
||||
run: |
|
||||
set -xue
|
||||
SOURCE_TAG=${{ github.ref_name }}
|
||||
VERSION_REF="${SOURCE_TAG#*v}"
|
||||
if echo "$VERSION_REF" | grep -E -- '^[0-9]+\.[0-9]+\.0-rc1';then
|
||||
VERSION=$(awk 'BEGIN {FS=OFS="."} {$2++; print}' <<< "${VERSION_REF%-rc1}")
|
||||
echo "Updating VERSION to: $VERSION"
|
||||
echo "UPDATE_VERSION=true" >> $GITHUB_ENV
|
||||
echo "NEW_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Not updating VERSION"
|
||||
echo "UPDATE_VERSION=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Update VERSION on master branch
|
||||
run: |
|
||||
echo ${{ env.NEW_VERSION }} > VERSION
|
||||
if: ${{ env.UPDATE_VERSION == 'true' }}
|
||||
|
||||
- name: Create PR to update VERSION on master branch
|
||||
uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # v5.0.2
|
||||
with:
|
||||
commit-message: Bump version in master
|
||||
title: "chore: Bump version in master"
|
||||
body: All images built from master should indicate which version we are on track for.
|
||||
signoff: true
|
||||
branch: update-version
|
||||
branch-suffix: random
|
||||
base: master
|
||||
if: ${{ env.UPDATE_VERSION == 'true' }}
|
||||
git push --delete origin ${SOURCE_TAG}
|
||||
if: ${{ always() }}
|
||||
|
||||
67
.github/workflows/scorecard.yaml
vendored
67
.github/workflows/scorecard.yaml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Scorecards supply-chain security
|
||||
on:
|
||||
# Only the default branch is supported.
|
||||
branch_protection_rule:
|
||||
schedule:
|
||||
- cron: "39 9 * * 2"
|
||||
push:
|
||||
branches: ["master"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecards analysis
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Used to receive a badge. (Upcoming feature)
|
||||
id-token: write
|
||||
# Needs for private repositories.
|
||||
contents: read
|
||||
actions: read
|
||||
if: github.repository == 'argoproj/argo-cd'
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@08b4669551908b1024bb425080c797723083c031 # v2.2.0
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) Read-only PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecards on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||
# repo_token: ${{ secrets.SCORECARD_READ_TOKEN }}
|
||||
|
||||
# Publish the results for public repositories to enable scorecard badges. For more details, see
|
||||
# https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories, `publish_results` will automatically be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@3ebbd71c74ef574dbc558c82f70e52732c8b44fe # v2.2.1
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
4
.github/workflows/update-snyk.yaml
vendored
4
.github/workflows/update-snyk.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build reports
|
||||
@@ -31,6 +31,6 @@ jobs:
|
||||
git config --global user.email 'ci@argoproj.com'
|
||||
git config --global user.name 'CI'
|
||||
git add docs/snyk
|
||||
git commit -m "[Bot] docs: Update Snyk reports" --signoff
|
||||
git commit -m "[Bot] Update Snyk reports" --signoff
|
||||
git push --set-upstream origin "$pr_branch"
|
||||
gh pr create -B master -H "$pr_branch" --title '[Bot] docs: Update Snyk report' --body ''
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -18,7 +18,6 @@ node_modules/
|
||||
.kube/
|
||||
./test/cmp/*.sock
|
||||
.envrc.remote
|
||||
.*.swp
|
||||
|
||||
# ignore built binaries
|
||||
cmd/argocd/argocd
|
||||
|
||||
4
.gitpod.Dockerfile
vendored
4
.gitpod.Dockerfile
vendored
@@ -1,4 +1,4 @@
|
||||
FROM gitpod/workspace-full@sha256:511cecde4dc129ca9eb4cc4c479d61f95e5485ebe320a07f5b902f11899956a3
|
||||
FROM gitpod/workspace-full
|
||||
|
||||
USER root
|
||||
|
||||
@@ -13,8 +13,6 @@ ENV GOCACHE=/go-build-cache
|
||||
RUN apt-get install redis-server -y
|
||||
RUN go install github.com/mattn/goreman@latest
|
||||
|
||||
RUN chown -R gitpod:gitpod /go-build-cache
|
||||
|
||||
USER gitpod
|
||||
|
||||
ENV ARGOCD_REDIS_LOCAL=true
|
||||
|
||||
121
.goreleaser.yaml
121
.goreleaser.yaml
@@ -1,121 +0,0 @@
|
||||
project_name: argocd
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod download
|
||||
- make build-ui
|
||||
|
||||
builds:
|
||||
- id: argocd-cli
|
||||
main: ./cmd
|
||||
binary: argocd-{{ .Os}}-{{ .Arch}}
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
flags:
|
||||
- -v
|
||||
ldflags:
|
||||
- -X github.com/argoproj/argo-cd/v2/common.version={{ .Version }}
|
||||
- -X github.com/argoproj/argo-cd/v2/common.buildDate={{ .Date }}
|
||||
- -X github.com/argoproj/argo-cd/v2/common.gitCommit={{ .FullCommit }}
|
||||
- -X github.com/argoproj/argo-cd/v2/common.gitTreeState={{ .Env.GIT_TREE_STATE }}
|
||||
- -X github.com/argoproj/argo-cd/v2/common.kubectlVersion={{ .Env.KUBECTL_VERSION }}
|
||||
- -extldflags="-static"
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
- s390x
|
||||
- ppc64le
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: s390x
|
||||
- goos: darwin
|
||||
goarch: ppc64le
|
||||
- goos: windows
|
||||
goarch: s390x
|
||||
- goos: windows
|
||||
goarch: ppc64le
|
||||
- goos: windows
|
||||
goarch: arm64
|
||||
|
||||
archives:
|
||||
- id: argocd-archive
|
||||
builds:
|
||||
- argocd-cli
|
||||
name_template: |-
|
||||
{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}
|
||||
format: binary
|
||||
|
||||
checksum:
|
||||
name_template: 'cli_checksums.txt'
|
||||
algorithm: sha256
|
||||
|
||||
release:
|
||||
prerelease: auto
|
||||
draft: false
|
||||
header: |
|
||||
## Quick Start
|
||||
|
||||
### Non-HA:
|
||||
|
||||
```shell
|
||||
kubectl create namespace argocd
|
||||
kubectl apply -n argocd -f https://raw.githubusercontent.com/argoproj/argo-cd/{{.Tag}}/manifests/install.yaml
|
||||
```
|
||||
|
||||
### HA:
|
||||
|
||||
```shell
|
||||
kubectl create namespace argocd
|
||||
kubectl apply -n argocd -f https://raw.githubusercontent.com/argoproj/argo-cd/{{.Tag}}/manifests/ha/install.yaml
|
||||
```
|
||||
|
||||
## Release Signatures and Provenance
|
||||
|
||||
All Argo CD container images are signed by cosign. A Provenance is generated for container images and CLI binaries which meet the SLSA Level 3 specifications. See the [documentation](https://argo-cd.readthedocs.io/en/stable/operator-manual/signed-release-assets) on how to verify.
|
||||
|
||||
|
||||
## Upgrading
|
||||
|
||||
If upgrading from a different minor version, be sure to read the [upgrading](https://argo-cd.readthedocs.io/en/stable/operator-manual/upgrading/overview/) documentation.
|
||||
footer: |
|
||||
**Full Changelog**: https://github.com/argoproj/argo-cd/compare/{{ .PreviousTag }}...{{ .Tag }}
|
||||
|
||||
<a href="https://argoproj.github.io/cd/"><img src="https://raw.githubusercontent.com/argoproj/argo-site/master/content/pages/cd/gitops-cd.png" width="25%" ></a>
|
||||
|
||||
|
||||
snapshot: #### To be removed for PR
|
||||
name_template: "2.6.0"
|
||||
|
||||
changelog:
|
||||
use:
|
||||
github
|
||||
sort: asc
|
||||
abbrev: 0
|
||||
groups: # Regex use RE2 syntax as defined here: https://github.com/google/re2/wiki/Syntax.
|
||||
- title: 'Features'
|
||||
regexp: '^.*?feat(\([[:word:]]+\))??!?:.+$'
|
||||
order: 100
|
||||
- title: 'Bug fixes'
|
||||
regexp: '^.*?fix(\([[:word:]]+\))??!?:.+$'
|
||||
order: 200
|
||||
- title: 'Documentation'
|
||||
regexp: '^.*?docs(\([[:word:]]+\))??!?:.+$'
|
||||
order: 300
|
||||
- title: 'Dependency updates'
|
||||
regexp: '^.*?(feat|fix|chore)\(deps?.+\)!?:.+$'
|
||||
order: 400
|
||||
- title: 'Other work'
|
||||
order: 999
|
||||
filters:
|
||||
exclude:
|
||||
- '^test:'
|
||||
- '^.*?Bump(\([[:word:]]+\))?.+$'
|
||||
- '^.*?[Bot](\([[:word:]]+\))?.+$'
|
||||
|
||||
|
||||
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
|
||||
|
||||
@@ -4,8 +4,4 @@ mkdocs:
|
||||
fail_on_warning: false
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.7"
|
||||
- requirements: docs/requirements.txt
|
||||
@@ -1,9 +0,0 @@
|
||||
# All
|
||||
** @argoproj/argocd-approvers
|
||||
|
||||
# Docs
|
||||
/docs/** @argoproj/argocd-approvers @argoproj/argocd-approvers-docs
|
||||
|
||||
# CI
|
||||
/.github/** @argoproj/argocd-approvers @argoproj/argocd-approvers-ci
|
||||
/.goreleaser.yaml @argoproj/argocd-approvers @argoproj/argocd-approvers-ci
|
||||
24
Dockerfile
24
Dockerfile
@@ -1,10 +1,10 @@
|
||||
ARG BASE_IMAGE=docker.io/library/ubuntu:22.04@sha256:0bced47fffa3361afa981854fcabcd4577cd43cebbb808cea2b1f33a3dd7f508
|
||||
ARG BASE_IMAGE=docker.io/library/ubuntu:22.04
|
||||
####################################################################################################
|
||||
# Builder image
|
||||
# Initial stage which pulls prepares build dependencies and CLI tooling we need for our final image
|
||||
# Also used as the image in CI jobs so needs all dependencies
|
||||
####################################################################################################
|
||||
FROM docker.io/library/golang:1.21.3@sha256:02d7116222536a5cf0fcf631f90b507758b669648e0f20186d2dc94a9b419a9b AS builder
|
||||
FROM docker.io/library/golang:1.18 AS builder
|
||||
|
||||
RUN echo 'deb http://deb.debian.org/debian buster-backports main' >> /etc/apt/sources.list
|
||||
|
||||
@@ -36,8 +36,6 @@ RUN ./install.sh helm-linux && \
|
||||
####################################################################################################
|
||||
FROM $BASE_IMAGE AS argocd-base
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/argoproj/argo-cd"
|
||||
|
||||
USER root
|
||||
|
||||
ENV ARGOCD_USER_ID=999
|
||||
@@ -83,7 +81,7 @@ WORKDIR /home/argocd
|
||||
####################################################################################################
|
||||
# Argo CD UI stage
|
||||
####################################################################################################
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/node:20.6.1@sha256:14bd39208dbc0eb171cbfb26ccb9ac09fa1b2eba04ccd528ab5d12983fd9ee24 AS argocd-ui
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/node:12.18.4 AS argocd-ui
|
||||
|
||||
WORKDIR /src
|
||||
COPY ["ui/package.json", "ui/yarn.lock", "./"]
|
||||
@@ -101,7 +99,7 @@ RUN HOST_ARCH=$TARGETARCH NODE_ENV='production' NODE_ONLINE_ENV='online' NODE_OP
|
||||
####################################################################################################
|
||||
# Argo CD Build stage which performs the actual build of Argo CD binaries
|
||||
####################################################################################################
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.21.3@sha256:02d7116222536a5cf0fcf631f90b507758b669648e0f20186d2dc94a9b419a9b AS argocd-build
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.18 AS argocd-build
|
||||
|
||||
WORKDIR /go/src/github.com/argoproj/argo-cd
|
||||
|
||||
@@ -113,18 +111,7 @@ COPY . .
|
||||
COPY --from=argocd-ui /src/dist/app /go/src/github.com/argoproj/argo-cd/ui/dist/app
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
# These build args are optional; if not specified the defaults will be taken from the Makefile
|
||||
ARG GIT_TAG
|
||||
ARG BUILD_DATE
|
||||
ARG GIT_TREE_STATE
|
||||
ARG GIT_COMMIT
|
||||
RUN GIT_COMMIT=$GIT_COMMIT \
|
||||
GIT_TREE_STATE=$GIT_TREE_STATE \
|
||||
GIT_TAG=$GIT_TAG \
|
||||
BUILD_DATE=$BUILD_DATE \
|
||||
GOOS=$TARGETOS \
|
||||
GOARCH=$TARGETARCH \
|
||||
make argocd-all
|
||||
RUN GOOS=$TARGETOS GOARCH=$TARGETARCH make argocd-all
|
||||
|
||||
####################################################################################################
|
||||
# Final image
|
||||
@@ -143,4 +130,3 @@ RUN ln -s /usr/local/bin/argocd /usr/local/bin/argocd-server && \
|
||||
ln -s /usr/local/bin/argocd /usr/local/bin/argocd-k8s-auth
|
||||
|
||||
USER $ARGOCD_USER_ID
|
||||
ENTRYPOINT ["/usr/bin/tini", "--"]
|
||||
|
||||
126
Makefile
126
Makefile
@@ -9,13 +9,11 @@ GEN_RESOURCES_CLI_NAME=argocd-resources-gen
|
||||
HOST_OS:=$(shell go env GOOS)
|
||||
HOST_ARCH:=$(shell go env GOARCH)
|
||||
|
||||
TARGET_ARCH?=linux/amd64
|
||||
|
||||
VERSION=$(shell cat ${CURRENT_DIR}/VERSION)
|
||||
BUILD_DATE:=$(if $(BUILD_DATE),$(BUILD_DATE),$(shell date -u +'%Y-%m-%dT%H:%M:%SZ'))
|
||||
GIT_COMMIT:=$(if $(GIT_COMMIT),$(GIT_COMMIT),$(shell git rev-parse HEAD))
|
||||
GIT_TAG:=$(if $(GIT_TAG),$(GIT_TAG),$(shell if [ -z "`git status --porcelain`" ]; then git describe --exact-match --tags HEAD 2>/dev/null; fi))
|
||||
GIT_TREE_STATE:=$(if $(GIT_TREE_STATE),$(GIT_TREE_STATE),$(shell if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi))
|
||||
BUILD_DATE=$(shell date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
GIT_COMMIT=$(shell git rev-parse HEAD)
|
||||
GIT_TAG=$(shell if [ -z "`git status --porcelain`" ]; then git describe --exact-match --tags HEAD 2>/dev/null; fi)
|
||||
GIT_TREE_STATE=$(shell if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi)
|
||||
VOLUME_MOUNT=$(shell if test "$(go env GOOS)" = "darwin"; then echo ":delegated"; elif test selinuxenabled; then echo ":delegated"; else echo ""; fi)
|
||||
KUBECTL_VERSION=$(shell go list -m k8s.io/client-go | head -n 1 | rev | cut -d' ' -f1 | rev)
|
||||
|
||||
@@ -66,20 +64,13 @@ else
|
||||
DOCKER_SRC_MOUNT="$(PWD):/go/src/github.com/argoproj/argo-cd$(VOLUME_MOUNT)"
|
||||
endif
|
||||
|
||||
# User and group IDs to map to the test container
|
||||
CONTAINER_UID=$(shell id -u)
|
||||
CONTAINER_GID=$(shell id -g)
|
||||
|
||||
# Set SUDO to sudo to run privileged commands with sudo
|
||||
SUDO?=
|
||||
|
||||
# Runs any command in the argocd-test-utils container in server mode
|
||||
# Server mode container will start with uid 0 and drop privileges during runtime
|
||||
define run-in-test-server
|
||||
$(SUDO) docker run --rm -it \
|
||||
docker run --rm -it \
|
||||
--name argocd-test-server \
|
||||
-u $(CONTAINER_UID):$(CONTAINER_GID) \
|
||||
-e USER_ID=$(CONTAINER_UID) \
|
||||
-u $(shell id -u):$(shell id -g) \
|
||||
-e USER_ID=$(shell id -u) \
|
||||
-e HOME=/home/user \
|
||||
-e GOPATH=/go \
|
||||
-e GOCACHE=/tmp/go-build-cache \
|
||||
@@ -107,9 +98,9 @@ endef
|
||||
|
||||
# Runs any command in the argocd-test-utils container in client mode
|
||||
define run-in-test-client
|
||||
$(SUDO) docker run --rm -it \
|
||||
docker run --rm -it \
|
||||
--name argocd-test-client \
|
||||
-u $(CONTAINER_UID):$(CONTAINER_GID) \
|
||||
-u $(shell id -u):$(shell id -g) \
|
||||
-e HOME=/home/user \
|
||||
-e GOPATH=/go \
|
||||
-e ARGOCD_E2E_K3S=$(ARGOCD_E2E_K3S) \
|
||||
@@ -128,7 +119,7 @@ endef
|
||||
|
||||
#
|
||||
define exec-in-test-server
|
||||
$(SUDO) docker exec -it -u $(CONTAINER_UID):$(CONTAINER_GID) -e ARGOCD_E2E_RECORD=$(ARGOCD_E2E_RECORD) -e ARGOCD_E2E_K3S=$(ARGOCD_E2E_K3S) argocd-test-server $(1)
|
||||
docker exec -it -u $(shell id -u):$(shell id -g) -e ARGOCD_E2E_RECORD=$(ARGOCD_E2E_RECORD) -e ARGOCD_E2E_K3S=$(ARGOCD_E2E_K3S) argocd-test-server $(1)
|
||||
endef
|
||||
|
||||
PATH:=$(PATH):$(PWD)/hack
|
||||
@@ -148,8 +139,7 @@ override LDFLAGS += \
|
||||
-X ${PACKAGE}.buildDate=${BUILD_DATE} \
|
||||
-X ${PACKAGE}.gitCommit=${GIT_COMMIT} \
|
||||
-X ${PACKAGE}.gitTreeState=${GIT_TREE_STATE}\
|
||||
-X ${PACKAGE}.kubectlVersion=${KUBECTL_VERSION}\
|
||||
-X "${PACKAGE}.extraBuildInfo=${EXTRA_BUILD_INFO}"
|
||||
-X ${PACKAGE}.kubectlVersion=${KUBECTL_VERSION}
|
||||
|
||||
ifeq (${STATIC_BUILD}, true)
|
||||
override LDFLAGS += -extldflags "-static"
|
||||
@@ -222,7 +212,7 @@ clidocsgen: ensure-gopath
|
||||
|
||||
|
||||
.PHONY: codegen-local
|
||||
codegen-local: ensure-gopath mod-vendor-local gogen protogen clientgen openapigen clidocsgen manifests-local notification-docs notification-catalog
|
||||
codegen-local: ensure-gopath mod-vendor-local notification-docs notification-catalog gogen protogen clientgen openapigen clidocsgen manifests-local
|
||||
rm -rf vendor/
|
||||
|
||||
.PHONY: codegen
|
||||
@@ -235,11 +225,11 @@ cli: test-tools-image
|
||||
|
||||
.PHONY: cli-local
|
||||
cli-local: clean-debug
|
||||
CGO_ENABLED=0 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${CLI_NAME} ./cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${CLI_NAME} ./cmd
|
||||
|
||||
.PHONY: gen-resources-cli-local
|
||||
gen-resources-cli-local: clean-debug
|
||||
CGO_ENABLED=0 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${GEN_RESOURCES_CLI_NAME} ./hack/gen-resources/cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${GEN_RESOURCES_CLI_NAME} ./hack/gen-resources/cmd
|
||||
|
||||
.PHONY: release-cli
|
||||
release-cli: clean-debug build-ui
|
||||
@@ -254,8 +244,8 @@ release-cli: clean-debug build-ui
|
||||
.PHONY: test-tools-image
|
||||
test-tools-image:
|
||||
ifndef SKIP_TEST_TOOLS_IMAGE
|
||||
$(SUDO) docker build --build-arg UID=$(CONTAINER_UID) -t $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE) -f test/container/Dockerfile .
|
||||
$(SUDO) docker tag $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE) $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE):$(TEST_TOOLS_TAG)
|
||||
docker build --build-arg UID=$(shell id -u) -t $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE) -f test/container/Dockerfile .
|
||||
docker tag $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE) $(TEST_TOOLS_PREFIX)$(TEST_TOOLS_IMAGE):$(TEST_TOOLS_TAG)
|
||||
endif
|
||||
|
||||
.PHONY: manifests-local
|
||||
@@ -269,23 +259,23 @@ manifests: test-tools-image
|
||||
# consolidated binary for cli, util, server, repo-server, controller
|
||||
.PHONY: argocd-all
|
||||
argocd-all: clean-debug
|
||||
CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${BIN_NAME} ./cmd
|
||||
CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/${BIN_NAME} ./cmd
|
||||
|
||||
.PHONY: server
|
||||
server: clean-debug
|
||||
CGO_ENABLED=0 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-server ./cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-server ./cmd
|
||||
|
||||
.PHONY: repo-server
|
||||
repo-server:
|
||||
CGO_ENABLED=0 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-repo-server ./cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-repo-server ./cmd
|
||||
|
||||
.PHONY: controller
|
||||
controller:
|
||||
CGO_ENABLED=0 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-application-controller ./cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-application-controller ./cmd
|
||||
|
||||
.PHONY: build-ui
|
||||
build-ui:
|
||||
DOCKER_BUILDKIT=1 docker build -t argocd-ui --platform=$(TARGET_ARCH) --target argocd-ui .
|
||||
DOCKER_BUILDKIT=1 docker build -t argocd-ui --target argocd-ui .
|
||||
find ./ui/dist -type f -not -name gitkeep -delete
|
||||
docker run -v ${CURRENT_DIR}/ui/dist/app:/tmp/app --rm -t argocd-ui sh -c 'cp -r ./dist/app/* /tmp/app/'
|
||||
|
||||
@@ -296,18 +286,18 @@ ifeq ($(DEV_IMAGE), true)
|
||||
# the dist directory is under .dockerignore.
|
||||
IMAGE_TAG="dev-$(shell git describe --always --dirty)"
|
||||
image: build-ui
|
||||
DOCKER_BUILDKIT=1 docker build --platform=$(TARGET_ARCH) -t argocd-base --target argocd-base .
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd ./cmd
|
||||
DOCKER_BUILDKIT=1 docker build --platform=linux/amd64 -t argocd-base --target argocd-base .
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd ./cmd
|
||||
ln -sfn ${DIST_DIR}/argocd ${DIST_DIR}/argocd-server
|
||||
ln -sfn ${DIST_DIR}/argocd ${DIST_DIR}/argocd-application-controller
|
||||
ln -sfn ${DIST_DIR}/argocd ${DIST_DIR}/argocd-repo-server
|
||||
ln -sfn ${DIST_DIR}/argocd ${DIST_DIR}/argocd-cmp-server
|
||||
ln -sfn ${DIST_DIR}/argocd ${DIST_DIR}/argocd-dex
|
||||
cp Dockerfile.dev dist
|
||||
DOCKER_BUILDKIT=1 docker build --platform=$(TARGET_ARCH) -t $(IMAGE_PREFIX)argocd:$(IMAGE_TAG) -f dist/Dockerfile.dev dist
|
||||
DOCKER_BUILDKIT=1 docker build --platform=linux/amd64 -t $(IMAGE_PREFIX)argocd:$(IMAGE_TAG) -f dist/Dockerfile.dev dist
|
||||
else
|
||||
image:
|
||||
DOCKER_BUILDKIT=1 docker build -t $(IMAGE_PREFIX)argocd:$(IMAGE_TAG) --platform=$(TARGET_ARCH) .
|
||||
DOCKER_BUILDKIT=1 docker build -t $(IMAGE_PREFIX)argocd:$(IMAGE_TAG) .
|
||||
endif
|
||||
@if [ "$(DOCKER_PUSH)" = "true" ] ; then docker push $(IMAGE_PREFIX)argocd:$(IMAGE_TAG) ; fi
|
||||
|
||||
@@ -336,7 +326,7 @@ mod-vendor: test-tools-image
|
||||
mod-vendor-local: mod-download-local
|
||||
go mod vendor
|
||||
|
||||
# Deprecated - replace by install-tools-local
|
||||
# Deprecated - replace by install-local-tools
|
||||
.PHONY: install-lint-tools
|
||||
install-lint-tools:
|
||||
./hack/install.sh lint-tools
|
||||
@@ -352,7 +342,7 @@ lint-local:
|
||||
golangci-lint --version
|
||||
# NOTE: If you get a "Killed" OOM message, try reducing the value of GOGC
|
||||
# See https://github.com/golangci/golangci-lint#memory-usage-of-golangci-lint
|
||||
GOGC=$(ARGOCD_LINT_GOGC) GOMAXPROCS=2 golangci-lint run --enable gofmt --fix --verbose --timeout 3000s --max-issues-per-linter 0 --max-same-issues 0
|
||||
GOGC=$(ARGOCD_LINT_GOGC) GOMAXPROCS=2 golangci-lint run --fix --verbose --timeout 3000s
|
||||
|
||||
.PHONY: lint-ui
|
||||
lint-ui: test-tools-image
|
||||
@@ -371,7 +361,7 @@ build: test-tools-image
|
||||
# Build all Go code (local version)
|
||||
.PHONY: build-local
|
||||
build-local:
|
||||
GODEBUG="tarinsecurepath=0,zipinsecurepath=0" go build -v `go list ./... | grep -v 'resource_customizations\|test/e2e'`
|
||||
go build -v `go list ./... | grep -v 'resource_customizations\|test/e2e'`
|
||||
|
||||
# Run all unit tests
|
||||
#
|
||||
@@ -459,8 +449,6 @@ start-e2e-local: mod-vendor-local dep-ui-local cli-local
|
||||
ARGOCD_IN_CI=$(ARGOCD_IN_CI) \
|
||||
BIN_MODE=$(ARGOCD_BIN_MODE) \
|
||||
ARGOCD_APPLICATION_NAMESPACES=argocd-e2e-external \
|
||||
ARGOCD_APPLICATIONSET_CONTROLLER_NAMESPACES=argocd-e2e-external \
|
||||
ARGOCD_APPLICATIONSET_CONTROLLER_ALLOWED_SCM_PROVIDERS=http://127.0.0.1:8341,http://127.0.0.1:8342,http://127.0.0.1:8343,http://127.0.0.1:8344 \
|
||||
ARGOCD_E2E_TEST=true \
|
||||
goreman -f $(ARGOCD_PROCFILE) start ${ARGOCD_START}
|
||||
|
||||
@@ -524,7 +512,7 @@ build-docs-local:
|
||||
|
||||
.PHONY: build-docs
|
||||
build-docs:
|
||||
docker run ${MKDOCS_RUN_ARGS} --rm -it -v ${CURRENT_DIR}:/docs --entrypoint "" ${MKDOCS_DOCKER_IMAGE} sh -c 'pip install -r docs/requirements.txt; mkdocs build'
|
||||
docker run ${MKDOCS_RUN_ARGS} --rm -it -p 8000:8000 -v ${CURRENT_DIR}:/docs ${MKDOCS_DOCKER_IMAGE} build
|
||||
|
||||
.PHONY: serve-docs-local
|
||||
serve-docs-local:
|
||||
@@ -532,7 +520,7 @@ serve-docs-local:
|
||||
|
||||
.PHONY: serve-docs
|
||||
serve-docs:
|
||||
docker run ${MKDOCS_RUN_ARGS} --rm -it -p 8000:8000 -v ${CURRENT_DIR}/site:/site -w /site --entrypoint "" ${MKDOCS_DOCKER_IMAGE} python3 -m http.server --bind 0.0.0.0 8000
|
||||
docker run ${MKDOCS_RUN_ARGS} --rm -it -p 8000:8000 -v ${CURRENT_DIR}:/docs ${MKDOCS_DOCKER_IMAGE} serve -a 0.0.0.0:8000
|
||||
|
||||
|
||||
# Verify that kubectl can connect to your K8s cluster from Docker
|
||||
@@ -584,7 +572,7 @@ list:
|
||||
|
||||
.PHONY: applicationset-controller
|
||||
applicationset-controller:
|
||||
GODEBUG="tarinsecurepath=0,zipinsecurepath=0" CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-applicationset-controller ./cmd
|
||||
CGO_ENABLED=0 go build -v -ldflags '${LDFLAGS}' -o ${DIST_DIR}/argocd-applicationset-controller ./cmd
|
||||
|
||||
.PHONY: checksums
|
||||
checksums:
|
||||
@@ -601,55 +589,3 @@ snyk-non-container-tests:
|
||||
.PHONY: snyk-report
|
||||
snyk-report:
|
||||
./hack/snyk-report.sh $(target_branch)
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo 'Note: Generally an item w/ (-local) will run inside docker unless you use the -local variant'
|
||||
@echo
|
||||
@echo 'Common targets'
|
||||
@echo
|
||||
@echo 'all -- make cli and image'
|
||||
@echo
|
||||
@echo 'components:'
|
||||
@echo ' applicationset-controller -- applicationset controller'
|
||||
@echo ' cli(-local) -- argocd cli program'
|
||||
@echo ' controller -- controller (orchestrator)'
|
||||
@echo ' repo-server -- repo server (manage repository instances)'
|
||||
@echo ' server -- argocd web application'
|
||||
@echo
|
||||
@echo 'build:'
|
||||
@echo ' image -- make image of the following items'
|
||||
@echo ' build(-local) -- compile go'
|
||||
@echo ' build-docs(-local) -- build docs'
|
||||
@echo ' build-ui -- compile typescript'
|
||||
@echo
|
||||
@echo 'run:'
|
||||
@echo ' run -- run the components locally'
|
||||
@echo ' serve-docs(-local) -- expose the documents for viewing in a browser'
|
||||
@echo
|
||||
@echo 'release:'
|
||||
@echo ' release-cli'
|
||||
@echo ' release-precheck'
|
||||
@echo ' checksums'
|
||||
@echo
|
||||
@echo 'docs:'
|
||||
@echo ' build-docs(-local)'
|
||||
@echo ' serve-docs(-local)'
|
||||
@echo ' notification-docs'
|
||||
@echo ' clidocsgen'
|
||||
@echo
|
||||
@echo 'testing:'
|
||||
@echo ' test(-local)'
|
||||
@echo ' start-e2e(-local)'
|
||||
@echo ' test-e2e(-local)'
|
||||
@echo ' test-race(-local)'
|
||||
@echo
|
||||
@echo 'debug:'
|
||||
@echo ' list -- list all make targets'
|
||||
@echo ' install-tools-local -- install all the tools below'
|
||||
@echo ' install-lint-tools(-local)'
|
||||
@echo
|
||||
@echo 'codegen:'
|
||||
@echo ' codegen(-local) -- if using -local, run the following targets first'
|
||||
@echo ' install-codegen-tools-local -- run this to install the codegen tools'
|
||||
@echo ' install-go-tools-local -- run this to install go libraries for codegen'
|
||||
|
||||
4
OWNERS
4
OWNERS
@@ -27,7 +27,3 @@ reviewers:
|
||||
- wanghong230
|
||||
- ciiay
|
||||
- saumeya
|
||||
- zachaller
|
||||
- 34fathombelow
|
||||
- alexef
|
||||
- gdsoumya
|
||||
|
||||
16
Procfile
16
Procfile
@@ -1,12 +1,12 @@
|
||||
controller: [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-application-controller $COMMAND --loglevel debug --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081} --otlp-address=${ARGOCD_OTLP_ADDRESS} --application-namespaces=${ARGOCD_APPLICATION_NAMESPACES:-''}"
|
||||
api-server: [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-server $COMMAND --loglevel debug --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --disable-auth=${ARGOCD_E2E_DISABLE_AUTH:-'true'} --insecure --dex-server http://localhost:${ARGOCD_E2E_DEX_PORT:-5556} --repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081} --port ${ARGOCD_E2E_APISERVER_PORT:-8080} --otlp-address=${ARGOCD_OTLP_ADDRESS} --application-namespaces=${ARGOCD_APPLICATION_NAMESPACES:-''}"
|
||||
dex: sh -c "ARGOCD_BINARY_NAME=argocd-dex go run github.com/argoproj/argo-cd/v2/cmd gendexcfg -o `pwd`/dist/dex.yaml && (test -f dist/dex.yaml || { echo 'Failed to generate dex configuration'; exit 1; }) && docker run --rm -p ${ARGOCD_E2E_DEX_PORT:-5556}:${ARGOCD_E2E_DEX_PORT:-5556} -v `pwd`/dist/dex.yaml:/dex.yaml ghcr.io/dexidp/dex:$(grep "image: ghcr.io/dexidp/dex" manifests/base/dex/argocd-dex-server-deployment.yaml | cut -d':' -f3) dex serve /dex.yaml"
|
||||
redis: bash -c "if [ \"$ARGOCD_REDIS_LOCAL\" = 'true' ]; then redis-server --save '' --appendonly no --port ${ARGOCD_E2E_REDIS_PORT:-6379}; else docker run --rm --name argocd-redis -i -p ${ARGOCD_E2E_REDIS_PORT:-6379}:${ARGOCD_E2E_REDIS_PORT:-6379} docker.io/library/redis:$(grep "image: redis" manifests/base/redis/argocd-redis-deployment.yaml | cut -d':' -f3) --save '' --appendonly no --port ${ARGOCD_E2E_REDIS_PORT:-6379}; fi"
|
||||
repo-server: [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_GNUPGHOME=${ARGOCD_GNUPGHOME:-/tmp/argocd-local/gpg/keys} ARGOCD_PLUGINSOCKFILEPATH=${ARGOCD_PLUGINSOCKFILEPATH:-./test/cmp} ARGOCD_GPG_DATA_PATH=${ARGOCD_GPG_DATA_PATH:-/tmp/argocd-local/gpg/source} ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-repo-server ARGOCD_GPG_ENABLED=${ARGOCD_GPG_ENABLED:-false} $COMMAND --loglevel debug --port ${ARGOCD_E2E_REPOSERVER_PORT:-8081} --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --otlp-address=${ARGOCD_OTLP_ADDRESS}"
|
||||
cmp-server: [ "$ARGOCD_E2E_TEST" = 'true' ] && exit 0 || [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_BINARY_NAME=argocd-cmp-server ARGOCD_PLUGINSOCKFILEPATH=${ARGOCD_PLUGINSOCKFILEPATH:-./test/cmp} $COMMAND --config-dir-path ./test/cmp --loglevel debug --otlp-address=${ARGOCD_OTLP_ADDRESS}"
|
||||
controller: [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-application-controller $COMMAND --loglevel debug --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081} --otlp-address=${ARGOCD_OTLP_ADDRESS} --application-namespaces=${ARGOCD_APPLICATION_NAMESPACES:-''}"
|
||||
api-server: [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-server $COMMAND --loglevel debug --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --disable-auth=${ARGOCD_E2E_DISABLE_AUTH:-'true'} --insecure --dex-server http://localhost:${ARGOCD_E2E_DEX_PORT:-5556} --repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081} --port ${ARGOCD_E2E_APISERVER_PORT:-8080} --otlp-address=${ARGOCD_OTLP_ADDRESS} --application-namespaces=${ARGOCD_APPLICATION_NAMESPACES:-''}"
|
||||
dex: sh -c "ARGOCD_BINARY_NAME=argocd-dex go run github.com/argoproj/argo-cd/v2/cmd gendexcfg -o `pwd`/dist/dex.yaml && docker run --rm -p ${ARGOCD_E2E_DEX_PORT:-5556}:${ARGOCD_E2E_DEX_PORT:-5556} -v `pwd`/dist/dex.yaml:/dex.yaml ghcr.io/dexidp/dex:$(grep "image: ghcr.io/dexidp/dex" manifests/base/dex/argocd-dex-server-deployment.yaml | cut -d':' -f3) dex serve /dex.yaml"
|
||||
redis: bash -c "if [ \"$ARGOCD_REDIS_LOCAL\" == 'true' ]; then redis-server --save '' --appendonly no --port ${ARGOCD_E2E_REDIS_PORT:-6379}; else docker run --rm --name argocd-redis -i -p ${ARGOCD_E2E_REDIS_PORT:-6379}:${ARGOCD_E2E_REDIS_PORT:-6379} redis:$(grep "image: redis" manifests/base/redis/argocd-redis-deployment.yaml | cut -d':' -f3) --save '' --appendonly no --port ${ARGOCD_E2E_REDIS_PORT:-6379}; fi"
|
||||
repo-server: [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_GNUPGHOME=${ARGOCD_GNUPGHOME:-/tmp/argocd-local/gpg/keys} ARGOCD_PLUGINSOCKFILEPATH=${ARGOCD_PLUGINSOCKFILEPATH:-./test/cmp} ARGOCD_GPG_DATA_PATH=${ARGOCD_GPG_DATA_PATH:-/tmp/argocd-local/gpg/source} ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-repo-server ARGOCD_GPG_ENABLED=${ARGOCD_GPG_ENABLED:-false} $COMMAND --loglevel debug --port ${ARGOCD_E2E_REPOSERVER_PORT:-8081} --redis localhost:${ARGOCD_E2E_REDIS_PORT:-6379} --otlp-address=${ARGOCD_OTLP_ADDRESS}"
|
||||
cmp-server: [ "$ARGOCD_E2E_TEST" == 'true' ] && exit 0 || [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=1 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_BINARY_NAME=argocd-cmp-server ARGOCD_PLUGINSOCKFILEPATH=${ARGOCD_PLUGINSOCKFILEPATH:-./test/cmp} $COMMAND --config-dir-path ./test/cmp --loglevel debug --otlp-address=${ARGOCD_OTLP_ADDRESS}"
|
||||
ui: sh -c 'cd ui && ${ARGOCD_E2E_YARN_CMD:-yarn} start'
|
||||
git-server: test/fixture/testrepos/start-git.sh
|
||||
helm-registry: test/fixture/testrepos/start-helm-registry.sh
|
||||
dev-mounter: [[ "$ARGOCD_E2E_TEST" != "true" ]] && go run hack/dev-mounter/main.go --configmap argocd-ssh-known-hosts-cm=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} --configmap argocd-tls-certs-cm=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} --configmap argocd-gpg-keys-cm=${ARGOCD_GPG_DATA_PATH:-/tmp/argocd-local/gpg/source}
|
||||
applicationset-controller: [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=4 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-applicationset-controller $COMMAND --loglevel debug --metrics-addr localhost:12345 --probe-addr localhost:12346 --argocd-repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081}"
|
||||
notification: [ "$BIN_MODE" = 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=4 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_BINARY_NAME=argocd-notifications $COMMAND --loglevel debug"
|
||||
applicationset-controller: [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=4 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_ASK_PASS_SOCK=/tmp/applicationset-ask-pass.sock ARGOCD_SSH_DATA_PATH=${ARGOCD_SSH_DATA_PATH:-/tmp/argocd-local/ssh} ARGOCD_BINARY_NAME=argocd-applicationset-controller $COMMAND --loglevel debug --metrics-addr localhost:12345 --probe-addr localhost:12346 --argocd-repo-server localhost:${ARGOCD_E2E_REPOSERVER_PORT:-8081}"
|
||||
notification: [ "$BIN_MODE" == 'true' ] && COMMAND=./dist/argocd || COMMAND='go run ./cmd/main.go' && sh -c "FORCE_LOG_COLORS=4 ARGOCD_FAKE_IN_CLUSTER=true ARGOCD_TLS_DATA_PATH=${ARGOCD_TLS_DATA_PATH:-/tmp/argocd-local/tls} ARGOCD_BINARY_NAME=argocd-notifications $COMMAND --loglevel debug"
|
||||
|
||||
19
README.md
19
README.md
@@ -1,18 +1,5 @@
|
||||
**Releases:**
|
||||
[](https://github.com/argoproj/argo-cd/releases/latest)
|
||||
[](https://github.com/argoproj/argo-cd/actions?query=workflow%3A%22Integration+tests%22) [](https://argoproj.github.io/community/join-slack) [](https://codecov.io/gh/argoproj/argo-cd) [](https://github.com/argoproj/argo-cd/releases/latest) [](https://bestpractices.coreinfrastructure.org/projects/4486) [](https://twitter.com/argoproj)
|
||||
[](https://artifacthub.io/packages/helm/argo/argo-cd)
|
||||
[](https://slsa.dev)
|
||||
|
||||
**Code:**
|
||||
[](https://github.com/argoproj/argo-cd/actions?query=workflow%3A%22Integration+tests%22)
|
||||
[](https://codecov.io/gh/argoproj/argo-cd)
|
||||
[](https://bestpractices.coreinfrastructure.org/projects/4486)
|
||||
[](https://api.securityscorecards.dev/projects/github.com/argoproj/argo-cd)
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fargoproj%2Fargo-cd?ref=badge_shield)
|
||||
|
||||
**Social:**
|
||||
[](https://twitter.com/argoproj)
|
||||
[](https://argoproj.github.io/community/join-slack)
|
||||
|
||||
# Argo CD - Declarative Continuous Delivery for Kubernetes
|
||||
|
||||
@@ -56,7 +43,7 @@ Participation in the Argo CD project is governed by the [CNCF Code of Conduct](h
|
||||
### Blogs and Presentations
|
||||
|
||||
1. [Awesome-Argo: A Curated List of Awesome Projects and Resources Related to Argo](https://github.com/terrytangyuan/awesome-argo)
|
||||
1. [Unveil the Secret Ingredients of Continuous Delivery at Enterprise Scale with Argo CD](https://akuity.io/blog/unveil-the-secret-ingredients-of-continuous-delivery-at-enterprise-scale-with-argocd-kubecon-china-2021/)
|
||||
1. [Unveil the Secret Ingredients of Continuous Delivery at Enterprise Scale with Argo CD](https://blog.akuity.io/unveil-the-secret-ingredients-of-continuous-delivery-at-enterprise-scale-with-argo-cd-7c5b4057ee49)
|
||||
1. [GitOps Without Pipelines With ArgoCD Image Updater](https://youtu.be/avPUQin9kzU)
|
||||
1. [Combining Argo CD (GitOps), Crossplane (Control Plane), And KubeVela (OAM)](https://youtu.be/eEcgn_gU3SM)
|
||||
1. [How to Apply GitOps to Everything - Combining Argo CD and Crossplane](https://youtu.be/yrj4lmScKHQ)
|
||||
@@ -82,7 +69,7 @@ Participation in the Argo CD project is governed by the [CNCF Code of Conduct](h
|
||||
1. [Applied GitOps with Argo CD](https://thenewstack.io/applied-gitops-with-argocd/)
|
||||
1. [Solving configuration drift using GitOps with Argo CD](https://www.cncf.io/blog/2020/12/17/solving-configuration-drift-using-gitops-with-argo-cd/)
|
||||
1. [Decentralized GitOps over environments](https://blogs.sap.com/2021/05/06/decentralized-gitops-over-environments/)
|
||||
1. [How GitOps and Operators mark the rise of Infrastructure-As-Software](https://paytmlabs.com/blog/2021/10/how-to-improve-operational-work-with-operators-and-gitops/)
|
||||
1. [Getting Started with ArgoCD for GitOps Deployments](https://youtu.be/AvLuplh1skA)
|
||||
1. [Using Argo CD & Datree for Stable Kubernetes CI/CD Deployments](https://youtu.be/17894DTru2Y)
|
||||
1. [How to create Argo CD Applications Automatically using ApplicationSet? "Automation of GitOps"](https://amralaayassen.medium.com/how-to-create-argocd-applications-automatically-using-applicationset-automation-of-the-gitops-59455eaf4f72)
|
||||
|
||||
|
||||
33
SECURITY.md
33
SECURITY.md
@@ -1,6 +1,6 @@
|
||||
# Security Policy for Argo CD
|
||||
|
||||
Version: **v1.5 (2023-03-06)**
|
||||
Version: **v1.4 (2022-01-23)**
|
||||
|
||||
## Preface
|
||||
|
||||
@@ -35,11 +35,13 @@ impact on Argo CD before opening an issue at least roughly.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We currently support the last 3 minor versions of Argo CD with security and bug fixes.
|
||||
We currently support the most recent release (`N`, e.g. `1.8`) and the release
|
||||
previous to the most recent one (`N-1`, e.g. `1.7`). With the release of
|
||||
`N+1`, `N-1` drops out of support and `N` becomes `N-1`.
|
||||
|
||||
We regularly perform patch releases (e.g. `1.8.5` and `1.7.12`) for the
|
||||
supported versions, which will contain fixes for security vulnerabilities and
|
||||
important bugs. Prior releases might receive critical security fixes on best
|
||||
important bugs. Prior releases might receive critical security fixes on a best
|
||||
effort basis, however, it cannot be guaranteed that security fixes get
|
||||
back-ported to these unsupported versions.
|
||||
|
||||
@@ -50,7 +52,7 @@ of releasing it within a patch branch for the currently supported releases.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you find a security related bug in Argo CD, we kindly ask you for responsible
|
||||
If you find a security related bug in ArgoCD, we kindly ask you for responsible
|
||||
disclosure and for giving us appropriate time to react, analyze and develop a
|
||||
fix to mitigate the found security vulnerability.
|
||||
|
||||
@@ -59,28 +61,13 @@ and disclosure with you. Sometimes, it might take a little longer for us to
|
||||
react (e.g. out of office conditions), so please bear with us in these cases.
|
||||
|
||||
We will publish security advisories using the
|
||||
[GitHub Security Advisories](https://github.com/argoproj/argo-cd/security/advisories)
|
||||
feature to keep our community well-informed, and will credit you for your
|
||||
[Git Hub Security Advisories](https://github.com/argoproj/argo-cd/security/advisories)
|
||||
feature to keep our community well informed, and will credit you for your
|
||||
findings (unless you prefer to stay anonymous, of course).
|
||||
|
||||
There are two ways to report a vulnerability to the Argo CD team:
|
||||
Please report vulnerabilities by e-mail to the following address:
|
||||
|
||||
* By opening a draft GitHub security advisory: https://github.com/argoproj/argo-cd/security/advisories/new
|
||||
* By e-mail to the following address: cncf-argo-security@lists.cncf.io
|
||||
|
||||
## Internet Bug Bounty collaboration
|
||||
|
||||
We're happy to announce that the Argo project is collaborating with the great
|
||||
folks over at
|
||||
[Hacker One](https://hackerone.com/) and their
|
||||
[Internet Bug Bounty program](https://hackerone.com/ibb)
|
||||
to reward the awesome people who find security vulnerabilities in the four
|
||||
main Argo projects (CD, Events, Rollouts and Workflows) and then work with
|
||||
us to fix and disclose them in a responsible manner.
|
||||
|
||||
If you report a vulnerability to us as outlined in this security policy, we
|
||||
will work together with you to find out whether your finding is eligible for
|
||||
claiming a bounty, and also on how to claim it.
|
||||
* cncf-argo-security@lists.cncf.io
|
||||
|
||||
## Securing your Argo CD Instance
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Defined below are the security contacts for this repo.
|
||||
#
|
||||
# DO NOT REPORT SECURITY VULNERABILITIES DIRECTLY TO THESE NAMES, FOLLOW THE
|
||||
# INSTRUCTIONS AT https://github.com/argoproj/argo-cd/security/policy
|
||||
# INSTRUCTIONS AT https://argo-cd.readthedocs.io/en/latest/security_considerations/#reporting-vulnerabilities
|
||||
|
||||
alexmt
|
||||
edlee2121
|
||||
|
||||
87
USERS.md
87
USERS.md
@@ -7,27 +7,20 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
|
||||
1. [127Labs](https://127labs.com/)
|
||||
1. [3Rein](https://www.3rein.com/)
|
||||
1. [4data](https://4data.ch/)
|
||||
1. [7shifts](https://www.7shifts.com/)
|
||||
1. [Adevinta](https://www.adevinta.com/)
|
||||
1. [Adfinis](https://adfinis.com)
|
||||
1. [Adventure](https://jp.adventurekk.com/)
|
||||
1. [Adyen](https://www.adyen.com)
|
||||
1. [AirQo](https://airqo.net/)
|
||||
1. [Akuity](https://akuity.io/)
|
||||
1. [Albert Heijn](https://ah.nl/)
|
||||
1. [Alibaba Group](https://www.alibabagroup.com/)
|
||||
1. [Allianz Direct](https://www.allianzdirect.de/)
|
||||
1. [Amadeus IT Group](https://amadeus.com/)
|
||||
1. [Ambassador Labs](https://www.getambassador.io/)
|
||||
1. [ANSTO - Australian Synchrotron](https://www.synchrotron.org.au/)
|
||||
1. [Ant Group](https://www.antgroup.com/)
|
||||
1. [AppDirect](https://www.appdirect.com)
|
||||
1. [Arctiq Inc.](https://www.arctiq.ca)
|
||||
1. [ARZ Allgemeines Rechenzentrum GmbH](https://www.arz.at/)
|
||||
2. [Autodesk](https://www.autodesk.com)
|
||||
1. [Axual B.V.](https://axual.com)
|
||||
1. [Back Market](https://www.backmarket.com)
|
||||
1. [Baloise](https://www.baloise.com)
|
||||
1. [BCDevExchange DevOps Platform](https://bcdevexchange.org/DevOpsPlatform)
|
||||
1. [Beat](https://thebeat.co/en/)
|
||||
@@ -40,42 +33,29 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Boticario](https://www.boticario.com.br/)
|
||||
1. [Bulder Bank](https://bulderbank.no)
|
||||
1. [Camptocamp](https://camptocamp.com)
|
||||
1. [Candis](https://www.candis.io)
|
||||
1. [Capital One](https://www.capitalone.com)
|
||||
1. [CARFAX](https://www.carfax.com)
|
||||
1. [CARFAX Europe](https://www.carfax.eu)
|
||||
1. [Carrefour Group](https://www.carrefour.com)
|
||||
1. [Casavo](https://casavo.com)
|
||||
1. [Celonis](https://www.celonis.com/)
|
||||
1. [CERN](https://home.cern/)
|
||||
1. [Chargetrip](https://chargetrip.com)
|
||||
1. [Chainnodes](https://chainnodes.org)
|
||||
1. [Chime](https://www.chime.com)
|
||||
1. [Cisco ET&I](https://eti.cisco.com/)
|
||||
1. [Cloud Posse](https://www.cloudposse.com/)
|
||||
1. [Cloud Scale](https://cloudscaleinc.com/)
|
||||
1. [Cloudmate](https://cloudmt.co.kr/)
|
||||
1. [Cloudogu](https://cloudogu.com/)
|
||||
1. [Cobalt](https://www.cobalt.io/)
|
||||
1. [Codefresh](https://www.codefresh.io/)
|
||||
1. [Codility](https://www.codility.com/)
|
||||
1. [Commonbond](https://commonbond.co/)
|
||||
1. [Coralogix](https://coralogix.com/)
|
||||
1. [Crédit Agricole CIB](https://www.ca-cib.com)
|
||||
1. [CROZ d.o.o.](https://croz.net/)
|
||||
1. [Crédit Agricole CIB](https://www.ca-cib.com)
|
||||
1. [CyberAgent](https://www.cyberagent.co.jp/en/)
|
||||
1. [Cybozu](https://cybozu-global.com)
|
||||
1. [D2iQ](https://www.d2iq.com)
|
||||
1. [DaoCloud](https://daocloud.io/)
|
||||
1. [Datarisk](https://www.datarisk.io/)
|
||||
1. [Deloitte](https://www.deloitte.com/)
|
||||
1. [Deutsche Telekom AG](https://telekom.com)
|
||||
1. [Devopsi - Poland Software/DevOps Consulting](https://devopsi.pl/)
|
||||
1. [Devtron Labs](https://github.com/devtron-labs/devtron)
|
||||
1. [DigitalOcean](https://www.digitalocean.com)
|
||||
1. [Divistant](https://divistant.com)
|
||||
1. [Dott](https://ridedott.com)
|
||||
1. [Doximity](https://www.doximity.com/)
|
||||
1. [EDF Renewables](https://www.edf-re.com/)
|
||||
1. [edX](https://edx.org)
|
||||
1. [Elastic](https://elastic.co/)
|
||||
@@ -85,12 +65,8 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [END.](https://www.endclothing.com/)
|
||||
1. [Energisme](https://energisme.com/)
|
||||
1. [enigmo](https://enigmo.co.jp/)
|
||||
1. [Envoy](https://envoy.com/)
|
||||
1. [Factorial](https://factorialhr.com/)
|
||||
1. [Farfetch](https://www.farfetch.com)
|
||||
1. [Faro](https://www.faro.com/)
|
||||
1. [Fave](https://myfave.com)
|
||||
1. [Flexport](https://www.flexport.com/)
|
||||
1. [Flip](https://flip.id)
|
||||
1. [Fonoa](https://www.fonoa.com/)
|
||||
1. [freee](https://corp.freee.co.jp/en/company/)
|
||||
@@ -99,21 +75,16 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [G DATA CyberDefense AG](https://www.gdata-software.com/)
|
||||
1. [Garner](https://www.garnercorp.com)
|
||||
1. [Generali Deutschland AG](https://www.generali.de/)
|
||||
1. [Gepardec](https://gepardec.com/)
|
||||
1. [GetYourGuide](https://www.getyourguide.com/)
|
||||
2. [Gepardec](https://gepardec.com/)
|
||||
1. [Gitpod](https://www.gitpod.io)
|
||||
1. [Gllue](https://gllue.com)
|
||||
1. [gloat](https://gloat.com/)
|
||||
1. [GLOBIS](https://globis.com)
|
||||
1. [Glovo](https://www.glovoapp.com)
|
||||
1. [GlueOps](https://glueops.dev)
|
||||
1. [GMETRI](https://gmetri.com/)
|
||||
1. [Gojek](https://www.gojek.io/)
|
||||
1. [GoTo](https://www.goto.com/)
|
||||
1. [GoTo Financial](https://gotofinancial.com/)
|
||||
1. [Greenpass](https://www.greenpass.com.br/)
|
||||
1. [Gridfuse](https://gridfuse.com/)
|
||||
1. [Groww](https://groww.in)
|
||||
1. [Grupo MasMovil](https://grupomasmovil.com/en/)
|
||||
1. [Handelsbanken](https://www.handelsbanken.se)
|
||||
1. [Healy](https://www.healyworld.net)
|
||||
@@ -122,41 +93,31 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [hipages](https://hipages.com.au/)
|
||||
1. [Hiya](https://hiya.com)
|
||||
1. [Honestbank](https://honestbank.com)
|
||||
1. [Hostinger](https://www.hostinger.com)
|
||||
1. [IBM](https://www.ibm.com/)
|
||||
1. [Ibotta](https://home.ibotta.com)
|
||||
1. [IITS-Consulting](https://iits-consulting.de)
|
||||
1. [imaware](https://imaware.health)
|
||||
1. [Indeed](https://indeed.com)
|
||||
1. [Index Exchange](https://www.indexexchange.com/)
|
||||
1. [Info Support](https://www.infosupport.com/)
|
||||
1. [InsideBoard](https://www.insideboard.com)
|
||||
1. [Intuit](https://www.intuit.com/)
|
||||
1. [Jellysmack](https://www.jellysmack.com)
|
||||
1. [Joblift](https://joblift.com/)
|
||||
1. [JovianX](https://www.jovianx.com/)
|
||||
1. [Kaltura](https://corp.kaltura.com/)
|
||||
1. [Kandji](https://www.kandji.io/)
|
||||
1. [Karrot](https://www.daangn.com/)
|
||||
1. [KarrotPay](https://www.daangnpay.com/)
|
||||
1. [Karrot](https://www.daangn.com/)
|
||||
1. [Kasa](https://kasa.co.kr/)
|
||||
1. [Keeeb](https://www.keeeb.com/)
|
||||
1. [KelkooGroup](https://www.kelkoogroup.com)
|
||||
1. [Keptn](https://keptn.sh)
|
||||
1. [Kinguin](https://www.kinguin.net/)
|
||||
1. [KintoHub](https://www.kintohub.com/)
|
||||
1. [KompiTech GmbH](https://www.kompitech.com/)
|
||||
1. [KPMG](https://kpmg.com/uk)
|
||||
1. [KubeSphere](https://github.com/kubesphere)
|
||||
1. [Kurly](https://www.kurly.com/)
|
||||
1. [Kvist](https://kvistsolutions.com)
|
||||
1. [LexisNexis](https://www.lexisnexis.com/)
|
||||
1. [Lian Chu Securities](https://lczq.com)
|
||||
1. [Liatrio](https://www.liatrio.com)
|
||||
1. [Lightricks](https://www.lightricks.com/)
|
||||
1. [LINE](https://linecorp.com/en/)
|
||||
1. [Loom](https://www.loom.com/)
|
||||
1. [Lucid Motors](https://www.lucidmotors.com/)
|
||||
1. [Lytt](https://www.lytt.co/)
|
||||
1. [Magic Leap](https://www.magicleap.com/)
|
||||
1. [Majid Al Futtaim](https://www.majidalfuttaim.com/)
|
||||
@@ -167,12 +128,9 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Max Kelsen](https://www.maxkelsen.com/)
|
||||
1. [MeDirect](https://medirect.com.mt/)
|
||||
1. [Meican](https://meican.com/)
|
||||
1. [Meilleurs Agents](https://www.meilleursagents.com/)
|
||||
1. [Mercedes-Benz Tech Innovation](https://www.mercedes-benz-techinnovation.com/)
|
||||
1. [Metanet](http://www.metanet.co.kr/en/)
|
||||
1. [MindSpore](https://mindspore.cn)
|
||||
1. [Mirantis](https://mirantis.com/)
|
||||
1. [Mission Lane](https://missionlane.com)
|
||||
1. [mixi Group](https://mixi.co.jp/)
|
||||
1. [Moengage](https://www.moengage.com/)
|
||||
1. [Money Forward](https://corp.moneyforward.com/en/)
|
||||
@@ -184,54 +142,36 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Nextdoor](https://nextdoor.com/)
|
||||
1. [Nikkei](https://www.nikkei.co.jp/nikkeiinfo/en/)
|
||||
1. [Nitro](https://gonitro.com)
|
||||
1. [NYCU, CS IT Center](https://it.cs.nycu.edu.tw)
|
||||
1. [Objective](https://www.objective.com.br/)
|
||||
1. [OCCMundial](https://occ.com.mx)
|
||||
1. [Octadesk](https://octadesk.com)
|
||||
1. [Olfeo](https://www.olfeo.com/)
|
||||
1. [omegaUp](https://omegaUp.com)
|
||||
1. [Omni](https://omni.se/)
|
||||
1. [openEuler](https://openeuler.org)
|
||||
1. [openGauss](https://opengauss.org/)
|
||||
1. [OpenGov](https://opengov.com)
|
||||
1. [openLooKeng](https://openlookeng.io)
|
||||
1. [OpenSaaS Studio](https://opensaas.studio)
|
||||
1. [Opensurvey](https://www.opensurvey.co.kr/)
|
||||
1. [OpsMx](https://opsmx.io)
|
||||
1. [OpsVerse](https://opsverse.io)
|
||||
1. [Optoro](https://www.optoro.com/)
|
||||
1. [Orbital Insight](https://orbitalinsight.com/)
|
||||
1. [Oscar Health Insurance](https://hioscar.com/)
|
||||
1. [p3r](https://www.p3r.one/)
|
||||
1. [Packlink](https://www.packlink.com/)
|
||||
1. [PagerDuty](https://www.pagerduty.com/)
|
||||
1. [Pandosearch](https://www.pandosearch.com/en/home)
|
||||
1. [Patreon](https://www.patreon.com/)
|
||||
1. [PagerDuty](https://www.pagerduty.com/)
|
||||
1. [PayPay](https://paypay.ne.jp/)
|
||||
1. [Peloton Interactive](https://www.onepeloton.com/)
|
||||
1. [PGS](https://www.pgs.com)
|
||||
1. [Pigment](https://www.gopigment.com/)
|
||||
1. [Pipefy](https://www.pipefy.com/)
|
||||
1. [Pismo](https://pismo.io/)
|
||||
1. [Platform9 Systems](https://platform9.com/)
|
||||
1. [Polarpoint.io](https://polarpoint.io)
|
||||
1. [PostFinance](https://github.com/postfinance)
|
||||
1. [Preferred Networks](https://preferred.jp/en/)
|
||||
1. [Previder BV](https://previder.nl)
|
||||
1. [Procore](https://www.procore.com)
|
||||
1. [Productboard](https://www.productboard.com/)
|
||||
1. [Prudential](https://prudential.com.sg)
|
||||
1. [PT Boer Technology (Btech)](https://btech.id/)
|
||||
1. [PUBG](https://www.pubg.com)
|
||||
1. [Puzzle ITC](https://www.puzzle.ch/)
|
||||
1. [Qonto](https://qonto.com)
|
||||
1. [QuintoAndar](https://quintoandar.com.br)
|
||||
1. [Quipper](https://www.quipper.com/)
|
||||
1. [RapidAPI](https://www.rapidapi.com/)
|
||||
1. [Recreation.gov](https://www.recreation.gov/)
|
||||
1. [Red Hat](https://www.redhat.com/)
|
||||
1. [Redpill Linpro](https://www.redpill-linpro.com/)
|
||||
1. [Reenigne Cloud](https://reenigne.ca)
|
||||
1. [reev.com](https://www.reev.com/)
|
||||
1. [RightRev](https://rightrev.com/)
|
||||
1. [Rise](https://www.risecard.eu/)
|
||||
@@ -241,15 +181,10 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Saildrone](https://www.saildrone.com/)
|
||||
1. [Saloodo! GmbH](https://www.saloodo.com)
|
||||
1. [Sap Labs](http://sap.com)
|
||||
1. [Sauce Labs](https://saucelabs.com/)
|
||||
1. [Schwarz IT](https://jobs.schwarz/it-mission)
|
||||
1. [SEEK](https://seek.com.au)
|
||||
1. [SI Analytics](https://si-analytics.ai)
|
||||
1. [Skit](https://skit.ai/)
|
||||
1. [Skyscanner](https://www.skyscanner.net/)
|
||||
1. [Smart Pension](https://www.smartpension.co.uk/)
|
||||
1. [Smilee.io](https://smilee.io)
|
||||
1. [Smood.ch](https://www.smood.ch/)
|
||||
1. [Snapp](https://snapp.ir/)
|
||||
1. [Snyk](https://snyk.io/)
|
||||
1. [Softway Medical](https://www.softwaymedical.fr/)
|
||||
@@ -258,7 +193,6 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Spendesk](https://spendesk.com/)
|
||||
1. [Splunk](https://splunk.com/)
|
||||
1. [Spores Labs](https://spores.app)
|
||||
1. [StreamNative](https://streamnative.io)
|
||||
1. [Stuart](https://stuart.com/)
|
||||
1. [Sumo Logic](https://sumologic.com/)
|
||||
1. [Sutpc](http://www.sutpc.com/)
|
||||
@@ -272,7 +206,6 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Techcombank](https://www.techcombank.com.vn/trang-chu)
|
||||
1. [Technacy](https://www.technacy.it/)
|
||||
1. [Tesla](https://tesla.com/)
|
||||
1. [The Scale Factory](https://www.scalefactory.com/)
|
||||
1. [ThousandEyes](https://www.thousandeyes.com/)
|
||||
1. [Ticketmaster](https://ticketmaster.com)
|
||||
1. [Tiger Analytics](https://www.tigeranalytics.com/)
|
||||
@@ -280,27 +213,17 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [Toss](https://toss.im/en)
|
||||
1. [Trendyol](https://www.trendyol.com/)
|
||||
1. [tru.ID](https://tru.id)
|
||||
1. [Trusting Social](https://trustingsocial.com/)
|
||||
1. [Twilio Segment](https://segment.com/)
|
||||
1. [Twilio SendGrid](https://sendgrid.com)
|
||||
1. [tZERO](https://www.tzero.com/)
|
||||
1. [U.S. Veterans Affairs Department](https://www.va.gov/)
|
||||
1. [UBIO](https://ub.io/)
|
||||
1. [UFirstGroup](https://www.ufirstgroup.com/en/)
|
||||
1. [ungleich.ch](https://ungleich.ch/)
|
||||
1. [Unifonic Inc](https://www.unifonic.com/)
|
||||
1. [Universidad Mesoamericana](https://www.umes.edu.gt/)
|
||||
1. [Upsider Inc.](https://up-sider.com/lp/)
|
||||
1. [Urbantz](https://urbantz.com/)
|
||||
1. [Vectra](https://www.vectra.ai)
|
||||
1. [Veepee](https://www.veepee.com)
|
||||
1. [Viaduct](https://www.viaduct.ai/)
|
||||
1. [VietMoney](https://vietmoney.vn/)
|
||||
1. [Vinted](https://vinted.com/)
|
||||
1. [Virtuo](https://www.govirtuo.com/)
|
||||
1. [VISITS Technologies](https://visits.world/en)
|
||||
1. [Volvo Cars](https://www.volvocars.com/)
|
||||
1. [Voyager Digital](https://www.investvoyager.com/)
|
||||
1. [VSHN - The DevOps Company](https://vshn.ch/)
|
||||
1. [Walkbase](https://www.walkbase.com/)
|
||||
1. [Webstores](https://www.webstores.nl)
|
||||
@@ -309,13 +232,11 @@ Currently, the following organizations are **officially** using Argo CD:
|
||||
1. [WeMo Scooter](https://www.wemoscooter.com/)
|
||||
1. [Whitehat Berlin](https://whitehat.berlin) by Guido Maria Serra +Fenaroli
|
||||
1. [Witick](https://witick.io/)
|
||||
1. [Wolffun Game](https://www.wolffungame.com/)
|
||||
1. [WooliesX](https://wooliesx.com.au/)
|
||||
1. [Woolworths Group](https://www.woolworthsgroup.com.au/)
|
||||
1. [WSpot](https://www.wspot.com.br/)
|
||||
1. [Yieldlab](https://www.yieldlab.de/)
|
||||
1. [Youverify](https://youverify.co/)
|
||||
1. [Yubo](https://www.yubo.live/)
|
||||
1. [ZDF](https://www.zdf.de/)
|
||||
1. [Zimpler](https://www.zimpler.com/)
|
||||
1. [ZOZO](https://corp.zozo.com/)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,6 @@ package controllers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
@@ -47,6 +46,7 @@ type addRateLimitingInterface interface {
|
||||
}
|
||||
|
||||
func (h *clusterSecretEventHandler) queueRelatedAppGenerators(q addRateLimitingInterface, object client.Object) {
|
||||
|
||||
// Check for label, lookup all ApplicationSets that might match the cluster, queue them all
|
||||
if object.GetLabels()[generators.ArgoCDSecretTypeLabel] != generators.ArgoCDSecretTypeCluster {
|
||||
return
|
||||
@@ -73,40 +73,6 @@ func (h *clusterSecretEventHandler) queueRelatedAppGenerators(q addRateLimitingI
|
||||
foundClusterGenerator = true
|
||||
break
|
||||
}
|
||||
|
||||
if generator.Matrix != nil {
|
||||
ok, err := nestedGeneratorsHaveClusterGenerator(generator.Matrix.Generators)
|
||||
if err != nil {
|
||||
h.Log.
|
||||
WithFields(log.Fields{
|
||||
"namespace": appSet.GetNamespace(),
|
||||
"name": appSet.GetName(),
|
||||
}).
|
||||
WithError(err).
|
||||
Error("Unable to check if ApplicationSet matrix generators have cluster generator")
|
||||
}
|
||||
if ok {
|
||||
foundClusterGenerator = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if generator.Merge != nil {
|
||||
ok, err := nestedGeneratorsHaveClusterGenerator(generator.Merge.Generators)
|
||||
if err != nil {
|
||||
h.Log.
|
||||
WithFields(log.Fields{
|
||||
"namespace": appSet.GetNamespace(),
|
||||
"name": appSet.GetName(),
|
||||
}).
|
||||
WithError(err).
|
||||
Error("Unable to check if ApplicationSet merge generators have cluster generator")
|
||||
}
|
||||
if ok {
|
||||
foundClusterGenerator = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if foundClusterGenerator {
|
||||
|
||||
@@ -116,50 +82,3 @@ func (h *clusterSecretEventHandler) queueRelatedAppGenerators(q addRateLimitingI
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nestedGeneratorsHaveClusterGenerator iterate over provided nested generators to check if they have a cluster generator.
|
||||
func nestedGeneratorsHaveClusterGenerator(generators []argoprojiov1alpha1.ApplicationSetNestedGenerator) (bool, error) {
|
||||
for _, generator := range generators {
|
||||
if ok, err := nestedGeneratorHasClusterGenerator(generator); ok || err != nil {
|
||||
return ok, err
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// nestedGeneratorHasClusterGenerator checks if the provided generator has a cluster generator.
|
||||
func nestedGeneratorHasClusterGenerator(nested argoprojiov1alpha1.ApplicationSetNestedGenerator) (bool, error) {
|
||||
if nested.Clusters != nil {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if nested.Matrix != nil {
|
||||
nestedMatrix, err := argoprojiov1alpha1.ToNestedMatrixGenerator(nested.Matrix)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("unable to get nested matrix generator: %w", err)
|
||||
}
|
||||
if nestedMatrix != nil {
|
||||
hasClusterGenerator, err := nestedGeneratorsHaveClusterGenerator(nestedMatrix.ToMatrixGenerator().Generators)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("error evaluating nested matrix generator: %w", err)
|
||||
}
|
||||
return hasClusterGenerator, nil
|
||||
}
|
||||
}
|
||||
|
||||
if nested.Merge != nil {
|
||||
nestedMerge, err := argoprojiov1alpha1.ToNestedMergeGenerator(nested.Merge)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("unable to get nested merge generator: %w", err)
|
||||
}
|
||||
if nestedMerge != nil {
|
||||
hasClusterGenerator, err := nestedGeneratorsHaveClusterGenerator(nestedMerge.ToMergeGenerator().Generators)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("error evaluating nested merge generator: %w", err)
|
||||
}
|
||||
return hasClusterGenerator, nil
|
||||
}
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/stretchr/testify/assert"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
@@ -164,6 +163,7 @@ func TestClusterEventHandler(t *testing.T) {
|
||||
{NamespacedName: types.NamespacedName{Namespace: "another-namespace", Name: "my-app-set"}},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "non-argo cd secret should not match",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
@@ -189,348 +189,6 @@ func TestClusterEventHandler(t *testing.T) {
|
||||
},
|
||||
expectedRequests: []reconcile.Request{},
|
||||
},
|
||||
{
|
||||
name: "a matrix generator with a cluster generator should produce a request",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Matrix: &argov1alpha1.MatrixGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{{
|
||||
NamespacedName: types.NamespacedName{Namespace: "argocd", Name: "my-app-set"},
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "a matrix generator with non cluster generator should not match",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Matrix: &argov1alpha1.MatrixGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
List: &argov1alpha1.ListGenerator{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{},
|
||||
},
|
||||
{
|
||||
name: "a matrix generator with a nested matrix generator containing a cluster generator should produce a request",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Matrix: &argov1alpha1.MatrixGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Matrix: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"clusters": {
|
||||
"selector": {
|
||||
"matchLabels": {
|
||||
"argocd.argoproj.io/secret-type": "cluster"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{{
|
||||
NamespacedName: types.NamespacedName{Namespace: "argocd", Name: "my-app-set"},
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "a matrix generator with a nested matrix generator containing non cluster generator should not match",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Matrix: &argov1alpha1.MatrixGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Matrix: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"list": {
|
||||
"elements": [
|
||||
"a",
|
||||
"b"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{},
|
||||
},
|
||||
{
|
||||
name: "a merge generator with a cluster generator should produce a request",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Merge: &argov1alpha1.MergeGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{{
|
||||
NamespacedName: types.NamespacedName{Namespace: "argocd", Name: "my-app-set"},
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "a matrix generator with non cluster generator should not match",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Merge: &argov1alpha1.MergeGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
List: &argov1alpha1.ListGenerator{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{},
|
||||
},
|
||||
{
|
||||
name: "a merge generator with a nested merge generator containing a cluster generator should produce a request",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Merge: &argov1alpha1.MergeGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Merge: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"clusters": {
|
||||
"selector": {
|
||||
"matchLabels": {
|
||||
"argocd.argoproj.io/secret-type": "cluster"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{{
|
||||
NamespacedName: types.NamespacedName{Namespace: "argocd", Name: "my-app-set"},
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "a merge generator with a nested merge generator containing non cluster generator should not match",
|
||||
items: []argov1alpha1.ApplicationSet{
|
||||
{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Name: "my-app-set",
|
||||
Namespace: "argocd",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{
|
||||
Merge: &argov1alpha1.MergeGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Merge: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"list": {
|
||||
"elements": [
|
||||
"a",
|
||||
"b"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
secret: corev1.Secret{
|
||||
ObjectMeta: v1.ObjectMeta{
|
||||
Namespace: "argocd",
|
||||
Name: "my-secret",
|
||||
Labels: map[string]string{
|
||||
generators.ArgoCDSecretTypeLabel: generators.ArgoCDSecretTypeCluster,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRequests: []reconcile.Request{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -573,68 +231,3 @@ type mockAddRateLimitingInterface struct {
|
||||
errorOccurred bool
|
||||
addedItems []ctrl.Request
|
||||
}
|
||||
|
||||
func TestNestedGeneratorHasClusterGenerator_NestedClusterGenerator(t *testing.T) {
|
||||
nested := argov1alpha1.ApplicationSetNestedGenerator{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{},
|
||||
}
|
||||
|
||||
hasClusterGenerator, err := nestedGeneratorHasClusterGenerator(nested)
|
||||
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, hasClusterGenerator)
|
||||
}
|
||||
|
||||
func TestNestedGeneratorHasClusterGenerator_NestedMergeGenerator(t *testing.T) {
|
||||
nested := argov1alpha1.ApplicationSetNestedGenerator{
|
||||
Merge: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"clusters": {
|
||||
"selector": {
|
||||
"matchLabels": {
|
||||
"argocd.argoproj.io/secret-type": "cluster"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
hasClusterGenerator, err := nestedGeneratorHasClusterGenerator(nested)
|
||||
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, hasClusterGenerator)
|
||||
}
|
||||
|
||||
func TestNestedGeneratorHasClusterGenerator_NestedMergeGeneratorWithInvalidJSON(t *testing.T) {
|
||||
nested := argov1alpha1.ApplicationSetNestedGenerator{
|
||||
Merge: &apiextensionsv1.JSON{
|
||||
Raw: []byte(
|
||||
`{
|
||||
"generators": [
|
||||
{
|
||||
"clusters": {
|
||||
"selector": {
|
||||
"matchLabels": {
|
||||
"argocd.argoproj.io/secret-type": "cluster"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
`,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
hasClusterGenerator, err := nestedGeneratorHasClusterGenerator(nested)
|
||||
|
||||
assert.NotNil(t, err)
|
||||
assert.False(t, hasClusterGenerator)
|
||||
}
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
dynfake "k8s.io/client-go/dynamic/fake"
|
||||
kubefake "k8s.io/client-go/kubernetes/fake"
|
||||
"k8s.io/client-go/tools/record"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client/fake"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/generators"
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/mocks"
|
||||
argov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
func TestRequeueAfter(t *testing.T) {
|
||||
mockServer := &mocks.Repos{}
|
||||
ctx := context.Background()
|
||||
scheme := runtime.NewScheme()
|
||||
err := argov1alpha1.AddToScheme(scheme)
|
||||
assert.Nil(t, err)
|
||||
gvrToListKind := map[schema.GroupVersionResource]string{{
|
||||
Group: "mallard.io",
|
||||
Version: "v1",
|
||||
Resource: "ducks",
|
||||
}: "DuckList"}
|
||||
appClientset := kubefake.NewSimpleClientset()
|
||||
k8sClient := fake.NewClientBuilder().Build()
|
||||
duckType := &unstructured.Unstructured{
|
||||
Object: map[string]interface{}{
|
||||
"apiVersion": "v2quack",
|
||||
"kind": "Duck",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "mightyduck",
|
||||
"namespace": "namespace",
|
||||
"labels": map[string]interface{}{"duck": "all-species"},
|
||||
},
|
||||
"status": map[string]interface{}{
|
||||
"decisions": []interface{}{
|
||||
map[string]interface{}{
|
||||
"clusterName": "staging-01",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"clusterName": "production-01",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
fakeDynClient := dynfake.NewSimpleDynamicClientWithCustomListKinds(runtime.NewScheme(), gvrToListKind, duckType)
|
||||
|
||||
terminalGenerators := map[string]generators.Generator{
|
||||
"List": generators.NewListGenerator(),
|
||||
"Clusters": generators.NewClusterGenerator(k8sClient, ctx, appClientset, "argocd"),
|
||||
"Git": generators.NewGitGenerator(mockServer),
|
||||
"SCMProvider": generators.NewSCMProviderGenerator(fake.NewClientBuilder().WithObjects(&corev1.Secret{}).Build(), generators.SCMAuthProviders{}, "", []string{""}),
|
||||
"ClusterDecisionResource": generators.NewDuckTypeGenerator(ctx, fakeDynClient, appClientset, "argocd"),
|
||||
"PullRequest": generators.NewPullRequestGenerator(k8sClient, generators.SCMAuthProviders{}, "", []string{""}),
|
||||
}
|
||||
|
||||
nestedGenerators := map[string]generators.Generator{
|
||||
"List": terminalGenerators["List"],
|
||||
"Clusters": terminalGenerators["Clusters"],
|
||||
"Git": terminalGenerators["Git"],
|
||||
"SCMProvider": terminalGenerators["SCMProvider"],
|
||||
"ClusterDecisionResource": terminalGenerators["ClusterDecisionResource"],
|
||||
"PullRequest": terminalGenerators["PullRequest"],
|
||||
"Matrix": generators.NewMatrixGenerator(terminalGenerators),
|
||||
"Merge": generators.NewMergeGenerator(terminalGenerators),
|
||||
}
|
||||
|
||||
topLevelGenerators := map[string]generators.Generator{
|
||||
"List": terminalGenerators["List"],
|
||||
"Clusters": terminalGenerators["Clusters"],
|
||||
"Git": terminalGenerators["Git"],
|
||||
"SCMProvider": terminalGenerators["SCMProvider"],
|
||||
"ClusterDecisionResource": terminalGenerators["ClusterDecisionResource"],
|
||||
"PullRequest": terminalGenerators["PullRequest"],
|
||||
"Matrix": generators.NewMatrixGenerator(nestedGenerators),
|
||||
"Merge": generators.NewMergeGenerator(nestedGenerators),
|
||||
}
|
||||
|
||||
client := fake.NewClientBuilder().WithScheme(scheme).Build()
|
||||
r := ApplicationSetReconciler{
|
||||
Client: client,
|
||||
Scheme: scheme,
|
||||
Recorder: record.NewFakeRecorder(0),
|
||||
Generators: topLevelGenerators,
|
||||
}
|
||||
|
||||
type args struct {
|
||||
appset *argov1alpha1.ApplicationSet
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want time.Duration
|
||||
wantErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{name: "Cluster", args: args{appset: &argov1alpha1.ApplicationSet{
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{{Clusters: &argov1alpha1.ClusterGenerator{}}},
|
||||
},
|
||||
}}, want: generators.NoRequeueAfter, wantErr: assert.NoError},
|
||||
{name: "ClusterMergeNested", args: args{&argov1alpha1.ApplicationSet{
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{Clusters: &argov1alpha1.ClusterGenerator{}},
|
||||
{Merge: &argov1alpha1.MergeGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{},
|
||||
Git: &argov1alpha1.GitGenerator{},
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}}, want: generators.DefaultRequeueAfterSeconds, wantErr: assert.NoError},
|
||||
{name: "ClusterMatrixNested", args: args{&argov1alpha1.ApplicationSet{
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{
|
||||
{Clusters: &argov1alpha1.ClusterGenerator{}},
|
||||
{Matrix: &argov1alpha1.MatrixGenerator{
|
||||
Generators: []argov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{},
|
||||
Git: &argov1alpha1.GitGenerator{},
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}}, want: generators.DefaultRequeueAfterSeconds, wantErr: assert.NoError},
|
||||
{name: "ListGenerator", args: args{appset: &argov1alpha1.ApplicationSet{
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argov1alpha1.ApplicationSetGenerator{{List: &argov1alpha1.ListGenerator{}}},
|
||||
},
|
||||
}}, want: generators.NoRequeueAfter, wantErr: assert.NoError},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
assert.Equalf(t, tt.want, r.getMinRequeueAfter(tt.args.appset), "getMinRequeueAfter(%v)", tt.args.appset)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: ApplicationSet
|
||||
metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
- cluster: engineering-dev
|
||||
url: https://kubernetes.default.svc
|
||||
foo: bar
|
||||
# Update foo value with foo: bar
|
||||
# Application engineering-prod-guestbook labels will still be baz
|
||||
# Delete this element
|
||||
# Application engineering-prod-guestbook will be kept
|
||||
- cluster: engineering-prod
|
||||
url: https://kubernetes.default.svc
|
||||
foo: baz
|
||||
template:
|
||||
metadata:
|
||||
name: '{{.cluster}}-guestbook'
|
||||
labels:
|
||||
foo: '{{.foo}}'
|
||||
spec:
|
||||
project: default
|
||||
source:
|
||||
repoURL: https://github.com/argoproj/argo-cd.git
|
||||
targetRevision: HEAD
|
||||
path: applicationset/examples/list-generator/guestbook/{{.cluster}}
|
||||
destination:
|
||||
server: '{{.url}}'
|
||||
namespace: guestbook
|
||||
syncPolicy:
|
||||
applicationsSync: create-only
|
||||
@@ -1,35 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: ApplicationSet
|
||||
metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
- cluster: engineering-dev
|
||||
url: https://kubernetes.default.svc
|
||||
foo: bar
|
||||
# Update foo value with foo: bar
|
||||
# Application engineering-prod-guestbook labels will change to foo: bar
|
||||
# Delete this element
|
||||
# Application engineering-prod-guestbook will be kept
|
||||
- cluster: engineering-prod
|
||||
url: https://kubernetes.default.svc
|
||||
foo: baz
|
||||
template:
|
||||
metadata:
|
||||
name: '{{.cluster}}-guestbook'
|
||||
labels:
|
||||
foo: '{{.foo}}'
|
||||
spec:
|
||||
project: default
|
||||
source:
|
||||
repoURL: https://github.com/argoproj/argo-cd.git
|
||||
targetRevision: HEAD
|
||||
path: applicationset/examples/list-generator/guestbook/{{.cluster}}
|
||||
destination:
|
||||
server: '{{.url}}'
|
||||
namespace: guestbook
|
||||
syncPolicy:
|
||||
applicationsSync: create-update
|
||||
@@ -1,20 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: guestbook-ui
|
||||
spec:
|
||||
replicas: 1
|
||||
revisionHistoryLimit: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: guestbook-ui
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: guestbook-ui
|
||||
spec:
|
||||
containers:
|
||||
- image: gcr.io/heptio-images/ks-guestbook-demo:0.2
|
||||
name: guestbook-ui
|
||||
ports:
|
||||
- containerPort: 80
|
||||
@@ -1,10 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: guestbook-ui
|
||||
spec:
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: 80
|
||||
selector:
|
||||
app: guestbook-ui
|
||||
@@ -1,20 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: guestbook-ui
|
||||
spec:
|
||||
replicas: 1
|
||||
revisionHistoryLimit: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: guestbook-ui
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: guestbook-ui
|
||||
spec:
|
||||
containers:
|
||||
- image: gcr.io/heptio-images/ks-guestbook-demo:0.2
|
||||
name: guestbook-ui
|
||||
ports:
|
||||
- containerPort: 80
|
||||
@@ -1,10 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: guestbook-ui
|
||||
spec:
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: 80
|
||||
selector:
|
||||
app: guestbook-ui
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- clusters: {}
|
||||
template:
|
||||
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: book-import
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- clusterDecisionResource:
|
||||
configMapRef: ocm-placement
|
||||
|
||||
@@ -8,7 +8,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- clusters: {}
|
||||
template:
|
||||
|
||||
@@ -27,7 +27,6 @@ metadata:
|
||||
name: cluster-addons
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/infra-team/cluster-deployments.git
|
||||
|
||||
@@ -38,7 +38,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/infra-team/cluster-deployments.git
|
||||
|
||||
@@ -51,7 +51,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/infra-team/cluster-deployments.git
|
||||
|
||||
@@ -5,7 +5,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
|
||||
@@ -8,7 +8,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
|
||||
@@ -5,7 +5,6 @@ metadata:
|
||||
namespace: argocd
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/argoproj/argo-cd.git
|
||||
|
||||
@@ -5,7 +5,6 @@ metadata:
|
||||
namespace: argocd
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/argoproj/argo-cd.git
|
||||
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- git:
|
||||
repoURL: https://github.com/argoproj/argo-cd.git
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
key:
|
||||
components:
|
||||
- name: component1
|
||||
chart: podinfo
|
||||
version: "6.3.2"
|
||||
releaseName: component1
|
||||
repoUrl: "https://stefanprodan.github.io/podinfo"
|
||||
namespace: component1
|
||||
- name: component2
|
||||
chart: podinfo
|
||||
version: "6.3.3"
|
||||
releaseName: component2
|
||||
repoUrl: "ghcr.io/stefanprodan/charts"
|
||||
namespace: component2
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
|
||||
@@ -8,7 +8,6 @@ metadata:
|
||||
name: cluster-git
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- matrix:
|
||||
generators:
|
||||
|
||||
@@ -8,7 +8,6 @@ metadata:
|
||||
name: list-git
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- matrix:
|
||||
generators:
|
||||
|
||||
@@ -5,7 +5,6 @@ metadata:
|
||||
namespace: argocd
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- matrix:
|
||||
generators:
|
||||
|
||||
@@ -13,7 +13,6 @@ metadata:
|
||||
name: matrix-and-union-in-matrix
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- matrix:
|
||||
generators:
|
||||
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: merge-clusters-and-list
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- merge:
|
||||
mergeKeys:
|
||||
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: merge-two-matrixes
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- merge:
|
||||
mergeKeys:
|
||||
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: myapp
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- pullRequest:
|
||||
github:
|
||||
@@ -24,8 +23,6 @@ spec:
|
||||
template:
|
||||
metadata:
|
||||
name: 'myapp-{{ .branch }}-{{ .number }}'
|
||||
labels:
|
||||
key1: '{{ index .labels 0 }}'
|
||||
spec:
|
||||
source:
|
||||
repoURL: 'https://github.com/myorg/myrepo.git'
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: ApplicationSet
|
||||
metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
generators:
|
||||
- scmProvider:
|
||||
gitlab:
|
||||
api: https://gitlab.com
|
||||
group: test-argocd-proton
|
||||
includeSubgroups: true
|
||||
cloneProtocol: https
|
||||
filters:
|
||||
- repositoryMatch: test-app
|
||||
template:
|
||||
metadata:
|
||||
name: '{{ repository }}-guestbook'
|
||||
spec:
|
||||
project: "default"
|
||||
source:
|
||||
repoURL: '{{ url }}'
|
||||
targetRevision: '{{ branch }}'
|
||||
path: guestbook
|
||||
destination:
|
||||
server: https://kubernetes.default.svc
|
||||
namespace: guestbook
|
||||
@@ -4,7 +4,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- scmProvider:
|
||||
github:
|
||||
|
||||
@@ -8,7 +8,6 @@ metadata:
|
||||
name: guestbook
|
||||
spec:
|
||||
goTemplate: true
|
||||
goTemplateOptions: ["missingkey=error"]
|
||||
generators:
|
||||
- list:
|
||||
elements:
|
||||
|
||||
@@ -51,8 +51,6 @@ func NewClusterGenerator(c client.Client, ctx context.Context, clientset kuberne
|
||||
return g
|
||||
}
|
||||
|
||||
// GetRequeueAfter never requeue the cluster generator because the `clusterSecretEventHandler` will requeue the appsets
|
||||
// when the cluster secrets change
|
||||
func (g *ClusterGenerator) GetRequeueAfter(appSetGenerator *argoappsetv1alpha1.ApplicationSetGenerator) time.Duration {
|
||||
return NoRequeueAfter
|
||||
}
|
||||
@@ -61,7 +59,8 @@ func (g *ClusterGenerator) GetTemplate(appSetGenerator *argoappsetv1alpha1.Appli
|
||||
return &appSetGenerator.Clusters.Template
|
||||
}
|
||||
|
||||
func (g *ClusterGenerator) GenerateParams(appSetGenerator *argoappsetv1alpha1.ApplicationSetGenerator, appSet *argoappsetv1alpha1.ApplicationSet) ([]map[string]interface{}, error) {
|
||||
func (g *ClusterGenerator) GenerateParams(
|
||||
appSetGenerator *argoappsetv1alpha1.ApplicationSetGenerator, appSet *argoappsetv1alpha1.ApplicationSet) ([]map[string]interface{}, error) {
|
||||
|
||||
if appSetGenerator == nil {
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
@@ -78,7 +77,7 @@ func (g *ClusterGenerator) GenerateParams(appSetGenerator *argoappsetv1alpha1.Ap
|
||||
// ListCluster from Argo CD's util/db package will include the local cluster in the list of clusters
|
||||
clustersFromArgoCD, err := utils.ListClusters(g.ctx, g.clientset, g.namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing clusters: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if clustersFromArgoCD == nil {
|
||||
@@ -108,7 +107,7 @@ func (g *ClusterGenerator) GenerateParams(appSetGenerator *argoappsetv1alpha1.Ap
|
||||
params["nameNormalized"] = cluster.Name
|
||||
params["server"] = cluster.Server
|
||||
|
||||
err = appendTemplatedValues(appSetGenerator.Clusters.Values, params, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
err = appendTemplatedValues(appSetGenerator.Clusters.Values, params, appSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -148,7 +147,7 @@ func (g *ClusterGenerator) GenerateParams(appSetGenerator *argoappsetv1alpha1.Ap
|
||||
}
|
||||
}
|
||||
|
||||
err = appendTemplatedValues(appSetGenerator.Clusters.Values, params, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
err = appendTemplatedValues(appSetGenerator.Clusters.Values, params, appSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -161,6 +160,44 @@ func (g *ClusterGenerator) GenerateParams(appSetGenerator *argoappsetv1alpha1.Ap
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func appendTemplatedValues(clusterValues map[string]string, params map[string]interface{}, appSet *argoappsetv1alpha1.ApplicationSet) error {
|
||||
// We create a local map to ensure that we do not fall victim to a billion-laughs attack. We iterate through the
|
||||
// cluster values map and only replace values in said map if it has already been whitelisted in the params map.
|
||||
// Once we iterate through all the cluster values we can then safely merge the `tmp` map into the main params map.
|
||||
tmp := map[string]interface{}{}
|
||||
|
||||
for key, value := range clusterValues {
|
||||
result, err := replaceTemplatedString(value, params, appSet)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("error replacing templated String: %w", err)
|
||||
}
|
||||
|
||||
if appSet.Spec.GoTemplate {
|
||||
if tmp["values"] == nil {
|
||||
tmp["values"] = map[string]string{}
|
||||
}
|
||||
tmp["values"].(map[string]string)[key] = result
|
||||
} else {
|
||||
tmp[fmt.Sprintf("values.%s", key)] = result
|
||||
}
|
||||
}
|
||||
|
||||
for key, value := range tmp {
|
||||
params[key] = value
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func replaceTemplatedString(value string, params map[string]interface{}, appSet *argoappsetv1alpha1.ApplicationSet) (string, error) {
|
||||
replacedTmplStr, err := render.Replace(value, params, appSet.Spec.GoTemplate)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return replacedTmplStr, nil
|
||||
}
|
||||
|
||||
func (g *ClusterGenerator) getSecretsByClusterName(appSetGenerator *argoappsetv1alpha1.ApplicationSetGenerator) (map[string]corev1.Secret, error) {
|
||||
// List all Clusters:
|
||||
clusterSecretList := &corev1.SecretList{}
|
||||
|
||||
@@ -74,7 +74,7 @@ func (g *DuckTypeGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.A
|
||||
// ListCluster from Argo CD's util/db package will include the local cluster in the list of clusters
|
||||
clustersFromArgoCD, err := utils.ListClusters(g.ctx, g.clientset, g.namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing clusters: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if clustersFromArgoCD == nil {
|
||||
@@ -85,7 +85,7 @@ func (g *DuckTypeGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.A
|
||||
cm, err := g.clientset.CoreV1().ConfigMaps(g.namespace).Get(g.ctx, appSetGenerator.ClusterDecisionResource.ConfigMapRef, metav1.GetOptions{})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading configMapRef: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Extract GVK data for the dynamic client to use
|
||||
|
||||
@@ -3,7 +3,6 @@ package generators
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
@@ -16,6 +15,8 @@ import (
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
const resourceApiVersion = "mallard.io/v1"
|
||||
|
||||
@@ -2,12 +2,12 @@ package generators
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
|
||||
"github.com/jeremywohl/flatten"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/utils"
|
||||
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
@@ -25,12 +25,9 @@ type TransformResult struct {
|
||||
Template argoprojiov1alpha1.ApplicationSetTemplate
|
||||
}
|
||||
|
||||
// Transform a spec generator to list of paramSets and a template
|
||||
//Transform a spec generator to list of paramSets and a template
|
||||
func Transform(requestedGenerator argoprojiov1alpha1.ApplicationSetGenerator, allGenerators map[string]Generator, baseTemplate argoprojiov1alpha1.ApplicationSetTemplate, appSet *argoprojiov1alpha1.ApplicationSet, genParams map[string]interface{}) ([]TransformResult, error) {
|
||||
// This is a custom version of the `LabelSelectorAsSelector` that is in k8s.io/apimachinery. This has been copied
|
||||
// verbatim from that package, with the difference that we do not have any restrictions on label values. This is done
|
||||
// so that, among other things, we can match on cluster urls.
|
||||
selector, err := utils.LabelSelectorAsSelector(requestedGenerator.Selector)
|
||||
selector, err := metav1.LabelSelectorAsSelector(requestedGenerator.Selector)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing label selector: %w", err)
|
||||
}
|
||||
@@ -53,7 +50,7 @@ func Transform(requestedGenerator argoprojiov1alpha1.ApplicationSetGenerator, al
|
||||
}
|
||||
var params []map[string]interface{}
|
||||
if len(genParams) != 0 {
|
||||
tempInterpolatedGenerator, err := InterpolateGenerator(&requestedGenerator, genParams, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
tempInterpolatedGenerator, err := InterpolateGenerator(&requestedGenerator, genParams, appSet.Spec.GoTemplate)
|
||||
interpolatedGenerator = &tempInterpolatedGenerator
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("genParams", genParams).
|
||||
@@ -75,17 +72,8 @@ func Transform(requestedGenerator argoprojiov1alpha1.ApplicationSetGenerator, al
|
||||
}
|
||||
var filterParams []map[string]interface{}
|
||||
for _, param := range params {
|
||||
flatParam, err := flattenParameters(param)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("generator", g).
|
||||
Error("error flattening params")
|
||||
if firstError == nil {
|
||||
firstError = err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if requestedGenerator.Selector != nil && !selector.Matches(labels.Set(flatParam)) {
|
||||
if requestedGenerator.Selector != nil && !selector.Matches(labels.Set(keepOnlyStringValues(param))) {
|
||||
continue
|
||||
}
|
||||
filterParams = append(filterParams, param)
|
||||
@@ -100,6 +88,18 @@ func Transform(requestedGenerator argoprojiov1alpha1.ApplicationSetGenerator, al
|
||||
return res, firstError
|
||||
}
|
||||
|
||||
func keepOnlyStringValues(in map[string]interface{}) map[string]string {
|
||||
var out map[string]string = map[string]string{}
|
||||
|
||||
for key, value := range in {
|
||||
if _, ok := value.(string); ok {
|
||||
out[key] = value.(string)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func GetRelevantGenerators(requestedGenerator *argoprojiov1alpha1.ApplicationSetGenerator, generators map[string]Generator) []Generator {
|
||||
var res []Generator
|
||||
|
||||
@@ -122,20 +122,6 @@ func GetRelevantGenerators(requestedGenerator *argoprojiov1alpha1.ApplicationSet
|
||||
return res
|
||||
}
|
||||
|
||||
func flattenParameters(in map[string]interface{}) (map[string]string, error) {
|
||||
flat, err := flatten.Flatten(in, "", flatten.DotStyle)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error flatenning parameters: %w", err)
|
||||
}
|
||||
|
||||
out := make(map[string]string, len(flat))
|
||||
for k, v := range flat {
|
||||
out[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func mergeGeneratorTemplate(g Generator, requestedGenerator *argoprojiov1alpha1.ApplicationSetGenerator, applicationSetTemplate argoprojiov1alpha1.ApplicationSetTemplate) (argoprojiov1alpha1.ApplicationSetTemplate, error) {
|
||||
// Make a copy of the value from `GetTemplate()` before merge, rather than copying directly into
|
||||
// the provided parameter (which will touch the original resource object returned by client-go)
|
||||
@@ -146,28 +132,27 @@ func mergeGeneratorTemplate(g Generator, requestedGenerator *argoprojiov1alpha1.
|
||||
return *dest, err
|
||||
}
|
||||
|
||||
// InterpolateGenerator allows interpolating the matrix's 2nd child generator with values from the 1st child generator
|
||||
// Currently for Matrix Generator. Allows interpolating the matrix's 2nd child generator with values from the 1st child generator
|
||||
// "params" parameter is an array, where each index corresponds to a generator. Each index contains a map w/ that generator's parameters.
|
||||
func InterpolateGenerator(requestedGenerator *argoprojiov1alpha1.ApplicationSetGenerator, params map[string]interface{}, useGoTemplate bool, goTemplateOptions []string) (argoprojiov1alpha1.ApplicationSetGenerator, error) {
|
||||
render := utils.Render{}
|
||||
interpolatedGenerator, err := render.RenderGeneratorParams(requestedGenerator, params, useGoTemplate, goTemplateOptions)
|
||||
func InterpolateGenerator(requestedGenerator *argoprojiov1alpha1.ApplicationSetGenerator, params map[string]interface{}, useGoTemplate bool) (argoprojiov1alpha1.ApplicationSetGenerator, error) {
|
||||
interpolatedGenerator := requestedGenerator.DeepCopy()
|
||||
tmplBytes, err := json.Marshal(interpolatedGenerator)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("interpolatedGenerator", interpolatedGenerator).Error("error interpolating generator with other generator's parameter")
|
||||
return argoprojiov1alpha1.ApplicationSetGenerator{}, err
|
||||
log.WithError(err).WithField("requestedGenerator", interpolatedGenerator).Error("error marshalling requested generator for interpolation")
|
||||
return *interpolatedGenerator, err
|
||||
}
|
||||
|
||||
render := utils.Render{}
|
||||
replacedTmplStr, err := render.Replace(string(tmplBytes), params, useGoTemplate)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("interpolatedGeneratorString", replacedTmplStr).Error("error interpolating generator with other generator's parameter")
|
||||
return *interpolatedGenerator, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal([]byte(replacedTmplStr), interpolatedGenerator)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("requestedGenerator", interpolatedGenerator).Error("error unmarshalling requested generator for interpolation")
|
||||
return *interpolatedGenerator, err
|
||||
}
|
||||
return *interpolatedGenerator, nil
|
||||
}
|
||||
|
||||
// Fixes https://github.com/argoproj/argo-cd/issues/11982 while ensuring backwards compatibility.
|
||||
// This is only a short-term solution and should be removed in a future major version.
|
||||
func dropDisabledNestedSelectors(generators []argoprojiov1alpha1.ApplicationSetNestedGenerator) bool {
|
||||
var foundSelector bool
|
||||
for i := range generators {
|
||||
if generators[i].Selector != nil {
|
||||
foundSelector = true
|
||||
generators[i].Selector = nil
|
||||
}
|
||||
}
|
||||
return foundSelector
|
||||
}
|
||||
|
||||
@@ -6,12 +6,9 @@ import (
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/mocks"
|
||||
|
||||
argov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
|
||||
"github.com/stretchr/testify/mock"
|
||||
@@ -20,6 +17,8 @@ import (
|
||||
kubefake "k8s.io/client-go/kubernetes/fake"
|
||||
crtclient "sigs.k8s.io/controller-runtime/pkg/client"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client/fake"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
func TestMatchValues(t *testing.T) {
|
||||
@@ -70,18 +69,16 @@ func TestMatchValues(t *testing.T) {
|
||||
"List": listGenerator,
|
||||
}
|
||||
|
||||
applicationSetInfo := argov1alpha1.ApplicationSet{
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
GoTemplate: false,
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{},
|
||||
}
|
||||
|
||||
results, err := Transform(argov1alpha1.ApplicationSetGenerator{
|
||||
results, err := Transform(argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Selector: testCase.selector,
|
||||
List: &argov1alpha1.ListGenerator{
|
||||
List: &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: testCase.elements,
|
||||
Template: emptyTemplate(),
|
||||
}},
|
||||
@@ -95,160 +92,8 @@ func TestMatchValues(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestMatchValuesGoTemplate(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
elements []apiextensionsv1.JSON
|
||||
selector *metav1.LabelSelector
|
||||
expected []map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "no filter",
|
||||
elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url"}`)}},
|
||||
selector: &metav1.LabelSelector{},
|
||||
expected: []map[string]interface{}{{"cluster": "cluster", "url": "url"}},
|
||||
},
|
||||
{
|
||||
name: "nil",
|
||||
elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url"}`)}},
|
||||
selector: nil,
|
||||
expected: []map[string]interface{}{{"cluster": "cluster", "url": "url"}},
|
||||
},
|
||||
{
|
||||
name: "values.foo should be foo but is ignore element",
|
||||
elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url","values":{"foo":"bar"}}`)}},
|
||||
selector: &metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{
|
||||
"values.foo": "foo",
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{},
|
||||
},
|
||||
{
|
||||
name: "values.foo should be bar",
|
||||
elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url","values":{"foo":"bar"}}`)}},
|
||||
selector: &metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{
|
||||
"values.foo": "bar",
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{{"cluster": "cluster", "url": "url", "values": map[string]interface{}{"foo": "bar"}}},
|
||||
},
|
||||
{
|
||||
name: "values.0 should be bar",
|
||||
elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url","values":["bar"]}`)}},
|
||||
selector: &metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{
|
||||
"values.0": "bar",
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{{"cluster": "cluster", "url": "url", "values": []interface{}{"bar"}}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
var listGenerator = NewListGenerator()
|
||||
var data = map[string]Generator{
|
||||
"List": listGenerator,
|
||||
}
|
||||
|
||||
applicationSetInfo := argov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{
|
||||
GoTemplate: true,
|
||||
},
|
||||
}
|
||||
|
||||
results, err := Transform(argov1alpha1.ApplicationSetGenerator{
|
||||
Selector: testCase.selector,
|
||||
List: &argov1alpha1.ListGenerator{
|
||||
Elements: testCase.elements,
|
||||
Template: emptyTemplate(),
|
||||
}},
|
||||
data,
|
||||
emptyTemplate(),
|
||||
&applicationSetInfo, nil)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.ElementsMatch(t, testCase.expected, results[0].Params)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransForm(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
selector *metav1.LabelSelector
|
||||
expected []map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "server filter",
|
||||
selector: &metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{"server": "https://production-01.example.com"},
|
||||
},
|
||||
expected: []map[string]interface{}{{
|
||||
"metadata.annotations.foo.argoproj.io": "production",
|
||||
"metadata.labels.argocd.argoproj.io/secret-type": "cluster",
|
||||
"metadata.labels.environment": "production",
|
||||
"metadata.labels.org": "bar",
|
||||
"name": "production_01/west",
|
||||
"nameNormalized": "production-01-west",
|
||||
"server": "https://production-01.example.com",
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "server filter with long url",
|
||||
selector: &metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{"server": "https://some-really-long-url-that-will-exceed-63-characters.com"},
|
||||
},
|
||||
expected: []map[string]interface{}{{
|
||||
"metadata.annotations.foo.argoproj.io": "production",
|
||||
"metadata.labels.argocd.argoproj.io/secret-type": "cluster",
|
||||
"metadata.labels.environment": "production",
|
||||
"metadata.labels.org": "bar",
|
||||
"name": "some-really-long-server-url",
|
||||
"nameNormalized": "some-really-long-server-url",
|
||||
"server": "https://some-really-long-url-that-will-exceed-63-characters.com",
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
testGenerators := map[string]Generator{
|
||||
"Clusters": getMockClusterGenerator(),
|
||||
}
|
||||
|
||||
applicationSetInfo := argov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argov1alpha1.ApplicationSetSpec{},
|
||||
}
|
||||
|
||||
results, err := Transform(
|
||||
argov1alpha1.ApplicationSetGenerator{
|
||||
Selector: testCase.selector,
|
||||
Clusters: &argov1alpha1.ClusterGenerator{
|
||||
Selector: metav1.LabelSelector{},
|
||||
Template: argov1alpha1.ApplicationSetTemplate{},
|
||||
Values: nil,
|
||||
}},
|
||||
testGenerators,
|
||||
emptyTemplate(),
|
||||
&applicationSetInfo, nil)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.ElementsMatch(t, testCase.expected, results[0].Params)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func emptyTemplate() argov1alpha1.ApplicationSetTemplate {
|
||||
return argov1alpha1.ApplicationSetTemplate{
|
||||
func emptyTemplate() argoprojiov1alpha1.ApplicationSetTemplate {
|
||||
return argoprojiov1alpha1.ApplicationSetTemplate{
|
||||
Spec: argov1alpha1.ApplicationSpec{
|
||||
Project: "project",
|
||||
},
|
||||
@@ -305,35 +150,8 @@ func getMockClusterGenerator() Generator {
|
||||
},
|
||||
Type: corev1.SecretType("Opaque"),
|
||||
},
|
||||
&corev1.Secret{
|
||||
TypeMeta: metav1.TypeMeta{
|
||||
Kind: "Secret",
|
||||
APIVersion: "v1",
|
||||
},
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "some-really-long-server-url",
|
||||
Namespace: "namespace",
|
||||
Labels: map[string]string{
|
||||
"argocd.argoproj.io/secret-type": "cluster",
|
||||
"environment": "production",
|
||||
"org": "bar",
|
||||
},
|
||||
Annotations: map[string]string{
|
||||
"foo.argoproj.io": "production",
|
||||
},
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"config": []byte("{}"),
|
||||
"name": []byte("some-really-long-server-url"),
|
||||
"server": []byte("https://some-really-long-url-that-will-exceed-63-characters.com"),
|
||||
},
|
||||
Type: corev1.SecretType("Opaque"),
|
||||
},
|
||||
}
|
||||
runtimeClusters := []runtime.Object{}
|
||||
for _, clientCluster := range clusters {
|
||||
runtimeClusters = append(runtimeClusters, clientCluster)
|
||||
}
|
||||
appClientset := kubefake.NewSimpleClientset(runtimeClusters...)
|
||||
|
||||
fakeClient := fake.NewClientBuilder().WithObjects(clusters...).Build()
|
||||
@@ -341,9 +159,9 @@ func getMockClusterGenerator() Generator {
|
||||
}
|
||||
|
||||
func getMockGitGenerator() Generator {
|
||||
argoCDServiceMock := mocks.Repos{}
|
||||
argoCDServiceMock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything).Return([]string{"app1", "app2", "app_3", "p1/app4"}, nil)
|
||||
var gitGenerator = NewGitGenerator(&argoCDServiceMock)
|
||||
argoCDServiceMock := argoCDServiceMock{mock: &mock.Mock{}}
|
||||
argoCDServiceMock.mock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything).Return([]string{"app1", "app2", "app_3", "p1/app4"}, nil)
|
||||
var gitGenerator = NewGitGenerator(argoCDServiceMock)
|
||||
return gitGenerator
|
||||
}
|
||||
|
||||
@@ -358,8 +176,8 @@ func TestGetRelevantGenerators(t *testing.T) {
|
||||
testGenerators["Merge"] = NewMergeGenerator(testGenerators)
|
||||
testGenerators["List"] = NewListGenerator()
|
||||
|
||||
requestedGenerator := &argov1alpha1.ApplicationSetGenerator{
|
||||
List: &argov1alpha1.ListGenerator{
|
||||
requestedGenerator := &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
List: &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "cluster","url": "url","values":{"foo":"bar"}}`)}},
|
||||
}}
|
||||
|
||||
@@ -367,10 +185,10 @@ func TestGetRelevantGenerators(t *testing.T) {
|
||||
assert.Len(t, relevantGenerators, 1)
|
||||
assert.IsType(t, &ListGenerator{}, relevantGenerators[0])
|
||||
|
||||
requestedGenerator = &argov1alpha1.ApplicationSetGenerator{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{
|
||||
requestedGenerator = &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Clusters: &argoprojiov1alpha1.ClusterGenerator{
|
||||
Selector: metav1.LabelSelector{},
|
||||
Template: argov1alpha1.ApplicationSetTemplate{},
|
||||
Template: argoprojiov1alpha1.ApplicationSetTemplate{},
|
||||
Values: nil,
|
||||
},
|
||||
}
|
||||
@@ -379,14 +197,14 @@ func TestGetRelevantGenerators(t *testing.T) {
|
||||
assert.Len(t, relevantGenerators, 1)
|
||||
assert.IsType(t, &ClusterGenerator{}, relevantGenerators[0])
|
||||
|
||||
requestedGenerator = &argov1alpha1.ApplicationSetGenerator{
|
||||
Git: &argov1alpha1.GitGenerator{
|
||||
requestedGenerator = &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Git: &argoprojiov1alpha1.GitGenerator{
|
||||
RepoURL: "",
|
||||
Directories: nil,
|
||||
Files: nil,
|
||||
Revision: "",
|
||||
RequeueAfterSeconds: nil,
|
||||
Template: argov1alpha1.ApplicationSetTemplate{},
|
||||
Template: argoprojiov1alpha1.ApplicationSetTemplate{},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -396,8 +214,8 @@ func TestGetRelevantGenerators(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestInterpolateGenerator(t *testing.T) {
|
||||
requestedGenerator := &argov1alpha1.ApplicationSetGenerator{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{
|
||||
requestedGenerator := &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Clusters: &argoprojiov1alpha1.ClusterGenerator{
|
||||
Selector: metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{
|
||||
"argocd.argoproj.io/secret-type": "cluster",
|
||||
@@ -414,7 +232,7 @@ func TestInterpolateGenerator(t *testing.T) {
|
||||
"path[1]": "p2",
|
||||
"path.basenameNormalized": "app3",
|
||||
}
|
||||
interpolatedGenerator, err := InterpolateGenerator(requestedGenerator, gitGeneratorParams, false, nil)
|
||||
interpolatedGenerator, err := InterpolateGenerator(requestedGenerator, gitGeneratorParams, false)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("requestedGenerator", requestedGenerator).Error("error interpolating Generator")
|
||||
return
|
||||
@@ -423,23 +241,23 @@ func TestInterpolateGenerator(t *testing.T) {
|
||||
assert.Equal(t, "p1", interpolatedGenerator.Clusters.Selector.MatchLabels["path-zero"])
|
||||
assert.Equal(t, "p1/p2/app3", interpolatedGenerator.Clusters.Selector.MatchLabels["path-full"])
|
||||
|
||||
fileNamePath := argov1alpha1.GitFileGeneratorItem{
|
||||
fileNamePath := argoprojiov1alpha1.GitFileGeneratorItem{
|
||||
Path: "{{name}}",
|
||||
}
|
||||
fileServerPath := argov1alpha1.GitFileGeneratorItem{
|
||||
fileServerPath := argoprojiov1alpha1.GitFileGeneratorItem{
|
||||
Path: "{{server}}",
|
||||
}
|
||||
|
||||
requestedGenerator = &argov1alpha1.ApplicationSetGenerator{
|
||||
Git: &argov1alpha1.GitGenerator{
|
||||
Files: append([]argov1alpha1.GitFileGeneratorItem{}, fileNamePath, fileServerPath),
|
||||
Template: argov1alpha1.ApplicationSetTemplate{},
|
||||
requestedGenerator = &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Git: &argoprojiov1alpha1.GitGenerator{
|
||||
Files: append([]argoprojiov1alpha1.GitFileGeneratorItem{}, fileNamePath, fileServerPath),
|
||||
Template: argoprojiov1alpha1.ApplicationSetTemplate{},
|
||||
},
|
||||
}
|
||||
clusterGeneratorParams := map[string]interface{}{
|
||||
"name": "production_01/west", "server": "https://production-01.example.com",
|
||||
}
|
||||
interpolatedGenerator, err = InterpolateGenerator(requestedGenerator, clusterGeneratorParams, false, nil)
|
||||
interpolatedGenerator, err = InterpolateGenerator(requestedGenerator, clusterGeneratorParams, true)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("requestedGenerator", requestedGenerator).Error("error interpolating Generator")
|
||||
return
|
||||
@@ -447,114 +265,3 @@ func TestInterpolateGenerator(t *testing.T) {
|
||||
assert.Equal(t, "production_01/west", interpolatedGenerator.Git.Files[0].Path)
|
||||
assert.Equal(t, "https://production-01.example.com", interpolatedGenerator.Git.Files[1].Path)
|
||||
}
|
||||
|
||||
func TestInterpolateGenerator_go(t *testing.T) {
|
||||
requestedGenerator := &argov1alpha1.ApplicationSetGenerator{
|
||||
Clusters: &argov1alpha1.ClusterGenerator{
|
||||
Selector: metav1.LabelSelector{
|
||||
MatchLabels: map[string]string{
|
||||
"argocd.argoproj.io/secret-type": "cluster",
|
||||
"path-basename": "{{base .path.path}}",
|
||||
"path-zero": "{{index .path.segments 0}}",
|
||||
"path-full": "{{.path.path}}",
|
||||
"kubernetes.io/environment": `{{default "foo" .my_label}}`,
|
||||
}},
|
||||
},
|
||||
}
|
||||
gitGeneratorParams := map[string]interface{}{
|
||||
"path": map[string]interface{}{
|
||||
"path": "p1/p2/app3",
|
||||
"segments": []string{"p1", "p2", "app3"},
|
||||
},
|
||||
}
|
||||
interpolatedGenerator, err := InterpolateGenerator(requestedGenerator, gitGeneratorParams, true, nil)
|
||||
require.NoError(t, err)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("requestedGenerator", requestedGenerator).Error("error interpolating Generator")
|
||||
return
|
||||
}
|
||||
assert.Equal(t, "app3", interpolatedGenerator.Clusters.Selector.MatchLabels["path-basename"])
|
||||
assert.Equal(t, "p1", interpolatedGenerator.Clusters.Selector.MatchLabels["path-zero"])
|
||||
assert.Equal(t, "p1/p2/app3", interpolatedGenerator.Clusters.Selector.MatchLabels["path-full"])
|
||||
|
||||
fileNamePath := argov1alpha1.GitFileGeneratorItem{
|
||||
Path: "{{.name}}",
|
||||
}
|
||||
fileServerPath := argov1alpha1.GitFileGeneratorItem{
|
||||
Path: "{{.server}}",
|
||||
}
|
||||
|
||||
requestedGenerator = &argov1alpha1.ApplicationSetGenerator{
|
||||
Git: &argov1alpha1.GitGenerator{
|
||||
Files: append([]argov1alpha1.GitFileGeneratorItem{}, fileNamePath, fileServerPath),
|
||||
Template: argov1alpha1.ApplicationSetTemplate{},
|
||||
},
|
||||
}
|
||||
clusterGeneratorParams := map[string]interface{}{
|
||||
"name": "production_01/west", "server": "https://production-01.example.com",
|
||||
}
|
||||
interpolatedGenerator, err = InterpolateGenerator(requestedGenerator, clusterGeneratorParams, true, nil)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("requestedGenerator", requestedGenerator).Error("error interpolating Generator")
|
||||
return
|
||||
}
|
||||
assert.Equal(t, "production_01/west", interpolatedGenerator.Git.Files[0].Path)
|
||||
assert.Equal(t, "https://production-01.example.com", interpolatedGenerator.Git.Files[1].Path)
|
||||
}
|
||||
|
||||
func TestInterpolateGeneratorError(t *testing.T) {
|
||||
type args struct {
|
||||
requestedGenerator *argov1alpha1.ApplicationSetGenerator
|
||||
params map[string]interface{}
|
||||
useGoTemplate bool
|
||||
goTemplateOptions []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want argov1alpha1.ApplicationSetGenerator
|
||||
expectedErrStr string
|
||||
}{
|
||||
{name: "Empty Gen", args: args{
|
||||
requestedGenerator: nil,
|
||||
params: nil,
|
||||
useGoTemplate: false,
|
||||
goTemplateOptions: nil,
|
||||
}, want: argov1alpha1.ApplicationSetGenerator{}, expectedErrStr: "generator is empty"},
|
||||
{name: "No Params", args: args{
|
||||
requestedGenerator: &argov1alpha1.ApplicationSetGenerator{},
|
||||
params: map[string]interface{}{},
|
||||
useGoTemplate: false,
|
||||
goTemplateOptions: nil,
|
||||
}, want: argov1alpha1.ApplicationSetGenerator{}, expectedErrStr: ""},
|
||||
{name: "Error templating", args: args{
|
||||
requestedGenerator: &argov1alpha1.ApplicationSetGenerator{Git: &argov1alpha1.GitGenerator{
|
||||
RepoURL: "foo",
|
||||
Files: []argov1alpha1.GitFileGeneratorItem{{Path: "bar/"}},
|
||||
Revision: "main",
|
||||
Values: map[string]string{
|
||||
"git_test": "{{ toPrettyJson . }}",
|
||||
"selection": "{{ default .override .test }}",
|
||||
"resolved": "{{ index .rmap (default .override .test) }}",
|
||||
},
|
||||
}},
|
||||
params: map[string]interface{}{
|
||||
"name": "in-cluster",
|
||||
"override": "foo",
|
||||
},
|
||||
useGoTemplate: true,
|
||||
goTemplateOptions: []string{},
|
||||
}, want: argov1alpha1.ApplicationSetGenerator{}, expectedErrStr: "failed to replace parameters in generator: failed to execute go template {{ index .rmap (default .override .test) }}: template: :1:3: executing \"\" at <index .rmap (default .override .test)>: error calling index: index of untyped nil"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := InterpolateGenerator(tt.args.requestedGenerator, tt.args.params, tt.args.useGoTemplate, tt.args.goTemplateOptions)
|
||||
if tt.expectedErrStr != "" {
|
||||
assert.EqualError(t, err, tt.expectedErrStr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
assert.Equalf(t, tt.want, got, "InterpolateGenerator(%v, %v, %v, %v)", tt.args.requestedGenerator, tt.args.params, tt.args.useGoTemplate, tt.args.goTemplateOptions)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,56 +56,50 @@ func (g *GitGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.Applic
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
}
|
||||
|
||||
noRevisionCache := appSet.RefreshRequired()
|
||||
|
||||
var err error
|
||||
var res []map[string]interface{}
|
||||
if len(appSetGenerator.Git.Directories) != 0 {
|
||||
res, err = g.generateParamsForGitDirectories(appSetGenerator, noRevisionCache, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
} else if len(appSetGenerator.Git.Files) != 0 {
|
||||
res, err = g.generateParamsForGitFiles(appSetGenerator, noRevisionCache, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
if appSetGenerator.Git.Directories != nil {
|
||||
res, err = g.generateParamsForGitDirectories(appSetGenerator, appSet.Spec.GoTemplate)
|
||||
} else if appSetGenerator.Git.Files != nil {
|
||||
res, err = g.generateParamsForGitFiles(appSetGenerator, appSet.Spec.GoTemplate)
|
||||
} else {
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating params from git: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g *GitGenerator) generateParamsForGitDirectories(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, noRevisionCache bool, useGoTemplate bool, goTemplateOptions []string) ([]map[string]interface{}, error) {
|
||||
func (g *GitGenerator) generateParamsForGitDirectories(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, useGoTemplate bool) ([]map[string]interface{}, error) {
|
||||
|
||||
// Directories, not files
|
||||
allPaths, err := g.repos.GetDirectories(context.TODO(), appSetGenerator.Git.RepoURL, appSetGenerator.Git.Revision, noRevisionCache)
|
||||
allPaths, err := g.repos.GetDirectories(context.TODO(), appSetGenerator.Git.RepoURL, appSetGenerator.Git.Revision)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting directories from repo: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"allPaths": allPaths,
|
||||
"total": len(allPaths),
|
||||
"repoURL": appSetGenerator.Git.RepoURL,
|
||||
"revision": appSetGenerator.Git.Revision,
|
||||
"pathParamPrefix": appSetGenerator.Git.PathParamPrefix,
|
||||
"allPaths": allPaths,
|
||||
"total": len(allPaths),
|
||||
"repoURL": appSetGenerator.Git.RepoURL,
|
||||
"revision": appSetGenerator.Git.Revision,
|
||||
}).Info("applications result from the repo service")
|
||||
|
||||
requestedApps := g.filterApps(appSetGenerator.Git.Directories, allPaths)
|
||||
|
||||
res, err := g.generateParamsFromApps(requestedApps, appSetGenerator, useGoTemplate, goTemplateOptions)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating params from apps: %w", err)
|
||||
}
|
||||
res := g.generateParamsFromApps(requestedApps, appSetGenerator, useGoTemplate)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g *GitGenerator) generateParamsForGitFiles(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, noRevisionCache bool, useGoTemplate bool, goTemplateOptions []string) ([]map[string]interface{}, error) {
|
||||
func (g *GitGenerator) generateParamsForGitFiles(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, useGoTemplate bool) ([]map[string]interface{}, error) {
|
||||
|
||||
// Get all files that match the requested path string, removing duplicates
|
||||
allFiles := make(map[string][]byte)
|
||||
for _, requestedPath := range appSetGenerator.Git.Files {
|
||||
files, err := g.repos.GetFiles(context.TODO(), appSetGenerator.Git.RepoURL, appSetGenerator.Git.Revision, requestedPath.Path, noRevisionCache)
|
||||
files, err := g.repos.GetFiles(context.TODO(), appSetGenerator.Git.RepoURL, appSetGenerator.Git.Revision, requestedPath.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -127,17 +121,19 @@ func (g *GitGenerator) generateParamsForGitFiles(appSetGenerator *argoprojiov1al
|
||||
for _, path := range allPaths {
|
||||
|
||||
// A JSON / YAML file path can contain multiple sets of parameters (ie it is an array)
|
||||
paramsArray, err := g.generateParamsFromGitFile(path, allFiles[path], appSetGenerator.Git.Values, useGoTemplate, goTemplateOptions, appSetGenerator.Git.PathParamPrefix)
|
||||
paramsArray, err := g.generateParamsFromGitFile(path, allFiles[path], useGoTemplate)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process file '%s': %v", path, err)
|
||||
}
|
||||
|
||||
res = append(res, paramsArray...)
|
||||
for index := range paramsArray {
|
||||
res = append(res, paramsArray[index])
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g *GitGenerator) generateParamsFromGitFile(filePath string, fileContent []byte, values map[string]string, useGoTemplate bool, goTemplateOptions []string, pathParamPrefix string) ([]map[string]interface{}, error) {
|
||||
func (g *GitGenerator) generateParamsFromGitFile(filePath string, fileContent []byte, useGoTemplate bool) ([]map[string]interface{}, error) {
|
||||
objectsFound := []map[string]interface{}{}
|
||||
|
||||
// First, we attempt to parse as an array
|
||||
@@ -171,44 +167,32 @@ func (g *GitGenerator) generateParamsFromGitFile(filePath string, fileContent []
|
||||
paramPath["basenameNormalized"] = utils.SanitizeName(path.Base(paramPath["path"].(string)))
|
||||
paramPath["filenameNormalized"] = utils.SanitizeName(path.Base(paramPath["filename"].(string)))
|
||||
paramPath["segments"] = strings.Split(paramPath["path"].(string), "/")
|
||||
if pathParamPrefix != "" {
|
||||
params[pathParamPrefix] = map[string]interface{}{"path": paramPath}
|
||||
} else {
|
||||
params["path"] = paramPath
|
||||
}
|
||||
params["path"] = paramPath
|
||||
} else {
|
||||
flat, err := flatten.Flatten(objectFound, "", flatten.DotStyle)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error flattening object: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
for k, v := range flat {
|
||||
params[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
pathParamName := "path"
|
||||
if pathParamPrefix != "" {
|
||||
pathParamName = pathParamPrefix + "." + pathParamName
|
||||
}
|
||||
params[pathParamName] = path.Dir(filePath)
|
||||
params[pathParamName+".basename"] = path.Base(params[pathParamName].(string))
|
||||
params[pathParamName+".filename"] = path.Base(filePath)
|
||||
params[pathParamName+".basenameNormalized"] = utils.SanitizeName(path.Base(params[pathParamName].(string)))
|
||||
params[pathParamName+".filenameNormalized"] = utils.SanitizeName(path.Base(params[pathParamName+".filename"].(string)))
|
||||
for k, v := range strings.Split(params[pathParamName].(string), "/") {
|
||||
params["path"] = path.Dir(filePath)
|
||||
params["path.basename"] = path.Base(params["path"].(string))
|
||||
params["path.filename"] = path.Base(filePath)
|
||||
params["path.basenameNormalized"] = utils.SanitizeName(path.Base(params["path"].(string)))
|
||||
params["path.filenameNormalized"] = utils.SanitizeName(path.Base(params["path.filename"].(string)))
|
||||
for k, v := range strings.Split(params["path"].(string), "/") {
|
||||
if len(v) > 0 {
|
||||
params[pathParamName+"["+strconv.Itoa(k)+"]"] = v
|
||||
params["path["+strconv.Itoa(k)+"]"] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := appendTemplatedValues(values, params, useGoTemplate, goTemplateOptions)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to append templated values: %w", err)
|
||||
}
|
||||
|
||||
res = append(res, params)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
||||
}
|
||||
|
||||
func (g *GitGenerator) filterApps(Directories []argoprojiov1alpha1.GitDirectoryGeneratorItem, allPaths []string) []string {
|
||||
@@ -239,7 +223,9 @@ func (g *GitGenerator) filterApps(Directories []argoprojiov1alpha1.GitDirectoryG
|
||||
return res
|
||||
}
|
||||
|
||||
func (g *GitGenerator) generateParamsFromApps(requestedApps []string, appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, useGoTemplate bool, goTemplateOptions []string) ([]map[string]interface{}, error) {
|
||||
func (g *GitGenerator) generateParamsFromApps(requestedApps []string, _ *argoprojiov1alpha1.ApplicationSetGenerator, useGoTemplate bool) []map[string]interface{} {
|
||||
// TODO: At some point, the applicationSetGenerator param should be used
|
||||
|
||||
res := make([]map[string]interface{}, len(requestedApps))
|
||||
for i, a := range requestedApps {
|
||||
|
||||
@@ -251,33 +237,20 @@ func (g *GitGenerator) generateParamsFromApps(requestedApps []string, appSetGene
|
||||
paramPath["basename"] = path.Base(a)
|
||||
paramPath["basenameNormalized"] = utils.SanitizeName(path.Base(a))
|
||||
paramPath["segments"] = strings.Split(paramPath["path"].(string), "/")
|
||||
if appSetGenerator.Git.PathParamPrefix != "" {
|
||||
params[appSetGenerator.Git.PathParamPrefix] = map[string]interface{}{"path": paramPath}
|
||||
} else {
|
||||
params["path"] = paramPath
|
||||
}
|
||||
params["path"] = paramPath
|
||||
} else {
|
||||
pathParamName := "path"
|
||||
if appSetGenerator.Git.PathParamPrefix != "" {
|
||||
pathParamName = appSetGenerator.Git.PathParamPrefix + "." + pathParamName
|
||||
}
|
||||
params[pathParamName] = a
|
||||
params[pathParamName+".basename"] = path.Base(a)
|
||||
params[pathParamName+".basenameNormalized"] = utils.SanitizeName(path.Base(a))
|
||||
for k, v := range strings.Split(params[pathParamName].(string), "/") {
|
||||
params["path"] = a
|
||||
params["path.basename"] = path.Base(a)
|
||||
params["path.basenameNormalized"] = utils.SanitizeName(path.Base(a))
|
||||
for k, v := range strings.Split(params["path"].(string), "/") {
|
||||
if len(v) > 0 {
|
||||
params[pathParamName+"["+strconv.Itoa(k)+"]"] = v
|
||||
params["path["+strconv.Itoa(k)+"]"] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := appendTemplatedValues(appSetGenerator.Git.Values, params, useGoTemplate, goTemplateOptions)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to append templated values: %w", err)
|
||||
}
|
||||
|
||||
res[i] = params
|
||||
}
|
||||
|
||||
return res, nil
|
||||
return res
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package generators
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
@@ -8,17 +9,49 @@ import (
|
||||
"github.com/stretchr/testify/mock"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/mocks"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
// type clientSet struct {
|
||||
// RepoServerServiceClient apiclient.RepoServerServiceClient
|
||||
// }
|
||||
|
||||
// func (c *clientSet) NewRepoServerClient() (io.Closer, apiclient.RepoServerServiceClient, error) {
|
||||
// return io.NewCloser(func() error { return nil }), c.RepoServerServiceClient, nil
|
||||
// }
|
||||
|
||||
type argoCDServiceMock struct {
|
||||
mock *mock.Mock
|
||||
}
|
||||
|
||||
func (a argoCDServiceMock) GetApps(ctx context.Context, repoURL string, revision string) ([]string, error) {
|
||||
args := a.mock.Called(ctx, repoURL, revision)
|
||||
|
||||
return args.Get(0).([]string), args.Error(1)
|
||||
}
|
||||
|
||||
func (a argoCDServiceMock) GetFiles(ctx context.Context, repoURL string, revision string, pattern string) (map[string][]byte, error) {
|
||||
args := a.mock.Called(ctx, repoURL, revision, pattern)
|
||||
|
||||
return args.Get(0).(map[string][]byte), args.Error(1)
|
||||
}
|
||||
|
||||
func (a argoCDServiceMock) GetFileContent(ctx context.Context, repoURL string, revision string, path string) ([]byte, error) {
|
||||
args := a.mock.Called(ctx, repoURL, revision, path)
|
||||
|
||||
return args.Get(0).([]byte), args.Error(1)
|
||||
}
|
||||
|
||||
func (a argoCDServiceMock) GetDirectories(ctx context.Context, repoURL string, revision string) ([]string, error) {
|
||||
args := a.mock.Called(ctx, repoURL, revision)
|
||||
return args.Get(0).([]string), args.Error(1)
|
||||
}
|
||||
|
||||
func Test_generateParamsFromGitFile(t *testing.T) {
|
||||
values := map[string]string{}
|
||||
params, err := (*GitGenerator)(nil).generateParamsFromGitFile("path/dir/file_name.yaml", []byte(`
|
||||
foo:
|
||||
bar: baz
|
||||
`), values, false, nil, "")
|
||||
`), false)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -36,35 +69,11 @@ foo:
|
||||
}, params)
|
||||
}
|
||||
|
||||
func Test_generatePrefixedParamsFromGitFile(t *testing.T) {
|
||||
values := map[string]string{}
|
||||
params, err := (*GitGenerator)(nil).generateParamsFromGitFile("path/dir/file_name.yaml", []byte(`
|
||||
foo:
|
||||
bar: baz
|
||||
`), values, false, nil, "myRepo")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, []map[string]interface{}{
|
||||
{
|
||||
"foo.bar": "baz",
|
||||
"myRepo.path": "path/dir",
|
||||
"myRepo.path.basename": "dir",
|
||||
"myRepo.path.filename": "file_name.yaml",
|
||||
"myRepo.path.basenameNormalized": "dir",
|
||||
"myRepo.path.filenameNormalized": "file-name.yaml",
|
||||
"myRepo.path[0]": "path",
|
||||
"myRepo.path[1]": "dir",
|
||||
},
|
||||
}, params)
|
||||
}
|
||||
|
||||
func Test_generateParamsFromGitFileGoTemplate(t *testing.T) {
|
||||
values := map[string]string{}
|
||||
params, err := (*GitGenerator)(nil).generateParamsFromGitFile("path/dir/file_name.yaml", []byte(`
|
||||
foo:
|
||||
bar: baz
|
||||
`), values, true, nil, "")
|
||||
`), true)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -88,48 +97,15 @@ foo:
|
||||
}, params)
|
||||
}
|
||||
|
||||
func Test_generatePrefixedParamsFromGitFileGoTemplate(t *testing.T) {
|
||||
values := map[string]string{}
|
||||
params, err := (*GitGenerator)(nil).generateParamsFromGitFile("path/dir/file_name.yaml", []byte(`
|
||||
foo:
|
||||
bar: baz
|
||||
`), values, true, nil, "myRepo")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, []map[string]interface{}{
|
||||
{
|
||||
"foo": map[string]interface{}{
|
||||
"bar": "baz",
|
||||
},
|
||||
"myRepo": map[string]interface{}{
|
||||
"path": map[string]interface{}{
|
||||
"path": "path/dir",
|
||||
"basename": "dir",
|
||||
"filename": "file_name.yaml",
|
||||
"basenameNormalized": "dir",
|
||||
"filenameNormalized": "file-name.yaml",
|
||||
"segments": []string{
|
||||
"path",
|
||||
"dir",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, params)
|
||||
}
|
||||
|
||||
func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
directories []argoprojiov1alpha1.GitDirectoryGeneratorItem
|
||||
pathParamPrefix string
|
||||
repoApps []string
|
||||
repoError error
|
||||
values map[string]string
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
name string
|
||||
directories []argoprojiov1alpha1.GitDirectoryGeneratorItem
|
||||
repoApps []string
|
||||
repoError error
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "happy flow - created apps",
|
||||
@@ -148,24 +124,6 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "It prefixes path parameters with PathParamPrefix",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "*"}},
|
||||
pathParamPrefix: "myRepo",
|
||||
repoApps: []string{
|
||||
"app1",
|
||||
"app2",
|
||||
"app_3",
|
||||
"p1/app4",
|
||||
},
|
||||
repoError: nil,
|
||||
expected: []map[string]interface{}{
|
||||
{"myRepo.path": "app1", "myRepo.path.basename": "app1", "myRepo.path.basenameNormalized": "app1", "myRepo.path[0]": "app1"},
|
||||
{"myRepo.path": "app2", "myRepo.path.basename": "app2", "myRepo.path.basenameNormalized": "app2", "myRepo.path[0]": "app2"},
|
||||
{"myRepo.path": "app_3", "myRepo.path.basename": "app_3", "myRepo.path.basenameNormalized": "app-3", "myRepo.path[0]": "app_3"},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "It filters application according to the paths",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "p1/*"}, {Path: "p1/*/*"}},
|
||||
@@ -218,25 +176,6 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Value variable interpolation",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "*"}, {Path: "*/*"}},
|
||||
repoApps: []string{
|
||||
"app1",
|
||||
"p1/app2",
|
||||
},
|
||||
repoError: nil,
|
||||
values: map[string]string{
|
||||
"foo": "bar",
|
||||
"aaa": "{{ path[0] }}",
|
||||
"no-op": "{{ this-does-not-exist }}",
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{"values.foo": "bar", "values.no-op": "{{ this-does-not-exist }}", "values.aaa": "app1", "path": "app1", "path.basename": "app1", "path[0]": "app1", "path.basenameNormalized": "app1"},
|
||||
{"values.foo": "bar", "values.no-op": "{{ this-does-not-exist }}", "values.aaa": "p1", "path": "p1/app2", "path.basename": "app2", "path[0]": "p1", "path[1]": "app2", "path.basenameNormalized": "app2"},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "handles empty response from repo server",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "*"}},
|
||||
@@ -251,7 +190,7 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
repoApps: []string{},
|
||||
repoError: fmt.Errorf("error"),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: error getting directories from repo: error"),
|
||||
expectedError: fmt.Errorf("error"),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -261,11 +200,11 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
argoCDServiceMock := mocks.Repos{}
|
||||
argoCDServiceMock := argoCDServiceMock{mock: &mock.Mock{}}
|
||||
|
||||
argoCDServiceMock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testCaseCopy.repoApps, testCaseCopy.repoError)
|
||||
argoCDServiceMock.mock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything).Return(testCaseCopy.repoApps, testCaseCopy.repoError)
|
||||
|
||||
var gitGenerator = NewGitGenerator(&argoCDServiceMock)
|
||||
var gitGenerator = NewGitGenerator(argoCDServiceMock)
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
@@ -273,11 +212,9 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetGenerator{{
|
||||
Git: &argoprojiov1alpha1.GitGenerator{
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Directories: testCaseCopy.directories,
|
||||
PathParamPrefix: testCaseCopy.pathParamPrefix,
|
||||
Values: testCaseCopy.values,
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Directories: testCaseCopy.directories,
|
||||
},
|
||||
}},
|
||||
},
|
||||
@@ -292,7 +229,7 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
assert.Equal(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
argoCDServiceMock.AssertExpectations(t)
|
||||
argoCDServiceMock.mock.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -300,13 +237,12 @@ func TestGitGenerateParamsFromDirectories(t *testing.T) {
|
||||
func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
directories []argoprojiov1alpha1.GitDirectoryGeneratorItem
|
||||
pathParamPrefix string
|
||||
repoApps []string
|
||||
repoError error
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
name string
|
||||
directories []argoprojiov1alpha1.GitDirectoryGeneratorItem
|
||||
repoApps []string
|
||||
repoError error
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "happy flow - created apps",
|
||||
@@ -352,57 +288,6 @@ func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "It prefixes path parameters with PathParamPrefix",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "*"}},
|
||||
pathParamPrefix: "myRepo",
|
||||
repoApps: []string{
|
||||
"app1",
|
||||
"app2",
|
||||
"app_3",
|
||||
"p1/app4",
|
||||
},
|
||||
repoError: nil,
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"myRepo": map[string]interface{}{
|
||||
"path": map[string]interface{}{
|
||||
"path": "app1",
|
||||
"basename": "app1",
|
||||
"basenameNormalized": "app1",
|
||||
"segments": []string{
|
||||
"app1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"myRepo": map[string]interface{}{
|
||||
"path": map[string]interface{}{
|
||||
"path": "app2",
|
||||
"basename": "app2",
|
||||
"basenameNormalized": "app2",
|
||||
"segments": []string{
|
||||
"app2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"myRepo": map[string]interface{}{
|
||||
"path": map[string]interface{}{
|
||||
"path": "app_3",
|
||||
"basename": "app_3",
|
||||
"basenameNormalized": "app-3",
|
||||
"segments": []string{
|
||||
"app_3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "It filters application according to the paths",
|
||||
directories: []argoprojiov1alpha1.GitDirectoryGeneratorItem{{Path: "p1/*"}, {Path: "p1/*/*"}},
|
||||
@@ -547,7 +432,7 @@ func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
repoApps: []string{},
|
||||
repoError: fmt.Errorf("error"),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: error getting directories from repo: error"),
|
||||
expectedError: fmt.Errorf("error"),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -557,11 +442,11 @@ func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
argoCDServiceMock := mocks.Repos{}
|
||||
argoCDServiceMock := argoCDServiceMock{mock: &mock.Mock{}}
|
||||
|
||||
argoCDServiceMock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testCaseCopy.repoApps, testCaseCopy.repoError)
|
||||
argoCDServiceMock.mock.On("GetDirectories", mock.Anything, mock.Anything, mock.Anything).Return(testCaseCopy.repoApps, testCaseCopy.repoError)
|
||||
|
||||
var gitGenerator = NewGitGenerator(&argoCDServiceMock)
|
||||
var gitGenerator = NewGitGenerator(argoCDServiceMock)
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
@@ -570,10 +455,9 @@ func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
GoTemplate: true,
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetGenerator{{
|
||||
Git: &argoprojiov1alpha1.GitGenerator{
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Directories: testCaseCopy.directories,
|
||||
PathParamPrefix: testCaseCopy.pathParamPrefix,
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Directories: testCaseCopy.directories,
|
||||
},
|
||||
}},
|
||||
},
|
||||
@@ -588,7 +472,7 @@ func TestGitGenerateParamsFromDirectoriesGoTemplate(t *testing.T) {
|
||||
assert.Equal(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
argoCDServiceMock.AssertExpectations(t)
|
||||
argoCDServiceMock.mock.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -604,7 +488,6 @@ func TestGitGenerateParamsFromFiles(t *testing.T) {
|
||||
repoFileContents map[string][]byte
|
||||
// if repoPathsError is non-nil, the call to GetPaths(...) will return this error value
|
||||
repoPathsError error
|
||||
values map[string]string
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
}{
|
||||
@@ -668,81 +551,13 @@ func TestGitGenerateParamsFromFiles(t *testing.T) {
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Value variable interpolation",
|
||||
files: []argoprojiov1alpha1.GitFileGeneratorItem{{Path: "**/config.json"}},
|
||||
repoFileContents: map[string][]byte{
|
||||
"cluster-config/production/config.json": []byte(`{
|
||||
"cluster": {
|
||||
"owner": "john.doe@example.com",
|
||||
"name": "production",
|
||||
"address": "https://kubernetes.default.svc"
|
||||
},
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}`),
|
||||
"cluster-config/staging/config.json": []byte(`{
|
||||
"cluster": {
|
||||
"owner": "foo.bar@example.com",
|
||||
"name": "staging",
|
||||
"address": "https://kubernetes.default.svc"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
repoPathsError: nil,
|
||||
values: map[string]string{
|
||||
"aaa": "{{ cluster.owner }}",
|
||||
"no-op": "{{ this-does-not-exist }}",
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"cluster.owner": "john.doe@example.com",
|
||||
"cluster.name": "production",
|
||||
"cluster.address": "https://kubernetes.default.svc",
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"path": "cluster-config/production",
|
||||
"path.basename": "production",
|
||||
"path[0]": "cluster-config",
|
||||
"path[1]": "production",
|
||||
"path.basenameNormalized": "production",
|
||||
"path.filename": "config.json",
|
||||
"path.filenameNormalized": "config.json",
|
||||
"values.aaa": "john.doe@example.com",
|
||||
"values.no-op": "{{ this-does-not-exist }}",
|
||||
},
|
||||
{
|
||||
"cluster.owner": "foo.bar@example.com",
|
||||
"cluster.name": "staging",
|
||||
"cluster.address": "https://kubernetes.default.svc",
|
||||
"path": "cluster-config/staging",
|
||||
"path.basename": "staging",
|
||||
"path[0]": "cluster-config",
|
||||
"path[1]": "staging",
|
||||
"path.basenameNormalized": "staging",
|
||||
"path.filename": "config.json",
|
||||
"path.filenameNormalized": "config.json",
|
||||
"values.aaa": "foo.bar@example.com",
|
||||
"values.no-op": "{{ this-does-not-exist }}",
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "handles error during getting repo paths",
|
||||
files: []argoprojiov1alpha1.GitFileGeneratorItem{{Path: "**/config.json"}},
|
||||
repoFileContents: map[string][]byte{},
|
||||
repoPathsError: fmt.Errorf("paths error"),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: paths error"),
|
||||
expectedError: fmt.Errorf("paths error"),
|
||||
},
|
||||
{
|
||||
name: "test invalid JSON file returns error",
|
||||
@@ -752,7 +567,7 @@ func TestGitGenerateParamsFromFiles(t *testing.T) {
|
||||
},
|
||||
repoPathsError: nil,
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: unable to process file 'cluster-config/production/config.json': unable to parse file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type map[string]interface {}"),
|
||||
expectedError: fmt.Errorf("unable to process file 'cluster-config/production/config.json': unable to parse file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type map[string]interface {}"),
|
||||
},
|
||||
{
|
||||
name: "test JSON array",
|
||||
@@ -917,11 +732,11 @@ cluster:
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
argoCDServiceMock := mocks.Repos{}
|
||||
argoCDServiceMock.On("GetFiles", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).
|
||||
argoCDServiceMock := argoCDServiceMock{mock: &mock.Mock{}}
|
||||
argoCDServiceMock.mock.On("GetFiles", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
|
||||
Return(testCaseCopy.repoFileContents, testCaseCopy.repoPathsError)
|
||||
|
||||
var gitGenerator = NewGitGenerator(&argoCDServiceMock)
|
||||
var gitGenerator = NewGitGenerator(argoCDServiceMock)
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
@@ -932,7 +747,6 @@ cluster:
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Files: testCaseCopy.files,
|
||||
Values: testCaseCopy.values,
|
||||
},
|
||||
}},
|
||||
},
|
||||
@@ -948,7 +762,7 @@ cluster:
|
||||
assert.ElementsMatch(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
argoCDServiceMock.AssertExpectations(t)
|
||||
argoCDServiceMock.mock.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1048,7 +862,7 @@ func TestGitGenerateParamsFromFilesGoTemplate(t *testing.T) {
|
||||
repoFileContents: map[string][]byte{},
|
||||
repoPathsError: fmt.Errorf("paths error"),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: paths error"),
|
||||
expectedError: fmt.Errorf("paths error"),
|
||||
},
|
||||
{
|
||||
name: "test invalid JSON file returns error",
|
||||
@@ -1058,7 +872,7 @@ func TestGitGenerateParamsFromFilesGoTemplate(t *testing.T) {
|
||||
},
|
||||
repoPathsError: nil,
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error generating params from git: unable to process file 'cluster-config/production/config.json': unable to parse file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type map[string]interface {}"),
|
||||
expectedError: fmt.Errorf("unable to process file 'cluster-config/production/config.json': unable to parse file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type map[string]interface {}"),
|
||||
},
|
||||
{
|
||||
name: "test JSON array",
|
||||
@@ -1267,11 +1081,11 @@ cluster:
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
argoCDServiceMock := mocks.Repos{}
|
||||
argoCDServiceMock.On("GetFiles", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).
|
||||
argoCDServiceMock := argoCDServiceMock{mock: &mock.Mock{}}
|
||||
argoCDServiceMock.mock.On("GetFiles", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
|
||||
Return(testCaseCopy.repoFileContents, testCaseCopy.repoPathsError)
|
||||
|
||||
var gitGenerator = NewGitGenerator(&argoCDServiceMock)
|
||||
var gitGenerator = NewGitGenerator(argoCDServiceMock)
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
@@ -1298,7 +1112,7 @@ cluster:
|
||||
assert.ElementsMatch(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
argoCDServiceMock.AssertExpectations(t)
|
||||
argoCDServiceMock.mock.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,6 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"sigs.k8s.io/yaml"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
@@ -75,16 +73,5 @@ func (g *ListGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.Appli
|
||||
}
|
||||
}
|
||||
|
||||
// Append elements from ElementsYaml to the response
|
||||
if len(appSetGenerator.List.ElementsYaml) > 0 {
|
||||
|
||||
var yamlElements []map[string]interface{}
|
||||
err := yaml.Unmarshal([]byte(appSetGenerator.List.ElementsYaml), &yamlElements)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error unmarshling decoded ElementsYaml %v", err)
|
||||
}
|
||||
res = append(res, yamlElements...)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/utils"
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var _ Generator = (*MatrixGenerator)(nil)
|
||||
@@ -50,7 +48,7 @@ func (m *MatrixGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.App
|
||||
|
||||
g0, err := m.getParams(appSetGenerator.Matrix.Generators[0], appSet, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error failed to get params for first generator in matrix generator: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
for _, a := range g0 {
|
||||
g1, err := m.getParams(appSetGenerator.Matrix.Generators[1], appSet, a)
|
||||
@@ -61,11 +59,11 @@ func (m *MatrixGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.App
|
||||
|
||||
if appSet.Spec.GoTemplate {
|
||||
tmp := map[string]interface{}{}
|
||||
if err := mergo.Merge(&tmp, b, mergo.WithOverride); err != nil {
|
||||
return nil, fmt.Errorf("failed to merge params from the second generator in the matrix generator with temp map: %w", err)
|
||||
if err := mergo.Merge(&tmp, a); err != nil {
|
||||
return nil, fmt.Errorf("failed to merge params from the first generator in the matrix generator with temp map: %w", err)
|
||||
}
|
||||
if err := mergo.Merge(&tmp, a, mergo.WithOverride); err != nil {
|
||||
return nil, fmt.Errorf("failed to merge params from the second generator in the matrix generator with the first: %w", err)
|
||||
if err := mergo.Merge(&tmp, b); err != nil {
|
||||
return nil, fmt.Errorf("failed to merge params from the first generator in the matrix generator with the second: %w", err)
|
||||
}
|
||||
res = append(res, tmp)
|
||||
} else {
|
||||
@@ -82,24 +80,27 @@ func (m *MatrixGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.App
|
||||
}
|
||||
|
||||
func (m *MatrixGenerator) getParams(appSetBaseGenerator argoprojiov1alpha1.ApplicationSetNestedGenerator, appSet *argoprojiov1alpha1.ApplicationSet, params map[string]interface{}) ([]map[string]interface{}, error) {
|
||||
matrixGen, err := getMatrixGenerator(appSetBaseGenerator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if matrixGen != nil && !appSet.Spec.ApplyNestedSelectors {
|
||||
foundSelector := dropDisabledNestedSelectors(matrixGen.Generators)
|
||||
if foundSelector {
|
||||
log.Warnf("AppSet '%v' defines selector on nested matrix generator's generator without enabling them via 'spec.applyNestedSelectors', ignoring nested selectors", appSet.Name)
|
||||
var matrix *argoprojiov1alpha1.MatrixGenerator
|
||||
if appSetBaseGenerator.Matrix != nil {
|
||||
// Since nested matrix generator is represented as a JSON object in the CRD, we unmarshall it back to a Go struct here.
|
||||
nestedMatrix, err := argoprojiov1alpha1.ToNestedMatrixGenerator(appSetBaseGenerator.Matrix)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshall nested matrix generator: %v", err)
|
||||
}
|
||||
if nestedMatrix != nil {
|
||||
matrix = nestedMatrix.ToMatrixGenerator()
|
||||
}
|
||||
}
|
||||
mergeGen, err := getMergeGenerator(appSetBaseGenerator)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error retrieving merge generator: %w", err)
|
||||
}
|
||||
if mergeGen != nil && !appSet.Spec.ApplyNestedSelectors {
|
||||
foundSelector := dropDisabledNestedSelectors(mergeGen.Generators)
|
||||
if foundSelector {
|
||||
log.Warnf("AppSet '%v' defines selector on nested merge generator's generator without enabling them via 'spec.applyNestedSelectors', ignoring nested selectors", appSet.Name)
|
||||
|
||||
var mergeGenerator *argoprojiov1alpha1.MergeGenerator
|
||||
if appSetBaseGenerator.Merge != nil {
|
||||
// Since nested merge generator is represented as a JSON object in the CRD, we unmarshall it back to a Go struct here.
|
||||
nestedMerge, err := argoprojiov1alpha1.ToNestedMergeGenerator(appSetBaseGenerator.Merge)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshall nested merge generator: %v", err)
|
||||
}
|
||||
if nestedMerge != nil {
|
||||
mergeGenerator = nestedMerge.ToMergeGenerator()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,9 +112,8 @@ func (m *MatrixGenerator) getParams(appSetBaseGenerator argoprojiov1alpha1.Appli
|
||||
SCMProvider: appSetBaseGenerator.SCMProvider,
|
||||
ClusterDecisionResource: appSetBaseGenerator.ClusterDecisionResource,
|
||||
PullRequest: appSetBaseGenerator.PullRequest,
|
||||
Plugin: appSetBaseGenerator.Plugin,
|
||||
Matrix: matrixGen,
|
||||
Merge: mergeGen,
|
||||
Matrix: matrix,
|
||||
Merge: mergeGenerator,
|
||||
Selector: appSetBaseGenerator.Selector,
|
||||
},
|
||||
m.supportedGenerators,
|
||||
@@ -143,18 +143,10 @@ func (m *MatrixGenerator) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.Ap
|
||||
var found bool
|
||||
|
||||
for _, r := range appSetGenerator.Matrix.Generators {
|
||||
matrixGen, _ := getMatrixGenerator(r)
|
||||
mergeGen, _ := getMergeGenerator(r)
|
||||
base := &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
List: r.List,
|
||||
Clusters: r.Clusters,
|
||||
Git: r.Git,
|
||||
PullRequest: r.PullRequest,
|
||||
Plugin: r.Plugin,
|
||||
SCMProvider: r.SCMProvider,
|
||||
ClusterDecisionResource: r.ClusterDecisionResource,
|
||||
Matrix: matrixGen,
|
||||
Merge: mergeGen,
|
||||
List: r.List,
|
||||
Clusters: r.Clusters,
|
||||
Git: r.Git,
|
||||
}
|
||||
generators := GetRelevantGenerators(base, m.supportedGenerators)
|
||||
|
||||
@@ -175,17 +167,6 @@ func (m *MatrixGenerator) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.Ap
|
||||
|
||||
}
|
||||
|
||||
func getMatrixGenerator(r argoprojiov1alpha1.ApplicationSetNestedGenerator) (*argoprojiov1alpha1.MatrixGenerator, error) {
|
||||
if r.Matrix == nil {
|
||||
return nil, nil
|
||||
}
|
||||
matrix, err := argoprojiov1alpha1.ToNestedMatrixGenerator(r.Matrix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return matrix.ToMatrixGenerator(), nil
|
||||
}
|
||||
|
||||
func (m *MatrixGenerator) GetTemplate(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator) *argoprojiov1alpha1.ApplicationSetTemplate {
|
||||
return &appSetGenerator.Matrix.Template
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
@@ -13,8 +12,6 @@ import (
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client/fake"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/mocks"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
|
||||
@@ -31,7 +28,7 @@ func TestMatrixGenerate(t *testing.T) {
|
||||
}
|
||||
|
||||
listGenerator := &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "Cluster","url": "Url", "templated": "test-{{path.basenameNormalized}}"}`)}},
|
||||
Elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "Cluster","url": "Url"}`)}},
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
@@ -51,8 +48,8 @@ func TestMatrixGenerate(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{"path": "app1", "path.basename": "app1", "path.basenameNormalized": "app1", "cluster": "Cluster", "url": "Url", "templated": "test-app1"},
|
||||
{"path": "app2", "path.basename": "app2", "path.basenameNormalized": "app2", "cluster": "Cluster", "url": "Url", "templated": "test-app2"},
|
||||
{"path": "app1", "path.basename": "app1", "path.basenameNormalized": "app1", "cluster": "Cluster", "url": "Url"},
|
||||
{"path": "app2", "path.basename": "app2", "path.basenameNormalized": "app2", "cluster": "Cluster", "url": "Url"},
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -271,28 +268,6 @@ func TestMatrixGenerateGoTemplate(t *testing.T) {
|
||||
{"a": "2", "b": "2"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "parameter override: first list elements take precedence",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
List: &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{
|
||||
{Raw: []byte(`{"booleanFalse": false, "booleanTrue": true, "stringFalse": "false", "stringTrue": "true"}`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
List: &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{
|
||||
{Raw: []byte(`{"booleanFalse": true, "booleanTrue": false, "stringFalse": "true", "stringTrue": "false"}`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{"booleanFalse": false, "booleanTrue": true, "stringFalse": "false", "stringTrue": "true"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "returns error if there is less than two base generators",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
@@ -424,12 +399,6 @@ func TestMatrixGetRequeueAfter(t *testing.T) {
|
||||
Elements: []apiextensionsv1.JSON{{Raw: []byte(`{"cluster": "Cluster","url": "Url"}`)}},
|
||||
}
|
||||
|
||||
pullRequestGenerator := &argoprojiov1alpha1.PullRequestGenerator{}
|
||||
|
||||
scmGenerator := &argoprojiov1alpha1.SCMProviderGenerator{}
|
||||
|
||||
duckTypeGenerator := &argoprojiov1alpha1.DuckTypeGenerator{}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
baseGenerators []argoprojiov1alpha1.ApplicationSetNestedGenerator
|
||||
@@ -462,55 +431,6 @@ func TestMatrixGetRequeueAfter(t *testing.T) {
|
||||
gitGetRequeueAfter: time.Duration(1),
|
||||
expected: time.Duration(1),
|
||||
},
|
||||
{
|
||||
name: "returns the minimal time for pull request",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Git: gitGenerator,
|
||||
},
|
||||
{
|
||||
PullRequest: pullRequestGenerator,
|
||||
},
|
||||
},
|
||||
gitGetRequeueAfter: time.Duration(15 * time.Second),
|
||||
expected: time.Duration(15 * time.Second),
|
||||
},
|
||||
{
|
||||
name: "returns the default time if no requeueAfterSeconds is provided",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Git: gitGenerator,
|
||||
},
|
||||
{
|
||||
PullRequest: pullRequestGenerator,
|
||||
},
|
||||
},
|
||||
expected: time.Duration(30 * time.Minute),
|
||||
},
|
||||
{
|
||||
name: "returns the default time for duck type generator",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Git: gitGenerator,
|
||||
},
|
||||
{
|
||||
ClusterDecisionResource: duckTypeGenerator,
|
||||
},
|
||||
},
|
||||
expected: time.Duration(3 * time.Minute),
|
||||
},
|
||||
{
|
||||
name: "returns the default time for scm generator",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Git: gitGenerator,
|
||||
},
|
||||
{
|
||||
SCMProvider: scmGenerator,
|
||||
},
|
||||
},
|
||||
expected: time.Duration(30 * time.Minute),
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
@@ -521,22 +441,16 @@ func TestMatrixGetRequeueAfter(t *testing.T) {
|
||||
|
||||
for _, g := range testCaseCopy.baseGenerators {
|
||||
gitGeneratorSpec := argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Git: g.Git,
|
||||
List: g.List,
|
||||
PullRequest: g.PullRequest,
|
||||
SCMProvider: g.SCMProvider,
|
||||
ClusterDecisionResource: g.ClusterDecisionResource,
|
||||
Git: g.Git,
|
||||
List: g.List,
|
||||
}
|
||||
mock.On("GetRequeueAfter", &gitGeneratorSpec).Return(testCaseCopy.gitGetRequeueAfter, nil)
|
||||
}
|
||||
|
||||
var matrixGenerator = NewMatrixGenerator(
|
||||
map[string]Generator{
|
||||
"Git": mock,
|
||||
"List": &ListGenerator{},
|
||||
"PullRequest": &PullRequestGenerator{},
|
||||
"SCMProvider": &SCMProviderGenerator{},
|
||||
"ClusterDecisionResource": &DuckTypeGenerator{},
|
||||
"Git": mock,
|
||||
"List": &ListGenerator{},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -892,172 +806,6 @@ func TestInterpolatedMatrixGenerateGoTemplate(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestMatrixGenerateListElementsYaml(t *testing.T) {
|
||||
|
||||
gitGenerator := &argoprojiov1alpha1.GitGenerator{
|
||||
RepoURL: "RepoURL",
|
||||
Revision: "Revision",
|
||||
Files: []argoprojiov1alpha1.GitFileGeneratorItem{
|
||||
{Path: "config.yaml"},
|
||||
},
|
||||
}
|
||||
|
||||
listGenerator := &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{},
|
||||
ElementsYaml: "{{ .foo.bar | toJson }}",
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
baseGenerators []argoprojiov1alpha1.ApplicationSetNestedGenerator
|
||||
expectedErr error
|
||||
expected []map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "happy flow - generate params",
|
||||
baseGenerators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
Git: gitGenerator,
|
||||
},
|
||||
{
|
||||
List: listGenerator,
|
||||
},
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"chart": "a",
|
||||
"version": "1",
|
||||
"foo": map[string]interface{}{
|
||||
"bar": []interface{}{
|
||||
map[string]interface{}{
|
||||
"chart": "a",
|
||||
"version": "1",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"chart": "b",
|
||||
"version": "2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"path": map[string]interface{}{
|
||||
"basename": "dir",
|
||||
"basenameNormalized": "dir",
|
||||
"filename": "file_name.yaml",
|
||||
"filenameNormalized": "file-name.yaml",
|
||||
"path": "path/dir",
|
||||
"segments": []string{
|
||||
"path",
|
||||
"dir",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"chart": "b",
|
||||
"version": "2",
|
||||
"foo": map[string]interface{}{
|
||||
"bar": []interface{}{
|
||||
map[string]interface{}{
|
||||
"chart": "a",
|
||||
"version": "1",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"chart": "b",
|
||||
"version": "2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"path": map[string]interface{}{
|
||||
"basename": "dir",
|
||||
"basenameNormalized": "dir",
|
||||
"filename": "file_name.yaml",
|
||||
"filenameNormalized": "file-name.yaml",
|
||||
"path": "path/dir",
|
||||
"segments": []string{
|
||||
"path",
|
||||
"dir",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
testCaseCopy := testCase // Since tests may run in parallel
|
||||
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
genMock := &generatorMock{}
|
||||
appSet := &argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
GoTemplate: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, g := range testCaseCopy.baseGenerators {
|
||||
|
||||
gitGeneratorSpec := argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Git: g.Git,
|
||||
List: g.List,
|
||||
}
|
||||
genMock.On("GenerateParams", mock.AnythingOfType("*v1alpha1.ApplicationSetGenerator"), appSet).Return([]map[string]any{{
|
||||
"foo": map[string]interface{}{
|
||||
"bar": []interface{}{
|
||||
map[string]interface{}{
|
||||
"chart": "a",
|
||||
"version": "1",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"chart": "b",
|
||||
"version": "2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"path": map[string]interface{}{
|
||||
"basename": "dir",
|
||||
"basenameNormalized": "dir",
|
||||
"filename": "file_name.yaml",
|
||||
"filenameNormalized": "file-name.yaml",
|
||||
"path": "path/dir",
|
||||
"segments": []string{
|
||||
"path",
|
||||
"dir",
|
||||
},
|
||||
},
|
||||
}}, nil)
|
||||
genMock.On("GetTemplate", &gitGeneratorSpec).
|
||||
Return(&argoprojiov1alpha1.ApplicationSetTemplate{})
|
||||
|
||||
}
|
||||
|
||||
var matrixGenerator = NewMatrixGenerator(
|
||||
map[string]Generator{
|
||||
"Git": genMock,
|
||||
"List": &ListGenerator{},
|
||||
},
|
||||
)
|
||||
|
||||
got, err := matrixGenerator.GenerateParams(&argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Matrix: &argoprojiov1alpha1.MatrixGenerator{
|
||||
Generators: testCaseCopy.baseGenerators,
|
||||
Template: argoprojiov1alpha1.ApplicationSetTemplate{},
|
||||
},
|
||||
}, appSet)
|
||||
|
||||
if testCaseCopy.expectedErr != nil {
|
||||
assert.ErrorIs(t, err, testCaseCopy.expectedErr)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
type generatorMock struct {
|
||||
mock.Mock
|
||||
}
|
||||
@@ -1080,72 +828,3 @@ func (g *generatorMock) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.Appl
|
||||
return args.Get(0).(time.Duration)
|
||||
|
||||
}
|
||||
|
||||
func TestGitGenerator_GenerateParams_list_x_git_matrix_generator(t *testing.T) {
|
||||
// Given a matrix generator over a list generator and a git files generator, the nested git files generator should
|
||||
// be treated as a files generator, and it should produce parameters.
|
||||
|
||||
// This tests for a specific bug where a nested git files generator was being treated as a directory generator. This
|
||||
// happened because, when the matrix generator was being processed, the nested git files generator was being
|
||||
// interpolated by the deeplyReplace function. That function cannot differentiate between a nil slice and an empty
|
||||
// slice. So it was replacing the `Directories` field with an empty slice, which the ApplicationSet controller
|
||||
// interpreted as meaning this was a directory generator, not a files generator.
|
||||
|
||||
// Now instead of checking for nil, we check whether the field is a non-empty slice. This test prevents a regression
|
||||
// of that bug.
|
||||
|
||||
listGeneratorMock := &generatorMock{}
|
||||
listGeneratorMock.On("GenerateParams", mock.AnythingOfType("*v1alpha1.ApplicationSetGenerator"), mock.AnythingOfType("*v1alpha1.ApplicationSet")).Return([]map[string]interface{}{
|
||||
{"some": "value"},
|
||||
}, nil)
|
||||
listGeneratorMock.On("GetTemplate", mock.AnythingOfType("*v1alpha1.ApplicationSetGenerator")).Return(&argoprojiov1alpha1.ApplicationSetTemplate{})
|
||||
|
||||
gitGeneratorSpec := &argoprojiov1alpha1.GitGenerator{
|
||||
RepoURL: "https://git.example.com",
|
||||
Files: []argoprojiov1alpha1.GitFileGeneratorItem{
|
||||
{Path: "some/path.json"},
|
||||
},
|
||||
}
|
||||
|
||||
repoServiceMock := &mocks.Repos{}
|
||||
repoServiceMock.On("GetFiles", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(map[string][]byte{
|
||||
"some/path.json": []byte("test: content"),
|
||||
}, nil)
|
||||
gitGenerator := NewGitGenerator(repoServiceMock)
|
||||
|
||||
matrixGenerator := NewMatrixGenerator(map[string]Generator{
|
||||
"List": listGeneratorMock,
|
||||
"Git": gitGenerator,
|
||||
})
|
||||
|
||||
matrixGeneratorSpec := &argoprojiov1alpha1.MatrixGenerator{
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetNestedGenerator{
|
||||
{
|
||||
List: &argoprojiov1alpha1.ListGenerator{
|
||||
Elements: []apiextensionsv1.JSON{
|
||||
{
|
||||
Raw: []byte(`{"some": "value"}`),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Git: gitGeneratorSpec,
|
||||
},
|
||||
},
|
||||
}
|
||||
params, err := matrixGenerator.GenerateParams(&argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Matrix: matrixGeneratorSpec,
|
||||
}, &argoprojiov1alpha1.ApplicationSet{})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, []map[string]interface{}{{
|
||||
"path": "some",
|
||||
"path.basename": "some",
|
||||
"path.basenameNormalized": "some",
|
||||
"path.filename": "path.json",
|
||||
"path.filenameNormalized": "path.json",
|
||||
"path[0]": "some",
|
||||
"some": "value",
|
||||
"test": "content",
|
||||
}}, params)
|
||||
}
|
||||
|
||||
@@ -9,8 +9,6 @@ import (
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/utils"
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var _ Generator = (*MergeGenerator)(nil)
|
||||
@@ -38,10 +36,10 @@ func NewMergeGenerator(supportedGenerators map[string]Generator) Generator {
|
||||
// in slices ordered according to the order of the given generators.
|
||||
func (m *MergeGenerator) getParamSetsForAllGenerators(generators []argoprojiov1alpha1.ApplicationSetNestedGenerator, appSet *argoprojiov1alpha1.ApplicationSet) ([][]map[string]interface{}, error) {
|
||||
var paramSets [][]map[string]interface{}
|
||||
for i, generator := range generators {
|
||||
for _, generator := range generators {
|
||||
generatorParamSets, err := m.getParams(generator, appSet)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting params from generator %d of %d: %w", i+1, len(generators), err)
|
||||
return nil, err
|
||||
}
|
||||
// concatenate param lists produced by each generator
|
||||
paramSets = append(paramSets, generatorParamSets)
|
||||
@@ -61,18 +59,18 @@ func (m *MergeGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.Appl
|
||||
|
||||
paramSetsFromGenerators, err := m.getParamSetsForAllGenerators(appSetGenerator.Merge.Generators, appSet)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting param sets from generators: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
baseParamSetsByMergeKey, err := getParamSetsByMergeKey(appSetGenerator.Merge.MergeKeys, paramSetsFromGenerators[0])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting param sets by merge key: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, paramSets := range paramSetsFromGenerators[1:] {
|
||||
paramSetsByMergeKey, err := getParamSetsByMergeKey(appSetGenerator.Merge.MergeKeys, paramSets)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting param sets by merge key: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for mergeKeyValue, baseParamSet := range baseParamSetsByMergeKey {
|
||||
@@ -80,13 +78,13 @@ func (m *MergeGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.Appl
|
||||
|
||||
if appSet.Spec.GoTemplate {
|
||||
if err := mergo.Merge(&baseParamSet, overrideParamSet, mergo.WithOverride); err != nil {
|
||||
return nil, fmt.Errorf("error merging base param set with override param set: %w", err)
|
||||
return nil, fmt.Errorf("failed to merge base param set with override param set: %w", err)
|
||||
}
|
||||
baseParamSetsByMergeKey[mergeKeyValue] = baseParamSet
|
||||
} else {
|
||||
overriddenParamSet, err := utils.CombineStringMapsAllowDuplicates(baseParamSet, overrideParamSet)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error combining string maps: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
baseParamSetsByMergeKey[mergeKeyValue] = utils.ConvertToMapStringInterface(overriddenParamSet)
|
||||
}
|
||||
@@ -125,7 +123,7 @@ func getParamSetsByMergeKey(mergeKeys []string, paramSets []map[string]interface
|
||||
}
|
||||
paramSetKeyJson, err := json.Marshal(paramSetKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshalling param set key json: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
paramSetKeyString := string(paramSetKeyJson)
|
||||
if _, exists := paramSetsByMergeKey[paramSetKeyString]; exists {
|
||||
@@ -139,24 +137,26 @@ func getParamSetsByMergeKey(mergeKeys []string, paramSets []map[string]interface
|
||||
|
||||
// getParams get the parameters generated by this generator.
|
||||
func (m *MergeGenerator) getParams(appSetBaseGenerator argoprojiov1alpha1.ApplicationSetNestedGenerator, appSet *argoprojiov1alpha1.ApplicationSet) ([]map[string]interface{}, error) {
|
||||
matrixGen, err := getMatrixGenerator(appSetBaseGenerator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if matrixGen != nil && !appSet.Spec.ApplyNestedSelectors {
|
||||
foundSelector := dropDisabledNestedSelectors(matrixGen.Generators)
|
||||
if foundSelector {
|
||||
log.Warnf("AppSet '%v' defines selector on nested matrix generator's generator without enabling them via 'spec.applyNestedSelectors', ignoring nested selector", appSet.Name)
|
||||
|
||||
var matrix *argoprojiov1alpha1.MatrixGenerator
|
||||
if appSetBaseGenerator.Matrix != nil {
|
||||
nestedMatrix, err := argoprojiov1alpha1.ToNestedMatrixGenerator(appSetBaseGenerator.Matrix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if nestedMatrix != nil {
|
||||
matrix = nestedMatrix.ToMatrixGenerator()
|
||||
}
|
||||
}
|
||||
mergeGen, err := getMergeGenerator(appSetBaseGenerator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if mergeGen != nil && !appSet.Spec.ApplyNestedSelectors {
|
||||
foundSelector := dropDisabledNestedSelectors(mergeGen.Generators)
|
||||
if foundSelector {
|
||||
log.Warnf("AppSet '%v' defines selector on nested merge generator's generator without enabling them via 'spec.applyNestedSelectors', ignoring nested selector", appSet.Name)
|
||||
|
||||
var mergeGenerator *argoprojiov1alpha1.MergeGenerator
|
||||
if appSetBaseGenerator.Merge != nil {
|
||||
nestedMerge, err := argoprojiov1alpha1.ToNestedMergeGenerator(appSetBaseGenerator.Merge)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if nestedMerge != nil {
|
||||
mergeGenerator = nestedMerge.ToMergeGenerator()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,9 +168,8 @@ func (m *MergeGenerator) getParams(appSetBaseGenerator argoprojiov1alpha1.Applic
|
||||
SCMProvider: appSetBaseGenerator.SCMProvider,
|
||||
ClusterDecisionResource: appSetBaseGenerator.ClusterDecisionResource,
|
||||
PullRequest: appSetBaseGenerator.PullRequest,
|
||||
Plugin: appSetBaseGenerator.Plugin,
|
||||
Matrix: matrixGen,
|
||||
Merge: mergeGen,
|
||||
Matrix: matrix,
|
||||
Merge: mergeGenerator,
|
||||
Selector: appSetBaseGenerator.Selector,
|
||||
},
|
||||
m.supportedGenerators,
|
||||
@@ -198,18 +197,10 @@ func (m *MergeGenerator) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.App
|
||||
var found bool
|
||||
|
||||
for _, r := range appSetGenerator.Merge.Generators {
|
||||
matrixGen, _ := getMatrixGenerator(r)
|
||||
mergeGen, _ := getMergeGenerator(r)
|
||||
base := &argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
List: r.List,
|
||||
Clusters: r.Clusters,
|
||||
Git: r.Git,
|
||||
PullRequest: r.PullRequest,
|
||||
Plugin: r.Plugin,
|
||||
SCMProvider: r.SCMProvider,
|
||||
ClusterDecisionResource: r.ClusterDecisionResource,
|
||||
Matrix: matrixGen,
|
||||
Merge: mergeGen,
|
||||
List: r.List,
|
||||
Clusters: r.Clusters,
|
||||
Git: r.Git,
|
||||
}
|
||||
generators := GetRelevantGenerators(base, m.supportedGenerators)
|
||||
|
||||
@@ -230,17 +221,6 @@ func (m *MergeGenerator) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.App
|
||||
|
||||
}
|
||||
|
||||
func getMergeGenerator(r argoprojiov1alpha1.ApplicationSetNestedGenerator) (*argoprojiov1alpha1.MergeGenerator, error) {
|
||||
if r.Merge == nil {
|
||||
return nil, nil
|
||||
}
|
||||
merge, err := argoprojiov1alpha1.ToNestedMergeGenerator(r.Merge)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error converting to nested merge generator: %w", err)
|
||||
}
|
||||
return merge.ToMergeGenerator(), nil
|
||||
}
|
||||
|
||||
// GetTemplate gets the Template field for the MergeGenerator.
|
||||
func (m *MergeGenerator) GetTemplate(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator) *argoprojiov1alpha1.ApplicationSetTemplate {
|
||||
return &appSetGenerator.Merge.Template
|
||||
|
||||
@@ -1,211 +0,0 @@
|
||||
package generators
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jeremywohl/flatten"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
"github.com/argoproj/argo-cd/v2/util/settings"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/plugin"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultPluginRequeueAfterSeconds = 30 * time.Minute
|
||||
)
|
||||
|
||||
var _ Generator = (*PluginGenerator)(nil)
|
||||
|
||||
type PluginGenerator struct {
|
||||
client client.Client
|
||||
ctx context.Context
|
||||
clientset kubernetes.Interface
|
||||
namespace string
|
||||
}
|
||||
|
||||
func NewPluginGenerator(client client.Client, ctx context.Context, clientset kubernetes.Interface, namespace string) Generator {
|
||||
g := &PluginGenerator{
|
||||
client: client,
|
||||
ctx: ctx,
|
||||
clientset: clientset,
|
||||
namespace: namespace,
|
||||
}
|
||||
return g
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) GetRequeueAfter(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator) time.Duration {
|
||||
// Return a requeue default of 30 minutes, if no default is specified.
|
||||
|
||||
if appSetGenerator.Plugin.RequeueAfterSeconds != nil {
|
||||
return time.Duration(*appSetGenerator.Plugin.RequeueAfterSeconds) * time.Second
|
||||
}
|
||||
|
||||
return DefaultPluginRequeueAfterSeconds
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) GetTemplate(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator) *argoprojiov1alpha1.ApplicationSetTemplate {
|
||||
return &appSetGenerator.Plugin.Template
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, applicationSetInfo *argoprojiov1alpha1.ApplicationSet) ([]map[string]interface{}, error) {
|
||||
|
||||
if appSetGenerator == nil {
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
}
|
||||
|
||||
if appSetGenerator.Plugin == nil {
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
providerConfig := appSetGenerator.Plugin
|
||||
|
||||
pluginClient, err := g.getPluginFromGenerator(ctx, applicationSetInfo.Name, providerConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting plugin from generator: %w", err)
|
||||
}
|
||||
|
||||
list, err := pluginClient.List(ctx, providerConfig.Input.Parameters)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing params: %w", err)
|
||||
}
|
||||
|
||||
res, err := g.generateParams(appSetGenerator, applicationSetInfo, list.Output.Parameters, appSetGenerator.Plugin.Input.Parameters, applicationSetInfo.Spec.GoTemplate)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating params: %w", err)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) getPluginFromGenerator(ctx context.Context, appSetName string, generatorConfig *argoprojiov1alpha1.PluginGenerator) (*plugin.Service, error) {
|
||||
cm, err := g.getConfigMap(ctx, generatorConfig.ConfigMapRef.Name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching ConfigMap: %w", err)
|
||||
}
|
||||
token, err := g.getToken(ctx, cm["token"])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret token: %v", err)
|
||||
}
|
||||
|
||||
var requestTimeout int
|
||||
requestTimeoutStr, ok := cm["requestTimeout"]
|
||||
if ok {
|
||||
requestTimeout, err = strconv.Atoi(requestTimeoutStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error set requestTimeout : %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
pluginClient, err := plugin.NewPluginService(ctx, appSetName, cm["baseUrl"], token, requestTimeout)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error initializing plugin client: %w", err)
|
||||
}
|
||||
return pluginClient, nil
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) generateParams(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, appSet *argoprojiov1alpha1.ApplicationSet, objectsFound []map[string]interface{}, pluginParams argoprojiov1alpha1.PluginParameters, useGoTemplate bool) ([]map[string]interface{}, error) {
|
||||
res := []map[string]interface{}{}
|
||||
|
||||
for _, objectFound := range objectsFound {
|
||||
|
||||
params := map[string]interface{}{}
|
||||
|
||||
if useGoTemplate {
|
||||
for k, v := range objectFound {
|
||||
params[k] = v
|
||||
}
|
||||
} else {
|
||||
flat, err := flatten.Flatten(objectFound, "", flatten.DotStyle)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for k, v := range flat {
|
||||
params[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
|
||||
params["generator"] = map[string]interface{}{
|
||||
"input": map[string]argoprojiov1alpha1.PluginParameters{
|
||||
"parameters": pluginParams,
|
||||
},
|
||||
}
|
||||
|
||||
err := appendTemplatedValues(appSetGenerator.Plugin.Values, params, appSet.Spec.GoTemplate, appSet.Spec.GoTemplateOptions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res = append(res, params)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) getToken(ctx context.Context, tokenRef string) (string, error) {
|
||||
|
||||
if tokenRef == "" || !strings.HasPrefix(tokenRef, "$") {
|
||||
return "", fmt.Errorf("token is empty, or does not reference a secret key starting with '$': %v", tokenRef)
|
||||
}
|
||||
|
||||
secretName, tokenKey := plugin.ParseSecretKey(tokenRef)
|
||||
|
||||
secret := &corev1.Secret{}
|
||||
err := g.client.Get(
|
||||
ctx,
|
||||
client.ObjectKey{
|
||||
Name: secretName,
|
||||
Namespace: g.namespace,
|
||||
},
|
||||
secret)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error fetching secret %s/%s: %v", g.namespace, secretName, err)
|
||||
}
|
||||
|
||||
secretValues := make(map[string]string, len(secret.Data))
|
||||
|
||||
for k, v := range secret.Data {
|
||||
secretValues[k] = string(v)
|
||||
}
|
||||
|
||||
token := settings.ReplaceStringSecret(tokenKey, secretValues)
|
||||
|
||||
return token, err
|
||||
}
|
||||
|
||||
func (g *PluginGenerator) getConfigMap(ctx context.Context, configMapRef string) (map[string]string, error) {
|
||||
cm := &corev1.ConfigMap{}
|
||||
err := g.client.Get(
|
||||
ctx,
|
||||
client.ObjectKey{
|
||||
Name: configMapRef,
|
||||
Namespace: g.namespace,
|
||||
},
|
||||
cm)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
baseUrl, ok := cm.Data["baseUrl"]
|
||||
if !ok || baseUrl == "" {
|
||||
return nil, fmt.Errorf("baseUrl not found in ConfigMap")
|
||||
}
|
||||
|
||||
token, ok := cm.Data["token"]
|
||||
if !ok || token == "" {
|
||||
return nil, fmt.Errorf("token not found in ConfigMap")
|
||||
}
|
||||
|
||||
return cm.Data, nil
|
||||
}
|
||||
@@ -1,705 +0,0 @@
|
||||
package generators
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
v1 "k8s.io/api/core/v1"
|
||||
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
kubefake "k8s.io/client-go/kubernetes/fake"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client/fake"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/plugin"
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
func TestPluginGenerateParams(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
configmap *v1.ConfigMap
|
||||
secret *v1.Secret
|
||||
inputParameters map[string]apiextensionsv1.JSON
|
||||
values map[string]string
|
||||
gotemplate bool
|
||||
expected []map[string]interface{}
|
||||
content []byte
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "simple case",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "simple case with values",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
values: map[string]string{
|
||||
"valuekey1": "valuevalue1",
|
||||
"valuekey2": "templated-{{key1}}",
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"values.valuekey1": "valuevalue1",
|
||||
"values.valuekey2": "templated-val1",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "simple case with gotemplate",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: true,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2": map[string]interface{}{
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": map[string]interface{}{
|
||||
"key2_2_1": "val2_2_1",
|
||||
},
|
||||
},
|
||||
"key3": float64(123),
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "simple case with appended params",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123,
|
||||
"pkey2": "valplugin"
|
||||
}]}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"pkey2": "valplugin",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "no params",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: argoprojiov1alpha1.PluginParameters{},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": map[string]map[string]interface{}{
|
||||
"parameters": {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "empty return",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"input": {"parameters": []}}`),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "wrong return",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{},
|
||||
gotemplate: false,
|
||||
content: []byte(`wrong body ...`),
|
||||
expected: []map[string]interface{}{},
|
||||
expectedError: fmt.Errorf("error listing params: error get api 'set': invalid character 'w' looking for beginning of value: wrong body ..."),
|
||||
},
|
||||
{
|
||||
name: "external secret",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin-secret:plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "plugin-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123,
|
||||
"pkey2": "valplugin"
|
||||
}]}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"pkey2": "valplugin",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "no secret",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: fmt.Errorf("error getting plugin from generator: error fetching Secret token: error fetching secret default/argocd-secret: secrets \"argocd-secret\" not found"),
|
||||
},
|
||||
{
|
||||
name: "no configmap",
|
||||
configmap: &v1.ConfigMap{},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: fmt.Errorf("error getting plugin from generator: error fetching ConfigMap: configmaps \"\" not found"),
|
||||
},
|
||||
{
|
||||
name: "no baseUrl",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"token": "$plugin.token",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "argocd-secret",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string][]byte{
|
||||
"plugin.token": []byte("my-secret"),
|
||||
},
|
||||
},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: fmt.Errorf("error getting plugin from generator: error fetching ConfigMap: baseUrl not found in ConfigMap"),
|
||||
},
|
||||
{
|
||||
name: "no token",
|
||||
configmap: &v1.ConfigMap{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "first-plugin-cm",
|
||||
Namespace: "default",
|
||||
},
|
||||
Data: map[string]string{
|
||||
"baseUrl": "http://127.0.0.1",
|
||||
},
|
||||
},
|
||||
secret: &v1.Secret{},
|
||||
inputParameters: map[string]apiextensionsv1.JSON{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
gotemplate: false,
|
||||
content: []byte(`{"output": {
|
||||
"parameters": [{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]
|
||||
}}`),
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2.key2_1": "val2_1",
|
||||
"key2.key2_2.key2_2_1": "val2_2_1",
|
||||
"key3": "123",
|
||||
"generator": map[string]interface{}{
|
||||
"input": argoprojiov1alpha1.PluginInput{
|
||||
Parameters: argoprojiov1alpha1.PluginParameters{
|
||||
"pkey1": {Raw: []byte(`"val1"`)},
|
||||
"pkey2": {Raw: []byte(`"val2"`)},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: fmt.Errorf("error getting plugin from generator: error fetching ConfigMap: token not found in ConfigMap"),
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
for _, testCase := range testCases {
|
||||
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
|
||||
generatorConfig := argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
Plugin: &argoprojiov1alpha1.PluginGenerator{
|
||||
ConfigMapRef: argoprojiov1alpha1.PluginConfigMapRef{Name: testCase.configmap.Name},
|
||||
Input: argoprojiov1alpha1.PluginInput{
|
||||
Parameters: testCase.inputParameters,
|
||||
},
|
||||
Values: testCase.values,
|
||||
},
|
||||
}
|
||||
|
||||
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
authHeader := r.Header.Get("Authorization")
|
||||
_, tokenKey := plugin.ParseSecretKey(testCase.configmap.Data["token"])
|
||||
expectedToken := testCase.secret.Data[strings.Replace(tokenKey, "$", "", -1)]
|
||||
if authHeader != "Bearer "+string(expectedToken) {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_, err := w.Write(testCase.content)
|
||||
if err != nil {
|
||||
assert.NoError(t, fmt.Errorf("Error Write %v", err))
|
||||
}
|
||||
})
|
||||
|
||||
fakeServer := httptest.NewServer(handler)
|
||||
|
||||
defer fakeServer.Close()
|
||||
|
||||
if _, ok := testCase.configmap.Data["baseUrl"]; ok {
|
||||
testCase.configmap.Data["baseUrl"] = fakeServer.URL
|
||||
}
|
||||
|
||||
fakeClient := kubefake.NewSimpleClientset(append([]runtime.Object{}, testCase.configmap, testCase.secret)...)
|
||||
|
||||
fakeClientWithCache := fake.NewClientBuilder().WithObjects([]client.Object{testCase.configmap, testCase.secret}...).Build()
|
||||
|
||||
var pluginGenerator = NewPluginGenerator(fakeClientWithCache, ctx, fakeClient, "default")
|
||||
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
GoTemplate: testCase.gotemplate,
|
||||
},
|
||||
}
|
||||
|
||||
got, err := pluginGenerator.GenerateParams(&generatorConfig, &applicationSetInfo)
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
if testCase.expectedError != nil {
|
||||
assert.EqualError(t, err, testCase.expectedError.Error())
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
expectedJson, err := json.Marshal(testCase.expected)
|
||||
require.NoError(t, err)
|
||||
gotJson, err := json.Marshal(got)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, string(expectedJson), string(gotJson))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
|
||||
"github.com/gosimple/slug"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/pull_request"
|
||||
pullrequest "github.com/argoproj/argo-cd/v2/applicationset/services/pull_request"
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
@@ -25,16 +26,12 @@ type PullRequestGenerator struct {
|
||||
client client.Client
|
||||
selectServiceProviderFunc func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error)
|
||||
auth SCMAuthProviders
|
||||
scmRootCAPath string
|
||||
allowedSCMProviders []string
|
||||
}
|
||||
|
||||
func NewPullRequestGenerator(client client.Client, auth SCMAuthProviders, scmRootCAPath string, allowedScmProviders []string) Generator {
|
||||
func NewPullRequestGenerator(client client.Client, auth SCMAuthProviders) Generator {
|
||||
g := &PullRequestGenerator{
|
||||
client: client,
|
||||
auth: auth,
|
||||
scmRootCAPath: scmRootCAPath,
|
||||
allowedSCMProviders: allowedScmProviders,
|
||||
client: client,
|
||||
auth: auth,
|
||||
}
|
||||
g.selectServiceProviderFunc = g.selectServiceProvider
|
||||
return g
|
||||
@@ -69,7 +66,7 @@ func (g *PullRequestGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
return nil, fmt.Errorf("failed to select pull request service provider: %v", err)
|
||||
}
|
||||
|
||||
pulls, err := pullrequest.ListPullRequests(ctx, svc, appSetGenerator.PullRequest.Filters)
|
||||
pulls, err := pull_request.ListPullRequests(ctx, svc, appSetGenerator.PullRequest.Filters)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing repos: %v", err)
|
||||
}
|
||||
@@ -87,34 +84,19 @@ func (g *PullRequestGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
}
|
||||
|
||||
var shortSHALength int
|
||||
var shortSHALength7 int
|
||||
for _, pull := range pulls {
|
||||
shortSHALength = 8
|
||||
if len(pull.HeadSHA) < 8 {
|
||||
shortSHALength = len(pull.HeadSHA)
|
||||
}
|
||||
|
||||
shortSHALength7 = 7
|
||||
if len(pull.HeadSHA) < 7 {
|
||||
shortSHALength7 = len(pull.HeadSHA)
|
||||
}
|
||||
|
||||
paramMap := map[string]interface{}{
|
||||
"number": strconv.Itoa(pull.Number),
|
||||
"branch": pull.Branch,
|
||||
"branch_slug": slug.Make(pull.Branch),
|
||||
"target_branch": pull.TargetBranch,
|
||||
"target_branch_slug": slug.Make(pull.TargetBranch),
|
||||
"head_sha": pull.HeadSHA,
|
||||
"head_short_sha": pull.HeadSHA[:shortSHALength],
|
||||
"head_short_sha_7": pull.HeadSHA[:shortSHALength7],
|
||||
}
|
||||
|
||||
// PR lables will only be supported for Go Template appsets, since fasttemplate will be deprecated.
|
||||
if applicationSetInfo != nil && applicationSetInfo.Spec.GoTemplate {
|
||||
paramMap["labels"] = pull.Labels
|
||||
}
|
||||
params = append(params, paramMap)
|
||||
params = append(params, map[string]interface{}{
|
||||
"number": strconv.Itoa(pull.Number),
|
||||
"branch": pull.Branch,
|
||||
"branch_slug": slug.Make(pull.Branch),
|
||||
"head_sha": pull.HeadSHA,
|
||||
"head_short_sha": pull.HeadSHA[:shortSHALength],
|
||||
})
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
@@ -122,27 +104,18 @@ func (g *PullRequestGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
// selectServiceProvider selects the provider to get pull requests from the configuration
|
||||
func (g *PullRequestGenerator) selectServiceProvider(ctx context.Context, generatorConfig *argoprojiov1alpha1.PullRequestGenerator, applicationSetInfo *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error) {
|
||||
if generatorConfig.Github != nil {
|
||||
if !ScmProviderAllowed(applicationSetInfo, generatorConfig.Github.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", generatorConfig.Github.API)
|
||||
}
|
||||
return g.github(ctx, generatorConfig.Github, applicationSetInfo)
|
||||
}
|
||||
if generatorConfig.GitLab != nil {
|
||||
providerConfig := generatorConfig.GitLab
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.API)
|
||||
}
|
||||
token, err := g.getSecretRef(ctx, providerConfig.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret token: %v", err)
|
||||
}
|
||||
return pullrequest.NewGitLabService(ctx, token, providerConfig.API, providerConfig.Project, providerConfig.Labels, providerConfig.PullRequestState, g.scmRootCAPath, providerConfig.Insecure)
|
||||
return pullrequest.NewGitLabService(ctx, token, providerConfig.API, providerConfig.Project, providerConfig.Labels, providerConfig.PullRequestState)
|
||||
}
|
||||
if generatorConfig.Gitea != nil {
|
||||
providerConfig := generatorConfig.Gitea
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", generatorConfig.Gitea.API)
|
||||
}
|
||||
token, err := g.getSecretRef(ctx, providerConfig.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret token: %v", err)
|
||||
@@ -151,9 +124,6 @@ func (g *PullRequestGenerator) selectServiceProvider(ctx context.Context, genera
|
||||
}
|
||||
if generatorConfig.BitbucketServer != nil {
|
||||
providerConfig := generatorConfig.BitbucketServer
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.API)
|
||||
}
|
||||
if providerConfig.BasicAuth != nil {
|
||||
password, err := g.getSecretRef(ctx, providerConfig.BasicAuth.PasswordRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
@@ -164,32 +134,6 @@ func (g *PullRequestGenerator) selectServiceProvider(ctx context.Context, genera
|
||||
return pullrequest.NewBitbucketServiceNoAuth(ctx, providerConfig.API, providerConfig.Project, providerConfig.Repo)
|
||||
}
|
||||
}
|
||||
if generatorConfig.Bitbucket != nil {
|
||||
providerConfig := generatorConfig.Bitbucket
|
||||
if providerConfig.BearerToken != nil {
|
||||
appToken, err := g.getSecretRef(ctx, providerConfig.BearerToken.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret Bearer token: %v", err)
|
||||
}
|
||||
return pullrequest.NewBitbucketCloudServiceBearerToken(providerConfig.API, appToken, providerConfig.Owner, providerConfig.Repo)
|
||||
} else if providerConfig.BasicAuth != nil {
|
||||
password, err := g.getSecretRef(ctx, providerConfig.BasicAuth.PasswordRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret token: %v", err)
|
||||
}
|
||||
return pullrequest.NewBitbucketCloudServiceBasicAuth(providerConfig.API, providerConfig.BasicAuth.Username, password, providerConfig.Owner, providerConfig.Repo)
|
||||
} else {
|
||||
return pullrequest.NewBitbucketCloudServiceNoAuth(providerConfig.API, providerConfig.Owner, providerConfig.Repo)
|
||||
}
|
||||
}
|
||||
if generatorConfig.AzureDevOps != nil {
|
||||
providerConfig := generatorConfig.AzureDevOps
|
||||
token, err := g.getSecretRef(ctx, providerConfig.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Secret token: %v", err)
|
||||
}
|
||||
return pullrequest.NewAzureDevOpsService(ctx, token, providerConfig.API, providerConfig.Organization, providerConfig.Project, providerConfig.Repo, providerConfig.Labels)
|
||||
}
|
||||
return nil, fmt.Errorf("no Pull Request provider implementation configured")
|
||||
}
|
||||
|
||||
|
||||
@@ -17,21 +17,19 @@ import (
|
||||
func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
cases := []struct {
|
||||
selectFunc func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error)
|
||||
expected []map[string]interface{}
|
||||
expectedErr error
|
||||
applicationSet argoprojiov1alpha1.ApplicationSet
|
||||
selectFunc func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error)
|
||||
expected []map[string]interface{}
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
selectFunc: func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error) {
|
||||
return pullrequest.NewFakeService(
|
||||
ctx,
|
||||
[]*pullrequest.PullRequest{
|
||||
{
|
||||
Number: 1,
|
||||
Branch: "branch1",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
&pullrequest.PullRequest{
|
||||
Number: 1,
|
||||
Branch: "branch1",
|
||||
HeadSHA: "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
},
|
||||
},
|
||||
nil,
|
||||
@@ -39,14 +37,11 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"number": "1",
|
||||
"branch": "branch1",
|
||||
"branch_slug": "branch1",
|
||||
"target_branch": "master",
|
||||
"target_branch_slug": "master",
|
||||
"head_sha": "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
"head_short_sha": "089d92cb",
|
||||
"head_short_sha_7": "089d92c",
|
||||
"number": "1",
|
||||
"branch": "branch1",
|
||||
"branch_slug": "branch1",
|
||||
"head_sha": "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
"head_short_sha": "089d92cb",
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -56,11 +51,10 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
return pullrequest.NewFakeService(
|
||||
ctx,
|
||||
[]*pullrequest.PullRequest{
|
||||
{
|
||||
Number: 2,
|
||||
Branch: "feat/areally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
TargetBranch: "feat/anotherreally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
HeadSHA: "9b34ff5bd418e57d58891eb0aa0728043ca1e8be",
|
||||
&pullrequest.PullRequest{
|
||||
Number: 2,
|
||||
Branch: "feat/areally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
HeadSHA: "9b34ff5bd418e57d58891eb0aa0728043ca1e8be",
|
||||
},
|
||||
},
|
||||
nil,
|
||||
@@ -68,14 +62,11 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"number": "2",
|
||||
"branch": "feat/areally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
"branch_slug": "feat-areally-long-pull-request-name-to-test-argo",
|
||||
"target_branch": "feat/anotherreally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
"target_branch_slug": "feat-anotherreally-long-pull-request-name-to-test",
|
||||
"head_sha": "9b34ff5bd418e57d58891eb0aa0728043ca1e8be",
|
||||
"head_short_sha": "9b34ff5b",
|
||||
"head_short_sha_7": "9b34ff5",
|
||||
"number": "2",
|
||||
"branch": "feat/areally+long_pull_request_name_to_test_argo_slugification_and_branch_name_shortening_feature",
|
||||
"branch_slug": "feat-areally-long-pull-request-name-to-test-argo",
|
||||
"head_sha": "9b34ff5bd418e57d58891eb0aa0728043ca1e8be",
|
||||
"head_short_sha": "9b34ff5b",
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -85,11 +76,10 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
return pullrequest.NewFakeService(
|
||||
ctx,
|
||||
[]*pullrequest.PullRequest{
|
||||
{
|
||||
Number: 1,
|
||||
Branch: "a-very-short-sha",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "abcd",
|
||||
&pullrequest.PullRequest{
|
||||
Number: 1,
|
||||
Branch: "a-very-short-sha",
|
||||
HeadSHA: "abcd",
|
||||
},
|
||||
},
|
||||
nil,
|
||||
@@ -97,14 +87,11 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"number": "1",
|
||||
"branch": "a-very-short-sha",
|
||||
"branch_slug": "a-very-short-sha",
|
||||
"target_branch": "master",
|
||||
"target_branch_slug": "master",
|
||||
"head_sha": "abcd",
|
||||
"head_short_sha": "abcd",
|
||||
"head_short_sha_7": "abcd",
|
||||
"number": "1",
|
||||
"branch": "a-very-short-sha",
|
||||
"branch_slug": "a-very-short-sha",
|
||||
"head_sha": "abcd",
|
||||
"head_short_sha": "abcd",
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -120,79 +107,6 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
expected: nil,
|
||||
expectedErr: fmt.Errorf("error listing repos: fake error"),
|
||||
},
|
||||
{
|
||||
selectFunc: func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error) {
|
||||
return pullrequest.NewFakeService(
|
||||
ctx,
|
||||
[]*pullrequest.PullRequest{
|
||||
{
|
||||
Number: 1,
|
||||
Branch: "branch1",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
Labels: []string{"preview"},
|
||||
},
|
||||
},
|
||||
nil,
|
||||
)
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"number": "1",
|
||||
"branch": "branch1",
|
||||
"branch_slug": "branch1",
|
||||
"target_branch": "master",
|
||||
"target_branch_slug": "master",
|
||||
"head_sha": "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
"head_short_sha": "089d92cb",
|
||||
"head_short_sha_7": "089d92c",
|
||||
"labels": []string{"preview"},
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
applicationSet: argoprojiov1alpha1.ApplicationSet{
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
// Application set is using Go Template.
|
||||
GoTemplate: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
selectFunc: func(context.Context, *argoprojiov1alpha1.PullRequestGenerator, *argoprojiov1alpha1.ApplicationSet) (pullrequest.PullRequestService, error) {
|
||||
return pullrequest.NewFakeService(
|
||||
ctx,
|
||||
[]*pullrequest.PullRequest{
|
||||
{
|
||||
Number: 1,
|
||||
Branch: "branch1",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
Labels: []string{"preview"},
|
||||
},
|
||||
},
|
||||
nil,
|
||||
)
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"number": "1",
|
||||
"branch": "branch1",
|
||||
"branch_slug": "branch1",
|
||||
"target_branch": "master",
|
||||
"target_branch_slug": "master",
|
||||
"head_sha": "089d92cbf9ff857a39e6feccd32798ca700fb958",
|
||||
"head_short_sha": "089d92cb",
|
||||
"head_short_sha_7": "089d92c",
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
applicationSet: argoprojiov1alpha1.ApplicationSet{
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
// Application set is using fasttemplate.
|
||||
GoTemplate: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
@@ -203,7 +117,7 @@ func TestPullRequestGithubGenerateParams(t *testing.T) {
|
||||
PullRequest: &argoprojiov1alpha1.PullRequestGenerator{},
|
||||
}
|
||||
|
||||
got, gotErr := gen.GenerateParams(&generatorConfig, &c.applicationSet)
|
||||
got, gotErr := gen.GenerateParams(&generatorConfig, nil)
|
||||
assert.Equal(t, c.expectedErr, gotErr)
|
||||
assert.ElementsMatch(t, c.expected, got)
|
||||
}
|
||||
@@ -273,80 +187,3 @@ func TestPullRequestGetSecretRef(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAllowedSCMProviderPullRequest(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
providerConfig *argoprojiov1alpha1.PullRequestGenerator
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "Error Github",
|
||||
providerConfig: &argoprojiov1alpha1.PullRequestGenerator{
|
||||
Github: &argoprojiov1alpha1.PullRequestGeneratorGithub{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "failed to select pull request service provider: scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Gitlab",
|
||||
providerConfig: &argoprojiov1alpha1.PullRequestGenerator{
|
||||
GitLab: &argoprojiov1alpha1.PullRequestGeneratorGitLab{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "failed to select pull request service provider: scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Gitea",
|
||||
providerConfig: &argoprojiov1alpha1.PullRequestGenerator{
|
||||
Gitea: &argoprojiov1alpha1.PullRequestGeneratorGitea{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "failed to select pull request service provider: scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Bitbucket",
|
||||
providerConfig: &argoprojiov1alpha1.PullRequestGenerator{
|
||||
BitbucketServer: &argoprojiov1alpha1.PullRequestGeneratorBitbucketServer{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "failed to select pull request service provider: scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range cases {
|
||||
testCaseCopy := testCase
|
||||
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
pullRequestGenerator := NewPullRequestGenerator(nil, SCMAuthProviders{}, "", []string{
|
||||
"github.myorg.com",
|
||||
"gitlab.myorg.com",
|
||||
"gitea.myorg.com",
|
||||
"bitbucket.myorg.com",
|
||||
"azuredevops.myorg.com",
|
||||
})
|
||||
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetGenerator{{
|
||||
PullRequest: testCaseCopy.providerConfig,
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := pullRequestGenerator.GenerateParams(&applicationSetInfo.Spec.Generators[0], &applicationSetInfo)
|
||||
|
||||
assert.Error(t, err, "Must return an error")
|
||||
assert.Equal(t, testCaseCopy.expectedError, err.Error())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,12 +9,9 @@ import (
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/github_app_auth"
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/services/scm_provider"
|
||||
"github.com/argoproj/argo-cd/v2/applicationset/utils"
|
||||
"github.com/argoproj/argo-cd/v2/common"
|
||||
argoprojiov1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
@@ -29,20 +26,16 @@ type SCMProviderGenerator struct {
|
||||
// Testing hooks.
|
||||
overrideProvider scm_provider.SCMProviderService
|
||||
SCMAuthProviders
|
||||
scmRootCAPath string
|
||||
allowedSCMProviders []string
|
||||
}
|
||||
|
||||
type SCMAuthProviders struct {
|
||||
GitHubApps github_app_auth.Credentials
|
||||
}
|
||||
|
||||
func NewSCMProviderGenerator(client client.Client, providers SCMAuthProviders, scmRootCAPath string, allowedSCMProviders []string) Generator {
|
||||
func NewSCMProviderGenerator(client client.Client, providers SCMAuthProviders) Generator {
|
||||
return &SCMProviderGenerator{
|
||||
client: client,
|
||||
SCMAuthProviders: providers,
|
||||
scmRootCAPath: scmRootCAPath,
|
||||
allowedSCMProviders: allowedSCMProviders,
|
||||
client: client,
|
||||
SCMAuthProviders: providers,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,26 +58,6 @@ func (g *SCMProviderGenerator) GetTemplate(appSetGenerator *argoprojiov1alpha1.A
|
||||
return &appSetGenerator.SCMProvider.Template
|
||||
}
|
||||
|
||||
func ScmProviderAllowed(applicationSetInfo *argoprojiov1alpha1.ApplicationSet, url string, allowedScmProviders []string) bool {
|
||||
if url == "" || len(allowedScmProviders) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, allowedScmProvider := range allowedScmProviders {
|
||||
if url == allowedScmProvider {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
common.SecurityField: common.SecurityMedium,
|
||||
"applicationset": applicationSetInfo.Name,
|
||||
"appSetNamespace": applicationSetInfo.Namespace,
|
||||
}).Debugf("attempted to use disallowed SCM %q", url)
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha1.ApplicationSetGenerator, applicationSetInfo *argoprojiov1alpha1.ApplicationSet) ([]map[string]interface{}, error) {
|
||||
if appSetGenerator == nil {
|
||||
return nil, EmptyAppSetGeneratorError
|
||||
@@ -102,30 +75,21 @@ func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
if g.overrideProvider != nil {
|
||||
provider = g.overrideProvider
|
||||
} else if providerConfig.Github != nil {
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.Github.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.Github.API)
|
||||
}
|
||||
var err error
|
||||
provider, err = g.githubProvider(ctx, providerConfig.Github, applicationSetInfo)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("scm provider: %w", err)
|
||||
}
|
||||
} else if providerConfig.Gitlab != nil {
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.Gitlab.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.Gitlab.API)
|
||||
}
|
||||
token, err := g.getSecretRef(ctx, providerConfig.Gitlab.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Gitlab token: %v", err)
|
||||
}
|
||||
provider, err = scm_provider.NewGitlabProvider(ctx, providerConfig.Gitlab.Group, token, providerConfig.Gitlab.API, providerConfig.Gitlab.AllBranches, providerConfig.Gitlab.IncludeSubgroups, providerConfig.Gitlab.WillIncludeSharedProjects(), providerConfig.Gitlab.Insecure, g.scmRootCAPath, providerConfig.Gitlab.Topic)
|
||||
provider, err = scm_provider.NewGitlabProvider(ctx, providerConfig.Gitlab.Group, token, providerConfig.Gitlab.API, providerConfig.Gitlab.AllBranches, providerConfig.Gitlab.IncludeSubgroups)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error initializing Gitlab service: %v", err)
|
||||
}
|
||||
} else if providerConfig.Gitea != nil {
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.Gitea.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.Gitea.API)
|
||||
}
|
||||
token, err := g.getSecretRef(ctx, providerConfig.Gitea.TokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Gitea token: %v", err)
|
||||
@@ -136,9 +100,6 @@ func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
}
|
||||
} else if providerConfig.BitbucketServer != nil {
|
||||
providerConfig := providerConfig.BitbucketServer
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.API)
|
||||
}
|
||||
var scmError error
|
||||
if providerConfig.BasicAuth != nil {
|
||||
password, err := g.getSecretRef(ctx, providerConfig.BasicAuth.PasswordRef, applicationSetInfo.Namespace)
|
||||
@@ -153,9 +114,6 @@ func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
return nil, fmt.Errorf("error initializing Bitbucket Server service: %v", scmError)
|
||||
}
|
||||
} else if providerConfig.AzureDevOps != nil {
|
||||
if !ScmProviderAllowed(applicationSetInfo, providerConfig.AzureDevOps.API, g.allowedSCMProviders) {
|
||||
return nil, fmt.Errorf("scm provider not allowed: %s", providerConfig.AzureDevOps.API)
|
||||
}
|
||||
token, err := g.getSecretRef(ctx, providerConfig.AzureDevOps.AccessTokenRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Azure Devops access token: %v", err)
|
||||
@@ -164,21 +122,6 @@ func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error initializing Azure Devops service: %v", err)
|
||||
}
|
||||
} else if providerConfig.Bitbucket != nil {
|
||||
appPassword, err := g.getSecretRef(ctx, providerConfig.Bitbucket.AppPasswordRef, applicationSetInfo.Namespace)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching Bitbucket cloud appPassword: %v", err)
|
||||
}
|
||||
provider, err = scm_provider.NewBitBucketCloudProvider(ctx, providerConfig.Bitbucket.Owner, providerConfig.Bitbucket.User, appPassword, providerConfig.Bitbucket.AllBranches)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error initializing Bitbucket cloud service: %v", err)
|
||||
}
|
||||
} else if providerConfig.AWSCodeCommit != nil {
|
||||
var awsErr error
|
||||
provider, awsErr = scm_provider.NewAWSCodeCommitProvider(ctx, providerConfig.AWSCodeCommit.TagFilters, providerConfig.AWSCodeCommit.Role, providerConfig.AWSCodeCommit.Region, providerConfig.AWSCodeCommit.AllBranches)
|
||||
if awsErr != nil {
|
||||
return nil, fmt.Errorf("error initializing AWS codecommit service: %v", awsErr)
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("no SCM provider implementation configured")
|
||||
}
|
||||
@@ -188,40 +131,26 @@ func (g *SCMProviderGenerator) GenerateParams(appSetGenerator *argoprojiov1alpha
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing repos: %v", err)
|
||||
}
|
||||
paramsArray := make([]map[string]interface{}, 0, len(repos))
|
||||
params := make([]map[string]interface{}, 0, len(repos))
|
||||
var shortSHALength int
|
||||
var shortSHALength7 int
|
||||
for _, repo := range repos {
|
||||
shortSHALength = 8
|
||||
if len(repo.SHA) < 8 {
|
||||
shortSHALength = len(repo.SHA)
|
||||
}
|
||||
|
||||
shortSHALength7 = 7
|
||||
if len(repo.SHA) < 7 {
|
||||
shortSHALength7 = len(repo.SHA)
|
||||
}
|
||||
|
||||
params := map[string]interface{}{
|
||||
params = append(params, map[string]interface{}{
|
||||
"organization": repo.Organization,
|
||||
"repository": repo.Repository,
|
||||
"url": repo.URL,
|
||||
"branch": repo.Branch,
|
||||
"sha": repo.SHA,
|
||||
"short_sha": repo.SHA[:shortSHALength],
|
||||
"short_sha_7": repo.SHA[:shortSHALength7],
|
||||
"labels": strings.Join(repo.Labels, ","),
|
||||
"branchNormalized": utils.SanitizeName(repo.Branch),
|
||||
}
|
||||
|
||||
err := appendTemplatedValues(appSetGenerator.SCMProvider.Values, params, applicationSetInfo.Spec.GoTemplate, applicationSetInfo.Spec.GoTemplateOptions)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to append templated values: %w", err)
|
||||
}
|
||||
|
||||
paramsArray = append(paramsArray, params)
|
||||
})
|
||||
}
|
||||
return paramsArray, nil
|
||||
return params, nil
|
||||
}
|
||||
|
||||
func (g *SCMProviderGenerator) getSecretRef(ctx context.Context, ref *argoprojiov1alpha1.SecretRef, namespace string) (string, error) {
|
||||
|
||||
@@ -80,209 +80,38 @@ func TestSCMProviderGetSecretRef(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSCMProviderGenerateParams(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
repos []*scm_provider.Repository
|
||||
values map[string]string
|
||||
expected []map[string]interface{}
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Multiple repos with labels",
|
||||
repos: []*scm_provider.Repository{
|
||||
{
|
||||
Organization: "myorg",
|
||||
Repository: "repo1",
|
||||
URL: "git@github.com:myorg/repo1.git",
|
||||
Branch: "main",
|
||||
SHA: "0bc57212c3cbbec69d20b34c507284bd300def5b",
|
||||
Labels: []string{"prod", "staging"},
|
||||
},
|
||||
{
|
||||
Organization: "myorg",
|
||||
Repository: "repo2",
|
||||
URL: "git@github.com:myorg/repo2.git",
|
||||
Branch: "main",
|
||||
SHA: "59d0",
|
||||
},
|
||||
mockProvider := &scm_provider.MockProvider{
|
||||
Repos: []*scm_provider.Repository{
|
||||
{
|
||||
Organization: "myorg",
|
||||
Repository: "repo1",
|
||||
URL: "git@github.com:myorg/repo1.git",
|
||||
Branch: "main",
|
||||
SHA: "0bc57212c3cbbec69d20b34c507284bd300def5b",
|
||||
Labels: []string{"prod", "staging"},
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"organization": "myorg",
|
||||
"repository": "repo1",
|
||||
"url": "git@github.com:myorg/repo1.git",
|
||||
"branch": "main",
|
||||
"branchNormalized": "main",
|
||||
"sha": "0bc57212c3cbbec69d20b34c507284bd300def5b",
|
||||
"short_sha": "0bc57212",
|
||||
"short_sha_7": "0bc5721",
|
||||
"labels": "prod,staging",
|
||||
},
|
||||
{
|
||||
"organization": "myorg",
|
||||
"repository": "repo2",
|
||||
"url": "git@github.com:myorg/repo2.git",
|
||||
"branch": "main",
|
||||
"branchNormalized": "main",
|
||||
"sha": "59d0",
|
||||
"short_sha": "59d0",
|
||||
"short_sha_7": "59d0",
|
||||
"labels": "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Value interpolation",
|
||||
repos: []*scm_provider.Repository{
|
||||
{
|
||||
Organization: "myorg",
|
||||
Repository: "repo3",
|
||||
URL: "git@github.com:myorg/repo3.git",
|
||||
Branch: "main",
|
||||
SHA: "0bc57212c3cbbec69d20b34c507284bd300def5b",
|
||||
Labels: []string{"prod", "staging"},
|
||||
},
|
||||
},
|
||||
values: map[string]string{
|
||||
"foo": "bar",
|
||||
"should_i_force_push_to": "{{ branch }}?",
|
||||
},
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"organization": "myorg",
|
||||
"repository": "repo3",
|
||||
"url": "git@github.com:myorg/repo3.git",
|
||||
"branch": "main",
|
||||
"branchNormalized": "main",
|
||||
"sha": "0bc57212c3cbbec69d20b34c507284bd300def5b",
|
||||
"short_sha": "0bc57212",
|
||||
"short_sha_7": "0bc5721",
|
||||
"labels": "prod,staging",
|
||||
"values.foo": "bar",
|
||||
"values.should_i_force_push_to": "main?",
|
||||
},
|
||||
{
|
||||
Organization: "myorg",
|
||||
Repository: "repo2",
|
||||
URL: "git@github.com:myorg/repo2.git",
|
||||
Branch: "main",
|
||||
SHA: "59d0",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range cases {
|
||||
testCaseCopy := testCase
|
||||
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
mockProvider := &scm_provider.MockProvider{
|
||||
Repos: testCaseCopy.repos,
|
||||
}
|
||||
scmGenerator := &SCMProviderGenerator{overrideProvider: mockProvider}
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetGenerator{{
|
||||
SCMProvider: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
Values: testCaseCopy.values,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
got, err := scmGenerator.GenerateParams(&applicationSetInfo.Spec.Generators[0], &applicationSetInfo)
|
||||
|
||||
if testCaseCopy.expectedError != nil {
|
||||
assert.EqualError(t, err, testCaseCopy.expectedError.Error())
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, testCaseCopy.expected, got)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAllowedSCMProvider(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
providerConfig *argoprojiov1alpha1.SCMProviderGenerator
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "Error Github",
|
||||
providerConfig: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
Github: &argoprojiov1alpha1.SCMProviderGeneratorGithub{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Gitlab",
|
||||
providerConfig: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
Gitlab: &argoprojiov1alpha1.SCMProviderGeneratorGitlab{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Gitea",
|
||||
providerConfig: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
Gitea: &argoprojiov1alpha1.SCMProviderGeneratorGitea{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error Bitbucket",
|
||||
providerConfig: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
BitbucketServer: &argoprojiov1alpha1.SCMProviderGeneratorBitbucketServer{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
{
|
||||
name: "Error AzureDevops",
|
||||
providerConfig: &argoprojiov1alpha1.SCMProviderGenerator{
|
||||
AzureDevOps: &argoprojiov1alpha1.SCMProviderGeneratorAzureDevOps{
|
||||
API: "https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
},
|
||||
expectedError: "scm provider not allowed: https://myservice.mynamespace.svc.cluster.local",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range cases {
|
||||
testCaseCopy := testCase
|
||||
|
||||
t.Run(testCaseCopy.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
scmGenerator := &SCMProviderGenerator{allowedSCMProviders: []string{
|
||||
"github.myorg.com",
|
||||
"gitlab.myorg.com",
|
||||
"gitea.myorg.com",
|
||||
"bitbucket.myorg.com",
|
||||
"azuredevops.myorg.com",
|
||||
}}
|
||||
|
||||
applicationSetInfo := argoprojiov1alpha1.ApplicationSet{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "set",
|
||||
},
|
||||
Spec: argoprojiov1alpha1.ApplicationSetSpec{
|
||||
Generators: []argoprojiov1alpha1.ApplicationSetGenerator{{
|
||||
SCMProvider: testCaseCopy.providerConfig,
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := scmGenerator.GenerateParams(&applicationSetInfo.Spec.Generators[0], &applicationSetInfo)
|
||||
|
||||
assert.Error(t, err, "Must return an error")
|
||||
assert.Equal(t, testCaseCopy.expectedError, err.Error())
|
||||
})
|
||||
}
|
||||
gen := &SCMProviderGenerator{overrideProvider: mockProvider}
|
||||
params, err := gen.GenerateParams(&argoprojiov1alpha1.ApplicationSetGenerator{
|
||||
SCMProvider: &argoprojiov1alpha1.SCMProviderGenerator{},
|
||||
}, nil)
|
||||
assert.Nil(t, err)
|
||||
assert.Len(t, params, 2)
|
||||
assert.Equal(t, "myorg", params[0]["organization"])
|
||||
assert.Equal(t, "repo1", params[0]["repository"])
|
||||
assert.Equal(t, "git@github.com:myorg/repo1.git", params[0]["url"])
|
||||
assert.Equal(t, "main", params[0]["branch"])
|
||||
assert.Equal(t, "0bc57212c3cbbec69d20b34c507284bd300def5b", params[0]["sha"])
|
||||
assert.Equal(t, "0bc57212", params[0]["short_sha"])
|
||||
assert.Equal(t, "59d0", params[1]["short_sha"])
|
||||
assert.Equal(t, "prod,staging", params[0]["labels"])
|
||||
assert.Equal(t, "repo2", params[1]["repository"])
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
package generators
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func appendTemplatedValues(values map[string]string, params map[string]interface{}, useGoTemplate bool, goTemplateOptions []string) error {
|
||||
// We create a local map to ensure that we do not fall victim to a billion-laughs attack. We iterate through the
|
||||
// cluster values map and only replace values in said map if it has already been allowlisted in the params map.
|
||||
// Once we iterate through all the cluster values we can then safely merge the `tmp` map into the main params map.
|
||||
tmp := map[string]interface{}{}
|
||||
|
||||
for key, value := range values {
|
||||
result, err := replaceTemplatedString(value, params, useGoTemplate, goTemplateOptions)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to replace templated string: %w", err)
|
||||
}
|
||||
|
||||
if useGoTemplate {
|
||||
if tmp["values"] == nil {
|
||||
tmp["values"] = map[string]string{}
|
||||
}
|
||||
tmp["values"].(map[string]string)[key] = result
|
||||
} else {
|
||||
tmp[fmt.Sprintf("values.%s", key)] = result
|
||||
}
|
||||
}
|
||||
|
||||
for key, value := range tmp {
|
||||
params[key] = value
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func replaceTemplatedString(value string, params map[string]interface{}, useGoTemplate bool, goTemplateOptions []string) (string, error) {
|
||||
replacedTmplStr, err := render.Replace(value, params, useGoTemplate, goTemplateOptions)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to replace templated string with rendered values: %w", err)
|
||||
}
|
||||
return replacedTmplStr, nil
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
package generators
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestValueInterpolation(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
values map[string]string
|
||||
params map[string]interface{}
|
||||
expected map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "Simple interpolation",
|
||||
values: map[string]string{
|
||||
"hello": "{{ world }}",
|
||||
},
|
||||
params: map[string]interface{}{
|
||||
"world": "world!",
|
||||
},
|
||||
expected: map[string]interface{}{
|
||||
"world": "world!",
|
||||
"values.hello": "world!",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Non-existent",
|
||||
values: map[string]string{
|
||||
"non-existent": "{{ non-existent }}",
|
||||
},
|
||||
params: map[string]interface{}{},
|
||||
expected: map[string]interface{}{
|
||||
"values.non-existent": "{{ non-existent }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Billion laughs",
|
||||
values: map[string]string{
|
||||
"lol1": "lol",
|
||||
"lol2": "{{values.lol1}}{{values.lol1}}",
|
||||
"lol3": "{{values.lol2}}{{values.lol2}}{{values.lol2}}",
|
||||
},
|
||||
params: map[string]interface{}{},
|
||||
expected: map[string]interface{}{
|
||||
"values.lol1": "lol",
|
||||
"values.lol2": "{{values.lol1}}{{values.lol1}}",
|
||||
"values.lol3": "{{values.lol2}}{{values.lol2}}{{values.lol2}}",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
err := appendTemplatedValues(testCase.values, testCase.params, false, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testCase.expected, testCase.params)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueInterpolationWithGoTemplating(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
values map[string]string
|
||||
params map[string]interface{}
|
||||
expected map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "Simple interpolation",
|
||||
values: map[string]string{
|
||||
"hello": "{{ .world }}",
|
||||
},
|
||||
params: map[string]interface{}{
|
||||
"world": "world!",
|
||||
},
|
||||
expected: map[string]interface{}{
|
||||
"world": "world!",
|
||||
"values": map[string]string{
|
||||
"hello": "world!",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Non-existent to default",
|
||||
values: map[string]string{
|
||||
"non_existent": "{{ default \"bar\" .non_existent }}",
|
||||
},
|
||||
params: map[string]interface{}{},
|
||||
expected: map[string]interface{}{
|
||||
"values": map[string]string{
|
||||
"non_existent": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Billion laughs",
|
||||
values: map[string]string{
|
||||
"lol1": "lol",
|
||||
"lol2": "{{.values.lol1}}{{.values.lol1}}",
|
||||
"lol3": "{{.values.lol2}}{{.values.lol2}}{{.values.lol2}}",
|
||||
},
|
||||
params: map[string]interface{}{},
|
||||
expected: map[string]interface{}{
|
||||
"values": map[string]string{
|
||||
"lol1": "lol",
|
||||
"lol2": "<no value><no value>",
|
||||
"lol3": "<no value><no value><no value>",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
err := appendTemplatedValues(testCase.values, testCase.params, true, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testCase.expected, testCase.params)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -20,12 +20,10 @@ func Client(g github_app_auth.Authentication, url string) (*github.Client, error
|
||||
url = g.EnterpriseBaseURL
|
||||
}
|
||||
var client *github.Client
|
||||
httpClient := http.Client{Transport: rt}
|
||||
if url == "" {
|
||||
httpClient := http.Client{Transport: rt}
|
||||
client = github.NewClient(&httpClient)
|
||||
} else {
|
||||
rt.BaseURL = url
|
||||
httpClient := http.Client{Transport: rt}
|
||||
client, err = github.NewEnterpriseClient(url, url, &httpClient)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create github enterprise client: %w", err)
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
userAgent = "argocd-applicationset"
|
||||
defaultTimeout = 30
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
// URL is the URL used for API requests.
|
||||
baseURL string
|
||||
|
||||
// UserAgent is the user agent to include in HTTP requests.
|
||||
UserAgent string
|
||||
|
||||
// Token is used to make authenticated API calls.
|
||||
token string
|
||||
|
||||
// Client is an HTTP client used to communicate with the API.
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
type ErrorResponse struct {
|
||||
Body []byte
|
||||
Response *http.Response
|
||||
Message string
|
||||
}
|
||||
|
||||
func NewClient(baseURL string, options ...ClientOptionFunc) (*Client, error) {
|
||||
client, err := newClient(baseURL, options...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func newClient(baseURL string, options ...ClientOptionFunc) (*Client, error) {
|
||||
c := &Client{baseURL: baseURL, UserAgent: userAgent}
|
||||
|
||||
// Configure the HTTP client.
|
||||
c.client = &http.Client{
|
||||
Timeout: time.Duration(defaultTimeout) * time.Second,
|
||||
}
|
||||
|
||||
// Apply any given client options.
|
||||
for _, fn := range options {
|
||||
if fn == nil {
|
||||
continue
|
||||
}
|
||||
if err := fn(c); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *Client) NewRequest(method, path string, body interface{}, options []ClientOptionFunc) (*http.Request, error) {
|
||||
|
||||
// Make sure the given URL end with a slash
|
||||
if !strings.HasSuffix(c.baseURL, "/") {
|
||||
c.baseURL += "/"
|
||||
}
|
||||
|
||||
var buf io.ReadWriter
|
||||
if body != nil {
|
||||
buf = &bytes.Buffer{}
|
||||
enc := json.NewEncoder(buf)
|
||||
enc.SetEscapeHTML(false)
|
||||
err := enc.Encode(body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(method, c.baseURL+path, buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if body != nil {
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
if len(c.token) != 0 {
|
||||
req.Header.Set("Authorization", "Bearer "+c.token)
|
||||
}
|
||||
|
||||
if c.UserAgent != "" {
|
||||
req.Header.Set("User-Agent", c.UserAgent)
|
||||
}
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*http.Response, error) {
|
||||
resp, err := c.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
if err := CheckResponse(resp); err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case nil:
|
||||
case io.Writer:
|
||||
_, err = io.Copy(v, resp.Body)
|
||||
default:
|
||||
buf := new(bytes.Buffer)
|
||||
teeReader := io.TeeReader(resp.Body, buf)
|
||||
decErr := json.NewDecoder(teeReader).Decode(v)
|
||||
if decErr == io.EOF {
|
||||
decErr = nil // ignore EOF errors caused by empty response body
|
||||
}
|
||||
if decErr != nil {
|
||||
err = fmt.Errorf("%s: %s", decErr.Error(), buf.String())
|
||||
}
|
||||
}
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// CheckResponse checks the API response for errors, and returns them if present.
|
||||
func CheckResponse(resp *http.Response) error {
|
||||
|
||||
if c := resp.StatusCode; 200 <= c && c <= 299 {
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("API error with status code %d: %v", resp.StatusCode, err)
|
||||
}
|
||||
|
||||
var raw map[string]interface{}
|
||||
if err := json.Unmarshal(data, &raw); err != nil {
|
||||
return fmt.Errorf("API error with status code %d: %s", resp.StatusCode, string(data))
|
||||
}
|
||||
|
||||
message := ""
|
||||
if value, ok := raw["message"].(string); ok {
|
||||
message = value
|
||||
} else if value, ok := raw["error"].(string); ok {
|
||||
message = value
|
||||
}
|
||||
|
||||
return fmt.Errorf("API error with status code %d: %s", resp.StatusCode, message)
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
package http
|
||||
|
||||
import "time"
|
||||
|
||||
// ClientOptionFunc can be used to customize a new Restful API client.
|
||||
type ClientOptionFunc func(*Client) error
|
||||
|
||||
// WithToken is an option for NewClient to set token
|
||||
func WithToken(token string) ClientOptionFunc {
|
||||
return func(c *Client) error {
|
||||
c.token = token
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// WithTimeout can be used to configure a custom timeout for requests.
|
||||
func WithTimeout(timeout int) ClientOptionFunc {
|
||||
return func(c *Client) error {
|
||||
c.client.Timeout = time.Duration(timeout) * time.Second
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -1,163 +0,0 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestClient(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, err := w.Write([]byte("Hello, World!"))
|
||||
if err != nil {
|
||||
assert.NoError(t, fmt.Errorf("Error Write %v", err))
|
||||
}
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
var clientOptionFns []ClientOptionFunc
|
||||
_, err := NewClient(server.URL, clientOptionFns...)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create client: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClientDo(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
for _, c := range []struct {
|
||||
name string
|
||||
params map[string]string
|
||||
content []byte
|
||||
fakeServer *httptest.Server
|
||||
clientOptionFns []ClientOptionFunc
|
||||
expected []map[string]interface{}
|
||||
expectedCode int
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Simple",
|
||||
params: map[string]string{
|
||||
"pkey1": "val1",
|
||||
"pkey2": "val2",
|
||||
},
|
||||
fakeServer: httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, err := w.Write([]byte(`[{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]`))
|
||||
if err != nil {
|
||||
assert.NoError(t, fmt.Errorf("Error Write %v", err))
|
||||
}
|
||||
})),
|
||||
clientOptionFns: nil,
|
||||
expected: []map[string]interface{}{
|
||||
{
|
||||
"key1": "val1",
|
||||
"key2": map[string]interface{}{
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": map[string]interface{}{
|
||||
"key2_2_1": "val2_2_1",
|
||||
},
|
||||
},
|
||||
"key3": float64(123),
|
||||
},
|
||||
},
|
||||
expectedCode: 200,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "With Token",
|
||||
params: map[string]string{
|
||||
"pkey1": "val1",
|
||||
"pkey2": "val2",
|
||||
},
|
||||
fakeServer: httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
authHeader := r.Header.Get("Authorization")
|
||||
if authHeader != "Bearer "+string("test-token") {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, err := w.Write([]byte(`[{
|
||||
"key1": "val1",
|
||||
"key2": {
|
||||
"key2_1": "val2_1",
|
||||
"key2_2": {
|
||||
"key2_2_1": "val2_2_1"
|
||||
}
|
||||
},
|
||||
"key3": 123
|
||||
}]`))
|
||||
if err != nil {
|
||||
assert.NoError(t, fmt.Errorf("Error Write %v", err))
|
||||
}
|
||||
})),
|
||||
clientOptionFns: nil,
|
||||
expected: []map[string]interface{}(nil),
|
||||
expectedCode: 401,
|
||||
expectedError: fmt.Errorf("API error with status code 401: "),
|
||||
},
|
||||
} {
|
||||
cc := c
|
||||
t.Run(cc.name, func(t *testing.T) {
|
||||
defer cc.fakeServer.Close()
|
||||
|
||||
client, err := NewClient(cc.fakeServer.URL, cc.clientOptionFns...)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("NewClient returned unexpected error: %v", err)
|
||||
}
|
||||
|
||||
req, err := client.NewRequest("POST", "", cc.params, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("NewRequest returned unexpected error: %v", err)
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
|
||||
resp, err := client.Do(ctx, req, &data)
|
||||
|
||||
if cc.expectedError != nil {
|
||||
assert.EqualError(t, err, cc.expectedError.Error())
|
||||
} else {
|
||||
assert.Equal(t, resp.StatusCode, cc.expectedCode)
|
||||
assert.Equal(t, data, cc.expected)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCheckResponse(t *testing.T) {
|
||||
resp := &http.Response{
|
||||
StatusCode: http.StatusBadRequest,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":"invalid_request","description":"Invalid token"}`)),
|
||||
}
|
||||
|
||||
err := CheckResponse(resp)
|
||||
if err == nil {
|
||||
t.Error("Expected an error, got nil")
|
||||
}
|
||||
|
||||
expected := "API error with status code 400: invalid_request"
|
||||
if err.Error() != expected {
|
||||
t.Errorf("Expected error '%s', got '%s'", expected, err.Error())
|
||||
}
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
// Code generated by mockery v2.25.1. DO NOT EDIT.
|
||||
|
||||
package mocks
|
||||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// Repos is an autogenerated mock type for the Repos type
|
||||
type Repos struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// GetDirectories provides a mock function with given fields: ctx, repoURL, revision, noRevisionCache
|
||||
func (_m *Repos) GetDirectories(ctx context.Context, repoURL string, revision string, noRevisionCache bool) ([]string, error) {
|
||||
ret := _m.Called(ctx, repoURL, revision, noRevisionCache)
|
||||
|
||||
var r0 []string
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string, string, bool) ([]string, error)); ok {
|
||||
return rf(ctx, repoURL, revision, noRevisionCache)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string, string, bool) []string); ok {
|
||||
r0 = rf(ctx, repoURL, revision, noRevisionCache)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]string)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, string, string, bool) error); ok {
|
||||
r1 = rf(ctx, repoURL, revision, noRevisionCache)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetFiles provides a mock function with given fields: ctx, repoURL, revision, pattern, noRevisionCache
|
||||
func (_m *Repos) GetFiles(ctx context.Context, repoURL string, revision string, pattern string, noRevisionCache bool) (map[string][]byte, error) {
|
||||
ret := _m.Called(ctx, repoURL, revision, pattern, noRevisionCache)
|
||||
|
||||
var r0 map[string][]byte
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, bool) (map[string][]byte, error)); ok {
|
||||
return rf(ctx, repoURL, revision, pattern, noRevisionCache)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, bool) map[string][]byte); ok {
|
||||
r0 = rf(ctx, repoURL, revision, pattern, noRevisionCache)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(map[string][]byte)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, string, string, string, bool) error); ok {
|
||||
r1 = rf(ctx, repoURL, revision, pattern, noRevisionCache)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
type mockConstructorTestingTNewRepos interface {
|
||||
mock.TestingT
|
||||
Cleanup(func())
|
||||
}
|
||||
|
||||
// NewRepos creates a new instance of Repos. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||
func NewRepos(t mockConstructorTestingTNewRepos) *Repos {
|
||||
mock := &Repos{}
|
||||
mock.Mock.Test(t)
|
||||
|
||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||
|
||||
return mock
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
// Code generated by mockery v2.21.1. DO NOT EDIT.
|
||||
|
||||
package mocks
|
||||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
|
||||
v1alpha1 "github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
// RepositoryDB is an autogenerated mock type for the RepositoryDB type
|
||||
type RepositoryDB struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// GetRepository provides a mock function with given fields: ctx, url
|
||||
func (_m *RepositoryDB) GetRepository(ctx context.Context, url string) (*v1alpha1.Repository, error) {
|
||||
ret := _m.Called(ctx, url)
|
||||
|
||||
var r0 *v1alpha1.Repository
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string) (*v1alpha1.Repository, error)); ok {
|
||||
return rf(ctx, url)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string) *v1alpha1.Repository); ok {
|
||||
r0 = rf(ctx, url)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*v1alpha1.Repository)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
|
||||
r1 = rf(ctx, url)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
type mockConstructorTestingTNewRepositoryDB interface {
|
||||
mock.TestingT
|
||||
Cleanup(func())
|
||||
}
|
||||
|
||||
// NewRepositoryDB creates a new instance of RepositoryDB. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||
func NewRepositoryDB(t mockConstructorTestingTNewRepositoryDB) *RepositoryDB {
|
||||
mock := &RepositoryDB{}
|
||||
mock.Mock.Test(t)
|
||||
|
||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||
|
||||
return mock
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
internalhttp "github.com/argoproj/argo-cd/v2/applicationset/services/internal/http"
|
||||
"github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
// ServiceRequest is the request object sent to the plugin service.
|
||||
type ServiceRequest struct {
|
||||
// ApplicationSetName is the appSetName of the ApplicationSet for which we're requesting parameters. Useful for logging in
|
||||
// the plugin service.
|
||||
ApplicationSetName string `json:"applicationSetName"`
|
||||
// Input is the map of parameters set in the ApplicationSet spec for this generator.
|
||||
Input v1alpha1.PluginInput `json:"input"`
|
||||
}
|
||||
|
||||
type Output struct {
|
||||
// Parameters is the list of parameter sets returned by the plugin.
|
||||
Parameters []map[string]interface{} `json:"parameters"`
|
||||
}
|
||||
|
||||
// ServiceResponse is the response object returned by the plugin service.
|
||||
type ServiceResponse struct {
|
||||
// Output is the map of outputs returned by the plugin.
|
||||
Output Output `json:"output"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
client *internalhttp.Client
|
||||
appSetName string
|
||||
}
|
||||
|
||||
func NewPluginService(ctx context.Context, appSetName string, baseURL string, token string, requestTimeout int) (*Service, error) {
|
||||
var clientOptionFns []internalhttp.ClientOptionFunc
|
||||
|
||||
clientOptionFns = append(clientOptionFns, internalhttp.WithToken(token))
|
||||
|
||||
if requestTimeout != 0 {
|
||||
clientOptionFns = append(clientOptionFns, internalhttp.WithTimeout(requestTimeout))
|
||||
}
|
||||
|
||||
client, err := internalhttp.NewClient(baseURL, clientOptionFns...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating plugin client: %v", err)
|
||||
}
|
||||
|
||||
return &Service{
|
||||
client: client,
|
||||
appSetName: appSetName,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *Service) List(ctx context.Context, parameters v1alpha1.PluginParameters) (*ServiceResponse, error) {
|
||||
req, err := p.client.NewRequest(http.MethodPost, "api/v1/getparams.execute", ServiceRequest{ApplicationSetName: p.appSetName, Input: v1alpha1.PluginInput{Parameters: parameters}}, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("NewRequest returned unexpected error: %v", err)
|
||||
}
|
||||
|
||||
var data ServiceResponse
|
||||
|
||||
_, err = p.client.Do(ctx, req, &data)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error get api '%s': %v", p.appSetName, err)
|
||||
}
|
||||
|
||||
return &data, err
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPlugin(t *testing.T) {
|
||||
expectedJSON := `{"parameters": [{"number":123,"digest":"sha256:942ae2dfd73088b54d7151a3c3fd5af038a51c50029bfcfd21f1e650d9579967"},{"number":456,"digest":"sha256:224e68cc69566e5cbbb76034b3c42cd2ed57c1a66720396e1c257794cb7d68c1"}]}`
|
||||
token := "0bc57212c3cbbec69d20b34c507284bd300def5b"
|
||||
|
||||
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
authHeader := r.Header.Get("Authorization")
|
||||
if authHeader != "Bearer "+token {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
_, err := w.Write([]byte(expectedJSON))
|
||||
|
||||
if err != nil {
|
||||
assert.NoError(t, fmt.Errorf("Error Write %v", err))
|
||||
}
|
||||
})
|
||||
ts := httptest.NewServer(handler)
|
||||
defer ts.Close()
|
||||
|
||||
client, err := NewPluginService(context.Background(), "plugin-test", ts.URL, token, 0)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
data, err := client.List(context.Background(), nil)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
var expectedData ServiceResponse
|
||||
err = json.Unmarshal([]byte(expectedJSON), &expectedData)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, &expectedData, data)
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/common"
|
||||
)
|
||||
|
||||
// ParseSecretKey retrieves secret appSetName if different from common ArgoCDSecretName.
|
||||
func ParseSecretKey(key string) (secretName string, tokenKey string) {
|
||||
if strings.Contains(key, ":") {
|
||||
parts := strings.Split(key, ":")
|
||||
secretName = parts[0][1:]
|
||||
tokenKey = fmt.Sprintf("$%s", parts[1])
|
||||
} else {
|
||||
secretName = common.ArgoCDSecretName
|
||||
tokenKey = key
|
||||
}
|
||||
return secretName, tokenKey
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseSecretKey(t *testing.T) {
|
||||
secretName, tokenKey := ParseSecretKey("#my-secret:my-token")
|
||||
assert.Equal(t, "my-secret", secretName)
|
||||
assert.Equal(t, "$my-token", tokenKey)
|
||||
|
||||
secretName, tokenKey = ParseSecretKey("#my-secret")
|
||||
assert.Equal(t, "argocd-secret", secretName)
|
||||
assert.Equal(t, "#my-secret", tokenKey)
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
package pull_request
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/microsoft/azure-devops-go-api/azuredevops"
|
||||
core "github.com/microsoft/azure-devops-go-api/azuredevops/core"
|
||||
git "github.com/microsoft/azure-devops-go-api/azuredevops/git"
|
||||
)
|
||||
|
||||
const AZURE_DEVOPS_DEFAULT_URL = "https://dev.azure.com"
|
||||
|
||||
type AzureDevOpsClientFactory interface {
|
||||
// Returns an Azure Devops Client interface.
|
||||
GetClient(ctx context.Context) (git.Client, error)
|
||||
}
|
||||
|
||||
type devopsFactoryImpl struct {
|
||||
connection *azuredevops.Connection
|
||||
}
|
||||
|
||||
func (factory *devopsFactoryImpl) GetClient(ctx context.Context) (git.Client, error) {
|
||||
gitClient, err := git.NewClient(ctx, factory.connection)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get new Azure DevOps git client for pull request generator: %w", err)
|
||||
}
|
||||
return gitClient, nil
|
||||
}
|
||||
|
||||
type AzureDevOpsService struct {
|
||||
clientFactory AzureDevOpsClientFactory
|
||||
project string
|
||||
repo string
|
||||
labels []string
|
||||
}
|
||||
|
||||
var _ PullRequestService = (*AzureDevOpsService)(nil)
|
||||
var _ AzureDevOpsClientFactory = &devopsFactoryImpl{}
|
||||
|
||||
func NewAzureDevOpsService(ctx context.Context, token, url, organization, project, repo string, labels []string) (PullRequestService, error) {
|
||||
organizationUrl := buildURL(url, organization)
|
||||
|
||||
var connection *azuredevops.Connection
|
||||
if token == "" {
|
||||
connection = azuredevops.NewAnonymousConnection(organizationUrl)
|
||||
} else {
|
||||
connection = azuredevops.NewPatConnection(organizationUrl, token)
|
||||
}
|
||||
|
||||
return &AzureDevOpsService{
|
||||
clientFactory: &devopsFactoryImpl{connection: connection},
|
||||
project: project,
|
||||
repo: repo,
|
||||
labels: labels,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AzureDevOpsService) List(ctx context.Context) ([]*PullRequest, error) {
|
||||
client, err := a.clientFactory.GetClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get Azure DevOps client: %w", err)
|
||||
}
|
||||
|
||||
args := git.GetPullRequestsByProjectArgs{
|
||||
Project: &a.project,
|
||||
SearchCriteria: &git.GitPullRequestSearchCriteria{},
|
||||
}
|
||||
|
||||
azurePullRequests, err := client.GetPullRequestsByProject(ctx, args)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get pull requests by project: %w", err)
|
||||
}
|
||||
|
||||
pullRequests := []*PullRequest{}
|
||||
|
||||
for _, pr := range *azurePullRequests {
|
||||
if pr.Repository == nil ||
|
||||
pr.Repository.Name == nil ||
|
||||
pr.PullRequestId == nil ||
|
||||
pr.SourceRefName == nil ||
|
||||
pr.LastMergeSourceCommit == nil ||
|
||||
pr.LastMergeSourceCommit.CommitId == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
azureDevOpsLabels := convertLabels(pr.Labels)
|
||||
if !containAzureDevOpsLabels(a.labels, azureDevOpsLabels) {
|
||||
continue
|
||||
}
|
||||
|
||||
if *pr.Repository.Name == a.repo {
|
||||
pullRequests = append(pullRequests, &PullRequest{
|
||||
Number: *pr.PullRequestId,
|
||||
Branch: strings.Replace(*pr.SourceRefName, "refs/heads/", "", 1),
|
||||
HeadSHA: *pr.LastMergeSourceCommit.CommitId,
|
||||
Labels: azureDevOpsLabels,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return pullRequests, nil
|
||||
}
|
||||
|
||||
// convertLabels converts WebApiTagDefinitions to strings
|
||||
func convertLabels(tags *[]core.WebApiTagDefinition) []string {
|
||||
if tags == nil {
|
||||
return []string{}
|
||||
}
|
||||
labelStrings := make([]string, len(*tags))
|
||||
for i, label := range *tags {
|
||||
labelStrings[i] = *label.Name
|
||||
}
|
||||
return labelStrings
|
||||
}
|
||||
|
||||
// containAzureDevOpsLabels returns true if gotLabels contains expectedLabels
|
||||
func containAzureDevOpsLabels(expectedLabels []string, gotLabels []string) bool {
|
||||
for _, expected := range expectedLabels {
|
||||
found := false
|
||||
for _, got := range gotLabels {
|
||||
if expected == got {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func buildURL(url, organization string) string {
|
||||
if url == "" {
|
||||
url = AZURE_DEVOPS_DEFAULT_URL
|
||||
}
|
||||
separator := ""
|
||||
if !strings.HasSuffix(url, "/") {
|
||||
separator = "/"
|
||||
}
|
||||
devOpsURL := fmt.Sprintf("%s%s%s", url, separator, organization)
|
||||
return devOpsURL
|
||||
}
|
||||
@@ -1,221 +0,0 @@
|
||||
package pull_request
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/microsoft/azure-devops-go-api/azuredevops/core"
|
||||
git "github.com/microsoft/azure-devops-go-api/azuredevops/git"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
||||
azureMock "github.com/argoproj/argo-cd/v2/applicationset/services/scm_provider/azure_devops/git/mocks"
|
||||
)
|
||||
|
||||
func createBoolPtr(x bool) *bool {
|
||||
return &x
|
||||
}
|
||||
|
||||
func createStringPtr(x string) *string {
|
||||
return &x
|
||||
}
|
||||
|
||||
func createIntPtr(x int) *int {
|
||||
return &x
|
||||
}
|
||||
|
||||
func createLabelsPtr(x []core.WebApiTagDefinition) *[]core.WebApiTagDefinition {
|
||||
return &x
|
||||
}
|
||||
|
||||
type AzureClientFactoryMock struct {
|
||||
mock *mock.Mock
|
||||
}
|
||||
|
||||
func (m *AzureClientFactoryMock) GetClient(ctx context.Context) (git.Client, error) {
|
||||
args := m.mock.Called(ctx)
|
||||
|
||||
var client git.Client
|
||||
c := args.Get(0)
|
||||
if c != nil {
|
||||
client = c.(git.Client)
|
||||
}
|
||||
|
||||
var err error
|
||||
if len(args) > 1 {
|
||||
if e, ok := args.Get(1).(error); ok {
|
||||
err = e
|
||||
}
|
||||
}
|
||||
|
||||
return client, err
|
||||
}
|
||||
|
||||
func TestListPullRequest(t *testing.T) {
|
||||
teamProject := "myorg_project"
|
||||
repoName := "myorg_project_repo"
|
||||
pr_id := 123
|
||||
pr_head_sha := "cd4973d9d14a08ffe6b641a89a68891d6aac8056"
|
||||
ctx := context.Background()
|
||||
|
||||
pullRequestMock := []git.GitPullRequest{
|
||||
{
|
||||
PullRequestId: createIntPtr(pr_id),
|
||||
SourceRefName: createStringPtr("refs/heads/feature-branch"),
|
||||
LastMergeSourceCommit: &git.GitCommitRef{
|
||||
CommitId: createStringPtr(pr_head_sha),
|
||||
},
|
||||
Labels: &[]core.WebApiTagDefinition{},
|
||||
Repository: &git.GitRepository{
|
||||
Name: createStringPtr(repoName),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
args := git.GetPullRequestsByProjectArgs{
|
||||
Project: &teamProject,
|
||||
SearchCriteria: &git.GitPullRequestSearchCriteria{},
|
||||
}
|
||||
|
||||
gitClientMock := azureMock.Client{}
|
||||
clientFactoryMock := &AzureClientFactoryMock{mock: &mock.Mock{}}
|
||||
clientFactoryMock.mock.On("GetClient", mock.Anything).Return(&gitClientMock, nil)
|
||||
gitClientMock.On("GetPullRequestsByProject", ctx, args).Return(&pullRequestMock, nil)
|
||||
|
||||
provider := AzureDevOpsService{
|
||||
clientFactory: clientFactoryMock,
|
||||
project: teamProject,
|
||||
repo: repoName,
|
||||
labels: nil,
|
||||
}
|
||||
|
||||
list, err := provider.List(ctx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(list))
|
||||
assert.Equal(t, "feature-branch", list[0].Branch)
|
||||
assert.Equal(t, pr_head_sha, list[0].HeadSHA)
|
||||
assert.Equal(t, pr_id, list[0].Number)
|
||||
}
|
||||
|
||||
func TestConvertLabes(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
gotLabels *[]core.WebApiTagDefinition
|
||||
expectedLabels []string
|
||||
}{
|
||||
{
|
||||
name: "empty labels",
|
||||
gotLabels: createLabelsPtr([]core.WebApiTagDefinition{}),
|
||||
expectedLabels: []string{},
|
||||
},
|
||||
{
|
||||
name: "nil labels",
|
||||
gotLabels: createLabelsPtr(nil),
|
||||
expectedLabels: []string{},
|
||||
},
|
||||
{
|
||||
name: "one label",
|
||||
gotLabels: createLabelsPtr([]core.WebApiTagDefinition{
|
||||
{Name: createStringPtr("label1"), Active: createBoolPtr(true)},
|
||||
}),
|
||||
expectedLabels: []string{"label1"},
|
||||
},
|
||||
{
|
||||
name: "two label",
|
||||
gotLabels: createLabelsPtr([]core.WebApiTagDefinition{
|
||||
{Name: createStringPtr("label1"), Active: createBoolPtr(true)},
|
||||
{Name: createStringPtr("label2"), Active: createBoolPtr(true)},
|
||||
}),
|
||||
expectedLabels: []string{"label1", "label2"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got := convertLabels(tc.gotLabels)
|
||||
assert.Equal(t, tc.expectedLabels, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestContainAzureDevOpsLabels(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedLabels []string
|
||||
gotLabels []string
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "empty labels",
|
||||
expectedLabels: []string{},
|
||||
gotLabels: []string{},
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "no matching labels",
|
||||
expectedLabels: []string{"label1", "label2"},
|
||||
gotLabels: []string{"label3", "label4"},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
name: "some matching labels",
|
||||
expectedLabels: []string{"label1", "label2"},
|
||||
gotLabels: []string{"label1", "label3"},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
name: "all matching labels",
|
||||
expectedLabels: []string{"label1", "label2"},
|
||||
gotLabels: []string{"label1", "label2"},
|
||||
expectedResult: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got := containAzureDevOpsLabels(tc.expectedLabels, tc.gotLabels)
|
||||
assert.Equal(t, tc.expectedResult, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildURL(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
url string
|
||||
organization string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Provided default URL and organization",
|
||||
url: "https://dev.azure.com/",
|
||||
organization: "myorganization",
|
||||
expected: "https://dev.azure.com/myorganization",
|
||||
},
|
||||
{
|
||||
name: "Provided default URL and organization without trailing slash",
|
||||
url: "https://dev.azure.com",
|
||||
organization: "myorganization",
|
||||
expected: "https://dev.azure.com/myorganization",
|
||||
},
|
||||
{
|
||||
name: "Provided no URL and organization",
|
||||
url: "",
|
||||
organization: "myorganization",
|
||||
expected: "https://dev.azure.com/myorganization",
|
||||
},
|
||||
{
|
||||
name: "Provided custom URL and organization",
|
||||
url: "https://azuredevops.mycompany.com/",
|
||||
organization: "myorganization",
|
||||
expected: "https://azuredevops.mycompany.com/myorganization",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result := buildURL(tc.url, tc.organization)
|
||||
assert.Equal(t, result, tc.expected)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
package pull_request
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/ktrysmt/go-bitbucket"
|
||||
)
|
||||
|
||||
type BitbucketCloudService struct {
|
||||
client *bitbucket.Client
|
||||
owner string
|
||||
repositorySlug string
|
||||
}
|
||||
|
||||
type BitbucketCloudPullRequest struct {
|
||||
ID int `json:"id"`
|
||||
Source BitbucketCloudPullRequestSource `json:"source"`
|
||||
}
|
||||
|
||||
type BitbucketCloudPullRequestSource struct {
|
||||
Branch BitbucketCloudPullRequestSourceBranch `json:"branch"`
|
||||
Commit BitbucketCloudPullRequestSourceCommit `json:"commit"`
|
||||
}
|
||||
|
||||
type BitbucketCloudPullRequestSourceBranch struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type BitbucketCloudPullRequestSourceCommit struct {
|
||||
Hash string `json:"hash"`
|
||||
}
|
||||
|
||||
type PullRequestResponse struct {
|
||||
Page int32 `json:"page"`
|
||||
Size int32 `json:"size"`
|
||||
Pagelen int32 `json:"pagelen"`
|
||||
Next string `json:"next"`
|
||||
Previous string `json:"previous"`
|
||||
Items []PullRequest `json:"values"`
|
||||
}
|
||||
|
||||
var _ PullRequestService = (*BitbucketCloudService)(nil)
|
||||
|
||||
func parseUrl(uri string) (*url.URL, error) {
|
||||
if uri == "" {
|
||||
uri = "https://api.bitbucket.org/2.0"
|
||||
}
|
||||
|
||||
url, err := url.Parse(uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return url, nil
|
||||
}
|
||||
|
||||
func NewBitbucketCloudServiceBasicAuth(baseUrl, username, password, owner, repositorySlug string) (PullRequestService, error) {
|
||||
url, err := parseUrl(baseUrl)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing base url of %s for %s/%s: %v", baseUrl, owner, repositorySlug, err)
|
||||
}
|
||||
|
||||
bitbucketClient := bitbucket.NewBasicAuth(username, password)
|
||||
bitbucketClient.SetApiBaseURL(*url)
|
||||
|
||||
return &BitbucketCloudService{
|
||||
client: bitbucketClient,
|
||||
owner: owner,
|
||||
repositorySlug: repositorySlug,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewBitbucketCloudServiceBearerToken(baseUrl, bearerToken, owner, repositorySlug string) (PullRequestService, error) {
|
||||
url, err := parseUrl(baseUrl)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing base url of %s for %s/%s: %v", baseUrl, owner, repositorySlug, err)
|
||||
}
|
||||
|
||||
bitbucketClient := bitbucket.NewOAuthbearerToken(bearerToken)
|
||||
bitbucketClient.SetApiBaseURL(*url)
|
||||
|
||||
return &BitbucketCloudService{
|
||||
client: bitbucketClient,
|
||||
owner: owner,
|
||||
repositorySlug: repositorySlug,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewBitbucketCloudServiceNoAuth(baseUrl, owner, repositorySlug string) (PullRequestService, error) {
|
||||
// There is currently no method to explicitly not require auth
|
||||
return NewBitbucketCloudServiceBearerToken(baseUrl, "", owner, repositorySlug)
|
||||
}
|
||||
|
||||
func (b *BitbucketCloudService) List(_ context.Context) ([]*PullRequest, error) {
|
||||
opts := &bitbucket.PullRequestsOptions{
|
||||
Owner: b.owner,
|
||||
RepoSlug: b.repositorySlug,
|
||||
}
|
||||
|
||||
response, err := b.client.Repositories.PullRequests.Gets(opts)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing pull requests for %s/%s: %v", b.owner, b.repositorySlug, err)
|
||||
}
|
||||
|
||||
resp, ok := response.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unknown type returned from bitbucket pull requests")
|
||||
}
|
||||
|
||||
repoArray, ok := resp["values"].([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unknown type returned from response values")
|
||||
}
|
||||
|
||||
jsonStr, err := json.Marshal(repoArray)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshalling response body to json: %v", err)
|
||||
}
|
||||
|
||||
var pulls []BitbucketCloudPullRequest
|
||||
if err := json.Unmarshal(jsonStr, &pulls); err != nil {
|
||||
return nil, fmt.Errorf("error unmarshalling json to type '[]BitbucketCloudPullRequest': %v", err)
|
||||
}
|
||||
|
||||
pullRequests := []*PullRequest{}
|
||||
for _, pull := range pulls {
|
||||
pullRequests = append(pullRequests, &PullRequest{
|
||||
Number: pull.ID,
|
||||
Branch: pull.Source.Branch.Name,
|
||||
HeadSHA: pull.Source.Commit.Hash,
|
||||
})
|
||||
}
|
||||
|
||||
return pullRequests, nil
|
||||
}
|
||||
@@ -1,410 +0,0 @@
|
||||
package pull_request
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/argoproj/argo-cd/v2/pkg/apis/application/v1alpha1"
|
||||
)
|
||||
|
||||
func defaultHandlerCloud(t *testing.T) func(http.ResponseWriter, *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
var err error
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err = io.WriteString(w, `{
|
||||
"size": 1,
|
||||
"pagelen": 10,
|
||||
"page": 1,
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature/foo-bar"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "1a8dd249c04a"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`)
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseUrlEmptyUrl(t *testing.T) {
|
||||
url, err := parseUrl("")
|
||||
bitbucketUrl, _ := url.Parse("https://api.bitbucket.org/2.0")
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, bitbucketUrl, url)
|
||||
}
|
||||
|
||||
func TestInvalidBaseUrlBasicAuthCloud(t *testing.T) {
|
||||
_, err := NewBitbucketCloudServiceBasicAuth("http:// example.org", "user", "password", "OWNER", "REPO")
|
||||
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestInvalidBaseUrlBearerTokenCloud(t *testing.T) {
|
||||
_, err := NewBitbucketCloudServiceBearerToken("http:// example.org", "TOKEN", "OWNER", "REPO")
|
||||
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestInvalidBaseUrlNoAuthCloud(t *testing.T) {
|
||||
_, err := NewBitbucketCloudServiceNoAuth("http:// example.org", "OWNER", "REPO")
|
||||
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestListPullRequestBearerTokenCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Equal(t, "Bearer TOKEN", r.Header.Get("Authorization"))
|
||||
defaultHandlerCloud(t)(w, r)
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, err := NewBitbucketCloudServiceBearerToken(ts.URL, "TOKEN", "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, 101, pullRequests[0].Number)
|
||||
assert.Equal(t, "feature/foo-bar", pullRequests[0].Branch)
|
||||
assert.Equal(t, "1a8dd249c04a", pullRequests[0].HeadSHA)
|
||||
}
|
||||
|
||||
func TestListPullRequestNoAuthCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Empty(t, r.Header.Get("Authorization"))
|
||||
defaultHandlerCloud(t)(w, r)
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, err := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, 101, pullRequests[0].Number)
|
||||
assert.Equal(t, "feature/foo-bar", pullRequests[0].Branch)
|
||||
assert.Equal(t, "1a8dd249c04a", pullRequests[0].HeadSHA)
|
||||
}
|
||||
|
||||
func TestListPullRequestBasicAuthCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Equal(t, "Basic dXNlcjpwYXNzd29yZA==", r.Header.Get("Authorization"))
|
||||
defaultHandlerCloud(t)(w, r)
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, err := NewBitbucketCloudServiceBasicAuth(ts.URL, "user", "password", "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, 101, pullRequests[0].Number)
|
||||
assert.Equal(t, "feature/foo-bar", pullRequests[0].Branch)
|
||||
assert.Equal(t, "1a8dd249c04a", pullRequests[0].HeadSHA)
|
||||
}
|
||||
|
||||
func TestListPullRequestPaginationCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
var err error
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err = io.WriteString(w, fmt.Sprintf(`{
|
||||
"size": 2,
|
||||
"pagelen": 1,
|
||||
"page": 1,
|
||||
"next": "http://%s/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=2",
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-101"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "1a8dd249c04a"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-102"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "4cf807e67a6d"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`, r.Host))
|
||||
case "/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=2":
|
||||
_, err = io.WriteString(w, fmt.Sprintf(`{
|
||||
"size": 2,
|
||||
"pagelen": 1,
|
||||
"page": 2,
|
||||
"previous": "http://%s/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=1",
|
||||
"values": [
|
||||
{
|
||||
"id": 103,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-103"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "6344d9623e3b"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`, r.Host))
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, err := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 3, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
HeadSHA: "1a8dd249c04a",
|
||||
}, *pullRequests[0])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "4cf807e67a6d",
|
||||
}, *pullRequests[1])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 103,
|
||||
Branch: "feature-103",
|
||||
HeadSHA: "6344d9623e3b",
|
||||
}, *pullRequests[2])
|
||||
}
|
||||
|
||||
func TestListResponseErrorCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(500)
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, _ := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
_, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestListResponseMalformedCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err := io.WriteString(w, `[{
|
||||
"size": 1,
|
||||
"pagelen": 10,
|
||||
"page": 1,
|
||||
"values": [{ "id": 101 }]
|
||||
}]`)
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, _ := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
_, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestListResponseMalformedValuesCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err := io.WriteString(w, `{
|
||||
"size": 1,
|
||||
"pagelen": 10,
|
||||
"page": 1,
|
||||
"values": { "id": 101 }
|
||||
}`)
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, _ := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
_, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestListResponseEmptyCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err := io.WriteString(w, `{
|
||||
"size": 1,
|
||||
"pagelen": 10,
|
||||
"page": 1,
|
||||
"values": []
|
||||
}`)
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, err := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{})
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, pullRequests)
|
||||
}
|
||||
|
||||
func TestListPullRequestBranchMatchCloud(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
var err error
|
||||
switch r.RequestURI {
|
||||
case "/repositories/OWNER/REPO/pullrequests/":
|
||||
_, err = io.WriteString(w, fmt.Sprintf(`{
|
||||
"size": 2,
|
||||
"pagelen": 1,
|
||||
"page": 1,
|
||||
"next": "http://%s/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=2",
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-101"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "1a8dd249c04a"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 200,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-200"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "4cf807e67a6d"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`, r.Host))
|
||||
case "/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=2":
|
||||
_, err = io.WriteString(w, fmt.Sprintf(`{
|
||||
"size": 2,
|
||||
"pagelen": 1,
|
||||
"page": 2,
|
||||
"previous": "http://%s/repositories/OWNER/REPO/pullrequests/?pagelen=1&page=1",
|
||||
"values": [
|
||||
{
|
||||
"id": 102,
|
||||
"source": {
|
||||
"branch": {
|
||||
"name": "feature-102"
|
||||
},
|
||||
"commit": {
|
||||
"type": "commit",
|
||||
"hash": "6344d9623e3b"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`, r.Host))
|
||||
default:
|
||||
t.Fail()
|
||||
}
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
regexp := `feature-1[\d]{2}`
|
||||
svc, err := NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err := ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{
|
||||
{
|
||||
BranchMatch: ®exp,
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 2, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
HeadSHA: "1a8dd249c04a",
|
||||
}, *pullRequests[0])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "6344d9623e3b",
|
||||
}, *pullRequests[1])
|
||||
|
||||
regexp = `.*2$`
|
||||
svc, err = NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
pullRequests, err = ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{
|
||||
{
|
||||
BranchMatch: ®exp,
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "6344d9623e3b",
|
||||
}, *pullRequests[0])
|
||||
|
||||
regexp = `[\d{2}`
|
||||
svc, err = NewBitbucketCloudServiceNoAuth(ts.URL, "OWNER", "REPO")
|
||||
assert.NoError(t, err)
|
||||
_, err = ListPullRequests(context.Background(), svc, []v1alpha1.PullRequestGeneratorFilter{
|
||||
{
|
||||
BranchMatch: ®exp,
|
||||
},
|
||||
})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
@@ -66,11 +66,9 @@ func (b *BitbucketService) List(_ context.Context) ([]*PullRequest, error) {
|
||||
|
||||
for _, pull := range pulls {
|
||||
pullRequests = append(pullRequests, &PullRequest{
|
||||
Number: pull.ID,
|
||||
Branch: pull.FromRef.DisplayID, // ID: refs/heads/main DisplayID: main
|
||||
TargetBranch: pull.ToRef.DisplayID,
|
||||
HeadSHA: pull.FromRef.LatestCommit, // This is not defined in the official docs, but works in practice
|
||||
Labels: []string{}, // Not supported by library
|
||||
Number: pull.ID,
|
||||
Branch: pull.FromRef.DisplayID, // ID: refs/heads/main DisplayID: main
|
||||
HeadSHA: pull.FromRef.LatestCommit, // This is not defined in the official docs, but works in practice
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -24,11 +24,6 @@ func defaultHandler(t *testing.T) func(http.ResponseWriter, *http.Request) {
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "master",
|
||||
"id": "refs/heads/master"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-ABC-123",
|
||||
"displayId": "feature-ABC-123",
|
||||
@@ -60,7 +55,6 @@ func TestListPullRequestNoAuth(t *testing.T) {
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, 101, pullRequests[0].Number)
|
||||
assert.Equal(t, "feature-ABC-123", pullRequests[0].Branch)
|
||||
assert.Equal(t, "master", pullRequests[0].TargetBranch)
|
||||
assert.Equal(t, "cb3cf2e4d1517c83e720d2585b9402dbef71f992", pullRequests[0].HeadSHA)
|
||||
}
|
||||
|
||||
@@ -77,11 +71,6 @@ func TestListPullRequestPagination(t *testing.T) {
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "master",
|
||||
"id": "refs/heads/master"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-101",
|
||||
"displayId": "feature-101",
|
||||
@@ -90,11 +79,6 @@ func TestListPullRequestPagination(t *testing.T) {
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "branch",
|
||||
"id": "refs/heads/branch"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-102",
|
||||
"displayId": "feature-102",
|
||||
@@ -112,11 +96,6 @@ func TestListPullRequestPagination(t *testing.T) {
|
||||
"values": [
|
||||
{
|
||||
"id": 200,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "master",
|
||||
"id": "refs/heads/master"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-200",
|
||||
"displayId": "feature-200",
|
||||
@@ -140,25 +119,19 @@ func TestListPullRequestPagination(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 3, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "ab3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
HeadSHA: "ab3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[0])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
TargetBranch: "branch",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[1])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 200,
|
||||
Branch: "feature-200",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "cb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 200,
|
||||
Branch: "feature-200",
|
||||
HeadSHA: "cb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[2])
|
||||
}
|
||||
|
||||
@@ -182,7 +155,7 @@ func TestListPullRequestBasicAuth(t *testing.T) {
|
||||
|
||||
func TestListResponseError(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.WriteHeader(500)
|
||||
}))
|
||||
defer ts.Close()
|
||||
svc, _ := NewBitbucketServiceNoAuth(context.Background(), ts.URL, "PROJECT", "REPO")
|
||||
@@ -255,11 +228,6 @@ func TestListPullRequestBranchMatch(t *testing.T) {
|
||||
"values": [
|
||||
{
|
||||
"id": 101,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "master",
|
||||
"id": "refs/heads/master"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-101",
|
||||
"displayId": "feature-101",
|
||||
@@ -268,11 +236,6 @@ func TestListPullRequestBranchMatch(t *testing.T) {
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "branch",
|
||||
"id": "refs/heads/branch"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-102",
|
||||
"displayId": "feature-102",
|
||||
@@ -290,11 +253,6 @@ func TestListPullRequestBranchMatch(t *testing.T) {
|
||||
"values": [
|
||||
{
|
||||
"id": 200,
|
||||
"toRef": {
|
||||
"latestCommit": "5b766e3564a3453808f3cd3dd3f2e5fad8ef0e7a",
|
||||
"displayId": "master",
|
||||
"id": "refs/heads/master"
|
||||
},
|
||||
"fromRef": {
|
||||
"id": "refs/heads/feature-200",
|
||||
"displayId": "feature-200",
|
||||
@@ -323,18 +281,14 @@ func TestListPullRequestBranchMatch(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 2, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
TargetBranch: "master",
|
||||
HeadSHA: "ab3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 101,
|
||||
Branch: "feature-101",
|
||||
HeadSHA: "ab3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[0])
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
TargetBranch: "branch",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[1])
|
||||
|
||||
regexp = `.*2$`
|
||||
@@ -348,11 +302,9 @@ func TestListPullRequestBranchMatch(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(pullRequests))
|
||||
assert.Equal(t, PullRequest{
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
TargetBranch: "branch",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
Labels: []string{},
|
||||
Number: 102,
|
||||
Branch: "feature-102",
|
||||
HeadSHA: "bb3cf2e4d1517c83e720d2585b9402dbef71f992",
|
||||
}, *pullRequests[0])
|
||||
|
||||
regexp = `[\d{2}`
|
||||
|
||||
@@ -26,13 +26,11 @@ func NewGiteaService(ctx context.Context, token, url, owner, repo string, insecu
|
||||
if insecure {
|
||||
cookieJar, _ := cookiejar.New(nil)
|
||||
|
||||
tr := http.DefaultTransport.(*http.Transport).Clone()
|
||||
tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
|
||||
httpClient = &http.Client{
|
||||
Jar: cookieJar,
|
||||
Transport: tr,
|
||||
}
|
||||
Jar: cookieJar,
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}}
|
||||
}
|
||||
client, err := gitea.NewClient(url, gitea.SetToken(token), gitea.SetHTTPClient(httpClient))
|
||||
if err != nil {
|
||||
@@ -56,21 +54,10 @@ func (g *GiteaService) List(ctx context.Context) ([]*PullRequest, error) {
|
||||
list := []*PullRequest{}
|
||||
for _, pr := range prs {
|
||||
list = append(list, &PullRequest{
|
||||
Number: int(pr.Index),
|
||||
Branch: pr.Head.Ref,
|
||||
TargetBranch: pr.Base.Ref,
|
||||
HeadSHA: pr.Head.Sha,
|
||||
Labels: getGiteaPRLabelNames(pr.Labels),
|
||||
Number: int(pr.Index),
|
||||
Branch: pr.Head.Ref,
|
||||
HeadSHA: pr.Head.Sha,
|
||||
})
|
||||
}
|
||||
return list, nil
|
||||
}
|
||||
|
||||
// Get the Gitea pull request label names.
|
||||
func getGiteaPRLabelNames(giteaLabels []*gitea.Label) []string {
|
||||
var labelNames []string
|
||||
for _, giteaLabel := range giteaLabels {
|
||||
labelNames = append(labelNames, giteaLabel.Name)
|
||||
}
|
||||
return labelNames
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user