Compare commits
No commits in common. "426f5d9a21873787376f695e08e578f5d422f147" and "e368bbd94d80a6ef01f77ce2e8a610de9da04042" have entirely different histories.
426f5d9a21
...
e368bbd94d
|
|
@ -4,6 +4,5 @@
|
||||||
<file url="file://$PROJECT_DIR$/infra/modules/argocd/values.yaml" dialect="yaml" />
|
<file url="file://$PROJECT_DIR$/infra/modules/argocd/values.yaml" dialect="yaml" />
|
||||||
<file url="file://$PROJECT_DIR$/infra/modules/fusionauth/values.yaml" dialect="yaml" />
|
<file url="file://$PROJECT_DIR$/infra/modules/fusionauth/values.yaml" dialect="yaml" />
|
||||||
<file url="file://$PROJECT_DIR$/infra/modules/mongodb/values.yaml" dialect="yaml" />
|
<file url="file://$PROJECT_DIR$/infra/modules/mongodb/values.yaml" dialect="yaml" />
|
||||||
<file url="file://$PROJECT_DIR$/infra/modules/zot/values.yaml.tftpl" dialect="TFTPL" />
|
|
||||||
</component>
|
</component>
|
||||||
</project>
|
</project>
|
||||||
|
|
@ -1,70 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="AutoImportSettings">
|
|
||||||
<option name="autoReloadType" value="SELECTIVE" />
|
|
||||||
</component>
|
|
||||||
<component name="ChangeListManager">
|
|
||||||
<list default="true" id="009bc178-658e-4c81-9bb8-8d7bf6b8cbc6" name="Changes" comment="">
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/clusters/app-365zon/.terraform/terraform.tfstate" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/clusters/app-365zon/.terraform/terraform.tfstate" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/clusters/app-365zon/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/clusters/app-365zon/main.tf" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/modules/mijn-365zon-nl/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/mijn-365zon-nl/main.tf" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/main.tf" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/values.yaml.tftpl" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/values.yaml.tftpl" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/variables.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/variables.tf" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/../infra/modules/mongodb/values.yaml" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/mongodb/values.yaml" afterDir="false" />
|
|
||||||
</list>
|
|
||||||
<option name="SHOW_DIALOG" value="false" />
|
|
||||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
|
||||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
|
||||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
|
||||||
</component>
|
|
||||||
<component name="Git.Settings">
|
|
||||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/.." />
|
|
||||||
</component>
|
|
||||||
<component name="ProjectColorInfo">{
|
|
||||||
"associatedIndex": 1
|
|
||||||
}</component>
|
|
||||||
<component name="ProjectId" id="2oqTXEtODybqnAKfjaqPi9uslRP" />
|
|
||||||
<component name="ProjectViewState">
|
|
||||||
<option name="hideEmptyMiddlePackages" value="true" />
|
|
||||||
<option name="showLibraryContents" value="true" />
|
|
||||||
</component>
|
|
||||||
<component name="PropertiesComponent"><![CDATA[{
|
|
||||||
"keyToString": {
|
|
||||||
"RunOnceActivity.ShowReadmeOnStart": "true",
|
|
||||||
"RunOnceActivity.git.unshallow": "true",
|
|
||||||
"git-widget-placeholder": "main",
|
|
||||||
"last_opened_file_path": "/home/lamelos/Projects/fourlights/devops",
|
|
||||||
"node.js.detected.package.eslint": "true",
|
|
||||||
"node.js.detected.package.tslint": "true",
|
|
||||||
"node.js.selected.package.eslint": "(autodetect)",
|
|
||||||
"node.js.selected.package.tslint": "(autodetect)",
|
|
||||||
"nodejs_package_manager_path": "npm",
|
|
||||||
"vue.rearranger.settings.migration": "true"
|
|
||||||
}
|
|
||||||
}]]></component>
|
|
||||||
<component name="SharedIndexes">
|
|
||||||
<attachedChunks>
|
|
||||||
<set>
|
|
||||||
<option value="bundled-js-predefined-d6986cc7102b-deb605915726-JavaScript-WS-243.22562.222" />
|
|
||||||
</set>
|
|
||||||
</attachedChunks>
|
|
||||||
</component>
|
|
||||||
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
|
||||||
<component name="TaskManager">
|
|
||||||
<task active="true" id="Default" summary="Default task">
|
|
||||||
<changelist id="009bc178-658e-4c81-9bb8-8d7bf6b8cbc6" name="Changes" comment="" />
|
|
||||||
<created>1731596143702</created>
|
|
||||||
<option name="number" value="Default" />
|
|
||||||
<option name="presentableId" value="Default" />
|
|
||||||
<updated>1731596143702</updated>
|
|
||||||
<workItem from="1731596144788" duration="1417000" />
|
|
||||||
<workItem from="1736261138378" duration="1228000" />
|
|
||||||
<workItem from="1736775177111" duration="7000" />
|
|
||||||
</task>
|
|
||||||
<servers />
|
|
||||||
</component>
|
|
||||||
<component name="TypeScriptGeneratedFilesManager">
|
|
||||||
<option name="version" value="3" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
|
|
@ -59,7 +59,6 @@ resource "helm_release" "argocd" {
|
||||||
oauth_client_id = var.oauth_client_id,
|
oauth_client_id = var.oauth_client_id,
|
||||||
oauth_client_secret = var.oauth_client_secret,
|
oauth_client_secret = var.oauth_client_secret,
|
||||||
oauth_redirect_uri = var.oauth_redirect_uri
|
oauth_redirect_uri = var.oauth_redirect_uri
|
||||||
tls = var.tls
|
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -42,27 +42,14 @@ server:
|
||||||
hostname: ${ service_uri }
|
hostname: ${ service_uri }
|
||||||
annotations:
|
annotations:
|
||||||
kubernetes.io/ingress.class: traefik
|
kubernetes.io/ingress.class: traefik
|
||||||
%{ if tls }
|
|
||||||
traefik.ingress.kubernetes.io/router.entrypoints: web,websecure
|
traefik.ingress.kubernetes.io/router.entrypoints: web,websecure
|
||||||
traefik.ingress.kubernetes.io/router.middlewares: default-redirect-to-https@kubernetescrd,default-preserve-host-headers@kubernetescrd
|
traefik.ingress.kubernetes.io/router.middlewares: default-redirect-to-https@kubernetescrd,default-preserve-host-headers@kubernetescrd
|
||||||
%{ else }
|
|
||||||
traefik.ingress.kubernetes.io/router.entrypoints: web
|
|
||||||
traefik.ingress.kubernetes.io/router.middlewares: default-preserve-host-headers@kubernetescrd
|
|
||||||
%{ endif }
|
|
||||||
%{ if tls }
|
|
||||||
extraTls:
|
extraTls:
|
||||||
- hosts:
|
- hosts:
|
||||||
- ${ service_uri }
|
- ${ service_uri }
|
||||||
secretName: argocd-tls
|
secretName: argocd-tls
|
||||||
%{ endif }
|
|
||||||
|
|
||||||
config:
|
config:
|
||||||
rbac: |
|
|
||||||
scopes: '[groups]'
|
|
||||||
"policy.csv": |
|
|
||||||
g, admin, role:admin
|
|
||||||
g, user, role:readonly
|
|
||||||
"policy.default": ''
|
|
||||||
%{ if oauth_uri != null }
|
%{ if oauth_uri != null }
|
||||||
dex.config: |
|
dex.config: |
|
||||||
connectors:
|
connectors:
|
||||||
|
|
@ -70,9 +57,9 @@ server:
|
||||||
id: oidc
|
id: oidc
|
||||||
name: OIDC
|
name: OIDC
|
||||||
config:
|
config:
|
||||||
issuer: "${ oauth_issuer }"
|
issuer: ${ oauth_issuer }
|
||||||
clientID: "${ oauth_client_id }"
|
clientID: ${ oauth_client_id }
|
||||||
clientSecret: "${ oauth_client_secret }"
|
clientSecret: ${ oauth_client_secret }
|
||||||
insecureSkipEmailVerified: true
|
insecureSkipEmailVerified: true
|
||||||
insecureEnableGroups: true
|
insecureEnableGroups: true
|
||||||
scopes:
|
scopes:
|
||||||
|
|
@ -80,7 +67,6 @@ server:
|
||||||
- email
|
- email
|
||||||
- openid
|
- openid
|
||||||
- groups
|
- groups
|
||||||
logoutURL: "${ oauth_redirect_uri }"
|
|
||||||
claimMapping:
|
claimMapping:
|
||||||
name: fullName # ArgoCD expects 'name', FusionAuth provides 'fullName'
|
name: fullName # ArgoCD expects 'name', FusionAuth provides 'fullName'
|
||||||
preferred_username: email
|
preferred_username: email
|
||||||
|
|
|
||||||
|
|
@ -64,8 +64,3 @@ variable "oauth_redirect_uri" {
|
||||||
description = "OAuth redirect URI"
|
description = "OAuth redirect URI"
|
||||||
default = null
|
default = null
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "tls" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,26 @@
|
||||||
config:
|
config:
|
||||||
bookmarks:
|
bookmarks:
|
||||||
|
- Developer:
|
||||||
|
- Github:
|
||||||
|
- abbr: GH
|
||||||
|
href: https://github.com/
|
||||||
services:
|
services:
|
||||||
|
- My First Group:
|
||||||
|
- My First Service:
|
||||||
|
href: http://localhost/
|
||||||
|
description: Homepage is awesome
|
||||||
|
|
||||||
|
- My Second Group:
|
||||||
|
- My Second Service:
|
||||||
|
href: http://localhost/
|
||||||
|
description: Homepage is the best
|
||||||
|
|
||||||
|
- My Third Group:
|
||||||
|
- My Third Service:
|
||||||
|
href: http://localhost/
|
||||||
|
description: Homepage is 😎
|
||||||
widgets:
|
widgets:
|
||||||
|
# show the kubernetes widget, with the cluster summary and individual nodes
|
||||||
- kubernetes:
|
- kubernetes:
|
||||||
cluster:
|
cluster:
|
||||||
show: true
|
show: true
|
||||||
|
|
@ -14,6 +33,9 @@ config:
|
||||||
cpu: true
|
cpu: true
|
||||||
memory: true
|
memory: true
|
||||||
showLabel: true
|
showLabel: true
|
||||||
|
- search:
|
||||||
|
provider: duckduckgo
|
||||||
|
target: _blank
|
||||||
kubernetes:
|
kubernetes:
|
||||||
mode: cluster
|
mode: cluster
|
||||||
settings:
|
settings:
|
||||||
|
|
@ -29,13 +51,13 @@ enableRbac: true
|
||||||
ingress:
|
ingress:
|
||||||
main:
|
main:
|
||||||
enabled: true
|
enabled: true
|
||||||
#annotations:
|
annotations:
|
||||||
# # Example annotations to add Homepage to your Homepage!
|
# Example annotations to add Homepage to your Homepage!
|
||||||
# gethomepage.dev/enabled: "true"
|
gethomepage.dev/enabled: "true"
|
||||||
# gethomepage.dev/name: "Homepage"
|
gethomepage.dev/name: "Homepage"
|
||||||
# gethomepage.dev/description: "Dynamically Detected Homepage"
|
gethomepage.dev/description: "Dynamically Detected Homepage"
|
||||||
# gethomepage.dev/group: "Dynamic"
|
gethomepage.dev/group: "Dynamic"
|
||||||
# gethomepage.dev/icon: "homepage.png"
|
gethomepage.dev/icon: "homepage.png"
|
||||||
hosts:
|
hosts:
|
||||||
- host: ${service_uri}
|
- host: ${service_uri}
|
||||||
paths:
|
paths:
|
||||||
|
|
|
||||||
|
|
@ -6,22 +6,15 @@ resource "random_password" "minio_access_key" {
|
||||||
resource "random_password" "minio_secret_key" {
|
resource "random_password" "minio_secret_key" {
|
||||||
length = 40
|
length = 40
|
||||||
special = true
|
special = true
|
||||||
#override_special = "!#$%&*()-_=+[]{}<>:?"
|
|
||||||
#min_special = 2
|
|
||||||
#min_upper = 2
|
|
||||||
#min_lower = 2
|
|
||||||
#min_numeric = 2
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "helm_release" "minio" {
|
resource "helm_release" "minio" {
|
||||||
name = "minio"
|
name = "minio"
|
||||||
repository = "oci://registry-1.docker.io/bitnamicharts"
|
repository = "https://charts.bitnami.com/bitnami"
|
||||||
chart = "minio"
|
chart = "minio"
|
||||||
namespace = var.namespace
|
namespace = var.namespace
|
||||||
create_namespace = true
|
create_namespace = true
|
||||||
version = "16.0.0"
|
version = "14.7.16"
|
||||||
wait = true
|
|
||||||
wait_for_jobs = true
|
|
||||||
|
|
||||||
set_sensitive {
|
set_sensitive {
|
||||||
name = "auth.rootUser"
|
name = "auth.rootUser"
|
||||||
|
|
@ -65,7 +58,6 @@ resource "helm_release" "minio" {
|
||||||
admin = var.admin,
|
admin = var.admin,
|
||||||
tls = var.mode == "distributed" ? false : var.tls
|
tls = var.mode == "distributed" ? false : var.tls
|
||||||
ingressClass = var.ingressClass
|
ingressClass = var.ingressClass
|
||||||
displayOnHomepage = var.displayOnHomepage
|
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
@ -74,13 +66,3 @@ output "installed" {
|
||||||
value = true
|
value = true
|
||||||
depends_on = [helm_release.minio]
|
depends_on = [helm_release.minio]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "access_key" {
|
|
||||||
value = random_password.minio_access_key.result
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "secret_key" {
|
|
||||||
value = random_password.minio_secret_key.result
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,28 +1,10 @@
|
||||||
resource "null_resource" "health_check" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
provisioner "local-exec" {
|
|
||||||
command = <<-EOT
|
|
||||||
until curl -s -f "https://${var.server}/minio/health/live" || [[ $attempts -ge 10 ]]; do
|
|
||||||
sleep 10
|
|
||||||
attempts=$((attempts+1))
|
|
||||||
done
|
|
||||||
if [[ $attempts -ge 10 ]]; then
|
|
||||||
echo "Minio health check failed after maximum attempts"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
EOT
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "minio_s3_bucket" "overlay" {
|
resource "minio_s3_bucket" "overlay" {
|
||||||
depends_on = [null_resource.health_check]
|
depends_on = [var.wait_on]
|
||||||
bucket = var.name
|
bucket = var.name
|
||||||
acl = "private"
|
acl = "private"
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "minio_s3_bucket_policy" "overlay" {
|
resource "minio_s3_bucket_policy" "overlay" {
|
||||||
depends_on = [minio_s3_bucket.overlay]
|
|
||||||
bucket = minio_s3_bucket.overlay.bucket
|
bucket = minio_s3_bucket.overlay.bucket
|
||||||
policy = jsonencode({
|
policy = jsonencode({
|
||||||
"Version" : "2012-10-17",
|
"Version" : "2012-10-17",
|
||||||
|
|
@ -38,7 +20,7 @@ resource "minio_s3_bucket_policy" "overlay" {
|
||||||
"s3:GetBucketLocation"
|
"s3:GetBucketLocation"
|
||||||
],
|
],
|
||||||
"Resource" : [
|
"Resource" : [
|
||||||
minio_s3_bucket.overlay.arn
|
"arn:aws:s3:::bouwroute"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
@ -52,7 +34,7 @@ resource "minio_s3_bucket_policy" "overlay" {
|
||||||
"s3:ListBucket"
|
"s3:ListBucket"
|
||||||
],
|
],
|
||||||
"Resource" : [
|
"Resource" : [
|
||||||
minio_s3_bucket.overlay.arn
|
"arn:aws:s3:::bouwroute"
|
||||||
],
|
],
|
||||||
"Condition" : {
|
"Condition" : {
|
||||||
"StringEquals" : {
|
"StringEquals" : {
|
||||||
|
|
@ -73,7 +55,7 @@ resource "minio_s3_bucket_policy" "overlay" {
|
||||||
"s3:GetObject"
|
"s3:GetObject"
|
||||||
],
|
],
|
||||||
"Resource" : [
|
"Resource" : [
|
||||||
"${minio_s3_bucket.overlay.arn}/**"
|
"arn:aws:s3:::bouwroute/**"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
@ -81,12 +63,10 @@ resource "minio_s3_bucket_policy" "overlay" {
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "minio_iam_user" "overlay" {
|
resource "minio_iam_user" "overlay" {
|
||||||
depends_on = [null_resource.health_check]
|
|
||||||
name = var.name
|
name = var.name
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "minio_iam_policy" "overlay" {
|
resource "minio_iam_policy" "overlay" {
|
||||||
depends_on = [minio_s3_bucket.overlay]
|
|
||||||
name = minio_s3_bucket.overlay.bucket
|
name = minio_s3_bucket.overlay.bucket
|
||||||
policy = jsonencode({
|
policy = jsonencode({
|
||||||
Version = "2012-10-17"
|
Version = "2012-10-17"
|
||||||
|
|
@ -94,7 +74,7 @@ resource "minio_iam_policy" "overlay" {
|
||||||
{
|
{
|
||||||
Effect = "Allow"
|
Effect = "Allow"
|
||||||
Action = ["s3:ListBucket"]
|
Action = ["s3:ListBucket"]
|
||||||
Resource = [minio_s3_bucket.overlay.arn]
|
Resource = ["arn:aws:s3:::${var.name}"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Effect = "Allow"
|
Effect = "Allow"
|
||||||
|
|
@ -103,7 +83,7 @@ resource "minio_iam_policy" "overlay" {
|
||||||
"s3:PutObject",
|
"s3:PutObject",
|
||||||
"s3:DeleteObject"
|
"s3:DeleteObject"
|
||||||
]
|
]
|
||||||
Resource = ["${minio_s3_bucket.overlay.arn}/*"]
|
Resource = ["arn:aws:s3:::${var.name}/*"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
@ -111,14 +91,11 @@ resource "minio_iam_policy" "overlay" {
|
||||||
|
|
||||||
|
|
||||||
resource "minio_iam_user_policy_attachment" "overlay" {
|
resource "minio_iam_user_policy_attachment" "overlay" {
|
||||||
depends_on = [minio_iam_user.overlay, minio_iam_policy.overlay]
|
|
||||||
|
|
||||||
user_name = minio_iam_user.overlay.id
|
user_name = minio_iam_user.overlay.id
|
||||||
policy_name = minio_iam_policy.overlay.id
|
policy_name = minio_iam_policy.overlay.id
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "minio_iam_service_account" "overlay" {
|
resource "minio_iam_service_account" "overlay" {
|
||||||
depends_on = [minio_iam_user.overlay, minio_s3_bucket.overlay]
|
|
||||||
target_user = minio_iam_user.overlay.name
|
target_user = minio_iam_user.overlay.name
|
||||||
policy = jsonencode({
|
policy = jsonencode({
|
||||||
Version = "2012-10-17"
|
Version = "2012-10-17"
|
||||||
|
|
@ -126,7 +103,7 @@ resource "minio_iam_service_account" "overlay" {
|
||||||
{
|
{
|
||||||
Effect = "Allow"
|
Effect = "Allow"
|
||||||
Action = ["s3:ListBucket"]
|
Action = ["s3:ListBucket"]
|
||||||
Resource = [minio_s3_bucket.overlay.arn]
|
Resource = ["arn:aws:s3:::${var.name}"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Effect = "Allow"
|
Effect = "Allow"
|
||||||
|
|
@ -135,16 +112,12 @@ resource "minio_iam_service_account" "overlay" {
|
||||||
"s3:PutObject",
|
"s3:PutObject",
|
||||||
"s3:DeleteObject"
|
"s3:DeleteObject"
|
||||||
]
|
]
|
||||||
Resource = ["${minio_s3_bucket.overlay.arn}/*"]
|
Resource = ["arn:aws:s3:::${var.name}/*"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
output "bucket" {
|
|
||||||
value = var.name
|
|
||||||
}
|
|
||||||
|
|
||||||
output "access_key" {
|
output "access_key" {
|
||||||
value = minio_iam_service_account.overlay.access_key
|
value = minio_iam_service_account.overlay.access_key
|
||||||
sensitive = true
|
sensitive = true
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ terraform {
|
||||||
required_providers {
|
required_providers {
|
||||||
minio = {
|
minio = {
|
||||||
source = "aminueza/minio"
|
source = "aminueza/minio"
|
||||||
version = "~> 3.3.0"
|
version = "~> 2.5.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,6 @@ ingress:
|
||||||
ingress.kubernetes.io/proxy-body-size: "0"
|
ingress.kubernetes.io/proxy-body-size: "0"
|
||||||
nginx.ingress.kubernetes.io/proxy-body-size: "0"
|
nginx.ingress.kubernetes.io/proxy-body-size: "0"
|
||||||
%{ endif }
|
%{ endif }
|
||||||
%{ if displayOnHomepage }
|
|
||||||
gethomepage.dev/enabled: "true"
|
|
||||||
gethomepage.dev/name: "Minio"
|
|
||||||
gethomepage.dev/description: "S3-Compatible cloud storage"
|
|
||||||
gethomepage.dev/group: "Tools"
|
|
||||||
gethomepage.dev/icon: "minio.png"
|
|
||||||
%{ endif }
|
|
||||||
|
|
||||||
apiIngress:
|
apiIngress:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,3 @@ variable "storageSize" {
|
||||||
default = "6Gi"
|
default = "6Gi"
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "displayOnHomepage" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -56,16 +56,3 @@ output "installed" {
|
||||||
value = true
|
value = true
|
||||||
depends_on = [helm_release.mongodb]
|
depends_on = [helm_release.mongodb]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "connection_string" {
|
|
||||||
value = format(
|
|
||||||
"mongodb://%s:%s@%s/%s?replicaSet=rs0&authSource=admin",
|
|
||||||
"root",
|
|
||||||
random_password.mongodb_root_password.result,
|
|
||||||
join(",", [
|
|
||||||
for i in range(var.replicas) :format("mongodb-%d.mongodb-headless.mongodb.svc.cluster.local:27017", i)
|
|
||||||
]),
|
|
||||||
"admin"
|
|
||||||
)
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -16,14 +16,14 @@ mongodb:
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
initialDelaySeconds: 30
|
initialDelaySeconds: 30
|
||||||
periodSeconds: 10
|
periodSeconds: 10
|
||||||
timeoutSeconds: 15
|
timeoutSeconds: 5
|
||||||
failureThreshold: 3
|
failureThreshold: 3
|
||||||
successThreshold: 1
|
successThreshold: 1
|
||||||
|
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
initialDelaySeconds: 60
|
initialDelaySeconds: 60
|
||||||
periodSeconds: 20
|
periodSeconds: 20
|
||||||
timeoutSeconds: 15
|
timeoutSeconds: 5
|
||||||
failureThreshold: 6
|
failureThreshold: 6
|
||||||
|
|
||||||
# Proper shutdown handling
|
# Proper shutdown handling
|
||||||
|
|
@ -55,11 +55,3 @@ auth:
|
||||||
- ${ database }
|
- ${ database }
|
||||||
%{ endfor ~}
|
%{ endfor ~}
|
||||||
%{ endif }
|
%{ endif }
|
||||||
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpu: 1000m
|
|
||||||
memory: 1.5Gi
|
|
||||||
requests:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 1Gi
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,4 @@
|
||||||
resource "kubernetes_namespace" "postgresql" {
|
resource "kubernetes_namespace" "postgresql" {
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
|
|
||||||
metadata {
|
metadata {
|
||||||
name = var.namespace
|
name = var.namespace
|
||||||
}
|
}
|
||||||
|
|
@ -13,30 +11,19 @@ resource "kubernetes_namespace" "postgresql" {
|
||||||
resource "random_password" "postgresql_user_password" {
|
resource "random_password" "postgresql_user_password" {
|
||||||
length = 40
|
length = 40
|
||||||
special = true
|
special = true
|
||||||
override_special = "!#$%&*()-_=+[]{}<>:?"
|
|
||||||
min_special = 2
|
|
||||||
min_upper = 2
|
|
||||||
min_lower = 2
|
|
||||||
min_numeric = 2
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "random_password" "postgresql_root_password" {
|
resource "random_password" "postgresql_root_password" {
|
||||||
length = 40
|
length = 40
|
||||||
special = true
|
special = true
|
||||||
override_special = "!#$%&*()-_=+[]{}<>:?"
|
|
||||||
min_special = 2
|
|
||||||
min_upper = 2
|
|
||||||
min_lower = 2
|
|
||||||
min_numeric = 2
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "kubernetes_secret" "postgresql_auth" {
|
resource "kubernetes_secret" "postgresql_auth" {
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
type = "generic"
|
type = "generic"
|
||||||
depends_on = [var.wait_on]
|
depends_on = [var.wait_on]
|
||||||
metadata {
|
metadata {
|
||||||
name = "postgresql-auth"
|
name = "postgresql-auth"
|
||||||
namespace = kubernetes_namespace.postgresql[count.index].metadata.0.name
|
namespace = kubernetes_namespace.postgresql.metadata.0.name
|
||||||
}
|
}
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
|
|
@ -46,12 +33,11 @@ resource "kubernetes_secret" "postgresql_auth" {
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "helm_release" "postgresql" {
|
resource "helm_release" "postgresql" {
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
depends_on = [var.wait_on, kubernetes_secret.postgresql_auth]
|
depends_on = [var.wait_on, kubernetes_secret.postgresql_auth]
|
||||||
name = "postgresql"
|
name = "postgresql"
|
||||||
repository = "https://charts.bitnami.com/bitnami"
|
repository = "https://charts.bitnami.com/bitnami"
|
||||||
chart = "postgresql"
|
chart = "postgresql"
|
||||||
namespace = kubernetes_namespace.postgresql[count.index].metadata.0.name
|
namespace = kubernetes_namespace.postgresql.metadata.0.name
|
||||||
version = "16.0.5"
|
version = "16.0.5"
|
||||||
wait = true
|
wait = true
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,6 @@ resource "random_password" "tenant" {
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "kubernetes_job" "create-tenant" {
|
resource "kubernetes_job" "create-tenant" {
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
depends_on = [var.wait_on]
|
depends_on = [var.wait_on]
|
||||||
|
|
||||||
metadata {
|
metadata {
|
||||||
|
|
@ -109,5 +108,5 @@ output "username" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "job_name" {
|
output "job_name" {
|
||||||
value = var.enabled ? kubernetes_job.create-tenant[0].metadata[0].name : null
|
value = kubernetes_job.create-tenant.metadata[0].name
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -38,8 +38,3 @@ variable "k8s_config_yaml" {
|
||||||
description = "Content of k8s config yaml file"
|
description = "Content of k8s config yaml file"
|
||||||
type = string
|
type = string
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "enabled" {
|
|
||||||
type = bool
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -16,8 +16,3 @@ variable "namespace" {
|
||||||
variable "username" {
|
variable "username" {
|
||||||
type = string
|
type = string
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "enabled" {
|
|
||||||
type = bool
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -41,8 +41,3 @@ output "installed" {
|
||||||
value = true
|
value = true
|
||||||
depends_on = [helm_release.rabbitmq]
|
depends_on = [helm_release.rabbitmq]
|
||||||
}
|
}
|
||||||
|
|
||||||
output "connection_string" {
|
|
||||||
value = "rabbitmq://user:${random_password.password.result}@rabbitmq-headless.${var.namespace}.svc.cluster.local:5672/"
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -11,11 +11,11 @@ ports:
|
||||||
port: 8000
|
port: 8000
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
forwardedHeaders:
|
forwardedHeaders:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 0
|
writeTimeout: 0
|
||||||
idleTimeout: 0
|
idleTimeout: 0
|
||||||
readTimeout: 0
|
readTimeout: 0
|
||||||
|
|
@ -26,11 +26,11 @@ ports:
|
||||||
port: 8443
|
port: 8443
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
forwardedHeaders:
|
forwardedHeaders:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 0
|
writeTimeout: 0
|
||||||
idleTimeout: 0
|
idleTimeout: 0
|
||||||
readTimeout: 0
|
readTimeout: 0
|
||||||
|
|
@ -41,9 +41,9 @@ ports:
|
||||||
port: 2223
|
port: 2223
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 600s
|
writeTimeout: 600s
|
||||||
idleTimeout: 60s
|
idleTimeout: 60s
|
||||||
readTimeout: 600s
|
readTimeout: 600s
|
||||||
|
|
@ -54,9 +54,9 @@ ports:
|
||||||
port: 8993
|
port: 8993
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 600s
|
writeTimeout: 600s
|
||||||
idleTimeout: 300s
|
idleTimeout: 300s
|
||||||
readTimeout: 600s
|
readTimeout: 600s
|
||||||
|
|
@ -67,9 +67,9 @@ ports:
|
||||||
port: 8995
|
port: 8995
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 600s
|
writeTimeout: 600s
|
||||||
idleTimeout: 300s
|
idleTimeout: 300s
|
||||||
readTimeout: 600s
|
readTimeout: 600s
|
||||||
|
|
@ -80,9 +80,9 @@ ports:
|
||||||
port: 4190
|
port: 4190
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 600s
|
writeTimeout: 600s
|
||||||
idleTimeout: 300s
|
idleTimeout: 300s
|
||||||
readTimeout: 600s
|
readTimeout: 600s
|
||||||
|
|
@ -93,7 +93,7 @@ ports:
|
||||||
port: 8025
|
port: 8025
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 300s
|
writeTimeout: 300s
|
||||||
idleTimeout: 300s
|
idleTimeout: 300s
|
||||||
readTimeout: 300s
|
readTimeout: 300s
|
||||||
|
|
@ -104,9 +104,9 @@ ports:
|
||||||
port: 8465
|
port: 8465
|
||||||
protocol: TCP
|
protocol: TCP
|
||||||
proxyProtocol:
|
proxyProtocol:
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
|
||||||
transport:
|
transport:
|
||||||
respondingTimeouts:
|
respondingTimouts:
|
||||||
writeTimeout: 300s
|
writeTimeout: 300s
|
||||||
idleTimeout: 300s
|
idleTimeout: 300s
|
||||||
readTimeout: 300s
|
readTimeout: 300s
|
||||||
|
|
|
||||||
|
|
@ -1,124 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
slugify = {
|
|
||||||
source = "public-cloud-wl/slugify"
|
|
||||||
version = "0.1.1"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
locals {
|
|
||||||
authority = "https://${var.zitadel_domain}"
|
|
||||||
slug_project = provider::slugify::slug(var.project)
|
|
||||||
slug_name = provider::slugify::slug(var.name)
|
|
||||||
|
|
||||||
cluster = "${local.slug_project}.${var.cluster_domain}"
|
|
||||||
uri = "https://${local.slug_name}.${local.cluster}"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_application_api" {
|
|
||||||
source = "../project/application/api"
|
|
||||||
wait_on = var.wait_on
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = var.project_id
|
|
||||||
|
|
||||||
name = "${var.name} API"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_application_ua" {
|
|
||||||
source = "../project/application/user-agent"
|
|
||||||
wait_on = module.zitadel_project_application_api.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = var.project_id
|
|
||||||
|
|
||||||
name = "${ var.name } (Swagger)"
|
|
||||||
|
|
||||||
redirect_uris = ["${local.uri}/swagger/oauth2-redirect.html", "${local.uri}/hangfire/signin-oidc", "${local.uri}/signin-oidc"]
|
|
||||||
post_logout_redirect_uris = [local.uri]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
resource "kubernetes_secret" "user-agent" {
|
|
||||||
type = "Opaque"
|
|
||||||
depends_on = [module.zitadel_project_application_ua]
|
|
||||||
|
|
||||||
metadata {
|
|
||||||
name = "${local.slug_name}-user-agent"
|
|
||||||
namespace = var.namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"authority" = local.authority
|
|
||||||
"audience" = var.project_id
|
|
||||||
"client_id" = module.zitadel_project_application_ua.client_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_secret" "api" {
|
|
||||||
type = "Opaque"
|
|
||||||
depends_on = [module.zitadel_project_application_api]
|
|
||||||
|
|
||||||
metadata {
|
|
||||||
name = "${local.slug_name}-api"
|
|
||||||
namespace = var.namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"authority" = local.authority
|
|
||||||
"audience" = var.project_id
|
|
||||||
"client_id" = module.zitadel_project_application_api.client_id
|
|
||||||
"client_secret" = module.zitadel_project_application_api.client_secret
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_service_account" {
|
|
||||||
count = var.service_account ? 1 : 0
|
|
||||||
wait_on = module.zitadel_project_application_api.installed
|
|
||||||
source = "../service-account"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
user_name = "${local.slug_name}@${ local.cluster }"
|
|
||||||
name = "${var.name} @ ${var.project}"
|
|
||||||
|
|
||||||
with_secret = true
|
|
||||||
access_token_type = "ACCESS_TOKEN_TYPE_JWT"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_user_grant" {
|
|
||||||
count = var.service_account ? 1 : 0
|
|
||||||
source = "../project/user-grant"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
project_id = var.project_id
|
|
||||||
user_id = module.zitadel_service_account[0].user_id
|
|
||||||
|
|
||||||
roles = var.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_secret" "service-account" {
|
|
||||||
count = var.service_account ? 1 : 0
|
|
||||||
type = "Opaque"
|
|
||||||
depends_on = [module.zitadel_service_account]
|
|
||||||
|
|
||||||
metadata {
|
|
||||||
name = "${local.slug_name}-service-account"
|
|
||||||
namespace = var.namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"authority" = local.authority
|
|
||||||
"audience" = var.project_id
|
|
||||||
"client_id" = module.zitadel_service_account[count.index].client_id
|
|
||||||
"client_secret" = module.zitadel_service_account[count.index].client_secret
|
|
||||||
"scope" = join(" ", concat(["openid", "profile", "urn:zitadel:iam:org:project:id:${var.project_id}:aud"], var.roles))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [kubernetes_secret.service-account]
|
|
||||||
}
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
variable "roles" {
|
|
||||||
type = list(string)
|
|
||||||
description = "Roles to be granted"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "namespace" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "service_account" {
|
|
||||||
type = bool
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "zitadel_domain" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "cluster_domain" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
@ -1,82 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_org_idp_google" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
org_id = var.org_id
|
|
||||||
name = "Google"
|
|
||||||
client_id = var.client_id
|
|
||||||
client_secret = var.client_secret
|
|
||||||
scopes = var.options.scopes
|
|
||||||
is_linking_allowed = var.options.is_linking_allowed
|
|
||||||
is_creation_allowed = var.options.is_creation_allowed
|
|
||||||
is_auto_creation = var.options.is_auto_creation
|
|
||||||
is_auto_update = var.options.is_auto_update
|
|
||||||
auto_linking = var.options.auto_linking
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_login_policy" "default" {
|
|
||||||
depends_on = [zitadel_org_idp_google.default]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
user_login = false
|
|
||||||
allow_register = true
|
|
||||||
allow_external_idp = true
|
|
||||||
force_mfa = false
|
|
||||||
force_mfa_local_only = false
|
|
||||||
passwordless_type = "PASSWORDLESS_TYPE_ALLOWED"
|
|
||||||
hide_password_reset = "false"
|
|
||||||
password_check_lifetime = "240h0m0s"
|
|
||||||
external_login_check_lifetime = "240h0m0s"
|
|
||||||
multi_factor_check_lifetime = "24h0m0s"
|
|
||||||
mfa_init_skip_lifetime = "720h0m0s"
|
|
||||||
second_factor_check_lifetime = "24h0m0s"
|
|
||||||
ignore_unknown_usernames = true
|
|
||||||
default_redirect_uri = "https://${var.domain}"
|
|
||||||
second_factors = ["SECOND_FACTOR_TYPE_OTP", "SECOND_FACTOR_TYPE_U2F"]
|
|
||||||
multi_factors = ["MULTI_FACTOR_TYPE_U2F_WITH_VERIFICATION"]
|
|
||||||
idps = [zitadel_org_idp_google.default.id]
|
|
||||||
allow_domain_discovery = true
|
|
||||||
disable_login_with_email = true
|
|
||||||
disable_login_with_phone = true
|
|
||||||
}
|
|
||||||
|
|
||||||
#resource "zitadel_action" "verify-email-from-google-idp" {
|
|
||||||
# org_id = var.org_id
|
|
||||||
# name = "trustEmailVerification"
|
|
||||||
# script = templatefile("${path.module}/verify-email.action.tftpl", {
|
|
||||||
# trusted_idp = zitadel_org_idp_google.default.id,
|
|
||||||
# })
|
|
||||||
# allowed_to_fail = false
|
|
||||||
# timeout = "10s"
|
|
||||||
#}
|
|
||||||
|
|
||||||
#resource "zitadel_trigger_actions" "verify-email-from-google-idp" {
|
|
||||||
# org_id = var.org_id
|
|
||||||
# flow_type = "FLOW_TYPE_EXTERNAL_AUTHENTICATION"
|
|
||||||
# trigger_type = "TRIGGER_TYPE_PRE_CREATION"
|
|
||||||
# action_ids = [zitadel_action.verify-email-from-google-idp.id]
|
|
||||||
#}
|
|
||||||
#
|
|
||||||
#resource "zitadel_trigger_actions" "internal" {
|
|
||||||
# org_id = var.org_id
|
|
||||||
# flow_type = "FLOW_TYPE_INTERNAL_AUTHENTICATION"
|
|
||||||
# trigger_type = "TRIGGER_TYPE_PRE_CREATION"
|
|
||||||
# action_ids = [zitadel_action.verify-email-from-google-idp.id]
|
|
||||||
#}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [
|
|
||||||
zitadel_org_idp_google.default, zitadel_login_policy.default,
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "idp_id" {
|
|
||||||
value = zitadel_org_idp_google.default.id
|
|
||||||
}
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "client_id" {
|
|
||||||
type = string
|
|
||||||
description = "Google Client ID"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "client_secret" {
|
|
||||||
type = string
|
|
||||||
description = "Google Client Secret"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "options" {
|
|
||||||
type = object({
|
|
||||||
scopes = list(string)
|
|
||||||
is_linking_allowed = bool
|
|
||||||
is_creation_allowed = bool
|
|
||||||
is_auto_creation = bool
|
|
||||||
is_auto_update = bool
|
|
||||||
auto_linking = string
|
|
||||||
})
|
|
||||||
default = {
|
|
||||||
scopes = ["openid", "profile", "email"],
|
|
||||||
is_linking_allowed = true
|
|
||||||
is_creation_allowed = true
|
|
||||||
is_auto_creation = true
|
|
||||||
is_auto_update = true
|
|
||||||
auto_linking = "AUTO_LINKING_OPTION_USERNAME"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "domain" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
/**
|
|
||||||
* Set first and lastname of a user on just in time provisioning for okta.
|
|
||||||
* Useful if you like to fill the first and lastname with the name stored on okta, so the user doesn't have to fill himself.
|
|
||||||
* Also set email to verified, so the user doesn't get a verification email
|
|
||||||
*
|
|
||||||
* Flow: External Authentication, Trigger: Post Authentication
|
|
||||||
*
|
|
||||||
* @param ctx
|
|
||||||
* @param api
|
|
||||||
*/
|
|
||||||
let logger = require("zitadel/log")
|
|
||||||
|
|
||||||
function trustEmailVerification(ctx, api) {
|
|
||||||
api.setEmailVerified(true);
|
|
||||||
}
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
locals {
|
|
||||||
service_uri = join(".", [var.service_name, var.server_dns])
|
|
||||||
}
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
kubernetes = {
|
|
||||||
source = "hashicorp/kubernetes"
|
|
||||||
version = "2.31.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_namespace" "zitadel" {
|
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
metadata {
|
|
||||||
name = var.namespace
|
|
||||||
}
|
|
||||||
|
|
||||||
lifecycle {
|
|
||||||
ignore_changes = [metadata]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "random_password" "zitadel_masterkey" {
|
|
||||||
length = 32
|
|
||||||
special = true
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_secret" "zitadel" {
|
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
metadata {
|
|
||||||
name = "zitadel"
|
|
||||||
namespace = kubernetes_namespace.zitadel[count.index].metadata[0].name
|
|
||||||
}
|
|
||||||
data = {
|
|
||||||
masterkey = random_password.zitadel_masterkey.result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "helm_release" "zitadel" {
|
|
||||||
count = var.enabled ? 1 : 0
|
|
||||||
depends_on = [var.wait_on, kubernetes_secret.zitadel]
|
|
||||||
name = "zitadel"
|
|
||||||
repository = "https://charts.zitadel.com"
|
|
||||||
chart = "zitadel"
|
|
||||||
namespace = kubernetes_namespace.zitadel[count.index].metadata[0].name
|
|
||||||
version = "8.12.0"
|
|
||||||
create_namespace = false
|
|
||||||
wait = true
|
|
||||||
wait_for_jobs = true
|
|
||||||
|
|
||||||
values = [
|
|
||||||
templatefile("${path.module}/values.yaml.tftpl", {
|
|
||||||
service_uri = local.service_uri,
|
|
||||||
database = var.database,
|
|
||||||
database_username = var.database_username,
|
|
||||||
database_password = var.database_password,
|
|
||||||
database_root_username = var.database_root_password != null ? var.database_root_username : null,
|
|
||||||
database_root_password = var.database_root_password
|
|
||||||
display_on_homepage = var.display_on_homepage
|
|
||||||
})
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
data "kubernetes_secret" "zitadel_admin" {
|
|
||||||
depends_on = [helm_release.zitadel]
|
|
||||||
metadata {
|
|
||||||
name = "zitadel-admin-sa"
|
|
||||||
namespace = var.namespace
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "local_file" "zitadel_jwt_profile_file" {
|
|
||||||
content = data.kubernetes_secret.zitadel_admin.data["zitadel-admin-sa.json"]
|
|
||||||
filename = format("%s/%s", path.root, "zitadel-admin-sa.json")
|
|
||||||
}
|
|
||||||
|
|
||||||
output "jwt_profile_file" {
|
|
||||||
value = local_file.zitadel_jwt_profile_file.filename
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [helm_release.zitadel, local_file.zitadel_jwt_profile_file]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "server" {
|
|
||||||
value = local.service_uri
|
|
||||||
}
|
|
||||||
|
|
||||||
output "uri" {
|
|
||||||
value = "https://${local.service_uri}"
|
|
||||||
}
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_application_api" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = var.project_id
|
|
||||||
|
|
||||||
name = var.name
|
|
||||||
auth_method_type = "API_AUTH_METHOD_TYPE_BASIC"
|
|
||||||
// TODO: Change this to private key jwt in the future
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_application_api.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "application_id" {
|
|
||||||
value = zitadel_application_api.default.id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_id" {
|
|
||||||
value = zitadel_application_api.default.client_id
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_secret" {
|
|
||||||
value = zitadel_application_api.default.client_secret
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
description = "Project Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
description = "Application name"
|
|
||||||
}
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_application_oidc" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
grant_types = ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE"]
|
|
||||||
name = var.name
|
|
||||||
project_id = var.project_id
|
|
||||||
|
|
||||||
redirect_uris = var.redirect_uris
|
|
||||||
response_types = ["OIDC_RESPONSE_TYPE_CODE"]
|
|
||||||
|
|
||||||
# // If selected, the requested roles of the authenticated user are added to the access token.
|
|
||||||
access_token_type = "OIDC_TOKEN_TYPE_JWT"
|
|
||||||
access_token_role_assertion = true
|
|
||||||
|
|
||||||
# BEARER uses an Opaque token, which needs the introspection endpoint and `urn:zitadel:iam:org:project:id:<API_PROJECT_ID>:aud` scope
|
|
||||||
#access_token_type = "OIDC_TOKEN_TYPE_BEARER"
|
|
||||||
|
|
||||||
# // If you want to add additional Origins to your app which is not used as a redirect you can do that here.
|
|
||||||
#additional_origins = []
|
|
||||||
|
|
||||||
app_type = "OIDC_APP_TYPE_USER_AGENT"
|
|
||||||
auth_method_type = "OIDC_AUTH_METHOD_TYPE_NONE"
|
|
||||||
|
|
||||||
# // Redirect URIs must begin with https:// unless dev_mode is true
|
|
||||||
#dev_mode = false
|
|
||||||
|
|
||||||
# // If selected, the requested roles of the authenticated user are added to the ID token.
|
|
||||||
#id_token_role_assertion = false
|
|
||||||
# // Enables clients to retrieve profile, email, phone and address claims from ID token.
|
|
||||||
#id_token_userinfo_assertion = false
|
|
||||||
|
|
||||||
post_logout_redirect_uris = var.post_logout_redirect_uris
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_application_oidc.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "application_id" {
|
|
||||||
value = zitadel_application_oidc.default.id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_id" {
|
|
||||||
value = zitadel_application_oidc.default.client_id
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_secret" {
|
|
||||||
value = zitadel_application_oidc.default.client_secret
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
description = "Project Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
description = "Application name"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "redirect_uris" {
|
|
||||||
type = list(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "post_logout_redirect_uris" {
|
|
||||||
type = list(string)
|
|
||||||
default = []
|
|
||||||
}
|
|
||||||
|
|
@ -1,61 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_application_oidc" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
grant_types = ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE"]
|
|
||||||
name = var.name
|
|
||||||
project_id = var.project_id
|
|
||||||
|
|
||||||
redirect_uris = var.redirect_uris
|
|
||||||
response_types = ["OIDC_RESPONSE_TYPE_CODE"]
|
|
||||||
|
|
||||||
# // If selected, the requested roles of the authenticated user are added to the access token.
|
|
||||||
#access_token_type = "OIDC_TOKEN_TYPE_JWT"
|
|
||||||
#access_token_role_assertion = true
|
|
||||||
|
|
||||||
# BEARER uses an Opaque token, which needs the introspection endpoint and `urn:zitadel:iam:org:project:id:<API_PROJECT_ID>:aud` scope
|
|
||||||
access_token_type = "OIDC_TOKEN_TYPE_BEARER"
|
|
||||||
|
|
||||||
# // If you want to add additional Origins to your app which is not used as a redirect you can do that here.
|
|
||||||
#additional_origins = []
|
|
||||||
|
|
||||||
app_type = "OIDC_APP_TYPE_WEB"
|
|
||||||
auth_method_type = var.auth_method_type
|
|
||||||
|
|
||||||
# // Redirect URIs must begin with https:// unless dev_mode is true
|
|
||||||
#dev_mode = false
|
|
||||||
|
|
||||||
id_token_role_assertion = var.id_token_role_assertion
|
|
||||||
id_token_userinfo_assertion = var.id_token_userinfo_assertion
|
|
||||||
|
|
||||||
post_logout_redirect_uris = var.post_logout_redirect_uris
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_application_oidc.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "application_id" {
|
|
||||||
value = zitadel_application_oidc.default.id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_id" {
|
|
||||||
value = zitadel_application_oidc.default.client_id
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_secret" {
|
|
||||||
value = zitadel_application_oidc.default.client_secret
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
description = "Project Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
description = "Application name"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "redirect_uris" {
|
|
||||||
type = list(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "post_logout_redirect_uris" {
|
|
||||||
type = list(string)
|
|
||||||
default = []
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "auth_method_type" {
|
|
||||||
type = string
|
|
||||||
default = "OIDC_AUTH_METHOD_TYPE_NONE"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "id_token_role_assertion" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
description = "If selected, the requested roles of the authenticated user are added to the ID token."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "id_token_userinfo_assertion" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
description = "Enables clients to retrieve profile, email, phone and address claims from ID token."
|
|
||||||
}
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_project" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
org_id = var.org_id
|
|
||||||
name = var.name
|
|
||||||
project_role_assertion = true
|
|
||||||
project_role_check = true
|
|
||||||
has_project_check = true
|
|
||||||
private_labeling_setting = "PRIVATE_LABELING_SETTING_ENFORCE_PROJECT_RESOURCE_OWNER_POLICY"
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_project_member" "default" {
|
|
||||||
count = length(var.owners)
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = zitadel_project.default.id
|
|
||||||
user_id = var.owners[count.index]
|
|
||||||
roles = ["PROJECT_OWNER"]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_project.default, zitadel_project_member.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "project_id" {
|
|
||||||
value = zitadel_project.default.id
|
|
||||||
}
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_project_role" "default" {
|
|
||||||
count = length(var.roles)
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = var.project_id
|
|
||||||
role_key = var.roles[count.index]
|
|
||||||
display_name = var.roles[count.index]
|
|
||||||
group = var.group
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_project_role.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "role_ids" {
|
|
||||||
value = toset([
|
|
||||||
for role in zitadel_project_role.default : role.id
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
output "roles" {
|
|
||||||
value = var.roles
|
|
||||||
}
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
description = "Project Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "group" {
|
|
||||||
type = string
|
|
||||||
description = "Optional group name"
|
|
||||||
default = null
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "roles" {
|
|
||||||
type = list(string)
|
|
||||||
description = "Roles to be added"
|
|
||||||
default = []
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_user_grant" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = var.project_id
|
|
||||||
user_id = var.user_id
|
|
||||||
role_keys = var.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_user_grant.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "user_grant_id" {
|
|
||||||
value = zitadel_user_grant.default.id
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "project_id" {
|
|
||||||
type = string
|
|
||||||
description = "Project Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_id" {
|
|
||||||
type = string
|
|
||||||
description = "User Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "roles" {
|
|
||||||
type = list(string)
|
|
||||||
description = "Roles to be granted"
|
|
||||||
default = []
|
|
||||||
}
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
description = "Name of the project"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "owners" {
|
|
||||||
type = list(string)
|
|
||||||
description = "User IDs to be granted `PROJECT_OWNER` role"
|
|
||||||
default = []
|
|
||||||
}
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
locals {
|
|
||||||
k8s_config = yamldecode(var.k8s_config_yaml)
|
|
||||||
k8s_host = local.k8s_config.clusters[0].cluster.server
|
|
||||||
k8s_auth = try(
|
|
||||||
{
|
|
||||||
token = local.k8s_config.users[0].user.token
|
|
||||||
using_token = true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
client_certificate = base64decode(local.k8s_config.users[0].user["client-certificate-data"])
|
|
||||||
client_key = base64decode(local.k8s_config.users[0].user["client-key-data"])
|
|
||||||
using_token = false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "kubernetes" {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "helm" {
|
|
||||||
kubernetes {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_machine_user" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
user_name = var.user_name
|
|
||||||
name = var.name
|
|
||||||
description = var.description
|
|
||||||
with_secret = var.with_secret
|
|
||||||
access_token_type = var.access_token_type
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_machine_user.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "user_id" {
|
|
||||||
value = zitadel_machine_user.default.id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_id" {
|
|
||||||
value = zitadel_machine_user.default.client_id
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_secret" {
|
|
||||||
value = zitadel_machine_user.default.client_secret
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "description" {
|
|
||||||
type = string
|
|
||||||
default = null
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "with_secret" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "access_token_type" {
|
|
||||||
type = string
|
|
||||||
default = "ACCESS_TOKEN_TYPE_JWT"
|
|
||||||
}
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
/**
|
|
||||||
* sets the roles an additional claim in the token with roles as value an project as key
|
|
||||||
*
|
|
||||||
* The role claims of the token look like the following:
|
|
||||||
*
|
|
||||||
* // added by the code below
|
|
||||||
* "groups": ["{roleName}", "{roleName}", ...],
|
|
||||||
*
|
|
||||||
* Flow: Complement token, Triggers: Pre Userinfo creation, Pre access token creation
|
|
||||||
*
|
|
||||||
* @param ctx
|
|
||||||
* @param api
|
|
||||||
*/
|
|
||||||
function groupsClaim(ctx, api) {
|
|
||||||
if (ctx.v1.user.grants === undefined || ctx.v1.user.grants.count == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let grants = [];
|
|
||||||
ctx.v1.user.grants.grants.forEach((claim) => {
|
|
||||||
claim.roles.forEach((role) => {
|
|
||||||
grants.push(role);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
api.v1.claims.setClaim("groups", grants);
|
|
||||||
api.v1.claims.setClaim("scope", grants);
|
|
||||||
}
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_org" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
name = var.name
|
|
||||||
is_default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// resource "zitadel_action" "groups-claim" {
|
|
||||||
// org_id = zitadel_org.default.id
|
|
||||||
// name = "groupsClaim"
|
|
||||||
// script = templatefile("${path.module}/groupsClaim.action.tftpl", {})
|
|
||||||
// allowed_to_fail = true
|
|
||||||
// timeout = "10s"
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// resource "zitadel_trigger_actions" "groups-claim-pre-user-info" {
|
|
||||||
// org_id = zitadel_org.default.id
|
|
||||||
// flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
|
|
||||||
// trigger_type = "TRIGGER_TYPE_PRE_USERINFO_CREATION"
|
|
||||||
// action_ids = [zitadel_action.groups-claim.id]
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// resource "zitadel_trigger_actions" "groups-claim-pre-access-token" {
|
|
||||||
// org_id = zitadel_org.default.id
|
|
||||||
// flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
|
|
||||||
// trigger_type = "TRIGGER_TYPE_PRE_ACCESS_TOKEN_CREATION"
|
|
||||||
// action_ids = [zitadel_action.groups-claim.id]
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
output "org_id" {
|
|
||||||
value = zitadel_org.default.id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_org.default]
|
|
||||||
}
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_org_member" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
org_id = var.org_id
|
|
||||||
user_id = var.user_id
|
|
||||||
roles = ["ORG_OWNER"]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_org_member.default]
|
|
||||||
}
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Zitadel Organization ID"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_id" {
|
|
||||||
type = string
|
|
||||||
description = "Zitadel User ID"
|
|
||||||
}
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
description = "Name of the tenant"
|
|
||||||
default = "fourlights"
|
|
||||||
}
|
|
||||||
|
|
@ -1,31 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_human_user" "default" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
email = var.email
|
|
||||||
user_name = var.user_name
|
|
||||||
first_name = var.first_name
|
|
||||||
last_name = var.last_name
|
|
||||||
|
|
||||||
is_email_verified = true
|
|
||||||
initial_password = "Password1!"
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [zitadel_human_user.default]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "user_id" {
|
|
||||||
value = zitadel_human_user.default.id
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
description = "Organisation Id"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "first_name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "last_name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "email" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
@ -1,76 +0,0 @@
|
||||||
zitadel:
|
|
||||||
masterkeySecretName: "zitadel"
|
|
||||||
configmapConfig:
|
|
||||||
Log:
|
|
||||||
Level: 'info'
|
|
||||||
LogStore:
|
|
||||||
Access:
|
|
||||||
Stdout:
|
|
||||||
Enabled: true
|
|
||||||
ExternalSecure: true
|
|
||||||
ExternalDomain: ${ service_uri }
|
|
||||||
ExternalPort: 443
|
|
||||||
TLS:
|
|
||||||
Enabled: false
|
|
||||||
FirstInstance:
|
|
||||||
Org:
|
|
||||||
Machine:
|
|
||||||
Machine:
|
|
||||||
Username: zitadel-admin-sa
|
|
||||||
Name: Admin
|
|
||||||
MachineKey:
|
|
||||||
ExpirationDate: "2026-01-01T00:00:00Z"
|
|
||||||
Type: 1
|
|
||||||
Database:
|
|
||||||
Postgres:
|
|
||||||
Host: postgresql-hl.postgresql.svc.cluster.local
|
|
||||||
Port: 5432
|
|
||||||
Database: ${ database }
|
|
||||||
MaxOpenConns: 20
|
|
||||||
MaxIdleConns: 10
|
|
||||||
MaxConnLifetime: 30m
|
|
||||||
MaxConnIdleTime: 5m
|
|
||||||
User:
|
|
||||||
Username: ${ database_username }
|
|
||||||
Password: "${ database_password }"
|
|
||||||
SSL:
|
|
||||||
Mode: disable
|
|
||||||
%{ if database_root_username != null }Admin:
|
|
||||||
Username: ${ database_root_username }
|
|
||||||
Password: "${ database_root_password }"
|
|
||||||
SSL:
|
|
||||||
Mode: disable
|
|
||||||
%{ endif }
|
|
||||||
|
|
||||||
readinessProbe:
|
|
||||||
initialDelaySeconds: 5
|
|
||||||
periodSeconds: 5
|
|
||||||
failureThreshold: 10
|
|
||||||
|
|
||||||
startupProbe:
|
|
||||||
periodSeconds: 5
|
|
||||||
failureThreshold: 30
|
|
||||||
|
|
||||||
service:
|
|
||||||
annotations:
|
|
||||||
traefik.ingress.kubernetes.io/service.serversscheme: h2c
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
enabled: true
|
|
||||||
className: traefik
|
|
||||||
annotations:
|
|
||||||
kubernetes.io/ingress.class: traefik
|
|
||||||
traefik.ingress.kubernetes.io/router.entrypoints: web
|
|
||||||
traefik.ingress.kubernetes.io/router.middlewares: default-preserve-host-headers@kubernetescrd
|
|
||||||
%{ if display_on_homepage }gethomepage.dev/enabled: "true"
|
|
||||||
gethomepage.dev/name: "Zitadel"
|
|
||||||
gethomepage.dev/description: "Identity and Access Management"
|
|
||||||
gethomepage.dev/group: "Tools"
|
|
||||||
gethomepage.dev/icon: "zitadel.png"
|
|
||||||
%{ endif }
|
|
||||||
hosts:
|
|
||||||
- host: ${service_uri}
|
|
||||||
paths:
|
|
||||||
- path: /
|
|
||||||
pathType: Prefix
|
|
||||||
|
|
||||||
|
|
@ -1,61 +0,0 @@
|
||||||
variable "service_name" {
|
|
||||||
type = string
|
|
||||||
description = "Name of the service"
|
|
||||||
default = "auth"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "server_dns" {
|
|
||||||
type = string
|
|
||||||
description = "Domain for the server"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "k8s_config_yaml" {
|
|
||||||
description = "Content of k8s config yaml file"
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
description = "Resources to wait on"
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "namespace" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "database" {
|
|
||||||
type = string
|
|
||||||
default = "zitadel"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "database_username" {
|
|
||||||
type = string
|
|
||||||
default = "zitadel"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "database_password" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "database_root_username" {
|
|
||||||
type = string
|
|
||||||
default = "postgres"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "database_root_password" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
default = null
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "display_on_homepage" {
|
|
||||||
type = bool
|
|
||||||
default = false
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "enabled" {
|
|
||||||
type = bool
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,56 +0,0 @@
|
||||||
resource "helm_release" "zot" {
|
|
||||||
name = "zot"
|
|
||||||
repository = "https://zotregistry.dev/helm-charts"
|
|
||||||
chart = "zot"
|
|
||||||
namespace = "registry"
|
|
||||||
create_namespace = true
|
|
||||||
|
|
||||||
values = [
|
|
||||||
templatefile("${path.module}/values.yaml.tftpl", { service_uri = var.service_uri })
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_manifest" "traefik_middleware_request_body" {
|
|
||||||
depends_on = [helm_release.zot]
|
|
||||||
manifest = {
|
|
||||||
apiVersion = "traefik.io/v1alpha1"
|
|
||||||
kind = "Middleware"
|
|
||||||
metadata = {
|
|
||||||
name = "request-body"
|
|
||||||
namespace = "registry"
|
|
||||||
}
|
|
||||||
spec = {
|
|
||||||
buffering = {
|
|
||||||
maxRequestBodyBytes = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_manifest" "traefik_middleware_request_timeouts" {
|
|
||||||
depends_on = [helm_release.zot]
|
|
||||||
manifest = {
|
|
||||||
apiVersion = "traefik.io/v1alpha1"
|
|
||||||
kind = "Middleware"
|
|
||||||
metadata = {
|
|
||||||
name = "request-timeouts"
|
|
||||||
namespace = "registry"
|
|
||||||
}
|
|
||||||
spec = {
|
|
||||||
headers = {
|
|
||||||
customRequestHeaders = {
|
|
||||||
"X-Forwarded-Timeout-Read" = "3600s"
|
|
||||||
"X-Forwarded-Timeout-Write" = "3600s"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [
|
|
||||||
kubernetes_manifest.traefik_middleware_request_body, kubernetes_manifest.traefik_middleware_request_timeouts,
|
|
||||||
helm_release.zot
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
ingress:
|
|
||||||
enabled: true
|
|
||||||
className: "traefik"
|
|
||||||
annotations:
|
|
||||||
traefik.ingress.kubernetes.io/router.entrypoints: web
|
|
||||||
traefik.ingress.kubernetes.io/router.middlewares: registry-request-body@kubernetescrd,registry-request-timeouts@kubernetescrd,default-preserve-host-headers@kubernetescrd
|
|
||||||
gethomepage.dev/enabled: "true"
|
|
||||||
gethomepage.dev/name: "Registry"
|
|
||||||
gethomepage.dev/description: "OCI Registry"
|
|
||||||
gethomepage.dev/group: "Tools"
|
|
||||||
gethomepage.dev/icon: "docker.png"
|
|
||||||
hosts:
|
|
||||||
- host: ${ service_uri }
|
|
||||||
paths:
|
|
||||||
- path: /
|
|
||||||
persistence: true
|
|
||||||
pvc:
|
|
||||||
create: true
|
|
||||||
name: zot
|
|
||||||
accessMode: "ReadWriteOnce"
|
|
||||||
storage: 8Gi
|
|
||||||
service:
|
|
||||||
type: ClusterIP
|
|
||||||
port: 5000
|
|
||||||
mountConfig: true
|
|
||||||
configFiles:
|
|
||||||
config.json: |-
|
|
||||||
{
|
|
||||||
"storage": { "rootDirectory": "/var/lib/registry" },
|
|
||||||
"http": { "address": "0.0.0.0", "port": "5000" },
|
|
||||||
"log": { "level": "error" },
|
|
||||||
"extensions": {
|
|
||||||
"scrub": {
|
|
||||||
"enable": true,
|
|
||||||
"interval": "12h"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
variable "service_uri" { type = string }
|
|
||||||
|
|
@ -1,81 +0,0 @@
|
||||||
locals {
|
|
||||||
name = "365Zon"
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_namespace" "tenant" {
|
|
||||||
depends_on = [var.wait_on]
|
|
||||||
|
|
||||||
metadata {
|
|
||||||
name = lower(local.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
lifecycle {
|
|
||||||
ignore_changes = [metadata]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module "bootstrap-zitadel" {
|
|
||||||
source = "./zitadel"
|
|
||||||
|
|
||||||
namespace = kubernetes_namespace.tenant.metadata[0].name
|
|
||||||
org_id = var.org_id
|
|
||||||
user_id = var.user_id
|
|
||||||
name = local.name
|
|
||||||
}
|
|
||||||
|
|
||||||
// create uploads bucket in minio
|
|
||||||
module "minio" {
|
|
||||||
source = "../../modules/minio/tenant"
|
|
||||||
|
|
||||||
access_key = var.minio_access_key
|
|
||||||
secret_key = var.minio_secret_key
|
|
||||||
server = var.minio_server
|
|
||||||
|
|
||||||
name = "365zon"
|
|
||||||
}
|
|
||||||
|
|
||||||
// create minio secret
|
|
||||||
resource "kubernetes_secret" "storage" {
|
|
||||||
metadata {
|
|
||||||
name = "storage"
|
|
||||||
namespace = kubernetes_namespace.tenant.metadata[0].name
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
Storage__AccountName = module.minio.access_key
|
|
||||||
Storage__AccountKey = module.minio.secret_key
|
|
||||||
Storage__BlobUri = var.minio_api_uri
|
|
||||||
Storage__S3BucketName = module.minio.bucket
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_secret" "connection_strings" {
|
|
||||||
metadata {
|
|
||||||
name = "connection-strings"
|
|
||||||
namespace = kubernetes_namespace.tenant.metadata[0].name
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
ConnectionStrings__DocumentDb = var.mongodb_connection_string
|
|
||||||
ConnectionStrings__ServiceBus = var.rabbitmq_connection_string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// okay, so now we have the identity stuff in order, and we have secrets to use for that
|
|
||||||
// next, we need to set-up:
|
|
||||||
// - the wildcard tls (*.365zon.venus.fourlights.dev)
|
|
||||||
// - argocd for all relevant apps
|
|
||||||
//
|
|
||||||
output "minio_access_key" {
|
|
||||||
value = module.minio.access_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio_secret_key" {
|
|
||||||
value = module.minio.secret_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio_bucket" {
|
|
||||||
value = module.minio.bucket
|
|
||||||
}
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "zitadel" {
|
|
||||||
domain = var.domain
|
|
||||||
insecure = "false"
|
|
||||||
jwt_profile_file = var.jwt_profile_file
|
|
||||||
}
|
|
||||||
|
|
||||||
locals {
|
|
||||||
k8s_config_path = format("%s/%s", path.root, "../kubeconfig")
|
|
||||||
k8s_config_yaml = file(local.k8s_config_path)
|
|
||||||
k8s_config = yamldecode(local.k8s_config_yaml)
|
|
||||||
k8s_host = local.k8s_config.clusters[0].cluster.server
|
|
||||||
k8s_auth = try(
|
|
||||||
{
|
|
||||||
token = local.k8s_config.users[0].user.token
|
|
||||||
using_token = true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
client_certificate = base64decode(local.k8s_config.users[0].user["client-certificate-data"])
|
|
||||||
client_key = base64decode(local.k8s_config.users[0].user["client-key-data"])
|
|
||||||
using_token = false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "kubernetes" {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "helm" {
|
|
||||||
kubernetes {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
variable "domain" { type = string }
|
|
||||||
variable "jwt_profile_file" { type = string }
|
|
||||||
variable "org_id" { type = string }
|
|
||||||
variable "user_id" { type = string }
|
|
||||||
variable "minio_access_key" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
variable "minio_secret_key" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
variable "minio_api_uri" { type = string }
|
|
||||||
variable "minio_server" { type = string }
|
|
||||||
variable "mongodb_connection_string" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
variable "rabbitmq_connection_string" {
|
|
||||||
type = string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
variable "wait_on" {
|
|
||||||
type = any
|
|
||||||
default = true
|
|
||||||
}
|
|
||||||
|
|
@ -1,153 +0,0 @@
|
||||||
locals {
|
|
||||||
tld = "fourlights.dev"
|
|
||||||
cluster_dns = "venus.${local.tld}"
|
|
||||||
domain = "zitadel.${local.cluster_dns}"
|
|
||||||
org_domain = "fourlights.${local.domain}"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project" {
|
|
||||||
source = "../../../modules/zitadel/project"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
name = var.name
|
|
||||||
owners = [var.user_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: add action for setting roles as scopes
|
|
||||||
|
|
||||||
module "zitadel_project_operator_roles" {
|
|
||||||
source = "../../../modules/zitadel/project/roles"
|
|
||||||
|
|
||||||
wait_on = module.zitadel_project.installed
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
group = "Operator"
|
|
||||||
roles = [
|
|
||||||
"manage:profiles", "manage:contacts", "manage:addresses", "manage:enquiries", "manage:flowstates",
|
|
||||||
"manage:flowevents", "manage:files", "manage:brands"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_configurator_roles" {
|
|
||||||
source = "../../../modules/zitadel/project/roles"
|
|
||||||
wait_on = module.zitadel_project_operator_roles.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
group = "Configurator"
|
|
||||||
roles = [
|
|
||||||
"manage:flows"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_developer_roles" {
|
|
||||||
source = "../../../modules/zitadel/project/roles"
|
|
||||||
wait_on = module.zitadel_project_configurator_roles.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
group = "Developer"
|
|
||||||
roles = [
|
|
||||||
"manage:jobs", "manage:infrastructure"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_user_grant" {
|
|
||||||
source = "../../../modules/zitadel/project/user-grant"
|
|
||||||
wait_on = module.zitadel_project_developer_roles.installed
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
user_id = var.user_id
|
|
||||||
roles = concat(module.zitadel_project_developer_roles.roles, module.zitadel_project_configurator_roles.roles, module.zitadel_project_operator_roles.roles)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Move External (and 365zon Push service account) to own project
|
|
||||||
// TODO: Add grant for external project
|
|
||||||
// TODO: Add read roles
|
|
||||||
|
|
||||||
module "zitadel_project_application_core" {
|
|
||||||
source = "../../../modules/zitadel/api-m2m-swagger"
|
|
||||||
wait_on = module.zitadel_project_user_grant.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
|
|
||||||
name = "Core"
|
|
||||||
zitadel_domain = local.domain
|
|
||||||
cluster_domain = local.cluster_dns
|
|
||||||
|
|
||||||
namespace = var.namespace
|
|
||||||
project = var.name
|
|
||||||
|
|
||||||
service_account = false
|
|
||||||
roles = []
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_application_salesforce" {
|
|
||||||
source = "../../../modules/zitadel/api-m2m-swagger"
|
|
||||||
wait_on = module.zitadel_project_application_core.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
|
|
||||||
name = "Salesforce"
|
|
||||||
zitadel_domain = local.domain
|
|
||||||
cluster_domain = local.cluster_dns
|
|
||||||
|
|
||||||
namespace = var.namespace
|
|
||||||
project = var.name
|
|
||||||
|
|
||||||
roles = module.zitadel_project_operator_roles.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_application_external" {
|
|
||||||
source = "../../../modules/zitadel/api-m2m-swagger"
|
|
||||||
wait_on = module.zitadel_project_application_salesforce.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
|
|
||||||
name = "External"
|
|
||||||
zitadel_domain = local.domain
|
|
||||||
cluster_domain = local.cluster_dns
|
|
||||||
|
|
||||||
namespace = var.namespace
|
|
||||||
project = var.name
|
|
||||||
|
|
||||||
roles = module.zitadel_project_operator_roles.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_application_module_internal" {
|
|
||||||
source = "../../../modules/zitadel/api-m2m-swagger"
|
|
||||||
wait_on = module.zitadel_project_application_external.installed
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
|
|
||||||
name = "Internal"
|
|
||||||
zitadel_domain = local.domain
|
|
||||||
cluster_domain = local.cluster_dns
|
|
||||||
|
|
||||||
namespace = var.namespace
|
|
||||||
project = var.name
|
|
||||||
|
|
||||||
roles = module.zitadel_project_operator_roles.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Application for Front-End End (implicit, authorization_code, refresh_token)
|
|
||||||
// TODO: Update API applications with callback apiDomain/swagger/oauth2-redirect.html to allow logging in for swagger (and probably hangire?)
|
|
||||||
// TODO: Put all the relevant secrets into secret manager
|
|
||||||
// TODO: Set up opentelemetry and update appinsights shit to use that.
|
|
||||||
|
|
||||||
output "org_id" {
|
|
||||||
value = var.org_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "project_id" {
|
|
||||||
value = module.zitadel_project.project_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [module.zitadel_project_application_external.installed]
|
|
||||||
}
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "namespace" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
/**
|
|
||||||
* sets the roles an additional claim in the token with roles as value an project as key
|
|
||||||
*
|
|
||||||
* The role claims of the token look like the following:
|
|
||||||
*
|
|
||||||
* // added by the code below
|
|
||||||
* "groups": ["{roleName}", "{roleName}", ...],
|
|
||||||
*
|
|
||||||
* Flow: Complement token, Triggers: Pre Userinfo creation, Pre access token creation
|
|
||||||
*
|
|
||||||
* @param ctx
|
|
||||||
* @param api
|
|
||||||
*/
|
|
||||||
function groupsClaim(ctx, api) {
|
|
||||||
if (ctx.v1.user.grants === undefined || ctx.v1.user.grants.count == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let grants = [];
|
|
||||||
ctx.v1.user.grants.grants.forEach((claim) => {
|
|
||||||
claim.roles.forEach((role) => {
|
|
||||||
grants.push(role);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
api.v1.claims.setClaim("groups", grants);
|
|
||||||
api.v1.claims.setClaim("scope", grants);
|
|
||||||
}
|
|
||||||
|
|
@ -1,113 +0,0 @@
|
||||||
locals {
|
|
||||||
argocd_uri = "https://${var.argocd_service_domain}"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project" {
|
|
||||||
source = "../../../modules/zitadel/project"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
name = var.name
|
|
||||||
owners = [var.user_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_roles_user" {
|
|
||||||
source = "../../../modules/zitadel/project/roles"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
group = "Users"
|
|
||||||
roles = ["user"]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_roles_admin" {
|
|
||||||
source = "../../../modules/zitadel/project/roles"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
group = "Admins"
|
|
||||||
roles = ["admin"]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_application_argocd" {
|
|
||||||
source = "../../../modules/zitadel/project/application/web"
|
|
||||||
|
|
||||||
name = "ArgoCD"
|
|
||||||
org_id = var.org_id
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
|
|
||||||
redirect_uris = ["${ local.argocd_uri}/api/dex/callback"]
|
|
||||||
post_logout_redirect_uris = [local.argocd_uri]
|
|
||||||
|
|
||||||
auth_method_type = "OIDC_AUTH_METHOD_TYPE_BASIC"
|
|
||||||
id_token_role_assertion = true
|
|
||||||
id_token_userinfo_assertion = true
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_action" "groups-claim" {
|
|
||||||
org_id = var.org_id
|
|
||||||
name = "groupsClaim"
|
|
||||||
script = templatefile("${path.module}/groupsClaim.action.tftpl", {})
|
|
||||||
allowed_to_fail = true
|
|
||||||
timeout = "10s"
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_trigger_actions" "groups-claim-pre-user-info" {
|
|
||||||
org_id = var.org_id
|
|
||||||
flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
|
|
||||||
trigger_type = "TRIGGER_TYPE_PRE_USERINFO_CREATION"
|
|
||||||
action_ids = [zitadel_action.groups-claim.id]
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "zitadel_trigger_actions" "groups-claim-pre-access-token" {
|
|
||||||
org_id = var.org_id
|
|
||||||
flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
|
|
||||||
trigger_type = "TRIGGER_TYPE_PRE_ACCESS_TOKEN_CREATION"
|
|
||||||
action_ids = [zitadel_action.groups-claim.id]
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel_project_user_grant" {
|
|
||||||
source = "../../../modules/zitadel/project/user-grant"
|
|
||||||
|
|
||||||
org_id = var.org_id
|
|
||||||
|
|
||||||
project_id = module.zitadel_project.project_id
|
|
||||||
user_id = var.user_id
|
|
||||||
|
|
||||||
roles = module.zitadel_project_roles_admin.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_id" {
|
|
||||||
value = module.zitadel_application_argocd.client_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "client_secret" {
|
|
||||||
value = module.zitadel_application_argocd.client_secret
|
|
||||||
}
|
|
||||||
|
|
||||||
output "scopes" {
|
|
||||||
value = ["openid", "profile", "email", "groups"]
|
|
||||||
}
|
|
||||||
|
|
||||||
output "logoutSuffix" {
|
|
||||||
value = "oidc/v1/end_session"
|
|
||||||
}
|
|
||||||
|
|
||||||
output "user_roles" {
|
|
||||||
value = module.zitadel_project_roles_user.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
output "admin_roles" {
|
|
||||||
value = module.zitadel_project_roles_admin.roles
|
|
||||||
}
|
|
||||||
|
|
||||||
output "project_id" {
|
|
||||||
value = module.zitadel_project.project_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "installed" {
|
|
||||||
value = true
|
|
||||||
depends_on = [
|
|
||||||
module.zitadel_project_user_grant.installed,
|
|
||||||
zitadel_trigger_actions.groups-claim-pre-access-token, zitadel_trigger_actions.groups-claim-pre-user-info
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "zitadel" {
|
|
||||||
domain = var.domain
|
|
||||||
insecure = "false"
|
|
||||||
jwt_profile_file = var.jwt_profile_file
|
|
||||||
}
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
variable "org_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "user_id" {
|
|
||||||
type = string
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "name" {
|
|
||||||
type = string
|
|
||||||
default = "argocd"
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "domain" { type = string }
|
|
||||||
variable "jwt_profile_file" { type = string }
|
|
||||||
|
|
||||||
variable "argocd_service_domain" { type = string }
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
module "zitadel-tenant" {
|
|
||||||
source = "../../../modules/zitadel/tenant"
|
|
||||||
|
|
||||||
name = "fourlights"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-idp-google" {
|
|
||||||
source = "../../../modules/zitadel/identity-provider/google"
|
|
||||||
wait_on = module.zitadel-tenant.installed
|
|
||||||
|
|
||||||
org_id = module.zitadel-tenant.org_id
|
|
||||||
client_id = "783390190667-quvko2l2kr9ksgeo3pn6pn6t8c1mai9n.apps.googleusercontent.com"
|
|
||||||
client_secret = "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"
|
|
||||||
|
|
||||||
domain = var.domain
|
|
||||||
|
|
||||||
options = {
|
|
||||||
scopes = ["openid", "profile", "email"]
|
|
||||||
is_auto_creation = true
|
|
||||||
is_auto_update = true
|
|
||||||
is_creation_allowed = true
|
|
||||||
is_linking_allowed = true
|
|
||||||
|
|
||||||
auto_linking = "AUTO_LINKING_OPTION_USERNAME"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-user" {
|
|
||||||
source = "../../../modules/zitadel/user"
|
|
||||||
wait_on = module.zitadel-tenant.installed
|
|
||||||
|
|
||||||
org_id = module.zitadel-tenant.org_id
|
|
||||||
|
|
||||||
first_name = "Thomas"
|
|
||||||
last_name = "Rijpstra"
|
|
||||||
user_name = "thomas@fourlights.nl"
|
|
||||||
email = "thomas@fourlights.nl"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-org-owner" {
|
|
||||||
source = "../../../modules/zitadel/tenant/role-owner"
|
|
||||||
wait_on = module.zitadel-user.installed
|
|
||||||
|
|
||||||
org_id = module.zitadel-tenant.org_id
|
|
||||||
user_id = module.zitadel-user.user_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "org_id" {
|
|
||||||
value = module.zitadel-tenant.org_id
|
|
||||||
}
|
|
||||||
|
|
||||||
output "user_id" {
|
|
||||||
value = module.zitadel-user.user_id
|
|
||||||
}
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
zitadel = {
|
|
||||||
source = "zitadel/zitadel"
|
|
||||||
version = "2.0.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "zitadel" {
|
|
||||||
domain = var.domain
|
|
||||||
insecure = "false"
|
|
||||||
jwt_profile_file = var.jwt_profile_file
|
|
||||||
}
|
|
||||||
|
|
||||||
locals {
|
|
||||||
k8s_config_path = format("%s/%s", path.root, "../kubeconfig")
|
|
||||||
k8s_config_yaml = file(local.k8s_config_path)
|
|
||||||
k8s_config = yamldecode(local.k8s_config_yaml)
|
|
||||||
k8s_host = local.k8s_config.clusters[0].cluster.server
|
|
||||||
k8s_auth = try(
|
|
||||||
{
|
|
||||||
token = local.k8s_config.users[0].user.token
|
|
||||||
using_token = true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
client_certificate = base64decode(local.k8s_config.users[0].user["client-certificate-data"])
|
|
||||||
client_key = base64decode(local.k8s_config.users[0].user["client-key-data"])
|
|
||||||
using_token = false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "kubernetes" {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
|
|
||||||
provider "helm" {
|
|
||||||
kubernetes {
|
|
||||||
host = local.k8s_host
|
|
||||||
insecure = true
|
|
||||||
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
|
|
||||||
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
|
|
||||||
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
variable "domain" { type = string }
|
|
||||||
variable "jwt_profile_file" { type = string }
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
kubeconfig
|
|
||||||
*.lock.hcl
|
|
||||||
Binary file not shown.
|
|
@ -0,0 +1,19 @@
|
||||||
|
apiVersion: v1
|
||||||
|
clusters:
|
||||||
|
- cluster:
|
||||||
|
certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJlRENDQVIyZ0F3SUJBZ0lCQURBS0JnZ3Foa2pPUFFRREFqQWpNU0V3SHdZRFZRUUREQmhyTTNNdGMyVnkKZG1WeUxXTmhRREUzTXprM09ESTROelF3SGhjTk1qVXdNakUzTURrd01URTBXaGNOTXpVd01qRTFNRGt3TVRFMApXakFqTVNFd0h3WURWUVFEREJock0zTXRjMlZ5ZG1WeUxXTmhRREUzTXprM09ESTROelF3V1RBVEJnY3Foa2pPClBRSUJCZ2dxaGtqT1BRTUJCd05DQUFUWVNEV1Jwbmd6TE5ySGphTmhqdmM1SU82a2dibVpwaER4WVROTG11MjAKaWxaQnZLRlZRdW5kV3ZEQ1VrcGJNRjNsOTRuSmxaYVByK3lDSnJpVVh0UjZvMEl3UURBT0JnTlZIUThCQWY4RQpCQU1DQXFRd0R3WURWUjBUQVFIL0JBVXdBd0VCL3pBZEJnTlZIUTRFRmdRVVQ5bVZxTGcvSFBCUS91L3MzbHAwCjhJQ0RDc013Q2dZSUtvWkl6ajBFQXdJRFNRQXdSZ0loQUpjMkJkMjd0SzNZTFpwa01yOFNMSEIvbngzd1E1MU0KRnRaYnBNVzJudVNXQWlFQTMyUmcyVHZNQW9LYll5bnhySkk3U3g5eWszZHFsSWd5TW15d2M5d1JicmM9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
|
||||||
|
server: https://10.110.36.47:6443
|
||||||
|
name: default
|
||||||
|
contexts:
|
||||||
|
- context:
|
||||||
|
cluster: default
|
||||||
|
user: default
|
||||||
|
name: default
|
||||||
|
current-context: default
|
||||||
|
kind: Config
|
||||||
|
preferences: {}
|
||||||
|
users:
|
||||||
|
- name: default
|
||||||
|
user:
|
||||||
|
client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJrakNDQVRlZ0F3SUJBZ0lJZFh2OWlXRHR6SE13Q2dZSUtvWkl6ajBFQXdJd0l6RWhNQjhHQTFVRUF3d1kKYXpOekxXTnNhV1Z1ZEMxallVQXhOek01TnpneU9EYzBNQjRYRFRJMU1ESXhOekE1TURFeE5Gb1hEVEkyTURJeApOekE1TURFeE5Gb3dNREVYTUJVR0ExVUVDaE1PYzNsemRHVnRPbTFoYzNSbGNuTXhGVEFUQmdOVkJBTVRESE41CmMzUmxiVHBoWkcxcGJqQlpNQk1HQnlxR1NNNDlBZ0VHQ0NxR1NNNDlBd0VIQTBJQUJKNlNVZm5ESVJndVRDMjkKaWFjVTdTM3VPWkw1RERGZjJPQi9IakdTWEErQlRGaE5VOGtMSHBxZlZYeWVKbHNkd09mR1QvL2JQbENsWFYvdQowc0wyTW5halNEQkdNQTRHQTFVZER3RUIvd1FFQXdJRm9EQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFmCkJnTlZIU01FR0RBV2dCUXdoZkJDTWRocVpXMW96WlEzZG84d1VYOEpCREFLQmdncWhrak9QUVFEQWdOSkFEQkcKQWlFQXczSFpKY1cwaGI3ZUwxSktvcTJ2cExFaFVxVncxRG1oTGJtcUNQTVdmcEFDSVFDRkhXcDhoTTNMdTROTgpGUnYxc2pkYS93VjdmSVpUcUsyZHVNOUNPQVc5emc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCi0tLS0tQkVHSU4gQ0VSVElGSUNBVEUtLS0tLQpNSUlCZHpDQ0FSMmdBd0lCQWdJQkFEQUtCZ2dxaGtqT1BRUURBakFqTVNFd0h3WURWUVFEREJock0zTXRZMnhwClpXNTBMV05oUURFM016azNPREk0TnpRd0hoY05NalV3TWpFM01Ea3dNVEUwV2hjTk16VXdNakUxTURrd01URTAKV2pBak1TRXdId1lEVlFRRERCaHJNM010WTJ4cFpXNTBMV05oUURFM016azNPREk0TnpRd1dUQVRCZ2NxaGtqTwpQUUlCQmdncWhrak9QUU1CQndOQ0FBUjJCcXE5cVhESmZGeVQ1VVpEY3Z6SHVPdDg2TEZ5WTlDb1oxL0xxeldGClZMdHVQYUFXc3BUdUtZckJieTRZRlBQQlQ1M0RkS1F5cjhhWG5HUDRWenlxbzBJd1FEQU9CZ05WSFE4QkFmOEUKQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVVNSVh3UWpIWWFtVnRhTTJVTjNhUApNRkYvQ1FRd0NnWUlLb1pJemowRUF3SURTQUF3UlFJZ1lmS01YQ3lFelBmM05wN3paLzVYTnFxeTdjTDBpMXBWCkpjZzNzYmtMbXB3Q0lRRDlzYVpmekswRlUrNWljWFpLZmUyVFg0WW5sNS96aFVGR2FHb2RTb1ovUXc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
|
||||||
|
client-key-data: LS0tLS1CRUdJTiBFQyBQUklWQVRFIEtFWS0tLS0tCk1IY0NBUUVFSUtlQVpqUzhNM1ZBd2l6cWo0UDN6RURuQmNaYldrcDJPekt2VlNpUSs0azRvQW9HQ0NxR1NNNDkKQXdFSG9VUURRZ0FFbnBKUitjTWhHQzVNTGIySnB4VHRMZTQ1a3ZrTU1WL1k0SDhlTVpKY0Q0Rk1XRTFUeVFzZQptcDlWZko0bVd4M0E1OFpQLzlzK1VLVmRYKzdTd3ZZeWRnPT0KLS0tLS1FTkQgRUMgUFJJVkFURSBLRVktLS0tLQo=
|
||||||
|
|
@ -1,372 +1,143 @@
|
||||||
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-write
|
#!/usr/bin/env -S deno run --allow-run --allow-read --allow-write
|
||||||
|
|
||||||
// Note: TypeScript errors related to Deno imports and namespace can be safely ignored
|
|
||||||
// These are only relevant when running the script with the Deno runtime
|
|
||||||
import { Command } from "https://deno.land/x/cliffy@v1.0.0-rc.4/command/mod.ts";
|
import { Command } from "https://deno.land/x/cliffy@v1.0.0-rc.4/command/mod.ts";
|
||||||
import { delay } from "https://deno.land/std/async/mod.ts";
|
import { delay } from "https://deno.land/std/async/mod.ts";
|
||||||
import { exists } from "https://deno.land/std/fs/mod.ts";
|
|
||||||
|
|
||||||
// Configuration constants
|
const alpineImage = "alpine/edge/cloud"
|
||||||
const alpineImage = "alpine/edge/cloud";
|
const alpineConfig = ['--profile', 'cloud-init-alpine']
|
||||||
const alpineConfig = ['--profile', 'cloud-init-alpine'];
|
const archImage = "archlinux/current/cloud"
|
||||||
const archImage = "archlinux/current/cloud";
|
const archConfig = ['--profile', 'cloud-init-arch']
|
||||||
const archConfig = ['--profile', 'cloud-init-arch'];
|
|
||||||
|
|
||||||
const getIp = (i: number) => `10.110.36.${109 + i}`;
|
|
||||||
|
|
||||||
const image = archImage;
|
const image = archImage
|
||||||
const config = archConfig;
|
const config = archConfig
|
||||||
|
|
||||||
// Enhanced logging function with timestamps and log levels
|
const findIP4 = (name: string, nodeList: any) => {
|
||||||
const log = {
|
const ip4 = nodeList?.find((n) => n.name === name)?.state?.network?.eth0?.addresses?.find((n) => n.family === 'inet')?.address;
|
||||||
debug: (message: string) => console.log(`[${new Date().toISOString()}] [DEBUG] ${message}`),
|
return ip4;
|
||||||
info: (message: string) => console.log(`[${new Date().toISOString()}] [INFO] ${message}`),
|
|
||||||
success: (message: string) => console.log(`[${new Date().toISOString()}] [SUCCESS] ✅ ${message}`),
|
|
||||||
warning: (message: string) => console.log(`[${new Date().toISOString()}] [WARNING] ⚠️ ${message}`),
|
|
||||||
error: (message: string) => console.error(`[${new Date().toISOString()}] [ERROR] ❌ ${message}`),
|
|
||||||
skip: (message: string) => console.log(`[${new Date().toISOString()}] [SKIP] ⏩ ${message}`),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Helper function to execute commands with proper error handling
|
|
||||||
async function executeCommand(
|
|
||||||
cmdArray: string[],
|
|
||||||
description: string,
|
|
||||||
options: {
|
|
||||||
stdout?: "piped" | "inherit" | "null",
|
|
||||||
stderr?: "piped" | "inherit" | "null",
|
|
||||||
throwOnError?: boolean
|
|
||||||
} = {stdout: 'piped', stderr: 'piped', throwOnError: true}
|
|
||||||
): Promise<{ success: boolean; output?: string; error?: string }> {
|
|
||||||
const {stdout = "piped", stderr = "piped", throwOnError = true} = options;
|
|
||||||
|
|
||||||
log.debug(`Executing: ${cmdArray.join(" ")}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Use Deno.Command API which is the modern replacement for Deno.run
|
|
||||||
const command = new Deno.Command(cmdArray[0], {
|
|
||||||
args: cmdArray.slice(1),
|
|
||||||
stdout: stdout === "piped" ? "piped" : stdout === "inherit" ? "inherit" : "null",
|
|
||||||
stderr: stderr === "piped" ? "piped" : stderr === "inherit" ? "inherit" : "null",
|
|
||||||
});
|
|
||||||
|
|
||||||
const {code, stdout: stdoutOutput, stderr: stderrOutput} = await command.output();
|
|
||||||
|
|
||||||
const stdoutText = stdout === "piped" ? new TextDecoder().decode(stdoutOutput).trim() : "";
|
|
||||||
const stderrText = stderr === "piped" ? new TextDecoder().decode(stderrOutput).trim() : "";
|
|
||||||
|
|
||||||
if (code !== 0) {
|
|
||||||
log.error(`Failed to ${description}: ${stderrText || "Unknown error"}`);
|
|
||||||
if (throwOnError) {
|
|
||||||
throw new Error(`Command failed: ${cmdArray.join(" ")}\n${stderrText}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: code === 0,
|
|
||||||
output: stdoutText,
|
|
||||||
error: stderrText
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
||||||
log.error(`Exception while ${description}: ${errorMessage}`);
|
|
||||||
if (throwOnError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
return {success: false, error: errorMessage};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if VM is ready for SSH connections
|
const setupCluster = async (numMasters: number) => {
|
||||||
async function isVmReadyForSsh(ip: string, user: string, sshKeyPath: string, maxAttempts = 30): Promise<boolean> {
|
const hostname = await Deno.run({
|
||||||
log.info(`Checking if VM at ${ip} is ready for SSH connections...`);
|
cmd: ["hostnamectl", "hostname"],
|
||||||
|
stdout: "piped",
|
||||||
|
}).output().then((output) => new TextDecoder().decode(output).trim());
|
||||||
|
|
||||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
const user = await Deno.run({
|
||||||
log.debug(`SSH readiness check attempt ${attempt}/${maxAttempts}`);
|
cmd: ["whoami"],
|
||||||
|
stdout: "piped",
|
||||||
const {success} = await executeCommand(
|
}).output().then((output) => new TextDecoder().decode(output).trim());
|
||||||
["ssh", "-o", "StrictHostKeyChecking=no", "-o", "ConnectTimeout=5", `${user}@${ip}`, "-i", sshKeyPath, "echo", "ready"],
|
|
||||||
`check SSH connectivity to ${ip}`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
log.success(`VM at ${ip} is ready for SSH connections`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.debug(`VM at ${ip} not ready yet, waiting...`);
|
|
||||||
await delay(2000); // Wait 2 seconds between attempts
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error(`VM at ${ip} is not ready for SSH connections after ${maxAttempts} attempts`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if VM is running
|
|
||||||
async function isVmRunning(vmName: string): Promise<boolean> {
|
|
||||||
const {success, output} = await executeCommand(
|
|
||||||
["incus", "list", vmName, "--format", "json"],
|
|
||||||
`check if VM ${vmName} is running`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!success || !output) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const vmInfo = JSON.parse(output);
|
|
||||||
return vmInfo.length > 0 && vmInfo[0].status === "Running";
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
||||||
log.error(`Failed to parse VM status: ${errorMessage}`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cleanup function to handle failures
|
|
||||||
async function cleanup(vmNames: string[], shouldRemove = false): Promise<void> {
|
|
||||||
log.info("Starting cleanup process...");
|
|
||||||
|
|
||||||
return;
|
|
||||||
|
|
||||||
for (const vmName of vmNames) {
|
|
||||||
// Check if VM exists
|
|
||||||
const {success, output} = await executeCommand(
|
|
||||||
["incus", "list", vmName, "--format", "csv"],
|
|
||||||
`check if VM ${vmName} exists`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (success && output) {
|
|
||||||
// Stop VM if it's running
|
|
||||||
const isRunning = await isVmRunning(vmName);
|
|
||||||
if (isRunning) {
|
|
||||||
log.info(`Stopping VM ${vmName}...`);
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "stop", vmName, "--force"],
|
|
||||||
`stop VM ${vmName}`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove VM if requested
|
|
||||||
if (shouldRemove) {
|
|
||||||
log.info(`Removing VM ${vmName}...`);
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "delete", vmName],
|
|
||||||
`remove VM ${vmName}`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.success("Cleanup completed");
|
|
||||||
}
|
|
||||||
|
|
||||||
const setupCluster = async (numMasters: number, forceCleanup = false) => {
|
|
||||||
log.info(`Starting setup of k3s cluster with ${numMasters} master nodes`);
|
|
||||||
|
|
||||||
const createdVMs: string[] = [];
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Get hostname and user
|
|
||||||
const {output: hostname} = await executeCommand(
|
|
||||||
["hostnamectl", "hostname"],
|
|
||||||
"get hostname"
|
|
||||||
);
|
|
||||||
|
|
||||||
const {output: user} = await executeCommand(
|
|
||||||
["whoami"],
|
|
||||||
"get current user"
|
|
||||||
);
|
|
||||||
|
|
||||||
const sshKeyPubFileName = `/home/${user}/.ssh/nl.fourlights.${hostname}.pub`;
|
const sshKeyPubFileName = `/home/${user}/.ssh/nl.fourlights.${hostname}.pub`;
|
||||||
const sshKeyPrivateFileName = `/home/${user}/.ssh/nl.fourlights.${hostname}`;
|
const sshKeyPrivateFileName = `/home/${user}/.ssh/nl.fourlights.${hostname}`;
|
||||||
|
|
||||||
// Check if SSH keys exist
|
|
||||||
if (!await exists(sshKeyPubFileName) || !await exists(sshKeyPrivateFileName)) {
|
|
||||||
log.error(`Required SSH keys not found: ${sshKeyPubFileName} or ${sshKeyPrivateFileName}`);
|
|
||||||
throw new Error("SSH keys not found");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 1: Create Low-Resource Profile (if not exists)
|
// Step 1: Create Low-Resource Profile (if not exists)
|
||||||
const {success: profileExists} = await executeCommand(
|
const profileExists = await Deno.run({
|
||||||
["incus", "profile", "show", "low-resource"],
|
cmd: ["incus", "profile", "show", "low-resource"],
|
||||||
"check if low-resource profile exists",
|
stdout: "null",
|
||||||
{throwOnError: false}
|
stderr: "null",
|
||||||
);
|
}).status().then((status) => status.success);
|
||||||
|
|
||||||
if (!profileExists) {
|
if (!profileExists) {
|
||||||
log.info("Creating low-resource profile...");
|
await Deno.run({
|
||||||
await executeCommand(
|
cmd: ["incus", "profile", "create", "low-resource"],
|
||||||
["incus", "profile", "create", "low-resource"],
|
}).status();
|
||||||
"create low-resource profile"
|
await Deno.run({
|
||||||
);
|
cmd: ["incus", "profile", "set", "low-resource", "limits.cpu=1", "limits.memory=512MB"],
|
||||||
await executeCommand(
|
}).status();
|
||||||
["incus", "profile", "set", "low-resource", "limits.cpu=1", "limits.memory=512MB"],
|
await Deno.run({
|
||||||
"set low-resource profile limits"
|
cmd: ["incus", "profile", "device", "add", "low-resource", "root", "disk", "pool=default", "path=/"],
|
||||||
);
|
}).status();
|
||||||
await executeCommand(
|
await Deno.run({
|
||||||
["incus", "profile", "device", "add", "low-resource", "root", "disk", "pool=default", "path=/"],
|
cmd: ["incus", "profile", "device", "add", "low-resource", "eth-0", "nic", "network=incusbr0"],
|
||||||
"add root disk to low-resource profile"
|
}).status();
|
||||||
);
|
console.log("✅ Low-resource profile created.");
|
||||||
// await executeCommand(
|
|
||||||
// ["incus", "profile", "device", "add", "low-resource", "eth-0", "nic", "network=incusbr0"],
|
|
||||||
// "add network interface to low-resource profile"
|
|
||||||
// );
|
|
||||||
log.success("Low-resource profile created");
|
|
||||||
} else {
|
} else {
|
||||||
log.skip("Low-resource profile already exists");
|
console.log("⏩ Low-resource profile already exists.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read SSH key
|
|
||||||
const sshKey = await Deno.readTextFile(sshKeyPubFileName);
|
const sshKey = await Deno.readTextFile(sshKeyPubFileName);
|
||||||
|
|
||||||
// Step 3: Launch VMs (if not already running)
|
// Step 3: Launch VMs (if not already running)
|
||||||
for (let i = 1; i <= numMasters; i++) {
|
for (let i = 1; i <= numMasters; i++) {
|
||||||
const vmName = `k3s-master${i}`;
|
const vmName = `k3s-master${i}`;
|
||||||
|
const vmExists = await Deno.run({
|
||||||
|
cmd: ["incus", "list", vmName, "--format", "csv"],
|
||||||
|
stdout: "piped",
|
||||||
|
}).output().then((output) => new TextDecoder().decode(output).trim() !== "");
|
||||||
|
|
||||||
const {success: vmExists, output: vmOutput} = await executeCommand(
|
if (!vmExists) {
|
||||||
["incus", "list", vmName, "--format", "csv"],
|
await Deno.run({
|
||||||
`check if VM ${vmName} exists`,
|
cmd: ["incus", "launch", `images:${image}`, vmName, "--profile", "low-resource", "-c", "user.timezone=\"Europe/Amsterdam\"", "-c", `user.ssh_key=\"${sshKey}\"`, ...config],
|
||||||
{throwOnError: false}
|
}).status();
|
||||||
);
|
console.log(`✅ VM ${vmName} launched.`);
|
||||||
|
|
||||||
if (!vmExists || !vmOutput) {
|
|
||||||
log.info(`Creating VM ${vmName}...`);
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "init", `images:${image}`, vmName, "--profile", "low-resource", "-c", "user.timezone=\"Europe/Amsterdam\"", "-c", `user.ssh_key=\"${sshKey}\"`, ...config],
|
|
||||||
`initialize VM ${vmName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "config", 'device', 'add', vmName, 'eth0', 'nic', 'nictype=bridged', 'parent=incusbr0', `ipv4.address=${getIp(i)}`],
|
|
||||||
`configure network for VM ${vmName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "start", vmName],
|
|
||||||
`start VM ${vmName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
createdVMs.push(vmName);
|
|
||||||
log.success(`VM ${vmName} started`);
|
|
||||||
} else {
|
} else {
|
||||||
// Check if VM is running, if not, start it
|
console.log(`⏩ VM ${vmName} already exists.`);
|
||||||
const isRunning = await isVmRunning(vmName);
|
|
||||||
if (!isRunning) {
|
|
||||||
log.info(`Starting existing VM ${vmName}...`);
|
|
||||||
await executeCommand(
|
|
||||||
["incus", "start", vmName],
|
|
||||||
`start VM ${vmName}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
log.skip(`VM ${vmName} already exists`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 4: Install k3sup (if not installed)
|
// Step 4: Install k3sup (if not installed)
|
||||||
const {success: k3supInstalled} = await executeCommand(
|
const k3supInstalled = await Deno.run({
|
||||||
["which", "k3sup"],
|
cmd: ["which", "k3sup"],
|
||||||
"check if k3sup is installed",
|
stdout: "null",
|
||||||
{throwOnError: false}
|
stderr: "null",
|
||||||
);
|
}).status().then((status) => status.success);
|
||||||
|
|
||||||
if (!k3supInstalled) {
|
if (!k3supInstalled) {
|
||||||
log.info("Installing k3sup...");
|
await Deno.run({
|
||||||
await executeCommand(
|
cmd: ["sh", "-c", "curl -sLS https://get.k3sup.dev | sh"],
|
||||||
["sh", "-c", "curl -sLS https://get.k3sup.dev | sh"],
|
}).status();
|
||||||
"install k3sup"
|
console.log("✅ k3sup installed.");
|
||||||
);
|
|
||||||
log.success("k3sup installed");
|
|
||||||
} else {
|
} else {
|
||||||
log.skip("k3sup already installed");
|
console.log("⏩ k3sup already installed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 5: Wait for VMs to be ready
|
// Step 5: Bootstrap First Master Node (if not already bootstrapped)
|
||||||
const firstMasterIP = getIp(1);
|
let firstMasterIP;
|
||||||
log.info(`Waiting for first master node (${firstMasterIP}) to be ready...`);
|
let nodes;
|
||||||
|
while (firstMasterIP === undefined) {
|
||||||
const vmReady = await isVmReadyForSsh(firstMasterIP, "picard", sshKeyPrivateFileName);
|
nodes = await Deno.run({
|
||||||
if (!vmReady) {
|
cmd: ["incus", "list", "--format", "json"],
|
||||||
throw new Error(`First master node at ${firstMasterIP} is not ready for SSH connections`);
|
stdout: "piped",
|
||||||
|
}).output().then((output) => JSON.parse(new TextDecoder().decode(output)));
|
||||||
|
firstMasterIP = findIP4('k3s-master1', nodes)
|
||||||
|
await delay(1000)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if kubeconfig exists
|
const kubeconfigExists = await Deno.stat("./kubeconfig").then(() => true).catch(() => false);
|
||||||
const kubeconfigExists = await exists("./kubeconfig");
|
|
||||||
|
|
||||||
if (!kubeconfigExists) {
|
if (!kubeconfigExists) {
|
||||||
log.info("Bootstrapping first master node...");
|
await Deno.run({
|
||||||
await executeCommand(
|
cmd: ["k3sup", "install", "--ip", firstMasterIP, "--user", "picard", "--cluster", "--ssh-key", sshKeyPrivateFileName],
|
||||||
["k3sup", "install", "--ip", firstMasterIP, "--user", "picard", "--cluster", "--ssh-key", sshKeyPrivateFileName],
|
}).status();
|
||||||
"bootstrap first master node"
|
console.log("✅ First master node bootstrapped.");
|
||||||
);
|
|
||||||
log.success("First master node bootstrapped");
|
|
||||||
} else {
|
} else {
|
||||||
log.skip("First master node already bootstrapped");
|
console.log("⏩ First master node already bootstrapped.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 6: Join Additional Master Nodes (if not already joined)
|
// Step 6: Join Additional Master Nodes (if not already joined)
|
||||||
for (let i = 2; i <= numMasters; i++) {
|
for (let i = 2; i <= numMasters; i++) {
|
||||||
const vmName = `k3s-master${i}`;
|
const vmName = `k3s-master${i}`;
|
||||||
const vmIP = getIp(i);
|
const vmIP = findIP4(vmName, nodes)
|
||||||
|
|
||||||
// Wait for VM to be ready
|
const joined = await Deno.run({
|
||||||
log.info(`Waiting for ${vmName} (${vmIP}) to be ready...`);
|
cmd: ["kubectl", "get", "nodes", vmName],
|
||||||
const nodeReady = await isVmReadyForSsh(vmIP, "picard", sshKeyPrivateFileName);
|
stdout: "null",
|
||||||
if (!nodeReady) {
|
stderr: "null",
|
||||||
log.warning(`VM ${vmName} is not ready for SSH connections, skipping join operation`);
|
}).status().then((status) => status.success);
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const {success: joined} = await executeCommand(
|
|
||||||
["kubectl", "--kubeconfig=./kubeconfig", "get", "nodes", vmName],
|
|
||||||
`check if ${vmName} has joined the cluster`,
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!joined) {
|
if (!joined) {
|
||||||
log.info(`Joining ${vmName} to the cluster...`);
|
await Deno.run({
|
||||||
await executeCommand(
|
cmd: ["k3sup", "join", "--ip", vmIP, "--server-ip", firstMasterIP, "--user", "picard", "--ssh-key", sshKeyPrivateFileName],
|
||||||
["k3sup", "join", "--server", "--ip", vmIP, "--server-ip", firstMasterIP, "--user", "picard", "--ssh-key", sshKeyPrivateFileName],
|
}).status();
|
||||||
`join ${vmName} to the cluster`
|
console.log(`✅ VM ${vmName} joined the cluster.`);
|
||||||
);
|
|
||||||
log.success(`VM ${vmName} joined the cluster`);
|
|
||||||
} else {
|
} else {
|
||||||
log.skip(`VM ${vmName} already joined the cluster`);
|
console.log(`⏩ VM ${vmName} already joined the cluster.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.success("HA k3s cluster setup complete! 🚀");
|
console.log("🚀 HA k3s cluster setup complete!");
|
||||||
|
|
||||||
// Verify cluster status
|
|
||||||
log.info("Verifying cluster status...");
|
|
||||||
const {success: clusterVerified, output: nodesOutput} = await executeCommand(
|
|
||||||
["kubectl", "--kubeconfig=./kubeconfig", "get", "nodes", "-o", "wide"],
|
|
||||||
"verify cluster nodes",
|
|
||||||
{throwOnError: false}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (clusterVerified) {
|
|
||||||
log.info("Cluster nodes:");
|
|
||||||
console.log(nodesOutput);
|
|
||||||
} else {
|
|
||||||
log.warning("Could not verify cluster status");
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
||||||
log.error(`Failed to set up cluster: ${errorMessage}`);
|
|
||||||
|
|
||||||
if (createdVMs.length > 0) {
|
|
||||||
log.warning("An error occurred during setup. Cleaning up created resources...");
|
|
||||||
await cleanup(createdVMs, forceCleanup);
|
|
||||||
}
|
|
||||||
|
|
||||||
Deno.exit(1);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
await new Command()
|
await new Command()
|
||||||
|
|
@ -374,6 +145,5 @@ await new Command()
|
||||||
.version("0.1.0")
|
.version("0.1.0")
|
||||||
.description("Automate the setup of an HA k3s cluster using incus and k3sup")
|
.description("Automate the setup of an HA k3s cluster using incus and k3sup")
|
||||||
.option("-m, --masters <numMasters:number>", "Number of master nodes", {default: 3})
|
.option("-m, --masters <numMasters:number>", "Number of master nodes", {default: 3})
|
||||||
.option("-c, --cleanup", "Force cleanup of VMs if setup fails", {default: false})
|
.action(({masters}) => setupCluster(masters))
|
||||||
.action(({masters, cleanup}) => setupCluster(masters, cleanup))
|
|
||||||
.parse(Deno.args);
|
.parse(Deno.args);
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,77 @@
|
||||||
|
# This file is maintained automatically by "terraform init".
|
||||||
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
|
provider "registry.terraform.io/argoproj-labs/argocd" {
|
||||||
|
version = "7.0.2"
|
||||||
|
constraints = "7.0.2"
|
||||||
|
hashes = [
|
||||||
|
"h1:4lbS20EczuzhSNSOjp1mJoe2YbcXniBTzxmJHd+rjIE=",
|
||||||
|
"zh:083686eaeaa7b51ebaac42c3c7b01a15f020a735dc8dbe50aa6a6bff16888943",
|
||||||
|
"zh:16b1b813f33874844fadc747c57ae99cf8f119c119b3776a105c154fc4a54488",
|
||||||
|
"zh:25ed8dca5da5faa52392c7938c61dd9a83bc6388ad771062cecfc15c44bc3d8e",
|
||||||
|
"zh:3907351bbcb6a0c1c1abeb33dac5d70f798b0ecc05559f2ede40ae84b9079983",
|
||||||
|
"zh:3a737237f03b9b28de26b1fe9d20bcfa53f580489fc28d774396e5de38906fd3",
|
||||||
|
"zh:64421961cc342cec8280899352444a96ad1b09144fa933dc3a0dfb9bbae809a9",
|
||||||
|
"zh:9702119789cc42b98dc9d1a8d7666b608a964cf1355e3cf500b82bed1898f2fd",
|
||||||
|
"zh:9cc9ad41a6ce25aac40b9dd2291fc4d90a223add197155decdca7d2d82fc60f1",
|
||||||
|
"zh:a239381a36bf6041d6520c8db83fb281fd2417f4540c895e07db052dd108a72f",
|
||||||
|
"zh:ecca66064fff07719eec2ef35cd62d1cb65cf4a11f9ce96f3a9b9b7c78d614a5",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "registry.terraform.io/hashicorp/helm" {
|
||||||
|
version = "2.17.0"
|
||||||
|
hashes = [
|
||||||
|
"h1:K5FEjxvDnxb1JF1kG1xr8J3pNGxoaR3Z0IBG9Csm/Is=",
|
||||||
|
"zh:06fb4e9932f0afc1904d2279e6e99353c2ddac0d765305ce90519af410706bd4",
|
||||||
|
"zh:104eccfc781fc868da3c7fec4385ad14ed183eb985c96331a1a937ac79c2d1a7",
|
||||||
|
"zh:129345c82359837bb3f0070ce4891ec232697052f7d5ccf61d43d818912cf5f3",
|
||||||
|
"zh:3956187ec239f4045975b35e8c30741f701aa494c386aaa04ebabffe7749f81c",
|
||||||
|
"zh:66a9686d92a6b3ec43de3ca3fde60ef3d89fb76259ed3313ca4eb9bb8c13b7dd",
|
||||||
|
"zh:88644260090aa621e7e8083585c468c8dd5e09a3c01a432fb05da5c4623af940",
|
||||||
|
"zh:a248f650d174a883b32c5b94f9e725f4057e623b00f171936dcdcc840fad0b3e",
|
||||||
|
"zh:aa498c1f1ab93be5c8fbf6d48af51dc6ef0f10b2ea88d67bcb9f02d1d80d3930",
|
||||||
|
"zh:bf01e0f2ec2468c53596e027d376532a2d30feb72b0b5b810334d043109ae32f",
|
||||||
|
"zh:c46fa84cc8388e5ca87eb575a534ebcf68819c5a5724142998b487cb11246654",
|
||||||
|
"zh:d0c0f15ffc115c0965cbfe5c81f18c2e114113e7a1e6829f6bfd879ce5744fbb",
|
||||||
|
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "registry.terraform.io/hashicorp/kubernetes" {
|
||||||
|
version = "2.35.1"
|
||||||
|
hashes = [
|
||||||
|
"h1:Av0Wk8g2XjY2oap7nyWNHEgfCRfphdJvrkqJjEM2ZKM=",
|
||||||
|
"zh:12212ca5ae47823ce14bfafb909eeb6861faf1e2435fb2fc4a8b334b3544b5f5",
|
||||||
|
"zh:3f49b3d77182df06b225ab266667de69681c2e75d296867eb2cf06a8f8db768c",
|
||||||
|
"zh:40832494d19f8a2b3cd0c18b80294d0b23ef6b82f6f6897b5fe00248a9997460",
|
||||||
|
"zh:739a5ddea61a77925ee7006a29c8717377a2e9d0a79a0bbd98738d92eec12c0d",
|
||||||
|
"zh:a02b472021753627c5c39447a56d125a32214c29ff9108fc499f2dcdf4f1cc4f",
|
||||||
|
"zh:b78865b3867065aa266d6758c9601a2756741478f5735a838c20d633d65e085b",
|
||||||
|
"zh:d362e87464683f5632790e66920ea803adb54c2bc0cb24b6fd9a314d2b1efffd",
|
||||||
|
"zh:d98206fe88c2c9a52b8d2d0cb2c877c812a4a51d19f9d8428e63cbd5fd8a304d",
|
||||||
|
"zh:dfa320946b1ce3f3615c42b3447a28dc9f604c06d8b9a6fe289855ab2ade4d11",
|
||||||
|
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||||
|
"zh:fc1debd2e695b5222d2ccc8b24dab65baba4ee2418ecce944e64d42e79474cb5",
|
||||||
|
"zh:fdaf960443720a238c09e519aeb30faf74f027ac5d1e0a309c3b326888e031d7",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "registry.terraform.io/hashicorp/random" {
|
||||||
|
version = "3.6.3"
|
||||||
|
hashes = [
|
||||||
|
"h1:Fnaec9vA8sZ8BXVlN3Xn9Jz3zghSETIKg7ch8oXhxno=",
|
||||||
|
"zh:04ceb65210251339f07cd4611885d242cd4d0c7306e86dda9785396807c00451",
|
||||||
|
"zh:448f56199f3e99ff75d5c0afacae867ee795e4dfda6cb5f8e3b2a72ec3583dd8",
|
||||||
|
"zh:4b4c11ccfba7319e901df2dac836b1ae8f12185e37249e8d870ee10bb87a13fe",
|
||||||
|
"zh:4fa45c44c0de582c2edb8a2e054f55124520c16a39b2dfc0355929063b6395b1",
|
||||||
|
"zh:588508280501a06259e023b0695f6a18149a3816d259655c424d068982cbdd36",
|
||||||
|
"zh:737c4d99a87d2a4d1ac0a54a73d2cb62974ccb2edbd234f333abd079a32ebc9e",
|
||||||
|
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||||
|
"zh:a357ab512e5ebc6d1fda1382503109766e21bbfdfaa9ccda43d313c122069b30",
|
||||||
|
"zh:c51bfb15e7d52cc1a2eaec2a903ac2aff15d162c172b1b4c17675190e8147615",
|
||||||
|
"zh:e0951ee6fa9df90433728b96381fb867e3db98f66f735e0c3e24f8f16903f0ad",
|
||||||
|
"zh:e3cdcb4e73740621dabd82ee6a37d6cfce7fee2a03d8074df65086760f5cf556",
|
||||||
|
"zh:eff58323099f1bd9a0bec7cb04f717e7f1b2774c7d612bf7581797e1622613a0",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
@ -1,80 +1,9 @@
|
||||||
locals {
|
locals {
|
||||||
tld = "fourlights.dev"
|
tld = "fourlights.dev"
|
||||||
cluster_dns = "venus.${local.tld}"
|
cluster_dns = "venus.${local.tld}"
|
||||||
|
bridge_dns = "bridge.${local.cluster_dns}"
|
||||||
is_installed = true
|
is_installed = true
|
||||||
node_count = 1
|
node_count = 3
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_manifest" "traefik-helm-config" {
|
|
||||||
manifest = {
|
|
||||||
apiVersion = "helm.cattle.io/v1"
|
|
||||||
kind = "HelmChartConfig"
|
|
||||||
metadata = {
|
|
||||||
name = "traefik"
|
|
||||||
namespace = "kube-system"
|
|
||||||
}
|
|
||||||
spec = {
|
|
||||||
valuesContent = <<EOF
|
|
||||||
serversTransport:
|
|
||||||
forwardingTimeouts:
|
|
||||||
dialTimeout: 0
|
|
||||||
responseHeaderTimeout: 0
|
|
||||||
idleConnTimeout: 0
|
|
||||||
logs:
|
|
||||||
general:
|
|
||||||
level: ERROR
|
|
||||||
ports:
|
|
||||||
web:
|
|
||||||
proxyProtocol:
|
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
|
||||||
forwardedHeaders:
|
|
||||||
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
|
|
||||||
transport:
|
|
||||||
respondingTimeouts:
|
|
||||||
writeTimeout: 0
|
|
||||||
idleTimeout: 0
|
|
||||||
readTimeout: 0
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_manifest" "preserve-host-middleware" {
|
|
||||||
depends_on = [local.is_installed]
|
|
||||||
manifest = {
|
|
||||||
apiVersion = "traefik.io/v1alpha1"
|
|
||||||
kind = "Middleware"
|
|
||||||
metadata = {
|
|
||||||
name = "preserve-host-headers"
|
|
||||||
namespace = "default" # NOTE: Hardcoded by design
|
|
||||||
}
|
|
||||||
spec = {
|
|
||||||
headers = {
|
|
||||||
customRequestHeaders = {
|
|
||||||
"X-Forwarded-Proto" = "https"
|
|
||||||
"X-Forwarded-Port" = "443"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_manifest" "https-redirect-middleware" {
|
|
||||||
depends_on = [local.is_installed]
|
|
||||||
manifest = {
|
|
||||||
apiVersion = "traefik.io/v1alpha1"
|
|
||||||
kind = "Middleware"
|
|
||||||
metadata = {
|
|
||||||
name = "redirect-to-https"
|
|
||||||
namespace = "default" # NOTE: Hardcoded by design
|
|
||||||
}
|
|
||||||
spec = {
|
|
||||||
redirectScheme = {
|
|
||||||
permanent = true
|
|
||||||
scheme = "https"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module "homepage" {
|
module "homepage" {
|
||||||
|
|
@ -103,8 +32,6 @@ module "minio" {
|
||||||
admin = true
|
admin = true
|
||||||
ingressClass = "traefik"
|
ingressClass = "traefik"
|
||||||
storageSize = "10Gi"
|
storageSize = "10Gi"
|
||||||
|
|
||||||
displayOnHomepage = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module "mongodb" {
|
module "mongodb" {
|
||||||
|
|
@ -130,138 +57,3 @@ module "rabbitmq" {
|
||||||
admin = true
|
admin = true
|
||||||
ingressClass = "traefik"
|
ingressClass = "traefik"
|
||||||
}
|
}
|
||||||
|
|
||||||
module "postgresql" {
|
|
||||||
source = "../../infra/modules/postgresql"
|
|
||||||
|
|
||||||
namespace = "postgresql"
|
|
||||||
k8s_config_yaml = local.k8s_config_yaml
|
|
||||||
username = "bridge"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-db" {
|
|
||||||
source = "../../infra/modules/postgresql/tenant"
|
|
||||||
wait_on = module.postgresql.installed
|
|
||||||
|
|
||||||
name = "zitadel"
|
|
||||||
root_password = module.postgresql.root_password
|
|
||||||
k8s_config_yaml = local.k8s_config_yaml
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel" {
|
|
||||||
source = "../../infra/modules/zitadel"
|
|
||||||
wait_on = module.zitadel-db.installed
|
|
||||||
k8s_config_yaml = local.k8s_config_yaml
|
|
||||||
|
|
||||||
server_dns = local.cluster_dns
|
|
||||||
|
|
||||||
service_name = "zitadel"
|
|
||||||
namespace = "zitadel"
|
|
||||||
|
|
||||||
database_password = module.zitadel-db.password
|
|
||||||
database_root_password = module.postgresql.root_password
|
|
||||||
|
|
||||||
display_on_homepage = true
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-bootstrap" {
|
|
||||||
source = "../../infra/tenants/fourlights/zitadel"
|
|
||||||
|
|
||||||
domain = module.zitadel.server
|
|
||||||
jwt_profile_file = module.zitadel.jwt_profile_file
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
module "redis" {
|
|
||||||
source = "../../infra/modules/redis"
|
|
||||||
|
|
||||||
namespace = "redis"
|
|
||||||
k8s_config_yaml = local.k8s_config_yaml
|
|
||||||
}
|
|
||||||
|
|
||||||
module "tenant-365zon" {
|
|
||||||
source = "../../infra/tenants/365zon"
|
|
||||||
|
|
||||||
wait_on = module.minio.installed
|
|
||||||
|
|
||||||
org_id = module.zitadel-bootstrap.org_id
|
|
||||||
user_id = module.zitadel-bootstrap.user_id
|
|
||||||
domain = module.zitadel.server
|
|
||||||
jwt_profile_file = module.zitadel.jwt_profile_file
|
|
||||||
|
|
||||||
minio_access_key = module.minio.minio_access_key
|
|
||||||
minio_secret_key = module.minio.minio_secret_key
|
|
||||||
minio_server = module.minio.minio_server
|
|
||||||
minio_api_uri = module.minio.minio_api_uri
|
|
||||||
|
|
||||||
mongodb_connection_string = module.mongodb.connection_string
|
|
||||||
rabbitmq_connection_string = module.rabbitmq.connection_string
|
|
||||||
}
|
|
||||||
|
|
||||||
module "zitadel-argocd" {
|
|
||||||
source = "../../infra/tenants/argocd/zitadel"
|
|
||||||
|
|
||||||
org_id = module.zitadel-bootstrap.org_id
|
|
||||||
user_id = module.zitadel-bootstrap.user_id
|
|
||||||
domain = module.zitadel.server
|
|
||||||
jwt_profile_file = module.zitadel.jwt_profile_file
|
|
||||||
|
|
||||||
argocd_service_domain = "argocd.${ local.cluster_dns}"
|
|
||||||
}
|
|
||||||
|
|
||||||
module "argocd" {
|
|
||||||
source = "../../infra/modules/argocd"
|
|
||||||
wait_on = module.zitadel-argocd.installed
|
|
||||||
|
|
||||||
namespace = "argocd"
|
|
||||||
k8s_config_yaml = local.k8s_config_yaml
|
|
||||||
|
|
||||||
redis_db_start_index = 0
|
|
||||||
redis_password = module.redis.password
|
|
||||||
server_dns = local.cluster_dns
|
|
||||||
|
|
||||||
oauth_uri = module.zitadel.server
|
|
||||||
oauth_client_id = module.zitadel-argocd.client_id
|
|
||||||
oauth_client_secret = module.zitadel-argocd.client_secret
|
|
||||||
oauth_redirect_uri = "https://${module.zitadel.server}/${module.zitadel-argocd.logoutSuffix}"
|
|
||||||
oauth_issuer = "https://${module.zitadel.server}"
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
argocd project
|
|
||||||
*/
|
|
||||||
|
|
||||||
output "argocd-root-password" {
|
|
||||||
value = module.argocd.admin_password
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "mongodb-connection-string" {
|
|
||||||
value = module.mongodb.connection_string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "rabbitmq-connection-string" {
|
|
||||||
value = module.rabbitmq.connection_string
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio-access-key" {
|
|
||||||
value = module.tenant-365zon.minio_access_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio-secret-key" {
|
|
||||||
value = module.tenant-365zon.minio_secret_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio-root-access-key" {
|
|
||||||
value = module.minio.minio_access_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "minio-root-secret-key" {
|
|
||||||
value = module.minio.minio_secret_key
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{"type":"serviceaccount","keyId":"313768085818048552","key":"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAtdNVdOOD80x5NDMusGpdfWvo91N9MnOETO0RLhhyrSRyO6vo\nfxKD68nKK/SwUpq8dl9vNCzBFqOsbz2tRPp3jeV+6YdMwGgnQQxEVpOfRftd0718\nycQcaWIauuU3xuyA+rj74CWjlg+R9b5dWbo2p/2Ey+ygO60LWrRhDWnNslRpbPco\nrw2StHnPqpORQLn0Oj7qyns6lSeq20hwWJu9+IbTYrWPA0HnO7WZZBUgukX0prai\n4edAZJfwoo7UJKH6ZETvQbCVuA8UMHqLbUB0U4se9+d+OuotoghVCMH3HAcVh/7A\ndQvTVa6ix6DiXjtqtQytt1+fRL7bCcntRn1kyQIDAQABAoIBAC3wPRERQo8/7QeB\nPvSXixNbJjsGvwT2JqEA7GxHBQI1yR7GajFgzi/OhePhKINWUPNfXUtDW22K4NAi\nNxrMZVRWfWAnLP8X0YMfxExTc9RMlAIhR9v6TmtZvAMoUpVRv6yY/Bo/qDsLqAb8\nl71JzPFYniqfmEQ7jjjWhgbLiorZVyZsRSBuaTYIqJccbq/zZ/O+D3xXdSEwbOri\nxPhqjsWQz6q3jxcc3FAAmzxEMwFBwx8pbocVUyCn43LifLjuXk831SMg6l9Q5mCd\nEi7UYXkZzcPtrdo3mg682FEsSna7VFUlBBl/kEXdSvuGRMZZfEYsx8TrI524sDe7\nPxN2LgECgYEAwHjNlgBox3kMg6uO5USQo5CzCHMOQhsIITwax353LPrsuhYssFUS\nvMyrSlZDvw87xuZEVEhUDuvMQlUOcCvNjHvstt1HPB3hku4KlZrgRUBmgsVGi2sG\nlAxczrp/1qenVU3z1Y2u6DCTM3ASPyb4smYvLDRgm1aTD8VY2pAfaaECgYEA8dbz\nUAEeagH6Au6VQ6404ZrjrrokzsjjLUzuhRah/zlpKWqaOEfldSCEHu0sZSPG20T/\nt2KhKqNzpTsSv6H0QmhD3k1/b42Sr2bu8WbKbQTOeuY64TA341PS7vGzVh5iXN5H\nAo0D5hUoiFhPlXI5Xzpo5sDy8pnX18DREnwnOikCgYBouNHTDcH01m3yrkN/hwDT\nngVrUX6uhRq1SoifhrW9KYHn9ey2vHMHeqywM6OErvstS1heK0RhIfbvGGxUp+Cc\n0UiIbnk1wmRbl1z27V+dDl84Q7IQZVkc8GUGrf6kgm/PQCytQvuppdRRpmanKcMi\n/eoouQ7fNgmqCQxBTy1oQQKBgAoFuneqVDRYeJ/+ezke0xo2bREkrbnUIXYTJh9l\n3LjTDESnIlUKxbug6VjOw3Q9k1Qq+94BvGQj+frzA8flUlYeNBHWbF2XJGkYfvaK\nKAF0nYoCWJZUhTxqkOdOJPyArdrja1XzuiGi5tcfscyjuzTzGr3VaTwcnBFY8FEv\nzABhAoGBAJDlWEJYN94VWkbYpA0ak4CopZONaK3zo+LPnd8RteRF1Bb7nbVLxoWK\n3JyRdjKBGgZHjJVjeVvfHZd2RcH/toKsJ8Oj3ImfMFLlBmx6C8nzLVvIKCjmGpRV\nVISNo5nUrEn9/9vghSlWTSiV/jDm4ExPqn4am6xNyscjNo8aPiNo\n-----END RSA PRIVATE KEY-----\n","expirationDate":"2026-01-01T00:00:00Z","userId":"313768085817983016"}
|
|
||||||
Loading…
Reference in New Issue