Compare commits

...

9 Commits

Author SHA1 Message Date
Thomas Rijpstra 344baa16aa
wip: wuadlets 2025-10-03 14:11:03 +02:00
Thomas Rijpstra b801f8f201
chore(terraform): remove helm provider submodule 2025-09-22 12:01:20 +02:00
Thomas Rijpstra 24dd89d071
feat: add provisioner support with incus and lxc
Introduced Provisioner enum type and refactored VM setup logic to support different provisioners like incus and lxc. Added bootstrap function for incus.
2025-06-20 09:28:18 +02:00
Thomas Rijpstra 426f5d9a21
WIP 2025-06-18 12:52:38 +02:00
Thomas Rijpstra 4abf8bc6a9
wip 2025-03-07 19:12:54 +01:00
Thomas Rijpstra d726c0f4a3
WIP 2025-03-06 19:46:23 +01:00
Thomas Rijpstra d348029f3b
WIP 2025-03-01 16:24:28 +01:00
Thomas Rijpstra df5c5b9a8e
WIP 2025-02-22 11:55:35 +01:00
Thomas Rijpstra 8d6cd81788
WIP 2025-02-22 11:55:14 +01:00
144 changed files with 29014 additions and 665 deletions

View File

@ -1,8 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="TemplateDataLanguageMappings">
<file url="file://$PROJECT_DIR$/infra/modules/argocd/values.yaml" dialect="yaml" />
<file url="file://$PROJECT_DIR$/infra/modules/fusionauth/values.yaml" dialect="yaml" />
<file url="file://$PROJECT_DIR$/infra/modules/mongodb/values.yaml" dialect="yaml" />
<file url="file://$PROJECT_DIR$/infra/modules/monitoring/monitoring-values.yaml.tftpl" dialect="TFTPL" />
<file url="file://$PROJECT_DIR$/infra/modules/zot/values.yaml.tftpl" dialect="TFTPL" />
</component>
</project>

View File

@ -2,5 +2,10 @@
<project version="4">
<component name="TerraformProjectSettings">
<option name="toolPath" value="/usr/bin/terraform" />
<option name="ignoredTemplateCandidatePaths">
<set>
<option value="file://$PROJECT_DIR$/infra/modules/monitoring/monitoring-values.yaml.tftpl" />
</set>
</option>
</component>
</project>

View File

@ -2,5 +2,6 @@
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$/shuttles/terraform/terraform-provider-helm" vcs="Git" />
</component>
</project>

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AutoImportSettings">
<option name="autoReloadType" value="SELECTIVE" />
</component>
<component name="ChangeListManager">
<list default="true" id="009bc178-658e-4c81-9bb8-8d7bf6b8cbc6" name="Changes" comment="">
<change beforePath="$PROJECT_DIR$/../infra/clusters/app-365zon/.terraform/terraform.tfstate" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/clusters/app-365zon/.terraform/terraform.tfstate" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/clusters/app-365zon/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/clusters/app-365zon/main.tf" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/modules/mijn-365zon-nl/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/mijn-365zon-nl/main.tf" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/main.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/main.tf" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/values.yaml.tftpl" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/values.yaml.tftpl" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/modules/minio/variables.tf" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/minio/variables.tf" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../infra/modules/mongodb/values.yaml" beforeDir="false" afterPath="$PROJECT_DIR$/../infra/modules/mongodb/values.yaml" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/.." />
</component>
<component name="ProjectColorInfo">{
&quot;associatedIndex&quot;: 1
}</component>
<component name="ProjectId" id="2oqTXEtODybqnAKfjaqPi9uslRP" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
"RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.git.unshallow": "true",
"git-widget-placeholder": "main",
"last_opened_file_path": "/home/lamelos/Projects/fourlights/devops",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
"node.js.selected.package.tslint": "(autodetect)",
"nodejs_package_manager_path": "npm",
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="SharedIndexes">
<attachedChunks>
<set>
<option value="bundled-js-predefined-d6986cc7102b-deb605915726-JavaScript-WS-243.22562.222" />
</set>
</attachedChunks>
</component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="009bc178-658e-4c81-9bb8-8d7bf6b8cbc6" name="Changes" comment="" />
<created>1731596143702</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1731596143702</updated>
<workItem from="1731596144788" duration="1417000" />
<workItem from="1736261138378" duration="1228000" />
<workItem from="1736775177111" duration="7000" />
</task>
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
</project>

View File

@ -2,22 +2,22 @@
# Manual edits may be lost in future updates.
provider "registry.terraform.io/aminueza/minio" {
version = "2.5.1"
constraints = "~> 2.5.0"
version = "3.3.0"
constraints = "~> 3.3.0"
hashes = [
"h1:03gfmXf78G9h9XCHwavPwAwCjg1xmQIp4e5aAv6xIbI=",
"zh:0710a1fcd8e3501237990344160b0193860c2e643e73c728bf832e3d3fde971a",
"zh:0b2f25fbb59d056299faec7fb09012ef0545bd25e7ffa55a04a5c10c28908713",
"zh:0e0179fe12c855bcf5dbcf6858373eaa6e9dd790010096a3fcc667a23224388d",
"zh:23f6118cefb2fae443de98197490a9ba56fa51f1e324d1811709e0fdfc22ed7d",
"zh:34875cbaf07fbed8b8c639f38146f19188e57fc2eac4cdeac638b3d675b82ad4",
"zh:5b0fc4934533557af0001630801e9e637ab0e1588fd086f0cd04a52f4a13474f",
"zh:5d8eda5611ce4017688694e566c00609508a2c3a0e9aa587f6a58dcd1cb9846c",
"zh:70855ab6327a1b79b0619d0ed3538513f98fdfadae6fe60e986dbbf2891151f8",
"zh:7330d66c56a67a4c36f2fc2f1d7042503f5b4d0ec66a9bbe2b72920fb56b85de",
"zh:764597f7be92426cd63f7ae82d2845a1f2677d2b86921f19facf93fdbb80f503",
"zh:7dd947c72366377a16adc7bf0c0d09c32ade09dcedbcbf411da057ca970fb9e8",
"zh:9db57839cdc1d667271d5589ca4d9e791b665c0248e37c9ccdc79c0cef39aaed",
"h1:apkVsmgFVWd1jpCMnPR1Kd8WJB2UkYRiS2kc1Meefz8=",
"zh:0c0ac1602465eaeb7045410a8ad22ee6eb82233f7bfda78bb07c58d3697bf62b",
"zh:26a097f7523b222bb7808b76ec9fdac8c5974e440dc2438d16ef8fa4562bf297",
"zh:47ade5b7a7ce2755291e0e4ae2125298bef682190a9917a4ca384edb23a338f4",
"zh:74769c5e1615b3fc930b9acc2f966dd7c053c31146d1eca19666bee8a7512088",
"zh:8b4a72b05ce50f41b6b1c8f2cd692509c0814f2d5fb4869a98e42b9eb22430f3",
"zh:8bcb1c844ab14b780c4547834d4f7755e4f2ac643f8061e8cfaa98becc6a78b5",
"zh:8e44bae37b1f984908f427d64154c090136b6e8b8e0c1229df7b03945e59509d",
"zh:974bba8e806aa3265ddc558657f93b0465877a8687f691d366dd34a90e059f97",
"zh:a5d029fb41b6e0f1f4d742e326918e725f3ebd57cb5170fdb39f82a26ce52d1c",
"zh:a7a405574406ff525105880ca7b86614b5bced701f74b7e748d8e2976b5880cd",
"zh:f83cf18fd194129ca06dfa3fc5bc8b951df71fa04cdbcb0bf651277f06c17d5f",
"zh:fa2eefadf213ad934c75bc1679408ad4b765ff86853b473f837ad83b68a44c77",
]
}
@ -39,78 +39,183 @@ provider "registry.terraform.io/argoproj-labs/argocd" {
]
}
provider "registry.terraform.io/hashicorp/helm" {
version = "2.16.1"
provider "registry.terraform.io/hashicorp/external" {
version = "2.3.5"
hashes = [
"h1:TerRBdq69SxIWg3ET2VE0bcP0BYRIWZOp1QxXj/14Fk=",
"zh:0003f6719a32aee9afaeeb001687fc0cfc8c2d5f54861298cf1dc5711f3b4e65",
"zh:16cd5bfee09e7bb081b8b4470f31a9af508e52220fd97fd81c6dda725d9422fe",
"zh:51817de8fdc2c2e36785f23fbf4ec022111bd1cf7679498c16ad0ad7471c16db",
"zh:51b95829b2873be40a65809294bffe349e40cfccc3ff6fee0f471d01770e0ebd",
"zh:56b158dde897c47e1460181fc472c3e920aa23db40579fdc2aad333c1456d2dd",
"zh:916641d26c386959eb982e680028aa677b787687ef7c1283241e45620bc8df50",
"zh:aec15ca8605babba77b283f2ca35daca53e006d567e1c3a3daf50497035b820b",
"zh:c2cecf710b87c8f3a4d186da2ea12cf08041f97ae0c6db82649720d6ed929d65",
"zh:dbdd96f17aea25c7db2d516ab8172a5e683c6686c72a1a44173d2fe96319be39",
"zh:de11e180368434a796b1ab6f20fde7554dc74f7800e063b8e4c8ec3a86d0be63",
"h1:smKSos4zs57pJjQrNuvGBpSWth2el9SgePPbPHo0aps=",
"zh:6e89509d056091266532fa64de8c06950010498adf9070bf6ff85bc485a82562",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:86868aec05b58dc0aa1904646a2c26b9367d69b890c9ad70c33c0d3aa7b1485a",
"zh:a2ce38fda83a62fa5fb5a70e6ca8453b168575feb3459fa39803f6f40bd42154",
"zh:a6c72798f4a9a36d1d1433c0372006cc9b904e8cfd60a2ae03ac5b7d2abd2398",
"zh:a8a3141d2fc71c86bf7f3c13b0b3be8a1b0f0144a47572a15af4dfafc051e28a",
"zh:aa20a1242eb97445ad26ebcfb9babf2cd675bdb81cac5f989268ebefa4ef278c",
"zh:b58a22445fb8804e933dcf835ab06c29a0f33148dce61316814783ee7f4e4332",
"zh:cb5626a661ee761e0576defb2a2d75230a3244799d380864f3089c66e99d0dcc",
"zh:d1acb00d20445f682c4e705c965e5220530209c95609194c2dc39324f3d4fcce",
"zh:d91a254ba77b69a29d8eae8ed0e9367cbf0ea6ac1a85b58e190f8cb096a40871",
"zh:f6592327673c9f85cdb6f20336faef240abae7621b834f189c4a62276ea5db41",
]
}
provider "registry.terraform.io/hashicorp/helm" {
version = "3.0.2"
constraints = ">= 2.0.0"
hashes = [
"h1:+tHGl509bhyUrvvj9GQTBsdK+ImHJnRuo6ppDZPavqY=",
"zh:2778de76c7dfb2e85c75fe6de3c11172a25551ed499bfb9e9f940a5be81167b0",
"zh:3b4c436a41e4fbae5f152852a9bd5c97db4460af384e26977477a40adf036690",
"zh:617a372f5bb2288f3faf5fd4c878a68bf08541cf418a3dbb8a19bc41ad4a0bf2",
"zh:84de431479548c96cb61c495278e320f361e80ab4f8835a5425ece24a9b6d310",
"zh:8b4cf5f81d10214e5e1857d96cff60a382a22b9caded7f5d7a92e5537fc166c1",
"zh:baeb26a00ffbcf3d507cdd940b2a2887eee723af5d3319a53eec69048d5e341e",
"zh:ca05a8814e9bf5fbffcd642df3a8d9fae9549776c7057ceae6d6f56471bae80f",
"zh:ca4bf3f94dedb5c5b1a73568f2dad7daf0ef3f85e688bc8bc2d0e915ec148366",
"zh:d331f2129fd3165c4bda875c84a65555b22eb007801522b9e017d065ac69b67e",
"zh:e583b2b478dde67da28e605ab4ef6521c2e390299b471d7d8ef05a0b608dcdad",
"zh:f238b86611647c108c073d265f8891a2738d3158c247468ae0ff5b1a3ac4122a",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
"zh:f827a9c1540d210c56053a2d5d5a6abda924896ffa8eeedc94054cf6d44c5f60",
]
}
provider "registry.terraform.io/hashicorp/kubernetes" {
version = "2.34.0"
version = "2.31.0"
constraints = ">= 2.0.0, 2.31.0"
hashes = [
"h1:QOiO85qZnkUm7kAtuPkfblchuKPWUqRdNVWE5agpr8k=",
"zh:076b451dc8629c49f4260de6d43595e98ac5f1bdbebb01d112659ef94d99451f",
"zh:0c29855dbd3c6ba82fce680fa5ac969d4e09e20fecb4ed40166b778bd19895a4",
"zh:583b4dfcea4d8392dd7904c00b2ff41bbae78d238e8b72e5ad580370a24a4ecb",
"zh:5e20844d8d1af052381d00de4febd4055ad0f3c3c02795c361265b9ef72a1075",
"zh:766b7ab7c4727c62b5887c3922e0467c4cc355ba0dc3aabe465ebb86bc1caabb",
"zh:776a5000b441d7c8262d17d4a4aa4aa9760ae64de4cb7172961d9e007e0be1e5",
"zh:7838f509235116e55adeeecbe6def3da1b66dd3c4ce0de02fc7dc66a60e1d630",
"zh:931e5581ec66c145c1d29198bd23fddc8d0c5cbf4cda22e02dba65644c7842f2",
"zh:95e728efa2a31a63b879fd093507466e509e3bfc9325eb35ea3dc28fed15c6f7",
"zh:972b9e3ca2b6a1057dcf5003fc78cabb0dd8847580bddeb52d885ebd64df38ea",
"zh:ef6114217965d55f5bddbd7a316b8f85f15b8a77c075fcbed95813039d522e0a",
"h1:wGHbATbv/pBVTST1MtEn0zyVhZbzZJD2NYq2EddASHY=",
"zh:0d16b861edb2c021b3e9d759b8911ce4cf6d531320e5dc9457e2ea64d8c54ecd",
"zh:1bad69ed535a5f32dec70561eb481c432273b81045d788eb8b37f2e4a322cc40",
"zh:43c58e3912fcd5bb346b5cb89f31061508a9be3ca7dd4cd8169c066203bcdfb3",
"zh:4778123da9206918a92dfa73cc711475d2b9a8275ff25c13a30513c523ac9660",
"zh:8bfa67d2db03b3bfae62beebe6fb961aee8d91b7a766efdfe4d337b33dfd23dd",
"zh:9020bb5729db59a520ade5e24984b737e65f8b81751fbbd343926f6d44d22176",
"zh:90431dbfc5b92498bfbce38f0b989978c84421a6c33245b97788a46b563fbd6e",
"zh:b71a061dda1244f6a52500e703a9524b851e7b11bbf238c17bbd282f27d51cb2",
"zh:d6232a7651b834b89591b94bf4446050119dcde740247e6083a4d55a2cefd28a",
"zh:d89fba43e699e28e2b5e92fff2f75fc03dbc8de0df9dacefe1a8836f8f430753",
"zh:ef85c0b744f5ba1b10dadc3c11e331ba4225c45bb733e024d7218c24b02b0512",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
]
}
provider "registry.terraform.io/hashicorp/random" {
version = "3.6.3"
provider "registry.terraform.io/hashicorp/local" {
version = "2.5.3"
hashes = [
"h1:Fnaec9vA8sZ8BXVlN3Xn9Jz3zghSETIKg7ch8oXhxno=",
"zh:04ceb65210251339f07cd4611885d242cd4d0c7306e86dda9785396807c00451",
"zh:448f56199f3e99ff75d5c0afacae867ee795e4dfda6cb5f8e3b2a72ec3583dd8",
"zh:4b4c11ccfba7319e901df2dac836b1ae8f12185e37249e8d870ee10bb87a13fe",
"zh:4fa45c44c0de582c2edb8a2e054f55124520c16a39b2dfc0355929063b6395b1",
"zh:588508280501a06259e023b0695f6a18149a3816d259655c424d068982cbdd36",
"zh:737c4d99a87d2a4d1ac0a54a73d2cb62974ccb2edbd234f333abd079a32ebc9e",
"h1:1Nkh16jQJMp0EuDmvP/96f5Unnir0z12WyDuoR6HjMo=",
"zh:284d4b5b572eacd456e605e94372f740f6de27b71b4e1fd49b63745d8ecd4927",
"zh:40d9dfc9c549e406b5aab73c023aa485633c1b6b730c933d7bcc2fa67fd1ae6e",
"zh:6243509bb208656eb9dc17d3c525c89acdd27f08def427a0dce22d5db90a4c8b",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:a357ab512e5ebc6d1fda1382503109766e21bbfdfaa9ccda43d313c122069b30",
"zh:c51bfb15e7d52cc1a2eaec2a903ac2aff15d162c172b1b4c17675190e8147615",
"zh:e0951ee6fa9df90433728b96381fb867e3db98f66f735e0c3e24f8f16903f0ad",
"zh:e3cdcb4e73740621dabd82ee6a37d6cfce7fee2a03d8074df65086760f5cf556",
"zh:eff58323099f1bd9a0bec7cb04f717e7f1b2774c7d612bf7581797e1622613a0",
"zh:885d85869f927853b6fe330e235cd03c337ac3b933b0d9ae827ec32fa1fdcdbf",
"zh:bab66af51039bdfcccf85b25fe562cbba2f54f6b3812202f4873ade834ec201d",
"zh:c505ff1bf9442a889ac7dca3ac05a8ee6f852e0118dd9a61796a2f6ff4837f09",
"zh:d36c0b5770841ddb6eaf0499ba3de48e5d4fc99f4829b6ab66b0fab59b1aaf4f",
"zh:ddb6a407c7f3ec63efb4dad5f948b54f7f4434ee1a2607a49680d494b1776fe1",
"zh:e0dafdd4500bec23d3ff221e3a9b60621c5273e5df867bc59ef6b7e41f5c91f6",
"zh:ece8742fd2882a8fc9d6efd20e2590010d43db386b920b2a9c220cfecc18de47",
"zh:f4c6b3eb8f39105004cf720e202f04f57e3578441cfb76ca27611139bc116a82",
]
}
provider "registry.terraform.io/hashicorp/null" {
version = "3.2.4"
hashes = [
"h1:hkf5w5B6q8e2A42ND2CjAvgvSN3puAosDmOJb3zCVQM=",
"zh:59f6b52ab4ff35739647f9509ee6d93d7c032985d9f8c6237d1f8a59471bbbe2",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:795c897119ff082133150121d39ff26cb5f89a730a2c8c26f3a9c1abf81a9c43",
"zh:7b9c7b16f118fbc2b05a983817b8ce2f86df125857966ad356353baf4bff5c0a",
"zh:85e33ab43e0e1726e5f97a874b8e24820b6565ff8076523cc2922ba671492991",
"zh:9d32ac3619cfc93eb3c4f423492a8e0f79db05fec58e449dee9b2d5873d5f69f",
"zh:9e15c3c9dd8e0d1e3731841d44c34571b6c97f5b95e8296a45318b94e5287a6e",
"zh:b4c2ab35d1b7696c30b64bf2c0f3a62329107bd1a9121ce70683dec58af19615",
"zh:c43723e8cc65bcdf5e0c92581dcbbdcbdcf18b8d2037406a5f2033b1e22de442",
"zh:ceb5495d9c31bfb299d246ab333f08c7fb0d67a4f82681fbf47f2a21c3e11ab5",
"zh:e171026b3659305c558d9804062762d168f50ba02b88b231d20ec99578a6233f",
"zh:ed0fe2acdb61330b01841fa790be00ec6beaac91d41f311fb8254f74eb6a711f",
]
}
provider "registry.terraform.io/hashicorp/random" {
version = "3.7.2"
hashes = [
"h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=",
"zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f",
"zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc",
"zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab",
"zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3",
"zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212",
"zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34",
"zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967",
"zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d",
"zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62",
"zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0",
]
}
provider "registry.terraform.io/hashicorp/vault" {
version = "4.5.0"
version = "5.1.0"
hashes = [
"h1:oKiQcEqj/HTCMzgGtZ531D/jnnM0i7iguSM8pU7aK8U=",
"zh:0a9301aa6a9b59db97682be568329526033bb50a4a308ad695c2a1877c1241c3",
"zh:0f8fee69ea4eaa27b86a391edc7de8e8b215e3c48f7074bab799986d5f707014",
"zh:2a2e51fe280e07700920bc8ed29b77e5c79fada0e4d5315d55ec0d2893bb5eed",
"zh:3fc7d9016bebe26a4c779ce6b87b181ed6a1af12499419726b8b0a0e3eaa7234",
"h1:x9cfzSpsdqUfrKM4qD/Mfqvy66ZWKrLtA+PZx7HhqZ8=",
"zh:121c84975a3732d2c68db6b555c37a4520f3c283fd916e25e472e784518662a7",
"zh:2a80da4424db091d3b9846a569b0ae3e60f3e95b0a988ff94f3986391a62c93d",
"zh:2bcb3aadf97aecf0b9f98393affd766b929eafd3cb68ed4f26419405c3e8ec64",
"zh:5a5f11db49784e9be251fbad2bb3a46c5f9999ab4e1ea7940f120b3743afca28",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:813a9e4875e58dbca2526b3088c0f76dbb2a66b10b910497a0b703518eaa73cd",
"zh:889ed6f21b94f89b8cbc4224454ced01a2792f12f53379d2fb1a2f2749bf624a",
"zh:acf9c01d403584015005083e64d8479d167e4f54e87e540311010133fcb5b023",
"zh:b377945a4b6a75c79793cb92c873aacc9c087c2a6e5792a1613f3aa2f3693848",
"zh:be243567b2a76ba2a546449e89764f707477cf25dcdd6d7f3b808ddf40aaf9f6",
"zh:d879fa16f391fb75b779067c751f3b8f80f5f4d73b2ff86814662038878a0ce4",
"zh:e47fb3daac933f5dcb034379fe77c0bf834512da7348e7643457c9af3b2ab36b",
"zh:976b000cf6fa75c33c3b3a3c2e5c67a8c1958744b0521a1f807de9f8855dc961",
"zh:a4321ce4ff9422d90c3c85bb4835a84563eb761d0714d1db9c81ca810a48fd7c",
"zh:a7464751eaf0b9cc7afb03e098d7efcf8c559215f3de7f34a56458e75709c94d",
"zh:ae434febd2590e58040308d18bf772f796b2fad90670be263acdd447db8fb106",
"zh:c61a27d8c9daa483feb4e3fecd42fa9f2887c5588433bb15df6d62572a7bb6f4",
"zh:dd2e8bdc76f09f8a47485d129140cd6126ad722014f6704ad5d8c4f18014571d",
"zh:f15d32b1eaeb419533e586b0c2c1e2b2f732925b3a094e31e9669cd6e6e735f0",
]
}
provider "registry.terraform.io/public-cloud-wl/slugify" {
version = "0.1.1"
constraints = "0.1.1"
hashes = [
"h1:iOJEMYX1bLfUnKjSxluQkKijr5NgWSqb2lU9Ag2Q12w=",
"zh:13f77dedcc74256053ac51512372510d722116bf58e119fac203fe599d667720",
"zh:2223be634f684f76e265efdaafdf95a948ba9e44f09f8a89540bdb564eff17f1",
"zh:73e8b763c796d57186756cf0bab75323e2d92c873f1df8eccd8a7e336a2e3e81",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:9f83adcf17de03afb5c27111cb26c580dc5296dffd40fca4571e81ad0bad3bad",
"zh:a5414ade8cbae9aea10dee79e43da247ceecb7e4a54e76d39906ee60b7365a7d",
"zh:bd118ead731e129c92c0dfe3c9a2ebbd8fa25ba6508deaaaccb9ac3a7f70af2d",
"zh:c8ce48ad921956edcee0643cb6184442f3deb438e5605a53794dfd6e8f89a559",
"zh:d96da8a32ef2b807ed3bd943294c6e1d0bd5fc3a793deb762f74d0c54aeff335",
"zh:e30a218b474afe082e005faf51c323ed8747d46845bfacab4cd3adc0c51704ec",
"zh:e3cd265c38da6e65974ac1b9b6be608ba0534178f16f059ad13672de6846e32e",
"zh:f2ded7f8c771a603ad3e2df84986b5f175c38049b7a9ab4a3cd384abafb33dff",
"zh:f2ece1996cf686583afd19384041204a32e08389dc6f4f105501584e653e797d",
"zh:fa2418b74cea55d29dad24f5095aaf30d6253d63ebac3c0c47949b3de8087c88",
"zh:fdc8d3fbca6a19db203802e7a7337075e39b9ffb7a3887a7583e379be61bde17",
]
}
provider "registry.terraform.io/zitadel/zitadel" {
version = "2.0.2"
constraints = "2.0.2"
hashes = [
"h1:iymeaNBrZ4smcr7eHrxO4gbXQ6bx/enKyj3RQ6xZRYA=",
"zh:01e16af0dda9372696b5e1d43ec709aed79829b49ee69a4f9606a248752f672d",
"zh:048c4e726fb846cfe9ab0a0a1f86d3f8922442154b086e2bd8e389b32f69f2f0",
"zh:3a3f6bea621c9d480f1f288cffebace8620979b9260cfeae8f9af5d9a25ed490",
"zh:4d349e584786589bc2037cee691ff1678296f5351e6491aa34dcb08ecbe1dcb7",
"zh:80741c78179788be8d7e33e471e1311197cd4e1067803d438463d0a8ac871a60",
"zh:89178d30f5ec49551e6a6ebc5eb589ab6631012dcec0d03ea7130b1029890e51",
"zh:94cd3b1fe3d1d39bcb3b70208b044bde4c5ce5152e12b29f0fa0ff1085e12863",
"zh:97299c172ada852705f8ca9fa91eeee12c6259263baae3ca53cf41e3130b1731",
"zh:a33d53acc640dc93b81352ba633cf392bc8c7614a72d320d59d3dcdb22d73fc4",
"zh:a95c15960baf8157f79a6490361455767d48e4dd3ce2ef1d0051743f6152733b",
"zh:ae66ad95c7039e6ef844c39389c9077ce7dbb501b6af02afb26a223fd289dbcb",
"zh:b8a9cb3b53653c06d52607368c406112ee1abc6d66dc4aedaedddbb46a66ea8f",
"zh:d48693ecdc985bb4167af0c3164240c13c4ea48167d28f706e7893cbdb20540a",
"zh:f6db1ec30bfbcf4423ab2d29979b775423ba37008fd48a766b5a1cf87a131859",
"zh:fed4e95dc9aaf361c8ff57f819d31fa25152b9e6cb90b7202d8be9ab1446b081",
]
}

View File

@ -1,6 +1,6 @@
{
"version": 3,
"terraform_version": "1.10.0",
"terraform_version": "1.12.1",
"backend": {
"type": "s3",
"config": {

View File

@ -0,0 +1,20 @@
apiVersion: v1
kind: Config
clusters:
- name: "app-365zon"
cluster:
server: "https://rancher.bridge.fourlights.dev/k8s/clusters/c-m-fh8pgmtb"
users:
- name: "app-365zon"
user:
token: "kubeconfig-user-9slw5nc6r2:7xq7zjsdfsvnjx9wnhh78r8nwmgqjfk4272mjsc4vwzhh5tcqv5swb"
contexts:
- name: "app-365zon"
context:
user: "app-365zon"
cluster: "app-365zon"
current-context: "app-365zon"

View File

@ -0,0 +1,20 @@
apiVersion: v1
kind: Config
clusters:
- name: "app-365zon"
cluster:
server: "https://rancher.bridge.fourlights.dev/k8s/clusters/c-m-fh8pgmtb"
users:
- name: "app-365zon"
user:
token: "kubeconfig-user-9slw5k78ws:dvh4q8rn5d6w9tqc5vpl6mnnr8p9b7pxzjn9n6sxmxqqnhpqc54dzz"
contexts:
- name: "app-365zon"
context:
user: "app-365zon"
cluster: "app-365zon"
current-context: "app-365zon"

View File

@ -8,30 +8,32 @@ locals {
module "cluster-init" {
source = "../../modules/cluster/init-rke2"
k8s_config_yaml = data.minio_s3_object.k8s_yaml.content
k8s_config_yaml = local.k8s_config_yaml
}
module "minio" {
source = "../../modules/minio"
wait_on = module.cluster-init.installed
k8s_config_yaml = data.minio_s3_object.k8s_yaml.content
k8s_config_yaml = local.k8s_config_yaml
server_dns = local.cluster_dns
service_name = "storage"
namespace = "minio"
mode = "distributed"
replicas = local.node_count
admin_server_dns = local.cluster_dns # Restricted admin access, access via bridge
tls = false # TLS termination happens on the bridge ingress
admin = true
ingressClass = "nginx"
storageSize = "40Gi"
storageSize = "20Gi"
}
module "mongodb" {
source = "../../modules/mongodb"
wait_on = module.cluster-init.installed
k8s_config_yaml = data.minio_s3_object.k8s_yaml.content
k8s_config_yaml = local.k8s_config_yaml
namespace = "mongodb"
replicas = local.node_count
@ -50,7 +52,7 @@ module "mongodb" {
module "rabbitmq" {
source = "../../modules/rabbitmq"
wait_on = module.cluster-init.installed
k8s_config_yaml = data.minio_s3_object.k8s_yaml.content
k8s_config_yaml = local.k8s_config_yaml
server_dns = "local" # Restricted admin access, access via bridge
@ -62,28 +64,155 @@ module "rabbitmq" {
ingressClass = "nginx"
}
# THESE SHOULD BE IN BRIDGE
# generate ed25519 for argocd: `ssh-keygen -t ed25519 -C "argocd.bridge.fourlights.dev" -f argocd.bridge.fourlights.dev
# add ed25519.pub to github repo deploy keys
# add argocd repo secret
# add argocd updated github.com known hosts
# add argocd application
# setup secrets
module "postgresql" {
source = "../../modules/postgresql"
resource "vault_kv_secret_v2" "cluster" {
mount = var.cluster
name = "minio"
delete_all_versions = true
data_json = jsonencode({
access_key = minio_iam_service_account.cluster.access_key
secret_key = minio_iam_service_account.cluster.secret_key
})
depends_on = [
var.wait_on,
minio_iam_service_account.cluster
]
namespace = "postgresql"
k8s_config_yaml = local.k8s_config_yaml
username = "bridge"
}
module "zitadel-db" {
source = "../../modules/postgresql/tenant"
wait_on = module.postgresql.installed
name = "zitadel"
root_password = module.postgresql.root_password
k8s_config_yaml = local.k8s_config_yaml
}
module "zitadel" {
source = "../../modules/zitadel"
wait_on = module.zitadel-db.installed
k8s_config_yaml = local.k8s_config_yaml
server_dns = local.cluster_dns
service_name = "zitadel"
namespace = "zitadel"
database_password = module.zitadel-db.password
database_root_password = module.postgresql.root_password
display_on_homepage = true
ingressClass = "nginx"
}
module "zitadel-bootstrap" {
source = "../../tenants/fourlights/zitadel"
domain = module.zitadel.server
jwt_profile_file = module.zitadel.jwt_profile_file
k8s_config_yaml = local.k8s_config_yaml
}
module "redis" {
source = "../../modules/redis"
namespace = "redis"
k8s_config_yaml = local.k8s_config_yaml
}
module "tenant-365zon" {
source = "../../tenants/365zon"
wait_on = module.minio.installed
k8s_config_yaml = local.k8s_config_yaml
org_id = module.zitadel-bootstrap.org_id
user_id = module.zitadel-bootstrap.user_id
domain = module.zitadel.server
jwt_profile_file = module.zitadel.jwt_profile_file
minio_access_key = module.minio.minio_access_key
minio_secret_key = module.minio.minio_secret_key
minio_server = module.minio.minio_server
minio_api_uri = module.minio.minio_api_uri
mongodb_connection_string = module.mongodb.connection_string
rabbitmq_connection_string = module.rabbitmq.connection_string
}
module "zitadel-argocd" {
source = "../../tenants/argocd/zitadel"
org_id = module.zitadel-bootstrap.org_id
user_id = module.zitadel-bootstrap.user_id
domain = module.zitadel.server
jwt_profile_file = module.zitadel.jwt_profile_file
argocd_service_domain = "argocd.${ local.cluster_dns}"
}
module "argocd" {
source = "../../modules/argocd"
wait_on = module.zitadel-argocd.installed
namespace = "argocd"
k8s_config_yaml = local.k8s_config_yaml
redis_db_start_index = 0
redis_password = module.redis.password
server_dns = local.cluster_dns
oauth_uri = module.zitadel.server
oauth_client_id = module.zitadel-argocd.client_id
oauth_client_secret = module.zitadel-argocd.client_secret
oauth_redirect_uri = "https://${module.zitadel.server}/${module.zitadel-argocd.logoutSuffix}"
oauth_issuer = "https://${module.zitadel.server}"
ingressClass = "nginx"
}
module "monitoring" {
source = "../../modules/monitoring"
wait_on = module.argocd.installed
namespace = "monitoring"
k8s_config_yaml = local.k8s_config_yaml
server_dns = local.cluster_dns
ingressClass = "nginx"
}
output "argocd-root-password" {
value = module.argocd.admin_password
sensitive = true
}
output "mongodb-connection-string" {
value = module.mongodb.connection_string
sensitive = true
}
output "rabbitmq-connection-string" {
value = module.rabbitmq.connection_string
sensitive = true
}
output "minio-access-key" {
value = module.tenant-365zon.minio_access_key
sensitive = true
}
output "minio-secret-key" {
value = module.tenant-365zon.minio_secret_key
sensitive = true
}
output "monitoring" {
value = module.monitoring.access_instructions
sensitive = true
}
output "minio-root-access-key" {
value = module.minio.minio_access_key
sensitive = true
}
output "minio-root-secret-key" {
value = module.minio.minio_secret_key
sensitive = true
}

View File

@ -2,7 +2,7 @@ terraform {
required_providers {
minio = {
source = "aminueza/minio"
version = "~> 2.5.0"
version = "~> 3.3.0"
}
}
}
@ -21,7 +21,8 @@ data "minio_s3_object" "k8s_yaml" {
}
locals {
k8s_config = yamldecode(data.minio_s3_object.k8s_yaml.content)
k8s_config_yaml = file("./kubeconfig")
k8s_config = yamldecode(local.k8s_config_yaml)
k8s_host = local.k8s_config.clusters[0].cluster.server
k8s_auth = try(
{

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"type":"serviceaccount","keyId":"328362785044575662","key":"-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA1+ggA05PsZZzpklcqEAck8UUkn1H5HJXiD3to0IU2vnLQYk9\n/pxHM30n2uWLOflL3OWeqy0rDfR2tp3a0gziGO3MV6EuDPg73WZqHMwxa4RkPOMO\nLcHcfnp4bG9RxZgbvj2pFkvPJ/j5KC54HDyol2urWuGSVKKIGY7bUE4HD0n+ejr6\nLoEeGtIV17ttipqvF2jPMXHvH/yjsQWPofRQh6kqnxTuwyesjlEXyezMP1+WfpCa\n/kmGfowocyIuMNHUSeymLfBX3KIYVWp+/HukP1J781RQLPivdh8hiPZT560gXh72\niSKCLgTLuUt72LFs+1XnKuq2gIOFvP8jpae+GQIDAQABAoIBAEkc+SJFdaVMrpMk\nCrWLKGI9/e5xcFQDCHNBjWSYrxlMN7FUuWKsJ0xfWIy+6LvUGQWP2IBdBlRucXj7\n0asJ49LPTEBse3y23wbldI+V8jJXnd4kgZurJ3DJJliKBeXk0ZhFpym+uELwA+fA\nfLoLLIhCVL+s3XG9rFsDTm9OTQO2ykPvwGAx151nitBnLJm6ms+meBOPc/f/nsH+\nIR6W3Fm4hWjSeKdeWSJG6ePkJqbXLTNYhxnkbgNg4fz4CCPf39aqS3NbwiUv/dQG\ni8cp/UnsZGF0IlmSyipQirFk9wLd9iX9vNorPPMosD68m/plC3eXFIQATbBXEGaf\nelMUmeUCgYEA2j0M42iiBQ0qsuj4rfdCv8rjaIva5EG6Jp2AMtXmQQulrp31iuKw\njfpMoUAip5HS1PdQuXCK6m7DVS3Nr39pBcGrsumU/nW1h3AWTR4GeWFtcCj5ek7q\nitJKfpGnyfWlpPeluXfGmS6npA36URlhX+FfaE6vEHVa7o/hcw4bF6sCgYEA/UPK\nHCwJNBPP04Q7zGF2ivl/zz9WDwnp3d9QZzpmjCZa3J52tLShs5URC8VeSH6mWh78\nfMoFb8fFiJUSjA1Dh0X1pbhXLeFnJmobHAlHZvdMgGDQSe0VfnmC7tJxwEQ3tdxu\n/E95kNFvXUSq0lSICG8TFwr2zHueJf8sPHvsbUsCgYEAzut3nt6ET/nN9FhEAN4h\n4atCtstzvVw8XYSUGp8Ic7/RVaDbQNXyxIr/wRwIP41ZsybS2SBF9kUS215UWQOa\n1iRs4XPrGuEMPuAdpPzE8q1XkDKpaDgO1ZTRoPQFfM12QtWzwgg4uuCrfjbqkZ5Y\n3wnW5hVEk3xRvUOaZPaI5YUCgYAQC5NqZX/PxNGN3PrM3//aniPyxVh2a3JdlCBL\n6e9FQcjP4Voz6J21+uOxkzd98EgpvXzRixr9EfL5zDf4l36Hu27sqNkdFwKp9F7z\nT8MuSsyV9Yw8osCR2rDo4HxEag39f5GoeoCBJEOh7Q/Fc4WKwz66Xv9zxQEn9xqe\nWluIMQKBgEFkltf0957pEC7pPb/t9/20j7ODKSKrSYE3+SGi9XwUeT74HkU43Euf\na3KWlwMmJcYvtc4RDNuYJ6F9/eAq5c5MqgySEHoMh386u3YzzZBcI5L06pMI4kYb\njhK63OCrlzeILxwcwc00ztYHbjxk/yFOUWQ/OknCzlGE0o8TOPyu\n-----END RSA PRIVATE KEY-----\n","expirationDate":"2026-01-01T00:00:00Z","userId":"328362785044510126"}

View File

@ -61,22 +61,41 @@ provider "registry.terraform.io/fusionauth/fusionauth" {
]
}
provider "registry.terraform.io/hashicorp/helm" {
version = "2.16.1"
provider "registry.terraform.io/hashicorp/external" {
version = "2.3.5"
hashes = [
"h1:TerRBdq69SxIWg3ET2VE0bcP0BYRIWZOp1QxXj/14Fk=",
"zh:0003f6719a32aee9afaeeb001687fc0cfc8c2d5f54861298cf1dc5711f3b4e65",
"zh:16cd5bfee09e7bb081b8b4470f31a9af508e52220fd97fd81c6dda725d9422fe",
"zh:51817de8fdc2c2e36785f23fbf4ec022111bd1cf7679498c16ad0ad7471c16db",
"zh:51b95829b2873be40a65809294bffe349e40cfccc3ff6fee0f471d01770e0ebd",
"zh:56b158dde897c47e1460181fc472c3e920aa23db40579fdc2aad333c1456d2dd",
"zh:916641d26c386959eb982e680028aa677b787687ef7c1283241e45620bc8df50",
"zh:aec15ca8605babba77b283f2ca35daca53e006d567e1c3a3daf50497035b820b",
"zh:c2cecf710b87c8f3a4d186da2ea12cf08041f97ae0c6db82649720d6ed929d65",
"zh:dbdd96f17aea25c7db2d516ab8172a5e683c6686c72a1a44173d2fe96319be39",
"zh:de11e180368434a796b1ab6f20fde7554dc74f7800e063b8e4c8ec3a86d0be63",
"h1:smKSos4zs57pJjQrNuvGBpSWth2el9SgePPbPHo0aps=",
"zh:6e89509d056091266532fa64de8c06950010498adf9070bf6ff85bc485a82562",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:86868aec05b58dc0aa1904646a2c26b9367d69b890c9ad70c33c0d3aa7b1485a",
"zh:a2ce38fda83a62fa5fb5a70e6ca8453b168575feb3459fa39803f6f40bd42154",
"zh:a6c72798f4a9a36d1d1433c0372006cc9b904e8cfd60a2ae03ac5b7d2abd2398",
"zh:a8a3141d2fc71c86bf7f3c13b0b3be8a1b0f0144a47572a15af4dfafc051e28a",
"zh:aa20a1242eb97445ad26ebcfb9babf2cd675bdb81cac5f989268ebefa4ef278c",
"zh:b58a22445fb8804e933dcf835ab06c29a0f33148dce61316814783ee7f4e4332",
"zh:cb5626a661ee761e0576defb2a2d75230a3244799d380864f3089c66e99d0dcc",
"zh:d1acb00d20445f682c4e705c965e5220530209c95609194c2dc39324f3d4fcce",
"zh:d91a254ba77b69a29d8eae8ed0e9367cbf0ea6ac1a85b58e190f8cb096a40871",
"zh:f6592327673c9f85cdb6f20336faef240abae7621b834f189c4a62276ea5db41",
]
}
provider "registry.terraform.io/hashicorp/helm" {
version = "3.0.2"
hashes = [
"h1:+tHGl509bhyUrvvj9GQTBsdK+ImHJnRuo6ppDZPavqY=",
"zh:2778de76c7dfb2e85c75fe6de3c11172a25551ed499bfb9e9f940a5be81167b0",
"zh:3b4c436a41e4fbae5f152852a9bd5c97db4460af384e26977477a40adf036690",
"zh:617a372f5bb2288f3faf5fd4c878a68bf08541cf418a3dbb8a19bc41ad4a0bf2",
"zh:84de431479548c96cb61c495278e320f361e80ab4f8835a5425ece24a9b6d310",
"zh:8b4cf5f81d10214e5e1857d96cff60a382a22b9caded7f5d7a92e5537fc166c1",
"zh:baeb26a00ffbcf3d507cdd940b2a2887eee723af5d3319a53eec69048d5e341e",
"zh:ca05a8814e9bf5fbffcd642df3a8d9fae9549776c7057ceae6d6f56471bae80f",
"zh:ca4bf3f94dedb5c5b1a73568f2dad7daf0ef3f85e688bc8bc2d0e915ec148366",
"zh:d331f2129fd3165c4bda875c84a65555b22eb007801522b9e017d065ac69b67e",
"zh:e583b2b478dde67da28e605ab4ef6521c2e390299b471d7d8ef05a0b608dcdad",
"zh:f238b86611647c108c073d265f8891a2738d3158c247468ae0ff5b1a3ac4122a",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
"zh:f827a9c1540d210c56053a2d5d5a6abda924896ffa8eeedc94054cf6d44c5f60",
]
}
@ -101,100 +120,100 @@ provider "registry.terraform.io/hashicorp/kubernetes" {
}
provider "registry.terraform.io/hashicorp/null" {
version = "3.2.3"
version = "3.2.4"
hashes = [
"h1:+AnORRgFbRO6qqcfaQyeX80W0eX3VmjadjnUFUJTiXo=",
"zh:22d062e5278d872fe7aed834f5577ba0a5afe34a3bdac2b81f828d8d3e6706d2",
"zh:23dead00493ad863729495dc212fd6c29b8293e707b055ce5ba21ee453ce552d",
"zh:28299accf21763ca1ca144d8f660688d7c2ad0b105b7202554ca60b02a3856d3",
"zh:55c9e8a9ac25a7652df8c51a8a9a422bd67d784061b1de2dc9fe6c3cb4e77f2f",
"zh:756586535d11698a216291c06b9ed8a5cc6a4ec43eee1ee09ecd5c6a9e297ac1",
"h1:hkf5w5B6q8e2A42ND2CjAvgvSN3puAosDmOJb3zCVQM=",
"zh:59f6b52ab4ff35739647f9509ee6d93d7c032985d9f8c6237d1f8a59471bbbe2",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:9d5eea62fdb587eeb96a8c4d782459f4e6b73baeece4d04b4a40e44faaee9301",
"zh:a6355f596a3fb8fc85c2fb054ab14e722991533f87f928e7169a486462c74670",
"zh:b5a65a789cff4ada58a5baffc76cb9767dc26ec6b45c00d2ec8b1b027f6db4ed",
"zh:db5ab669cf11d0e9f81dc380a6fdfcac437aea3d69109c7aef1a5426639d2d65",
"zh:de655d251c470197bcbb5ac45d289595295acb8f829f6c781d4a75c8c8b7c7dd",
"zh:f5c68199f2e6076bce92a12230434782bf768103a427e9bb9abee99b116af7b5",
"zh:795c897119ff082133150121d39ff26cb5f89a730a2c8c26f3a9c1abf81a9c43",
"zh:7b9c7b16f118fbc2b05a983817b8ce2f86df125857966ad356353baf4bff5c0a",
"zh:85e33ab43e0e1726e5f97a874b8e24820b6565ff8076523cc2922ba671492991",
"zh:9d32ac3619cfc93eb3c4f423492a8e0f79db05fec58e449dee9b2d5873d5f69f",
"zh:9e15c3c9dd8e0d1e3731841d44c34571b6c97f5b95e8296a45318b94e5287a6e",
"zh:b4c2ab35d1b7696c30b64bf2c0f3a62329107bd1a9121ce70683dec58af19615",
"zh:c43723e8cc65bcdf5e0c92581dcbbdcbdcf18b8d2037406a5f2033b1e22de442",
"zh:ceb5495d9c31bfb299d246ab333f08c7fb0d67a4f82681fbf47f2a21c3e11ab5",
"zh:e171026b3659305c558d9804062762d168f50ba02b88b231d20ec99578a6233f",
"zh:ed0fe2acdb61330b01841fa790be00ec6beaac91d41f311fb8254f74eb6a711f",
]
}
provider "registry.terraform.io/hashicorp/random" {
version = "3.6.3"
version = "3.7.2"
hashes = [
"h1:Fnaec9vA8sZ8BXVlN3Xn9Jz3zghSETIKg7ch8oXhxno=",
"zh:04ceb65210251339f07cd4611885d242cd4d0c7306e86dda9785396807c00451",
"zh:448f56199f3e99ff75d5c0afacae867ee795e4dfda6cb5f8e3b2a72ec3583dd8",
"zh:4b4c11ccfba7319e901df2dac836b1ae8f12185e37249e8d870ee10bb87a13fe",
"zh:4fa45c44c0de582c2edb8a2e054f55124520c16a39b2dfc0355929063b6395b1",
"zh:588508280501a06259e023b0695f6a18149a3816d259655c424d068982cbdd36",
"zh:737c4d99a87d2a4d1ac0a54a73d2cb62974ccb2edbd234f333abd079a32ebc9e",
"h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=",
"zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f",
"zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc",
"zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab",
"zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3",
"zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212",
"zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:a357ab512e5ebc6d1fda1382503109766e21bbfdfaa9ccda43d313c122069b30",
"zh:c51bfb15e7d52cc1a2eaec2a903ac2aff15d162c172b1b4c17675190e8147615",
"zh:e0951ee6fa9df90433728b96381fb867e3db98f66f735e0c3e24f8f16903f0ad",
"zh:e3cdcb4e73740621dabd82ee6a37d6cfce7fee2a03d8074df65086760f5cf556",
"zh:eff58323099f1bd9a0bec7cb04f717e7f1b2774c7d612bf7581797e1622613a0",
"zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34",
"zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967",
"zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d",
"zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62",
"zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0",
]
}
provider "registry.terraform.io/hashicorp/tls" {
version = "4.0.6"
version = "4.1.0"
hashes = [
"h1:dYSb3V94K5dDMtrBRLPzBpkMTPn+3cXZ/kIJdtFL+2M=",
"zh:10de0d8af02f2e578101688fd334da3849f56ea91b0d9bd5b1f7a243417fdda8",
"zh:37fc01f8b2bc9d5b055dc3e78bfd1beb7c42cfb776a4c81106e19c8911366297",
"zh:4578ca03d1dd0b7f572d96bd03f744be24c726bfd282173d54b100fd221608bb",
"zh:6c475491d1250050765a91a493ef330adc24689e8837a0f07da5a0e1269e11c1",
"zh:81bde94d53cdababa5b376bbc6947668be4c45ab655de7aa2e8e4736dfd52509",
"zh:abdce260840b7b050c4e401d4f75c7a199fafe58a8b213947a258f75ac18b3e8",
"zh:b754cebfc5184873840f16a642a7c9ef78c34dc246a8ae29e056c79939963c7a",
"zh:c928b66086078f9917aef0eec15982f2e337914c5c4dbc31dd4741403db7eb18",
"zh:cded27bee5f24de6f2ee0cfd1df46a7f88e84aaffc2ecbf3ff7094160f193d50",
"zh:d65eb3867e8f69aaf1b8bb53bd637c99c6b649ba3db16ded50fa9a01076d1a27",
"zh:ecb0c8b528c7a619fa71852bb3fb5c151d47576c5aab2bf3af4db52588722eeb",
"h1:Ka8mEwRFXBabR33iN/WTIEW6RP0z13vFsDlwn11Pf2I=",
"zh:14c35d89307988c835a7f8e26f1b83ce771e5f9b41e407f86a644c0152089ac2",
"zh:2fb9fe7a8b5afdbd3e903acb6776ef1be3f2e587fb236a8c60f11a9fa165faa8",
"zh:35808142ef850c0c60dd93dc06b95c747720ed2c40c89031781165f0c2baa2fc",
"zh:35b5dc95bc75f0b3b9c5ce54d4d7600c1ebc96fbb8dfca174536e8bf103c8cdc",
"zh:38aa27c6a6c98f1712aa5cc30011884dc4b128b4073a4a27883374bfa3ec9fac",
"zh:51fb247e3a2e88f0047cb97bb9df7c228254a3b3021c5534e4563b4007e6f882",
"zh:62b981ce491e38d892ba6364d1d0cdaadcee37cc218590e07b310b1dfa34be2d",
"zh:bc8e47efc611924a79f947ce072a9ad698f311d4a60d0b4dfff6758c912b7298",
"zh:c149508bd131765d1bc085c75a870abb314ff5a6d7f5ac1035a8892d686b6297",
"zh:d38d40783503d278b63858978d40e07ac48123a2925e1a6b47e62179c046f87a",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
"zh:fb07f708e3316615f6d218cec198504984c0ce7000b9f1eebff7516e384f4b54",
]
}
provider "registry.terraform.io/hashicorp/vault" {
version = "4.5.0"
version = "5.1.0"
hashes = [
"h1:oKiQcEqj/HTCMzgGtZ531D/jnnM0i7iguSM8pU7aK8U=",
"zh:0a9301aa6a9b59db97682be568329526033bb50a4a308ad695c2a1877c1241c3",
"zh:0f8fee69ea4eaa27b86a391edc7de8e8b215e3c48f7074bab799986d5f707014",
"zh:2a2e51fe280e07700920bc8ed29b77e5c79fada0e4d5315d55ec0d2893bb5eed",
"zh:3fc7d9016bebe26a4c779ce6b87b181ed6a1af12499419726b8b0a0e3eaa7234",
"h1:x9cfzSpsdqUfrKM4qD/Mfqvy66ZWKrLtA+PZx7HhqZ8=",
"zh:121c84975a3732d2c68db6b555c37a4520f3c283fd916e25e472e784518662a7",
"zh:2a80da4424db091d3b9846a569b0ae3e60f3e95b0a988ff94f3986391a62c93d",
"zh:2bcb3aadf97aecf0b9f98393affd766b929eafd3cb68ed4f26419405c3e8ec64",
"zh:5a5f11db49784e9be251fbad2bb3a46c5f9999ab4e1ea7940f120b3743afca28",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:813a9e4875e58dbca2526b3088c0f76dbb2a66b10b910497a0b703518eaa73cd",
"zh:889ed6f21b94f89b8cbc4224454ced01a2792f12f53379d2fb1a2f2749bf624a",
"zh:acf9c01d403584015005083e64d8479d167e4f54e87e540311010133fcb5b023",
"zh:b377945a4b6a75c79793cb92c873aacc9c087c2a6e5792a1613f3aa2f3693848",
"zh:be243567b2a76ba2a546449e89764f707477cf25dcdd6d7f3b808ddf40aaf9f6",
"zh:d879fa16f391fb75b779067c751f3b8f80f5f4d73b2ff86814662038878a0ce4",
"zh:e47fb3daac933f5dcb034379fe77c0bf834512da7348e7643457c9af3b2ab36b",
"zh:976b000cf6fa75c33c3b3a3c2e5c67a8c1958744b0521a1f807de9f8855dc961",
"zh:a4321ce4ff9422d90c3c85bb4835a84563eb761d0714d1db9c81ca810a48fd7c",
"zh:a7464751eaf0b9cc7afb03e098d7efcf8c559215f3de7f34a56458e75709c94d",
"zh:ae434febd2590e58040308d18bf772f796b2fad90670be263acdd447db8fb106",
"zh:c61a27d8c9daa483feb4e3fecd42fa9f2887c5588433bb15df6d62572a7bb6f4",
"zh:dd2e8bdc76f09f8a47485d129140cd6126ad722014f6704ad5d8c4f18014571d",
"zh:f15d32b1eaeb419533e586b0c2c1e2b2f732925b3a094e31e9669cd6e6e735f0",
]
}
provider "registry.terraform.io/hetznercloud/hcloud" {
version = "1.49.1"
version = "1.51.0"
constraints = "~> 1.45"
hashes = [
"h1:FKGRNHVbcfQJd8EWrb8Ze5QHkaGr8zI+ZKxBMjvOwPk=",
"zh:3d5f9773da4f8203cf625d04a5a0e4ff7e202684c010a801a945756140c61cde",
"zh:446305d492017cda91e5c15122ec16ff15bfe3ef4d3fd6bcea0cdf7742ab1b86",
"zh:44d4f9156ed8b4f0444bd4dc456825940be49048828565964a192286d28c9f20",
"zh:492ad893d2f89bb17c9beb877c8ceb4a16caf39db1a79030fefeada6c7aa217f",
"zh:68dc552c19ad9d209ec6018445df6e06fb77a637513a53cc66ddce1b024082be",
"zh:7492495ffda6f6c49ab38b539bd2eb965b1150a63fb6b191a27dec07d17601cb",
"zh:850fe92005981ea00db86c3e49ba5b49732fdf1f7bd5530a68f6e272847059fc",
"zh:8cb67f744c233acfb1d68a6c27686315439d944edf733b95f113b4aa63d86713",
"zh:8e13dac46e8c2497772ed1baee701b1d1c26bcc95a63b5c4566c83468f504868",
"zh:c44249c6a8ba931e208a334792686b5355ab2da465cadea03c1ea8e73c02db12",
"zh:d103125a28a85c89aea0cb0c534fe3f504416c4d4fc75c37364b9ec5f66dd77d",
"zh:ed8f64e826aa9bfca95b72892271678cb78411b40d7b404a52404141e05a4ab1",
"zh:f40efad816de00b279bd1e2cbf62c76b0e5b2da150a0764f259984b318e30945",
"zh:f5e912d0873bf4ecc43feba4ceccdf158048080c76d557e47f34749139fdd452",
"h1:yER+O3OKYfxBAO7KVYZzH+4EYrmorCO0J0hlnRUfH00=",
"zh:0e8e78084c12866e8e3873011bcac125780b62afeaa518d4749b9a063ae6e32b",
"zh:145738cee21bcdeea1cf82f0d44f7f239c27c2214249e5e5079668c479522a8a",
"zh:164406be8ee83952f58a449d514837cc6d9763b6d29e72262d5582d5d5b89315",
"zh:1a0e6ffab3196b35ca65eb445622615bb8dddd68d0bf350ed60d25e1e74f67dc",
"zh:3b7729d1bb5cc7a5af60b42a607f7b3fec690192b1efb55e2341cee88405ecb0",
"zh:3bcfc5c40d1b7702f39dac5d2dd9eef58c9c934effb4676e26fbe85fe2057e8f",
"zh:3ce193892dca025b804de6d99316c50a33462eb36336006a9db7ea44be439eba",
"zh:4f92437e1eba8eafe4417f8b61d557ed47f121622305ee2b3c13c31e45c69ca4",
"zh:554c308bf64b603a075a8f13a151a136b68ba382c2d83977a0df26de7dea2d3d",
"zh:8c57aa6032fed5da43a0102a4f26262c0496803b99f2f92e5ceb02c80161e291",
"zh:99cd4d246d0ad3a3529176df22a47f254700f8c4fc33f62c14464259284945b7",
"zh:af38a4d1e93f2392a296970ba4ecea341204e888d579cd74642e9f23a94b3b06",
"zh:f0766d42dd97b3eac6fa614fa5809ff2511c9104f3834d0d4b6e84674f13f092",
"zh:f20f7379876ede225f3b6f0719826706a171ea4c1dd438a8a3103dee8fe43ccc",
]
}

View File

@ -1,6 +1,6 @@
endpoints = { s3 = "https://storage.bridge.fourlights.dev" }
access_key = ""
secret_key = ""
access_key = "T8V84SHIVT6MAV424ES0"
secret_key = "23+N28yBK+cL3O2t9xsstT8jr2TpK+SgORCVIuxc"
bucket = "management"
key = "terraform.tfstate"
region = "eu-central-1"

View File

@ -1,9 +1,9 @@
endpoints = { s3 = "https://storage.bridge.fourlights.dev" }
access_key = ""
secret_key = ""
access_key = "T8V84SHIVT6MAV424ES0"
secret_key = "23+N28yBK+cL3O2t9xsstT8jr2TpK+SgORCVIuxc"
bucket = "management"
region = "eu-central-1"
minio_server = "storage.bridge.fourlights.dev"
rancher_server = "https://rancher.bridge.fourlights.dev"
vault_token = "hvs.CAESIPcy0DY5Jc-d0P2ZRRhiLXr3DmOOawpoA6--QTCoRCqqGh4KHGh2cy5lZjhJdTRINEVKaU55Q21VUTg4ZzZwSWI"
vault_token = "hvs.BsIbdvXLdbQn0v4sR3jSWJa9"
vault_addr = "https://vault.bridge.fourlights.dev"

View File

@ -31,23 +31,23 @@ resource "kubernetes_secret" "argocd-tls" {
}
resource "helm_release" "argocd" {
depends_on = [var.wait_on, kubernetes_secret.argocd-tls]
depends_on = [var.wait_on]
name = "argocd"
repository = "https://charts.bitnami.com/bitnami"
chart = "argo-cd"
namespace = kubernetes_namespace.argocd.metadata[0].name
version = "7.0.20"
version = "9.0.29"
create_namespace = false
wait = true
wait_for_jobs = true
set_sensitive {
set_sensitive = [{
name = "config.secret.argocdServerAdminPassword"
value = random_password.admin_password.result
}
}]
values = [
templatefile("${path.module}/values.yaml", {
templatefile("${path.module}/values.yaml.tftpl", {
service_uri = local.service_uri,
server_dns = var.server_dns,
grpc_service_uri = local.grpc_service_uri,
@ -58,7 +58,9 @@ resource "helm_release" "argocd" {
oauth_issuer = var.oauth_issuer,
oauth_client_id = var.oauth_client_id,
oauth_client_secret = var.oauth_client_secret,
oauth_redirect_uri = var.oauth_redirect_uri
oauth_redirect_uri = var.oauth_redirect_uri,
tls = var.tls,
ingress_class = var.ingressClass
})
]
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -1,74 +0,0 @@
commonEnvVars: &commonEnvVars
- name: REDIS_USERNAME
value: ""
- name: REDIS_PASSWORD
value: ${ redis_password }
commonArgs: &commonRedisArgs
- --redis=${ redis_service_uri }:6379
- --redisdb=${ redis_index }
redis:
enabled: false
redisWait:
enabled: false
externalRedis:
host: ${ redis_service_uri }
password: ${ redis_password }
database: ${ redis_index }
dex:
enabled: true
controller:
extraArgs: *commonRedisArgs
extraEnvVars: *commonEnvVars
repoServer:
extraArgs: *commonRedisArgs
extraEnvVars: *commonEnvVars
server:
extraArgs: *commonRedisArgs
extraEnvVars: *commonEnvVars
url: https://${ service_uri }
insecure: true
ingress:
enabled: true
ingressClassName: traefik
hostname: ${ service_uri }
annotations:
kubernetes.io/ingress.class: traefik
traefik.ingress.kubernetes.io/router.entrypoints: web,websecure
traefik.ingress.kubernetes.io/router.middlewares: default-redirect-to-https@kubernetescrd,default-preserve-host-headers@kubernetescrd
extraTls:
- hosts:
- ${ service_uri }
secretName: argocd-tls
config:
%{ if oauth_uri != null }
dex.config: |
connectors:
- type: oidc
id: oidc
name: OIDC
config:
issuer: ${ oauth_issuer }
clientID: ${ oauth_client_id }
clientSecret: ${ oauth_client_secret }
insecureSkipEmailVerified: true
insecureEnableGroups: true
scopes:
- profile
- email
- openid
- groups
claimMapping:
name: fullName # ArgoCD expects 'name', FusionAuth provides 'fullName'
preferred_username: email
%{ endif }

View File

@ -0,0 +1,104 @@
redis:
enabled: false
redisWait:
enabled: false
externalRedis:
host: ${ redis_service_uri }
password: ${ redis_password }
database: ${ redis_index }
dex:
enabled: true
controller:
extraArgs:
- --redis=${ redis_service_uri }:6379
- --redisdb=${ redis_index }
extraEnvVars:
- name: REDIS_USERNAME
value: ""
- name: REDIS_PASSWORD
value: ${ redis_password }
repoServer:
extraArgs:
- --redis=${ redis_service_uri }:6379
- --redisdb=${ redis_index }
extraEnvVars:
- name: REDIS_USERNAME
value: ""
- name: REDIS_PASSWORD
value: ${ redis_password }
server:
extraArgs:
- --redis=${ redis_service_uri }:6379
- --redisdb=${ redis_index }
extraEnvVars:
- name: REDIS_USERNAME
value: ""
- name: REDIS_PASSWORD
value: ${ redis_password }
url: https://${ service_uri }
insecure: true
ingress:
enabled: true
ingressClassName: ${ingress_class}
hostname: ${ service_uri }
annotations:
kubernetes.io/ingress.class: ${ingress_class}
cert-manager.io/cluster-issuer: letsencrypt
%{ if ingress_class == "traefik" }
%{ if tls }
traefik.ingress.kubernetes.io/router.entrypoints: web,websecure
traefik.ingress.kubernetes.io/router.middlewares: default-redirect-to-https@kubernetescrd,default-preserve-host-headers@kubernetescrd
%{ else }
traefik.ingress.kubernetes.io/router.entrypoints: web
traefik.ingress.kubernetes.io/router.middlewares: default-preserve-host-headers@kubernetescrd
%{ endif }
%{ else }
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
nginx.ingress.kubernetes.io/ssl-passthrough: "true"
%{ endif }
%{ if tls }
extraTls:
- hosts:
- ${ service_uri }
secretName: argocd-tls
%{ endif }
config:
rbac: |
scopes: '[groups]'
"policy.csv": |
g, admin, role:admin
g, user, role:readonly
"policy.default": ''
%{ if oauth_uri != null }
dex.config: |
connectors:
- type: oidc
id: oidc
name: OIDC
config:
issuer: "${ oauth_issuer }"
clientID: "${ oauth_client_id }"
clientSecret: "${ oauth_client_secret }"
insecureSkipEmailVerified: true
insecureEnableGroups: true
scopes:
- profile
- email
- openid
- groups
logoutURL: "${ oauth_redirect_uri }"
getUserInfo: true
claimMapping:
name: fullName
groups: "urn:zitadel:iam:org:project:roles"
preferred_username: email
%{ endif }

View File

@ -64,3 +64,14 @@ variable "oauth_redirect_uri" {
description = "OAuth redirect URI"
default = null
}
variable "tls" {
type = bool
default = false
}
variable "ingressClass" {
description = "Ingress class to use"
type = string
default = "nginx"
}

View File

@ -4,10 +4,10 @@ resource "helm_release" "cert-manager-webhook-hetzner" {
repository = "https://vadimkim.github.io/cert-manager-webhook-hetzner"
chart = "cert-manager-webhook-hetzner"
set {
set = [{
name = "groupName"
value = "acme.${var.tld}"
}
}]
}
resource "kubernetes_secret" "hetzner-secret" {

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -5,22 +5,22 @@ resource "helm_release" "cert_manager" {
create_namespace = true
wait = true
set {
set = [{
name = "installCRDs"
value = "true"
}
set_list {
name = "dnsConfig.nameservers"
value = ["1.1.1.1", "8.8.8.8"]
}
set {
},
{
name = "email"
value = var.email
}
}]
set_list = [{
name = "dnsConfig.nameservers"
value = ["1.1.1.1", "8.8.8.8"]
}]
}
output "installed" {
value = true
depends_on = [helm_release.cert_manager]

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -7,6 +7,106 @@ module "longhorn" {
wait_on = var.wait_on
}
resource "helm_release" "descheduler" {
name = "descheduler"
repository = "https://kubernetes-sigs.github.io/descheduler/"
chart = "descheduler"
namespace = "kube-system"
version = "0.33.0"
values = [
yamlencode({
deschedulerPolicy = {
# Only evict pods older than 5 minutes
maxPodLifeTimeSeconds = 300
# Respect PodDisruptionBudgets
evictLocalStoragePods = false
ignorePvcPods = true
strategies = {
LowNodeUtilization = {
enabled = true
params = {
nodeResourceUtilizationThresholds = {
thresholds = {
cpu = 30
memory = 30
}
targetThresholds = {
cpu = 50
memory = 50
}
}
evictableNamespaces = {
exclude = ["kube-system", "longhorn-system"]
}
}
}
}
}
# Additional settings
schedule = "*/10 * * * *" # Run every 10 minutes
# Don't run on control plane nodes
nodeSelector = {
"node-role.kubernetes.io/control-plane" = null
}
# Resource limits for the descheduler pod itself
resources = {
requests = {
cpu = "100m"
memory = "100Mi"
}
limits = {
cpu = "500m"
memory = "256Mi"
}
}
})
]
}
# resource "kubernetes_config_map" "scheduler_config" {
# metadata {
# name = "scheduler-config"
# namespace = "kube-system"
# }
#
# data = {
# "config.yaml" = yamlencode({
# apiVersion = "kubescheduler.config.k8s.io/v1beta3"
# kind = "KubeSchedulerConfiguration"
# profiles = [{
# schedulerName = "default-scheduler"
# plugins = {
# score = {
# enabled = [
# { name = "NodeResourcesFit", weight = 100 },
# { name = "NodeResourcesBalancedAllocation", weight = 100 },
# { name = "NodeAffinity", weight = 50 },
# { name = "InterPodAffinity", weight = 50 },
# { name = "NodePreferAvoidPods", weight = 10000 },
# { name = "TaintToleration", weight = 100 }
# ]
# }
# }
# pluginConfig = [{
# name = "NodeResourcesBalancedAllocation"
# args = {
# resources = [
# { name = "cpu", weight = 100 },
# { name = "memory", weight = 100 }
# ]
# }
# }]
# }]
# })
# }
# }
# Configure ingress to allow forwarded headers
resource "kubernetes_manifest" "rke2-ingress-nginx-config" {
manifest = {
@ -21,6 +121,25 @@ resource "kubernetes_manifest" "rke2-ingress-nginx-config" {
controller:
config:
use-forwarded-headers: "true"
# Buffer settings to prevent "upstream sent too big header" errors
proxy-buffer-size: "16k"
proxy-buffers: "8 16k"
proxy-busy-buffers-size: "32k"
large-client-header-buffers: "4 16k"
client-header-buffer-size: "16k"
client-body-buffer-size: "16k"
# File upload settings for production
client-max-body-size: "100m"
proxy-body-size: "100m"
proxy-request-buffering: "off"
# Additional production timeouts
proxy-connect-timeout: "600"
proxy-send-timeout: "600"
proxy-read-timeout: "600"
client-body-timeout: "600"
EOT
}
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -36,7 +36,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -4,7 +4,7 @@ resource "helm_release" "homepage" {
chart = "homepage"
namespace = var.namespace
create_namespace = true
version = "2.0.1"
version = "2.1.0"
values = [
templatefile("${path.module}/values.yaml.tftpl", {

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -1,26 +1,7 @@
config:
bookmarks:
- Developer:
- Github:
- abbr: GH
href: https://github.com/
services:
- My First Group:
- My First Service:
href: http://localhost/
description: Homepage is awesome
- My Second Group:
- My Second Service:
href: http://localhost/
description: Homepage is the best
- My Third Group:
- My Third Service:
href: http://localhost/
description: Homepage is 😎
widgets:
# show the kubernetes widget, with the cluster summary and individual nodes
- kubernetes:
cluster:
show: true
@ -33,9 +14,6 @@ config:
cpu: true
memory: true
showLabel: true
- search:
provider: duckduckgo
target: _blank
kubernetes:
mode: cluster
settings:
@ -48,16 +26,20 @@ serviceAccount:
# This enables the service account to access the necessary resources
enableRbac: true
env:
- name: HOMEPAGE_ALLOWED_HOSTS
value: ${service_uri}
ingress:
main:
enabled: true
annotations:
# Example annotations to add Homepage to your Homepage!
gethomepage.dev/enabled: "true"
gethomepage.dev/name: "Homepage"
gethomepage.dev/description: "Dynamically Detected Homepage"
gethomepage.dev/group: "Dynamic"
gethomepage.dev/icon: "homepage.png"
#annotations:
# # Example annotations to add Homepage to your Homepage!
# gethomepage.dev/enabled: "true"
# gethomepage.dev/name: "Homepage"
# gethomepage.dev/description: "Dynamically Detected Homepage"
# gethomepage.dev/group: "Dynamic"
# gethomepage.dev/icon: "homepage.png"
hosts:
- host: ${service_uri}
paths:

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -7,50 +7,38 @@ resource "helm_release" "longhorn" {
create_namespace = true
version = "1.7.1"
set {
name = "defaultSettings.defaultDataPath"
value = "/var/lib/longhorn/"
}
set {
name = "defaultSettings.defaultDataLocality"
value = "best-effort"
}
set {
name = "defaultSettings.storageOverProvisioningPercentage"
value = "90"
}
# set {
# name = "global.cattle.systemDefaultRegistry"
# value = "" # Set this to your private registry if you're using one
# }
set {
name = "csi.kubeletRootDir"
value = "/var/lib/kubelet" # Adjust if your Rancher setup uses a different path
}
set {
name = "enablePSP"
value = "false"
}
set {
name = "serviceMonitor.enabled"
value = "true"
}
set {
name = "persistence.defaultClassReplicaCount"
value = "1"
}
set {
name = "persistence.defaultDataLocality"
value = "best-effort"
}
set = [{
name = "defaultSettings.defaultDataPath"
value = "/var/lib/longhorn/"
},
{
name = "defaultSettings.defaultDataLocality"
value = "best-effort"
},
{
name = "defaultSettings.storageOverProvisioningPercentage"
value = "90"
},
{
name = "csi.kubeletRootDir"
value = "/var/lib/kubelet" # Adjust if your Rancher setup uses a different path
},
{
name = "enablePSP"
value = "false"
},
{
name = "serviceMonitor.enabled"
value = "true"
},
{
name = "persistence.defaultClassReplicaCount"
value = "1"
},
{
name = "persistence.defaultDataLocality"
value = "best-effort"
}]
}
output "installed" {

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -6,50 +6,56 @@ resource "random_password" "minio_access_key" {
resource "random_password" "minio_secret_key" {
length = 40
special = true
#override_special = "!#$%&*()-_=+[]{}<>:?"
#min_special = 2
#min_upper = 2
#min_lower = 2
#min_numeric = 2
}
resource "helm_release" "minio" {
name = "minio"
repository = "https://charts.bitnami.com/bitnami"
repository = "oci://registry-1.docker.io/bitnamicharts"
chart = "minio"
namespace = var.namespace
create_namespace = true
version = "14.7.16"
version = "16.0.0"
wait = true
wait_for_jobs = true
set_sensitive {
name = "auth.rootUser"
value = random_password.minio_access_key.result
}
set_sensitive = [{
name = "auth.rootUser"
value = random_password.minio_access_key.result
},
{
name = "auth.rootPassword"
value = random_password.minio_secret_key.result
}]
set_sensitive {
name = "auth.rootPassword"
value = random_password.minio_secret_key.result
}
set {
set = [{
name = "mode"
value = var.mode
}
},
set {
{
name = "resourcesPreset"
value = "nano"
}
},
set {
{
name = "statefulset.replicaCount"
value = var.replicas
}
},
set {
{
name = "statefulset.drivesPerNode"
value = var.replicas < 4 ? 2 : 1
}
},
set {
{
name = "persistence.size"
value = var.storageSize
}
}]
values = [
templatefile("${path.module}/values.yaml.tftpl", {
@ -58,6 +64,7 @@ resource "helm_release" "minio" {
admin = var.admin,
tls = var.mode == "distributed" ? false : var.tls
ingressClass = var.ingressClass
displayOnHomepage = var.displayOnHomepage
})
]
}
@ -66,3 +73,13 @@ output "installed" {
value = true
depends_on = [helm_release.minio]
}
output "access_key" {
value = random_password.minio_access_key.result
sensitive = true
}
output "secret_key" {
value = random_password.minio_secret_key.result
sensitive = true
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -1,10 +1,28 @@
resource "minio_s3_bucket" "overlay" {
resource "null_resource" "health_check" {
depends_on = [var.wait_on]
provisioner "local-exec" {
command = <<-EOT
until curl -s -f "https://${var.server}/minio/health/live" || [[ $attempts -ge 60 ]]; do
sleep 10
attempts=$((attempts+1))
done
if [[ $attempts -ge 60 ]]; then
echo "Minio health check failed after maximum attempts"
exit 1
fi
EOT
}
}
resource "minio_s3_bucket" "overlay" {
depends_on = [null_resource.health_check]
bucket = var.name
acl = "private"
}
resource "minio_s3_bucket_policy" "overlay" {
depends_on = [minio_s3_bucket.overlay]
bucket = minio_s3_bucket.overlay.bucket
policy = jsonencode({
"Version" : "2012-10-17",
@ -20,7 +38,7 @@ resource "minio_s3_bucket_policy" "overlay" {
"s3:GetBucketLocation"
],
"Resource" : [
"arn:aws:s3:::bouwroute"
minio_s3_bucket.overlay.arn,
]
},
{
@ -34,7 +52,7 @@ resource "minio_s3_bucket_policy" "overlay" {
"s3:ListBucket"
],
"Resource" : [
"arn:aws:s3:::bouwroute"
minio_s3_bucket.overlay.arn,
],
"Condition" : {
"StringEquals" : {
@ -55,7 +73,72 @@ resource "minio_s3_bucket_policy" "overlay" {
"s3:GetObject"
],
"Resource" : [
"arn:aws:s3:::bouwroute/**"
"${minio_s3_bucket.overlay.arn}/**",
]
}
]
})
}
resource "minio_s3_bucket" "uploads" {
depends_on = [null_resource.health_check]
bucket = "uploads"
acl = "private"
}
resource "minio_s3_bucket_policy" "uploads" {
depends_on = [minio_s3_bucket.uploads]
bucket = minio_s3_bucket.uploads.bucket
policy = jsonencode({
"Version" : "2012-10-17",
"Statement" : [
{
"Effect" : "Allow",
"Principal" : {
"AWS" : [
"*"
]
},
"Action" : [
"s3:GetBucketLocation"
],
"Resource" : [
minio_s3_bucket.uploads.arn,
]
},
{
"Effect" : "Allow",
"Principal" : {
"AWS" : [
"*"
]
},
"Action" : [
"s3:ListBucket"
],
"Resource" : [
minio_s3_bucket.uploads.arn,
],
"Condition" : {
"StringEquals" : {
"s3:prefix" : [
"*"
]
}
}
},
{
"Effect" : "Allow",
"Principal" : {
"AWS" : [
"*"
]
},
"Action" : [
"s3:GetObject"
],
"Resource" : [
"${minio_s3_bucket.uploads.arn}/**",
]
}
]
@ -63,10 +146,12 @@ resource "minio_s3_bucket_policy" "overlay" {
}
resource "minio_iam_user" "overlay" {
depends_on = [null_resource.health_check]
name = var.name
}
resource "minio_iam_policy" "overlay" {
depends_on = [minio_s3_bucket.overlay, minio_s3_bucket.uploads]
name = minio_s3_bucket.overlay.bucket
policy = jsonencode({
Version = "2012-10-17"
@ -74,7 +159,7 @@ resource "minio_iam_policy" "overlay" {
{
Effect = "Allow"
Action = ["s3:ListBucket"]
Resource = ["arn:aws:s3:::${var.name}"]
Resource = [minio_s3_bucket.overlay.arn, minio_s3_bucket.uploads.arn, ]
},
{
Effect = "Allow"
@ -83,7 +168,7 @@ resource "minio_iam_policy" "overlay" {
"s3:PutObject",
"s3:DeleteObject"
]
Resource = ["arn:aws:s3:::${var.name}/*"]
Resource = ["${minio_s3_bucket.overlay.arn}/*", "${minio_s3_bucket.uploads.arn}/*"]
}
]
})
@ -91,11 +176,14 @@ resource "minio_iam_policy" "overlay" {
resource "minio_iam_user_policy_attachment" "overlay" {
depends_on = [minio_iam_user.overlay, minio_iam_policy.overlay]
user_name = minio_iam_user.overlay.id
policy_name = minio_iam_policy.overlay.id
}
resource "minio_iam_service_account" "overlay" {
depends_on = [minio_iam_user.overlay, minio_s3_bucket.overlay, minio_s3_bucket.uploads]
target_user = minio_iam_user.overlay.name
policy = jsonencode({
Version = "2012-10-17"
@ -103,7 +191,7 @@ resource "minio_iam_service_account" "overlay" {
{
Effect = "Allow"
Action = ["s3:ListBucket"]
Resource = ["arn:aws:s3:::${var.name}"]
Resource = [minio_s3_bucket.overlay.arn, minio_s3_bucket.uploads.arn]
},
{
Effect = "Allow"
@ -112,12 +200,16 @@ resource "minio_iam_service_account" "overlay" {
"s3:PutObject",
"s3:DeleteObject"
]
Resource = ["arn:aws:s3:::${var.name}/*"]
Resource = ["${minio_s3_bucket.overlay.arn}/*", "${minio_s3_bucket.uploads.arn}/*"]
}
]
})
}
output "bucket" {
value = var.name
}
output "access_key" {
value = minio_iam_service_account.overlay.access_key
sensitive = true

View File

@ -2,7 +2,7 @@ terraform {
required_providers {
minio = {
source = "aminueza/minio"
version = "~> 2.5.0"
version = "~> 3.3.0"
}
}
}

View File

@ -1,6 +1,12 @@
tls:
enabled: ${tobool(tls)}
metrics:
enabled: true
serviceMonitor:
enabled: true
namespace: monitoring
ingress:
enabled: ${tobool(admin)}
tls: ${tobool(tls)}
@ -22,6 +28,13 @@ ingress:
ingress.kubernetes.io/proxy-body-size: "0"
nginx.ingress.kubernetes.io/proxy-body-size: "0"
%{ endif }
%{ if displayOnHomepage }
gethomepage.dev/enabled: "true"
gethomepage.dev/name: "Minio"
gethomepage.dev/description: "S3-Compatible cloud storage"
gethomepage.dev/group: "Tools"
gethomepage.dev/icon: "minio.png"
%{ endif }
apiIngress:
enabled: true
@ -44,3 +57,13 @@ apiIngress:
ingress.kubernetes.io/proxy-body-size: "0"
nginx.ingress.kubernetes.io/proxy-body-size: "0"
%{ endif }
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/name: minio
topologyKey: kubernetes.io/hostname

View File

@ -61,7 +61,11 @@ variable "ingressClass" {
}
variable "storageSize" {
type = string
type = string
default = "6Gi"
}
variable "displayOnHomepage" {
type = bool
default = false
}

View File

@ -56,3 +56,16 @@ output "installed" {
value = true
depends_on = [helm_release.mongodb]
}
output "connection_string" {
value = format(
"mongodb://%s:%s@%s/%s?replicaSet=rs0&authSource=admin",
"root",
random_password.mongodb_root_password.result,
join(",", [
for i in range(var.replicas) :format("mongodb-%d.mongodb-headless.mongodb.svc.cluster.local:27017", i)
]),
"admin"
)
sensitive = true
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -16,14 +16,14 @@ mongodb:
readinessProbe:
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
timeoutSeconds: 15
failureThreshold: 3
successThreshold: 1
livenessProbe:
initialDelaySeconds: 60
periodSeconds: 20
timeoutSeconds: 5
timeoutSeconds: 15
failureThreshold: 6
# Proper shutdown handling
@ -55,3 +55,11 @@ auth:
- ${ database }
%{ endfor ~}
%{ endif }
resources:
limits:
cpu: 1000m
memory: 1.5Gi
requests:
cpu: 500m
memory: 1Gi

View File

@ -0,0 +1,3 @@
locals {
service_uri = join(".", [var.service_name, var.server_dns])
}

View File

@ -0,0 +1,92 @@
# Monitoring stack for k3s cluster with Thanos
terraform {
required_providers {
helm = {
source = "hashicorp/helm"
version = ">= 2.0.0"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = ">= 2.0.0"
}
}
}
# Create monitoring namespace
resource "kubernetes_namespace" "monitoring" {
metadata {
name = "monitoring"
}
lifecycle {
ignore_changes = [metadata]
}
}
resource "random_password" "grafana_admin_password" {
length = 40
special = false
}
# Create secret for remote write authentication
resource "kubernetes_secret" "prometheus_remote_write_auth" {
metadata {
name = "prometheus-remote-write-auth"
namespace = kubernetes_namespace.monitoring.metadata[0].name
}
data = {
username = var.remote_write_username
password = var.remote_write_password
}
}
# Prometheus + Grafana + Alertmanager stack
resource "helm_release" "kube_prometheus_stack" {
depends_on = [var.wait_on, kubernetes_secret.prometheus_remote_write_auth]
name = "monitoring"
repository = "https://prometheus-community.github.io/helm-charts"
chart = "kube-prometheus-stack"
namespace = kubernetes_namespace.monitoring.metadata[0].name
version = "75.9.0" # Specify version for reproducibility
# Use values from template file
values = [
templatefile("${path.module}/monitoring-values.yaml.tftpl", {
remote_write_url = var.remote_write_url
remote_read_url = var.remote_read_url
grafana_admin_password = random_password.grafana_admin_password.result
})
]
}
# Output important endpoints
output "grafana_url" {
value = "http://monitoring-grafana.${kubernetes_namespace.monitoring.metadata[0].name}.svc.cluster.local"
}
output "alertmanager_url" {
value = "http://monitoring-kube-prometheus-alertmanager.${kubernetes_namespace.monitoring.metadata[0].name}.svc.cluster.local:9093"
}
output "prometheus_url" {
value = "http://monitoring-kube-prometheus-prometheus.${kubernetes_namespace.monitoring.metadata[0].name}.svc.cluster.local:9090"
}
# Instructions for accessing services
output "access_instructions" {
value = <<-EOT
To access services from outside the cluster:
Grafana:
kubectl port-forward -n ${kubernetes_namespace.monitoring.metadata[0].name} svc/monitoring-grafana 3000:80
Alertmanager:
kubectl port-forward -n ${kubernetes_namespace.monitoring.metadata[0].name} svc/monitoring-kube-prometheus-alertmanager 9093:9093
Default Grafana credentials:
Username: admin
Password: ${random_password.grafana_admin_password.result}
EOT
}

View File

@ -0,0 +1,135 @@
additionalPrometheusRulesMap:
custom-app-rules:
groups:
- name: aspnetcore
interval: 5m
rules:
- alert: HighRequestLatency
expr: histogram_quantile(0.95, sum by (job, instance) (rate(http_request_duration_seconds_bucket[5m]))) > 0.5
for: 5m
labels:
severity: warning
annotations:
summary: "High request latency on {{ $labels.instance }}"
description: "95th percentile latency is above 500ms (current value: {{ $value }}s)"
- alert: HighErrorRate
expr: 'rate(http_requests_total{status=~"5.."}[5m]) > 0.05'
for: 5m
labels:
severity: critical
annotations:
summary: "High error rate on {{ $labels.instance }}"
description: "Error rate is above 5% (current value: {{ $value }})"
prometheus:
prometheusSpec:
retention: 24h
retentionSize: 10GB
resources:
requests:
memory: 200Mi
cpu: 100m
limits:
memory: 500Mi
cpu: 500m
# Remote write to VictoriaMetrics
remoteWrite:
- url: ${remote_write_url}
queueConfig:
maxSamplesPerSend: 10000
maxShards: 5
minShards: 1
batchSendDeadline: 5s
basicAuth:
username:
name: prometheus-remote-write-auth
key: username
password:
name: prometheus-remote-write-auth
key: password
writeRelabelConfigs:
- sourceLabels: ["__name__"]
regex: "(up|kube_.*|container_.*|node_.*|http_.*|process_.*)"
action: keep
# Remote read from VictoriaMetrics for old data
remoteRead:
- url: ${remote_read_url}
basicAuth:
username:
name: prometheus-remote-write-auth
key: username
password:
name: prometheus-remote-write-auth
key: password
readRecent: false # Only read data older than local retention
alertmanager:
enabled: true
alertmanagerSpec:
replicas: 1
resources:
requests:
memory: 50Mi
cpu: 10m
limits:
memory: 150Mi
cpu: 100m
retention: 24h
grafana:
resources:
requests:
memory: 100Mi
cpu: 50m
limits:
memory: 300Mi
cpu: 200m
persistence:
enabled: true
size: 1Gi
adminUser: admin
adminPassword: ${grafana_admin_password}
kubeStateMetrics:
resources:
requests:
memory: 50Mi
cpu: 10m
limits:
memory: 150Mi
cpu: 100m
nodeExporter:
resources:
requests:
memory: 30Mi
cpu: 10m
limits:
memory: 100Mi
cpu: 100m
prometheusOperator:
resources:
requests:
memory: 100Mi
cpu: 50m
limits:
memory: 300Mi
cpu: 200m
defaultRules:
create: true
rules:
alertmanager: true
etcd: false
general: true
k8s: true
kubernetesApps: true
kubernetesResources: true
kubernetesStorage: true
kubernetesSystem: true
node: true
prometheus: true

View File

@ -0,0 +1,33 @@
locals {
k8s_config = yamldecode(var.k8s_config_yaml)
k8s_host = local.k8s_config.clusters[0].cluster.server
k8s_auth = try(
{
token = local.k8s_config.users[0].user.token
using_token = true
},
{
client_certificate = base64decode(local.k8s_config.users[0].user["client-certificate-data"])
client_key = base64decode(local.k8s_config.users[0].user["client-key-data"])
using_token = false
}
)
}
provider "kubernetes" {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
}
provider "helm" {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
}
}

View File

@ -0,0 +1,60 @@
variable "remote_write_url" {
description = "URL for remote write endpoint on local machine"
type = string
default = "https://metrics.binarysunset.dev/api/v1/write"
}
variable "remote_read_url" {
description = "URL for remote read endpoint on local machine"
type = string
default = "https://metrics.binarysunset.dev/api/v1/read"
}
variable "remote_write_username" {
description = "Username for remote write authentication"
type = string
default = "prometheus"
}
variable "remote_write_password" {
description = "Password for remote write authentication"
type = string
default = "your-secure-password"
sensitive = true
}
variable "service_name" {
type = string
description = "Name of the service"
default = "auth"
}
variable "server_dns" {
type = string
description = "Domain for the server"
}
variable "k8s_config_yaml" {
description = "Content of k8s config yaml file"
type = string
}
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "namespace" {
type = string
}
variable "enabled" {
type = bool
default = true
}
variable "ingressClass" {
type = string
default = "nginx"
}

View File

@ -1,4 +1,6 @@
resource "kubernetes_namespace" "postgresql" {
count = var.enabled ? 1 : 0
metadata {
name = var.namespace
}
@ -9,21 +11,32 @@ resource "kubernetes_namespace" "postgresql" {
}
resource "random_password" "postgresql_user_password" {
length = 40
special = true
length = 40
special = true
override_special = "!#$%&*()-_=+[]{}<>:?"
min_special = 2
min_upper = 2
min_lower = 2
min_numeric = 2
}
resource "random_password" "postgresql_root_password" {
length = 40
special = true
length = 40
special = true
override_special = "!#$%&*()-_=+[]{}<>:?"
min_special = 2
min_upper = 2
min_lower = 2
min_numeric = 2
}
resource "kubernetes_secret" "postgresql_auth" {
type = "generic"
count = var.enabled ? 1 : 0
type = "generic"
depends_on = [var.wait_on]
metadata {
name = "postgresql-auth"
namespace = kubernetes_namespace.postgresql.metadata.0.name
namespace = kubernetes_namespace.postgresql[count.index].metadata.0.name
}
data = {
@ -33,11 +46,12 @@ resource "kubernetes_secret" "postgresql_auth" {
}
resource "helm_release" "postgresql" {
count = var.enabled ? 1 : 0
depends_on = [var.wait_on, kubernetes_secret.postgresql_auth]
name = "postgresql"
repository = "https://charts.bitnami.com/bitnami"
chart = "postgresql"
namespace = kubernetes_namespace.postgresql.metadata.0.name
namespace = kubernetes_namespace.postgresql[count.index].metadata.0.name
version = "16.0.5"
wait = true

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -17,6 +17,7 @@ resource "random_password" "tenant" {
}
resource "kubernetes_job" "create-tenant" {
count = var.enabled ? 1 : 0
depends_on = [var.wait_on]
metadata {
@ -108,5 +109,5 @@ output "username" {
}
output "job_name" {
value = kubernetes_job.create-tenant.metadata[0].name
value = var.enabled ? kubernetes_job.create-tenant[0].metadata[0].name : null
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -38,3 +38,8 @@ variable "k8s_config_yaml" {
description = "Content of k8s config yaml file"
type = string
}
variable "enabled" {
type = bool
default = true
}

View File

@ -16,3 +16,8 @@ variable "namespace" {
variable "username" {
type = string
}
variable "enabled" {
type = bool
default = true
}

View File

@ -12,20 +12,19 @@ resource "helm_release" "rabbitmq" {
create_namespace = true
version = "15.1.0"
set_sensitive {
set_sensitive = [{
name = "auth.password"
value = random_password.password.result
}
}]
set {
set = [{
name = "replicaCount"
value = var.replicas
}
set {
},
{
name = "persistence.size"
value = "4Gi"
}
}]
values = [
templatefile("${path.module}/values.yaml.tftpl", {
@ -41,3 +40,8 @@ output "installed" {
value = true
depends_on = [helm_release.rabbitmq]
}
output "connection_string" {
value = "rabbitmq://user:${random_password.password.result}@rabbitmq-headless.${var.namespace}.svc.cluster.local:5672/"
sensitive = true
}

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -10,15 +10,15 @@ resource "helm_release" "rancher_server" {
values = [file("${path.module}/rancher-values.yaml")]
set {
set = [{
name = "hostname"
value = join(".", ["rancher", var.server_dns])
}
},
set {
{
name = "bootstrapPassword"
value = "admin" # TODO: change this once the terraform provider has been updated with the new pw bootstrap logic
}
}]
}
resource "random_password" "admin_password" {

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -11,11 +11,11 @@ ports:
port: 8000
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
forwardedHeaders:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 0
idleTimeout: 0
readTimeout: 0
@ -26,11 +26,11 @@ ports:
port: 8443
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
forwardedHeaders:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 0
idleTimeout: 0
readTimeout: 0
@ -41,9 +41,9 @@ ports:
port: 2223
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 600s
idleTimeout: 60s
readTimeout: 600s
@ -54,9 +54,9 @@ ports:
port: 8993
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 600s
idleTimeout: 300s
readTimeout: 600s
@ -67,9 +67,9 @@ ports:
port: 8995
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 600s
idleTimeout: 300s
readTimeout: 600s
@ -80,9 +80,9 @@ ports:
port: 4190
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 600s
idleTimeout: 300s
readTimeout: 600s
@ -93,7 +93,7 @@ ports:
port: 8025
protocol: TCP
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 300s
idleTimeout: 300s
readTimeout: 300s
@ -104,9 +104,9 @@ ports:
port: 8465
protocol: TCP
proxyProtocol:
trustedIPs: [127.0.0.1/8,10.0.0.0/8]
trustedIPs: [ 127.0.0.1/8,10.0.0.0/8 ]
transport:
respondingTimouts:
respondingTimeouts:
writeTimeout: 300s
idleTimeout: 300s
readTimeout: 300s

View File

@ -40,20 +40,18 @@ resource "helm_release" "vault" {
create_namespace = false
wait = true
set {
set = [{
name = "server.ha.enabled"
value = "false"
}
set {
},
{
name = "server.ha.replicas"
value = "1"
}
set {
},
{
name = "server.ha.raft.enabled"
value = "false"
}
}]
values = [
templatefile("${path.module}/values.yaml.tftpl", {

View File

@ -23,7 +23,7 @@ provider "kubernetes" {
}
provider "helm" {
kubernetes {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null

View File

@ -0,0 +1,124 @@
terraform {
required_providers {
slugify = {
source = "public-cloud-wl/slugify"
version = "0.1.1"
}
}
}
locals {
authority = "https://${var.zitadel_domain}"
slug_project = provider::slugify::slug(var.project)
slug_name = provider::slugify::slug(var.name)
cluster = "${local.slug_project}.${var.cluster_domain}"
uri = var.uri
}
module "zitadel_project_application_api" {
source = "../project/application/api"
wait_on = var.wait_on
org_id = var.org_id
project_id = var.project_id
name = "${var.name} API"
}
module "zitadel_project_application_ua" {
source = "../project/application/user-agent"
wait_on = module.zitadel_project_application_api.installed
org_id = var.org_id
project_id = var.project_id
name = "${ var.name } (Swagger)"
redirect_uris = ["${local.uri}/swagger/oauth2-redirect.html", "${local.uri}/hangfire/signin-oidc", "${local.uri}/signin-oidc"]
post_logout_redirect_uris = [local.uri]
}
resource "kubernetes_secret" "user-agent" {
type = "Opaque"
depends_on = [module.zitadel_project_application_ua]
metadata {
name = "${local.slug_name}-user-agent"
namespace = var.namespace
}
data = {
"authority" = local.authority
"audience" = var.project_id
"client_id" = module.zitadel_project_application_ua.client_id
}
}
resource "kubernetes_secret" "api" {
type = "Opaque"
depends_on = [module.zitadel_project_application_api]
metadata {
name = "${local.slug_name}-api"
namespace = var.namespace
}
data = {
"authority" = local.authority
"audience" = var.project_id
"client_id" = module.zitadel_project_application_api.client_id
"client_secret" = module.zitadel_project_application_api.client_secret
}
}
module "zitadel_service_account" {
count = var.service_account ? 1 : 0
wait_on = module.zitadel_project_application_api.installed
source = "../service-account"
org_id = var.org_id
user_name = "${local.slug_name}@${ local.cluster }"
name = "${var.name} @ ${var.project}"
with_secret = true
access_token_type = "ACCESS_TOKEN_TYPE_JWT"
}
module "zitadel_project_user_grant" {
count = var.service_account ? 1 : 0
source = "../project/user-grant"
org_id = var.org_id
project_id = var.project_id
user_id = module.zitadel_service_account[0].user_id
roles = var.roles
}
resource "kubernetes_secret" "service-account" {
count = var.service_account ? 1 : 0
type = "Opaque"
depends_on = [module.zitadel_service_account]
metadata {
name = "${local.slug_name}-service-account"
namespace = var.namespace
}
data = {
"authority" = local.authority
"audience" = var.project_id
"client_id" = module.zitadel_service_account[count.index].client_id
"client_secret" = module.zitadel_service_account[count.index].client_secret
"scope" = join(" ", concat(["openid", "profile", "urn:zitadel:iam:org:project:id:${var.project_id}:aud"], var.roles))
}
}
output "installed" {
value = true
depends_on = [kubernetes_secret.service-account]
}

View File

@ -0,0 +1,48 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
}
variable "project_id" {
type = string
}
variable "name" {
type = string
}
variable "project" {
type = string
}
variable "roles" {
type = list(string)
description = "Roles to be granted"
}
variable "namespace" {
type = string
}
variable "service_account" {
type = bool
default = true
}
variable "zitadel_domain" {
type = string
}
variable "cluster_domain" {
type = string
}
variable "uri" {
type = string
}

View File

@ -0,0 +1,82 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
}
}
}
resource "zitadel_org_idp_google" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
name = "Google"
client_id = var.client_id
client_secret = var.client_secret
scopes = var.options.scopes
is_linking_allowed = var.options.is_linking_allowed
is_creation_allowed = var.options.is_creation_allowed
is_auto_creation = var.options.is_auto_creation
is_auto_update = var.options.is_auto_update
auto_linking = var.options.auto_linking
}
resource "zitadel_login_policy" "default" {
depends_on = [zitadel_org_idp_google.default]
org_id = var.org_id
user_login = false
allow_register = true
allow_external_idp = true
force_mfa = false
force_mfa_local_only = false
passwordless_type = "PASSWORDLESS_TYPE_ALLOWED"
hide_password_reset = "false"
password_check_lifetime = "240h0m0s"
external_login_check_lifetime = "240h0m0s"
multi_factor_check_lifetime = "24h0m0s"
mfa_init_skip_lifetime = "720h0m0s"
second_factor_check_lifetime = "24h0m0s"
ignore_unknown_usernames = true
default_redirect_uri = "https://${var.domain}"
second_factors = ["SECOND_FACTOR_TYPE_OTP", "SECOND_FACTOR_TYPE_U2F"]
multi_factors = ["MULTI_FACTOR_TYPE_U2F_WITH_VERIFICATION"]
idps = [zitadel_org_idp_google.default.id]
allow_domain_discovery = true
disable_login_with_email = true
disable_login_with_phone = true
}
#resource "zitadel_action" "verify-email-from-google-idp" {
# org_id = var.org_id
# name = "trustEmailVerification"
# script = templatefile("${path.module}/verify-email.action.tftpl", {
# trusted_idp = zitadel_org_idp_google.default.id,
# })
# allowed_to_fail = false
# timeout = "10s"
#}
#resource "zitadel_trigger_actions" "verify-email-from-google-idp" {
# org_id = var.org_id
# flow_type = "FLOW_TYPE_EXTERNAL_AUTHENTICATION"
# trigger_type = "TRIGGER_TYPE_PRE_CREATION"
# action_ids = [zitadel_action.verify-email-from-google-idp.id]
#}
#
#resource "zitadel_trigger_actions" "internal" {
# org_id = var.org_id
# flow_type = "FLOW_TYPE_INTERNAL_AUTHENTICATION"
# trigger_type = "TRIGGER_TYPE_PRE_CREATION"
# action_ids = [zitadel_action.verify-email-from-google-idp.id]
#}
output "installed" {
value = true
depends_on = [
zitadel_org_idp_google.default, zitadel_login_policy.default,
]
}
output "idp_id" {
value = zitadel_org_idp_google.default.id
}

View File

@ -0,0 +1,43 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "client_id" {
type = string
description = "Google Client ID"
}
variable "client_secret" {
type = string
description = "Google Client Secret"
}
variable "options" {
type = object({
scopes = list(string)
is_linking_allowed = bool
is_creation_allowed = bool
is_auto_creation = bool
is_auto_update = bool
auto_linking = string
})
default = {
scopes = ["openid", "profile", "email"],
is_linking_allowed = true
is_creation_allowed = true
is_auto_creation = true
is_auto_update = true
auto_linking = "AUTO_LINKING_OPTION_USERNAME"
}
}
variable "domain" {
type = string
}

View File

@ -0,0 +1,15 @@
/**
* Set first and lastname of a user on just in time provisioning for okta.
* Useful if you like to fill the first and lastname with the name stored on okta, so the user doesn't have to fill himself.
* Also set email to verified, so the user doesn't get a verification email
*
* Flow: External Authentication, Trigger: Post Authentication
*
* @param ctx
* @param api
*/
let logger = require("zitadel/log")
function trustEmailVerification(ctx, api) {
api.setEmailVerified(true);
}

View File

@ -0,0 +1,3 @@
locals {
service_uri = join(".", [var.service_name, var.server_dns])
}

View File

@ -0,0 +1,91 @@
terraform {
required_providers {
kubernetes = {
source = "hashicorp/kubernetes"
version = "2.31.0"
}
}
}
resource "kubernetes_namespace" "zitadel" {
count = var.enabled ? 1 : 0
metadata {
name = var.namespace
}
lifecycle {
ignore_changes = [metadata]
}
}
resource "random_password" "zitadel_masterkey" {
length = 32
special = true
}
resource "kubernetes_secret" "zitadel" {
count = var.enabled ? 1 : 0
metadata {
name = "zitadel"
namespace = kubernetes_namespace.zitadel[count.index].metadata[0].name
}
data = {
masterkey = random_password.zitadel_masterkey.result
}
}
resource "helm_release" "zitadel" {
count = var.enabled ? 1 : 0
depends_on = [var.wait_on, kubernetes_secret.zitadel]
name = "zitadel"
repository = "https://charts.zitadel.com"
chart = "zitadel"
namespace = kubernetes_namespace.zitadel[count.index].metadata[0].name
version = "8.12.0"
create_namespace = false
wait = true
wait_for_jobs = true
values = [
templatefile("${path.module}/values.yaml.tftpl", {
service_uri = local.service_uri,
database = var.database,
database_username = var.database_username,
database_password = var.database_password,
database_root_username = var.database_root_password != null ? var.database_root_username : null,
database_root_password = var.database_root_password
display_on_homepage = var.display_on_homepage
ingressClass = var.ingressClass
})
]
}
data "kubernetes_secret" "zitadel_admin" {
depends_on = [helm_release.zitadel]
metadata {
name = "zitadel-admin-sa"
namespace = var.namespace
}
}
resource "local_file" "zitadel_jwt_profile_file" {
content = data.kubernetes_secret.zitadel_admin.data["zitadel-admin-sa.json"]
filename = format("%s/%s", path.root, "zitadel-admin-sa.json")
}
output "jwt_profile_file" {
value = local_file.zitadel_jwt_profile_file.filename
}
output "installed" {
value = true
depends_on = [helm_release.zitadel, local_file.zitadel_jwt_profile_file]
}
output "server" {
value = local.service_uri
}
output "uri" {
value = "https://${local.service_uri}"
}

View File

@ -0,0 +1,38 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_application_api" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
project_id = var.project_id
name = var.name
auth_method_type = "API_AUTH_METHOD_TYPE_BASIC"
// TODO: Change this to private key jwt in the future
}
output "installed" {
value = true
depends_on = [zitadel_application_api.default]
}
output "application_id" {
value = zitadel_application_api.default.id
}
output "client_id" {
value = zitadel_application_api.default.client_id
sensitive = true
}
output "client_secret" {
value = zitadel_application_api.default.client_secret
sensitive = true
}

View File

@ -0,0 +1,20 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "project_id" {
type = string
description = "Project Id"
}
variable "name" {
type = string
description = "Application name"
}

View File

@ -0,0 +1,63 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_application_oidc" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
grant_types = ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE"]
name = var.name
project_id = var.project_id
redirect_uris = var.redirect_uris
response_types = ["OIDC_RESPONSE_TYPE_CODE"]
# // If selected, the requested roles of the authenticated user are added to the access token.
access_token_type = "OIDC_TOKEN_TYPE_JWT"
access_token_role_assertion = true
# BEARER uses an Opaque token, which needs the introspection endpoint and `urn:zitadel:iam:org:project:id:<API_PROJECT_ID>:aud` scope
#access_token_type = "OIDC_TOKEN_TYPE_BEARER"
# // If you want to add additional Origins to your app which is not used as a redirect you can do that here.
#additional_origins = []
app_type = "OIDC_APP_TYPE_USER_AGENT"
auth_method_type = "OIDC_AUTH_METHOD_TYPE_NONE"
# // Redirect URIs must begin with https:// unless dev_mode is true
#dev_mode = false
# // If selected, the requested roles of the authenticated user are added to the ID token.
#id_token_role_assertion = false
# // Enables clients to retrieve profile, email, phone and address claims from ID token.
#id_token_userinfo_assertion = false
post_logout_redirect_uris = var.post_logout_redirect_uris
}
output "installed" {
value = true
depends_on = [zitadel_application_oidc.default]
}
output "application_id" {
value = zitadel_application_oidc.default.id
}
output "client_id" {
value = zitadel_application_oidc.default.client_id
sensitive = true
}
output "client_secret" {
value = zitadel_application_oidc.default.client_secret
sensitive = true
}

View File

@ -0,0 +1,30 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "project_id" {
type = string
description = "Project Id"
}
variable "name" {
type = string
description = "Application name"
}
variable "redirect_uris" {
type = list(string)
}
variable "post_logout_redirect_uris" {
type = list(string)
default = []
}

View File

@ -0,0 +1,61 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_application_oidc" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
grant_types = ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE"]
name = var.name
project_id = var.project_id
redirect_uris = var.redirect_uris
response_types = ["OIDC_RESPONSE_TYPE_CODE"]
# // If selected, the requested roles of the authenticated user are added to the access token.
#access_token_type = "OIDC_TOKEN_TYPE_JWT"
#access_token_role_assertion = true
# BEARER uses an Opaque token, which needs the introspection endpoint and `urn:zitadel:iam:org:project:id:<API_PROJECT_ID>:aud` scope
access_token_type = "OIDC_TOKEN_TYPE_BEARER"
# // If you want to add additional Origins to your app which is not used as a redirect you can do that here.
#additional_origins = []
app_type = "OIDC_APP_TYPE_WEB"
auth_method_type = var.auth_method_type
# // Redirect URIs must begin with https:// unless dev_mode is true
#dev_mode = false
id_token_role_assertion = var.id_token_role_assertion
id_token_userinfo_assertion = var.id_token_userinfo_assertion
post_logout_redirect_uris = var.post_logout_redirect_uris
}
output "installed" {
value = true
depends_on = [zitadel_application_oidc.default]
}
output "application_id" {
value = zitadel_application_oidc.default.id
}
output "client_id" {
value = zitadel_application_oidc.default.client_id
sensitive = true
}
output "client_secret" {
value = zitadel_application_oidc.default.client_secret
sensitive = true
}

View File

@ -0,0 +1,47 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "project_id" {
type = string
description = "Project Id"
}
variable "name" {
type = string
description = "Application name"
}
variable "redirect_uris" {
type = list(string)
}
variable "post_logout_redirect_uris" {
type = list(string)
default = []
}
variable "auth_method_type" {
type = string
default = "OIDC_AUTH_METHOD_TYPE_NONE"
}
variable "id_token_role_assertion" {
type = bool
default = false
description = "If selected, the requested roles of the authenticated user are added to the ID token."
}
variable "id_token_userinfo_assertion" {
type = bool
default = false
description = "Enables clients to retrieve profile, email, phone and address claims from ID token."
}

View File

@ -0,0 +1,36 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_project" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
name = var.name
project_role_assertion = true
project_role_check = true
has_project_check = true
private_labeling_setting = "PRIVATE_LABELING_SETTING_ENFORCE_PROJECT_RESOURCE_OWNER_POLICY"
}
resource "zitadel_project_member" "default" {
count = length(var.owners)
org_id = var.org_id
project_id = zitadel_project.default.id
user_id = var.owners[count.index]
roles = ["PROJECT_OWNER"]
}
output "installed" {
value = true
depends_on = [zitadel_project.default, zitadel_project_member.default]
}
output "project_id" {
value = zitadel_project.default.id
}

View File

@ -0,0 +1,34 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_project_role" "default" {
count = length(var.roles)
depends_on = [var.wait_on]
org_id = var.org_id
project_id = var.project_id
role_key = var.roles[count.index]
display_name = var.roles[count.index]
group = var.group
}
output "installed" {
value = true
depends_on = [zitadel_project_role.default]
}
output "role_ids" {
value = toset([
for role in zitadel_project_role.default : role.id
])
}
output "roles" {
value = var.roles
}

View File

@ -0,0 +1,27 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "project_id" {
type = string
description = "Project Id"
}
variable "group" {
type = string
description = "Optional group name"
default = null
}
variable "roles" {
type = list(string)
description = "Roles to be added"
default = []
}

View File

@ -0,0 +1,26 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_user_grant" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
project_id = var.project_id
user_id = var.user_id
role_keys = var.roles
}
output "installed" {
value = true
depends_on = [zitadel_user_grant.default]
}
output "user_grant_id" {
value = zitadel_user_grant.default.id
}

View File

@ -0,0 +1,26 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "project_id" {
type = string
description = "Project Id"
}
variable "user_id" {
type = string
description = "User Id"
}
variable "roles" {
type = list(string)
description = "Roles to be granted"
default = []
}

View File

@ -0,0 +1,21 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "name" {
type = string
description = "Name of the project"
}
variable "owners" {
type = list(string)
description = "User IDs to be granted `PROJECT_OWNER` role"
default = []
}

View File

@ -0,0 +1,33 @@
locals {
k8s_config = yamldecode(var.k8s_config_yaml)
k8s_host = local.k8s_config.clusters[0].cluster.server
k8s_auth = try(
{
token = local.k8s_config.users[0].user.token
using_token = true
},
{
client_certificate = base64decode(local.k8s_config.users[0].user["client-certificate-data"])
client_key = base64decode(local.k8s_config.users[0].user["client-key-data"])
using_token = false
}
)
}
provider "kubernetes" {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
}
provider "helm" {
kubernetes = {
host = local.k8s_host
insecure = true
token = local.k8s_auth.using_token ? local.k8s_auth.token : null
client_certificate = local.k8s_auth.using_token ? null : local.k8s_auth.client_certificate
client_key = local.k8s_auth.using_token ? null : local.k8s_auth.client_key
}
}

View File

@ -0,0 +1,38 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_machine_user" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
user_name = var.user_name
name = var.name
description = var.description
with_secret = var.with_secret
access_token_type = var.access_token_type
}
output "installed" {
value = true
depends_on = [zitadel_machine_user.default]
}
output "user_id" {
value = zitadel_machine_user.default.id
}
output "client_id" {
value = zitadel_machine_user.default.client_id
sensitive = true
}
output "client_secret" {
value = zitadel_machine_user.default.client_secret
sensitive = true
}

View File

@ -0,0 +1,33 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "user_name" {
type = string
}
variable "name" {
type = string
}
variable "description" {
type = string
default = null
}
variable "with_secret" {
type = bool
default = false
}
variable "access_token_type" {
type = string
default = "ACCESS_TOKEN_TYPE_JWT"
}

View File

@ -0,0 +1,28 @@
/**
* sets the roles an additional claim in the token with roles as value an project as key
*
* The role claims of the token look like the following:
*
* // added by the code below
* "groups": ["{roleName}", "{roleName}", ...],
*
* Flow: Complement token, Triggers: Pre Userinfo creation, Pre access token creation
*
* @param ctx
* @param api
*/
function groupsClaim(ctx, api) {
if (ctx.v1.user.grants === undefined || ctx.v1.user.grants.count == 0) {
return;
}
let grants = [];
ctx.v1.user.grants.grants.forEach((claim) => {
claim.roles.forEach((role) => {
grants.push(role);
});
});
api.v1.claims.setClaim("groups", grants);
api.v1.claims.setClaim("scope", grants);
}

View File

@ -0,0 +1,46 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_org" "default" {
depends_on = [var.wait_on]
name = var.name
is_default = true
}
// resource "zitadel_action" "groups-claim" {
// org_id = zitadel_org.default.id
// name = "groupsClaim"
// script = templatefile("${path.module}/groupsClaim.action.tftpl", {})
// allowed_to_fail = true
// timeout = "10s"
// }
//
// resource "zitadel_trigger_actions" "groups-claim-pre-user-info" {
// org_id = zitadel_org.default.id
// flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
// trigger_type = "TRIGGER_TYPE_PRE_USERINFO_CREATION"
// action_ids = [zitadel_action.groups-claim.id]
// }
//
// resource "zitadel_trigger_actions" "groups-claim-pre-access-token" {
// org_id = zitadel_org.default.id
// flow_type = "FLOW_TYPE_CUSTOMISE_TOKEN"
// trigger_type = "TRIGGER_TYPE_PRE_ACCESS_TOKEN_CREATION"
// action_ids = [zitadel_action.groups-claim.id]
// }
output "org_id" {
value = zitadel_org.default.id
}
output "installed" {
value = true
depends_on = [zitadel_org.default]
}

View File

@ -0,0 +1,20 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_org_member" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
user_id = var.user_id
roles = ["ORG_OWNER"]
}
output "installed" {
value = true
depends_on = [zitadel_org_member.default]
}

View File

@ -0,0 +1,15 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Zitadel Organization ID"
}
variable "user_id" {
type = string
description = "Zitadel User ID"
}

View File

@ -0,0 +1,11 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "name" {
type = string
description = "Name of the tenant"
default = "fourlights"
}

View File

@ -0,0 +1,31 @@
terraform {
required_providers {
zitadel = {
source = "zitadel/zitadel"
version = "2.0.2"
}
}
}
resource "zitadel_human_user" "default" {
depends_on = [var.wait_on]
org_id = var.org_id
email = var.email
user_name = var.user_name
first_name = var.first_name
last_name = var.last_name
is_email_verified = true
initial_password = "Password1!"
}
output "installed" {
value = true
depends_on = [zitadel_human_user.default]
}
output "user_id" {
value = zitadel_human_user.default.id
}

View File

@ -0,0 +1,26 @@
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "org_id" {
type = string
description = "Organisation Id"
}
variable "user_name" {
type = string
}
variable "first_name" {
type = string
}
variable "last_name" {
type = string
}
variable "email" {
type = string
}

View File

@ -0,0 +1,82 @@
zitadel:
masterkeySecretName: "zitadel"
configmapConfig:
Log:
Level: 'info'
LogStore:
Access:
Stdout:
Enabled: true
ExternalSecure: true
ExternalDomain: ${ service_uri }
ExternalPort: 443
TLS:
Enabled: false
FirstInstance:
Org:
Machine:
Machine:
Username: zitadel-admin-sa
Name: Admin
MachineKey:
ExpirationDate: "2026-01-01T00:00:00Z"
Type: 1
Database:
Postgres:
Host: postgresql-hl.postgresql.svc.cluster.local
Port: 5432
Database: ${ database }
MaxOpenConns: 20
MaxIdleConns: 10
MaxConnLifetime: 30m
MaxConnIdleTime: 5m
User:
Username: ${ database_username }
Password: "${ database_password }"
SSL:
Mode: disable
%{ if database_root_username != null }Admin:
Username: ${ database_root_username }
Password: "${ database_root_password }"
SSL:
Mode: disable
%{ endif }
readinessProbe:
initialDelaySeconds: 5
periodSeconds: 5
failureThreshold: 10
startupProbe:
periodSeconds: 5
failureThreshold: 30
service:
annotations:
traefik.ingress.kubernetes.io/service.serversscheme: h2c
ingress:
enabled: true
className: ${ingressClass}
annotations:
kubernetes.io/ingress.class: ${ingressClass}
cert-manager.io/cluster-issuer: letsencrypt
%{ if ingressClass == "traefik" }
traefik.ingress.kubernetes.io/router.entrypoints: web,websecure
traefik.ingress.kubernetes.io/router.middlewares: default-redirect-to-https@kubernetescrd,default-preserve-host-headers@kubernetescrd
%{ else }
nginx.ingress.kubernetes.io/backend-protocol: "GRPC"
nginx.ingress.kubernetes.io/grpc-backend: "true"
%{ endif }
%{ if display_on_homepage }gethomepage.dev/enabled: "true"
gethomepage.dev/name: "Zitadel"
gethomepage.dev/description: "Identity and Access Management"
gethomepage.dev/group: "Tools"
gethomepage.dev/icon: "zitadel.png"
%{ endif }
hosts:
- host: ${service_uri}
paths:
- path: /
pathType: Prefix

View File

@ -0,0 +1,67 @@
variable "service_name" {
type = string
description = "Name of the service"
default = "auth"
}
variable "server_dns" {
type = string
description = "Domain for the server"
}
variable "k8s_config_yaml" {
description = "Content of k8s config yaml file"
type = string
}
variable "wait_on" {
type = any
description = "Resources to wait on"
default = true
}
variable "namespace" {
type = string
}
variable "database" {
type = string
default = "zitadel"
}
variable "database_username" {
type = string
default = "zitadel"
}
variable "database_password" {
type = string
sensitive = true
}
variable "database_root_username" {
type = string
default = "postgres"
}
variable "database_root_password" {
type = string
sensitive = true
default = null
}
variable "display_on_homepage" {
type = bool
default = false
}
variable "enabled" {
type = bool
default = true
}
variable "ingressClass" {
description = "Ingress class to use"
type = string
default = "nginx"
}

56
infra/modules/zot/main.tf Normal file
View File

@ -0,0 +1,56 @@
resource "helm_release" "zot" {
name = "zot"
repository = "https://zotregistry.dev/helm-charts"
chart = "zot"
namespace = "registry"
create_namespace = true
values = [
templatefile("${path.module}/values.yaml.tftpl", { service_uri = var.service_uri })
]
}
resource "kubernetes_manifest" "traefik_middleware_request_body" {
depends_on = [helm_release.zot]
manifest = {
apiVersion = "traefik.io/v1alpha1"
kind = "Middleware"
metadata = {
name = "request-body"
namespace = "registry"
}
spec = {
buffering = {
maxRequestBodyBytes = 0
}
}
}
}
resource "kubernetes_manifest" "traefik_middleware_request_timeouts" {
depends_on = [helm_release.zot]
manifest = {
apiVersion = "traefik.io/v1alpha1"
kind = "Middleware"
metadata = {
name = "request-timeouts"
namespace = "registry"
}
spec = {
headers = {
customRequestHeaders = {
"X-Forwarded-Timeout-Read" = "3600s"
"X-Forwarded-Timeout-Write" = "3600s"
}
}
}
}
}
output "installed" {
value = true
depends_on = [
kubernetes_manifest.traefik_middleware_request_body, kubernetes_manifest.traefik_middleware_request_timeouts,
helm_release.zot
]
}

View File

@ -0,0 +1,38 @@
ingress:
enabled: true
className: "traefik"
annotations:
traefik.ingress.kubernetes.io/router.entrypoints: web
traefik.ingress.kubernetes.io/router.middlewares: registry-request-body@kubernetescrd,registry-request-timeouts@kubernetescrd,default-preserve-host-headers@kubernetescrd
gethomepage.dev/enabled: "true"
gethomepage.dev/name: "Registry"
gethomepage.dev/description: "OCI Registry"
gethomepage.dev/group: "Tools"
gethomepage.dev/icon: "docker.png"
hosts:
- host: ${ service_uri }
paths:
- path: /
persistence: true
pvc:
create: true
name: zot
accessMode: "ReadWriteOnce"
storage: 8Gi
service:
type: ClusterIP
port: 5000
mountConfig: true
configFiles:
config.json: |-
{
"storage": { "rootDirectory": "/var/lib/registry" },
"http": { "address": "0.0.0.0", "port": "5000" },
"log": { "level": "error" },
"extensions": {
"scrub": {
"enable": true,
"interval": "12h"
}
}
}

Some files were not shown because too many files have changed in this diff Show More