Compare commits
198 Commits
v1.0.11
...
atif/suppo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
92097e398d | ||
|
|
e4a57f4a6a | ||
|
|
66b0bf6d27 | ||
|
|
f6ebe73aea | ||
|
|
2e0f3eddc0 | ||
|
|
f63b460971 | ||
|
|
df507ca559 | ||
|
|
ce5a5b383a | ||
|
|
1b147ae90d | ||
|
|
7992d9d265 | ||
|
|
20d97a25dd | ||
|
|
8e0dfcd534 | ||
|
|
9752bf89a6 | ||
|
|
48c81c9ff4 | ||
|
|
acd5edffe7 | ||
|
|
4dcab99cb0 | ||
|
|
50a946df0f | ||
|
|
8a0ac3435c | ||
|
|
438c904567 | ||
|
|
bd6747f9bc | ||
|
|
fb81c8969f | ||
|
|
162808760d | ||
|
|
ad1189afff | ||
|
|
94e126f248 | ||
|
|
04535a9cd7 | ||
|
|
7a9f553564 | ||
|
|
e11b19d33e | ||
|
|
93c4fb3a8d | ||
|
|
86038f8d37 | ||
|
|
120a0e342e | ||
|
|
b51932d7ac | ||
|
|
834ffde032 | ||
|
|
831f64da56 | ||
|
|
236022f870 | ||
|
|
4c45d69994 | ||
|
|
310d0262bd | ||
|
|
f446fbd667 | ||
|
|
982c75e86f | ||
|
|
523ad9fe23 | ||
|
|
096cd214ce | ||
|
|
6a87fd18e5 | ||
|
|
fa4b84e8d1 | ||
|
|
7e0eacf1f4 | ||
|
|
cbe48aa072 | ||
|
|
89bb023fa5 | ||
|
|
66472b0105 | ||
|
|
cd010baac8 | ||
|
|
f7fa145855 | ||
|
|
f7f9c8b7ef | ||
|
|
889186d553 | ||
|
|
352577b833 | ||
|
|
4e59ecc606 | ||
|
|
a40f2b86c3 | ||
|
|
a2c29ace0a | ||
|
|
da4a561cb5 | ||
|
|
d77ad8ac63 | ||
|
|
b1f81afa7f | ||
|
|
883741244b | ||
|
|
c3eee866d1 | ||
|
|
bf175a1247 | ||
|
|
8fd54e0e78 | ||
|
|
e8ee02c044 | ||
|
|
aebdc9b434 | ||
|
|
d98bfcb20b | ||
|
|
894e507bb3 | ||
|
|
3f8f6181e0 | ||
|
|
b23d85327c | ||
|
|
a8580fe6b9 | ||
|
|
49f060549e | ||
|
|
b4153a6aaa | ||
|
|
13a8877791 | ||
|
|
fd2f91c043 | ||
|
|
c59eb0c0cc | ||
|
|
a381c3ee29 | ||
|
|
d9d1be08a3 | ||
|
|
7a8483d816 | ||
|
|
ec2c8edfb2 | ||
|
|
78f91a542a | ||
|
|
78c948094d | ||
|
|
16f96d3693 | ||
|
|
8262b29063 | ||
|
|
4ab72575ac | ||
|
|
f369697112 | ||
|
|
f82c7fd7a1 | ||
|
|
05a20a9e1f | ||
|
|
90e15cd90c | ||
|
|
5869eb86d4 | ||
|
|
25c90001f4 | ||
|
|
6409ee2bba | ||
|
|
7d366ff92a | ||
|
|
de00f6334f | ||
|
|
264584e673 | ||
|
|
83ecba2293 | ||
|
|
b2807640aa | ||
|
|
33d44fdf17 | ||
|
|
f335cd343d | ||
|
|
aebf095075 | ||
|
|
b283ac3129 | ||
|
|
5f418c3253 | ||
|
|
b09c4cb084 | ||
|
|
8aff87fdf7 | ||
|
|
f3c30abeb4 | ||
|
|
a9a75b675f | ||
|
|
ef4c87e48e | ||
|
|
1a0a8659cc | ||
|
|
c7a4fced4c | ||
|
|
5ec1b207d1 | ||
|
|
702271133f | ||
|
|
652fc6b84f | ||
|
|
8195cf4453 | ||
|
|
d5cfadb4e7 | ||
|
|
fba0f842a9 | ||
|
|
14e3fc5b6b | ||
|
|
0b6975c266 | ||
|
|
d530d68b12 | ||
|
|
047ccd67ca | ||
|
|
c7aa8253e3 | ||
|
|
452f41aa86 | ||
|
|
29209d546e | ||
|
|
aab5e55663 | ||
|
|
ff96b3f653 | ||
|
|
20795aa2b6 | ||
|
|
45456ab394 | ||
|
|
c652dbe320 | ||
|
|
4021d856ba | ||
|
|
72eaf8a9e1 | ||
|
|
249cb2fe9e | ||
|
|
49cff4b2aa | ||
|
|
c6b457e7fe | ||
|
|
beaa33b682 | ||
|
|
0d7bc37f9c | ||
|
|
dcd605c52e | ||
|
|
f5d41520cf | ||
|
|
cd0c730c95 | ||
|
|
873207fddf | ||
|
|
282e1f8c57 | ||
|
|
c068082e6b | ||
|
|
85e73c2071 | ||
|
|
4bdb428244 | ||
|
|
daed803530 | ||
|
|
a239212f0b | ||
|
|
67fef297da | ||
|
|
aced7547bc | ||
|
|
36fa871e7b | ||
|
|
46bf422d61 | ||
|
|
180e10c3ee | ||
|
|
a45706ad3a | ||
|
|
5030fcb988 | ||
|
|
cff60c4a7e | ||
|
|
5a33af28ac | ||
|
|
428f386c4c | ||
|
|
2e43788584 | ||
|
|
e8ce194ff7 | ||
|
|
1273378ca8 | ||
|
|
edc163b5f2 | ||
|
|
c9e418aaf5 | ||
|
|
9062b4c004 | ||
|
|
b2e87ef038 | ||
|
|
d4db52017d | ||
|
|
c36f4e03d7 | ||
|
|
443485a2d7 | ||
|
|
b686f2dbd5 | ||
|
|
76c60e9971 | ||
|
|
b0d6224e23 | ||
|
|
c50c4259d9 | ||
|
|
5f312ced5e | ||
|
|
fd985bedac | ||
|
|
b0c14be846 | ||
|
|
18efe83b89 | ||
|
|
b93471a381 | ||
|
|
33dbae6ea0 | ||
|
|
f14e6838e4 | ||
|
|
2a30982d1a | ||
|
|
47e995f636 | ||
|
|
56fdf096c1 | ||
|
|
49df203bd6 | ||
|
|
8766c670e6 | ||
|
|
43304e5d4e | ||
|
|
d8f71e4571 | ||
|
|
d8102e62ec | ||
|
|
ed16ba59a9 | ||
|
|
a8c659ad6f | ||
|
|
c4df384f4b | ||
|
|
892174da7c | ||
|
|
24e50e2bbb | ||
|
|
dfe69f25ce | ||
|
|
53083a5718 | ||
|
|
7de78d2ef5 | ||
|
|
89135671b2 | ||
|
|
ac648cc0a9 | ||
|
|
748a180ac3 | ||
|
|
ec922c7c3d | ||
|
|
9f8eee55b2 | ||
|
|
0e7644b284 | ||
|
|
bf06e8d3ac | ||
|
|
12fd16f701 | ||
|
|
1197e6bf0d | ||
|
|
c5c521fabd |
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
21
.github/workflows/ci.yaml
vendored
21
.github/workflows/ci.yaml
vendored
@@ -17,7 +17,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: oven-sh/setup-bun@v1
|
- uses: coder/coder/.github/actions/setup-tf@main
|
||||||
|
- uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
- name: Setup
|
- name: Setup
|
||||||
@@ -27,7 +28,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: oven-sh/setup-bun@v1
|
with:
|
||||||
|
fetch-depth: 0 # Needed to get tags
|
||||||
|
- uses: coder/coder/.github/actions/setup-tf@main
|
||||||
|
- uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
- name: Setup
|
- name: Setup
|
||||||
@@ -38,3 +42,16 @@ jobs:
|
|||||||
uses: crate-ci/typos@v1.17.2
|
uses: crate-ci/typos@v1.17.2
|
||||||
- name: Lint
|
- name: Lint
|
||||||
run: bun lint
|
run: bun lint
|
||||||
|
- name: Check version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# check for version changes
|
||||||
|
./update-version.sh
|
||||||
|
# Check if any changes were made in README.md files
|
||||||
|
if [[ -n "$(git status --porcelain -- '**/README.md')" ]]; then
|
||||||
|
echo "Version mismatch detected. Please run ./update-version.sh and commit the updated README.md files."
|
||||||
|
git diff -- '**/README.md'
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "No version mismatch detected. All versions are up to date."
|
||||||
|
fi
|
||||||
|
|||||||
42
.github/workflows/update-readme.yaml
vendored
42
.github/workflows/update-readme.yaml
vendored
@@ -1,42 +0,0 @@
|
|||||||
name: Update README on Tag
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-readme:
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Get the latest tag
|
|
||||||
id: get-latest-tag
|
|
||||||
run: echo "TAG=$(git describe --tags --abbrev=0 | sed 's/^v//')" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Run update script
|
|
||||||
run: ./update-version.sh
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
id: create-pr
|
|
||||||
uses: peter-evans/create-pull-request@v5
|
|
||||||
with:
|
|
||||||
commit-message: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
|
||||||
title: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
|
||||||
body: 'This is an auto-generated PR to update README.md files of all modules with the new tag ${{ env.TAG }}'
|
|
||||||
branch: 'update-readme-branch'
|
|
||||||
base: 'main'
|
|
||||||
env:
|
|
||||||
TAG: ${{ steps.get-latest-tag.outputs.TAG }}
|
|
||||||
|
|
||||||
- name: Auto-approve
|
|
||||||
uses: hmarr/auto-approve-action@v4
|
|
||||||
if: github.ref == 'refs/heads/update-readme-branch'
|
|
||||||
19
.icons/airflow.svg
Normal file
19
.icons/airflow.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 15 KiB |
1
.icons/cursor.svg
Normal file
1
.icons/cursor.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 1.5 MiB |
5
.icons/desktop.svg
Normal file
5
.icons/desktop.svg
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M31 6V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V6C1 4.35 2.35 3 4 3H28C29.65 3 31 4.35 31 6Z" fill="#2197F3"/>
|
||||||
|
<path d="M21 27H17V24C17 23.4478 16.5522 23 16 23C15.4478 23 15 23.4478 15 24V27H11C10.4478 27 10 27.4478 10 28C10 28.5522 10.4478 29 11 29H21C21.5522 29 22 28.5522 22 28C22 27.4478 21.5522 27 21 27Z" fill="#FFC10A"/>
|
||||||
|
<path d="M31 17V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V17H31Z" fill="#3F51B5"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 540 B |
1
.icons/github.svg
Normal file
1
.icons/github.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg width="98" height="96" xmlns="http://www.w3.org/2000/svg"><path fill-rule="evenodd" clip-rule="evenodd" d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z" fill="#fff"/></svg>
|
||||||
|
After Width: | Height: | Size: 960 B |
BIN
.images/airflow.png
Normal file
BIN
.images/airflow.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 603 KiB |
@@ -1,28 +1,75 @@
|
|||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
To create a new module, clone this repository and run:
|
## Getting started
|
||||||
|
|
||||||
|
This repo uses the [Bun runtime](https://bun.sh/) to to run all code and tests. To install Bun, you can run this command on Linux/MacOS:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
./new.sh MODULE_NAME
|
curl -fsSL https://bun.sh/install | bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Or this command on Windows:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||||
|
```
|
||||||
|
|
||||||
|
Follow the instructions to ensure that Bun is available globally. Once Bun has been installed, clone this repository. From there, run this script to create a new module:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
./new.sh NAME_OF_NEW_MODULE
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing a Module
|
## Testing a Module
|
||||||
|
|
||||||
|
> **Note:** It is the responsibility of the module author to implement tests for their module. The author must test the module locally before submitting a PR.
|
||||||
|
|
||||||
A suite of test-helpers exists to run `terraform apply` on modules with variables, and test script output against containers.
|
A suite of test-helpers exists to run `terraform apply` on modules with variables, and test script output against containers.
|
||||||
|
|
||||||
Reference existing `*.test.ts` files for implementation.
|
The testing suite must be able to run docker containers with the `--network=host` flag. This typically requires running the tests on Linux as this flag does not apply to Docker Desktop for MacOS and Windows. MacOS users can work around this by using something like [colima](https://github.com/abiosoft/colima) or [Orbstack](https://orbstack.dev/) instead of Docker Desktop.
|
||||||
|
|
||||||
|
Reference the existing `*.test.ts` files to get an idea for how to set up tests.
|
||||||
|
|
||||||
|
You can run all tests in a specific file with this command:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
# Run tests for a specific module!
|
|
||||||
$ bun test -t '<module>'
|
$ bun test -t '<module>'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or run all tests by running this command:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ bun test
|
||||||
|
```
|
||||||
|
|
||||||
You can test a module locally by updating the source as follows
|
You can test a module locally by updating the source as follows
|
||||||
|
|
||||||
```tf
|
```tf
|
||||||
module "example" {
|
module "example" {
|
||||||
source = "git::https://github.com/<USERNAME>/<REPO>.git//<MODULE-NAME>?ref=<BRANCH-NAME>"
|
source = "git::https://github.com/<USERNAME>/<REPO>.git//<MODULE-NAME>?ref=<BRANCH-NAME>"
|
||||||
|
# You may need to remove the 'version' field, it is incompatible with some sources.
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note:** This is the responsibility of the module author to implement tests for their module. and test the module locally before submitting a PR.
|
## Releases
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> When creating a new release, make sure that your new version number is fully accurate. If a version number is incorrect or does not exist, we may end up serving incorrect/old data for our various tools and providers.
|
||||||
|
|
||||||
|
Much of our release process is automated. To cut a new release:
|
||||||
|
|
||||||
|
1. Navigate to [GitHub's Releases page](https://github.com/coder/modules/releases)
|
||||||
|
2. Click "Draft a new release"
|
||||||
|
3. Click the "Choose a tag" button and type a new release number in the format `v<major>.<minor>.<patch>` (e.g., `v1.18.0`). Then click "Create new tag".
|
||||||
|
4. Click the "Generate release notes" button, and clean up the resulting README. Be sure to remove any notes that would not be relevant to end-users (e.g., bumping dependencies).
|
||||||
|
5. Once everything looks good, click the "Publish release" button.
|
||||||
|
|
||||||
|
Once the release has been cut, a script will run to check whether there are any modules that will require that the new release number be published to Terraform. If there are any, a new pull request will automatically be generated. Be sure to approve this PR and merge it into the `main` branch.
|
||||||
|
|
||||||
|
Following that, our automated processes will handle publishing new data for [`registry.coder.com`](https://github.com/coder/registry.coder.com/):
|
||||||
|
|
||||||
|
1. Publishing new versions to Coder's [Terraform Registry](https://registry.terraform.io/providers/coder/coder/latest)
|
||||||
|
2. Publishing new data to the [Coder Registry](https://registry.coder.com)
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Some data in `registry.coder.com` is fetched on demand from the Module repo's main branch. This data should be updated almost immediately after a new release, but other changes will take some time to propagate.
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
Modules
|
Modules
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
[Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
[Module Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
||||||
|
|
||||||
[](https://discord.gg/coder)
|
[](https://discord.gg/coder)
|
||||||
[](./LICENSE)
|
[](./LICENSE)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
Modules extend Templates to create reusable components for your development environment.
|
Modules extend Coder Templates to create reusable components for your development environment.
|
||||||
|
|
||||||
e.g.
|
e.g.
|
||||||
|
|
||||||
|
|||||||
23
apache-airflow/README.md
Normal file
23
apache-airflow/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
display_name: airflow
|
||||||
|
description: A module that adds Apache Airflow in your Coder template
|
||||||
|
icon: ../.icons/airflow.svg
|
||||||
|
maintainer_github: coder
|
||||||
|
partner_github: nataindata
|
||||||
|
verified: true
|
||||||
|
tags: [airflow, idea, web, helper]
|
||||||
|
---
|
||||||
|
|
||||||
|
# airflow
|
||||||
|
|
||||||
|
A module that adds Apache Airflow in your Coder template.
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "airflow" {
|
||||||
|
source = "registry.coder.com/modules/apache-airflow/coder"
|
||||||
|
version = "1.0.13"
|
||||||
|
agent_id = coder_agent.main.id
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
65
apache-airflow/main.tf
Normal file
65
apache-airflow/main.tf
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
terraform {
|
||||||
|
required_version = ">= 1.0"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = ">= 0.17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add required variables for your modules and remove any unneeded variables
|
||||||
|
variable "agent_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of a Coder agent."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "log_path" {
|
||||||
|
type = string
|
||||||
|
description = "The path to log airflow to."
|
||||||
|
default = "/tmp/airflow.log"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "port" {
|
||||||
|
type = number
|
||||||
|
description = "The port to run airflow on."
|
||||||
|
default = 8080
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "share" {
|
||||||
|
type = string
|
||||||
|
default = "owner"
|
||||||
|
validation {
|
||||||
|
condition = var.share == "owner" || var.share == "authenticated" || var.share == "public"
|
||||||
|
error_message = "Incorrect value. Please set either 'owner', 'authenticated', or 'public'."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_script" "airflow" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
display_name = "airflow"
|
||||||
|
icon = "/icon/apache-guacamole.svg"
|
||||||
|
script = templatefile("${path.module}/run.sh", {
|
||||||
|
LOG_PATH : var.log_path,
|
||||||
|
PORT : var.port
|
||||||
|
})
|
||||||
|
run_on_start = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_app" "airflow" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
slug = "airflow"
|
||||||
|
display_name = "airflow"
|
||||||
|
url = "http://localhost:${var.port}"
|
||||||
|
icon = "/icon/apache-guacamole.svg"
|
||||||
|
subdomain = true
|
||||||
|
share = var.share
|
||||||
|
order = var.order
|
||||||
|
}
|
||||||
19
apache-airflow/run.sh
Normal file
19
apache-airflow/run.sh
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
BOLD='\033[0;1m'
|
||||||
|
|
||||||
|
PATH=$PATH:~/.local/bin
|
||||||
|
pip install --upgrade apache-airflow
|
||||||
|
|
||||||
|
filename=~/airflow/airflow.db
|
||||||
|
if ! [ -f $filename ] || ! [ -s $filename ]; then
|
||||||
|
airflow db init
|
||||||
|
fi
|
||||||
|
|
||||||
|
export AIRFLOW__CORE__LOAD_EXAMPLES=false
|
||||||
|
|
||||||
|
airflow webserver > ${LOG_PATH} 2>&1 &
|
||||||
|
|
||||||
|
airflow scheduler >> /tmp/airflow_scheduler.log 2>&1 &
|
||||||
|
|
||||||
|
airflow users create -u admin -p admin -r Admin -e admin@admin.com -f Coder -l User
|
||||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
|||||||
```tf
|
```tf
|
||||||
module "aws-region" {
|
module "aws-region" {
|
||||||
source = "registry.coder.com/modules/aws-region/coder"
|
source = "registry.coder.com/modules/aws-region/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.12"
|
||||||
default = "us-east-1"
|
default = "us-east-1"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ Change the display name and icon for a region using the corresponding maps:
|
|||||||
```tf
|
```tf
|
||||||
module "aws-region" {
|
module "aws-region" {
|
||||||
source = "registry.coder.com/modules/aws-region/coder"
|
source = "registry.coder.com/modules/aws-region/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.12"
|
||||||
default = "ap-south-1"
|
default = "ap-south-1"
|
||||||
|
|
||||||
custom_names = {
|
custom_names = {
|
||||||
@@ -63,7 +63,7 @@ Hide the Asia Pacific regions Seoul and Osaka:
|
|||||||
```tf
|
```tf
|
||||||
module "aws-region" {
|
module "aws-region" {
|
||||||
source = "registry.coder.com/modules/aws-region/coder"
|
source = "registry.coder.com/modules/aws-region/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.12"
|
||||||
exclude = ["ap-northeast-2", "ap-northeast-3"]
|
exclude = ["ap-northeast-2", "ap-northeast-3"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
executeScriptInContainer,
|
|
||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
@@ -22,4 +21,13 @@ describe("aws-region", async () => {
|
|||||||
});
|
});
|
||||||
expect(state.outputs.value.value).toBe("us-west-2");
|
expect(state.outputs.value.value).toBe("us-west-2");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(1);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -51,6 +51,12 @@ variable "exclude" {
|
|||||||
type = list(string)
|
type = list(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# This is a static list because the regions don't change _that_
|
# This is a static list because the regions don't change _that_
|
||||||
# frequently and including the `aws_regions` data source requires
|
# frequently and including the `aws_regions` data source requires
|
||||||
@@ -176,6 +182,7 @@ data "coder_parameter" "region" {
|
|||||||
display_name = var.display_name
|
display_name = var.display_name
|
||||||
description = var.description
|
description = var.description
|
||||||
default = var.default == "" ? null : var.default
|
default = var.default == "" ? null : var.default
|
||||||
|
order = var.coder_parameter_order
|
||||||
mutable = var.mutable
|
mutable = var.mutable
|
||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
for_each = { for k, v in local.regions : k => v if !(contains(var.exclude, k)) }
|
for_each = { for k, v in local.regions : k => v if !(contains(var.exclude, k)) }
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ This module adds a parameter with all Azure regions, allowing developers to sele
|
|||||||
```tf
|
```tf
|
||||||
module "azure_region" {
|
module "azure_region" {
|
||||||
source = "registry.coder.com/modules/azure-region/coder"
|
source = "registry.coder.com/modules/azure-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "eastus"
|
default = "eastus"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Change the display name and icon for a region using the corresponding maps:
|
|||||||
```tf
|
```tf
|
||||||
module "azure-region" {
|
module "azure-region" {
|
||||||
source = "registry.coder.com/modules/azure-region/coder"
|
source = "registry.coder.com/modules/azure-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
custom_names = {
|
custom_names = {
|
||||||
"australia" : "Go Australia!"
|
"australia" : "Go Australia!"
|
||||||
}
|
}
|
||||||
@@ -57,7 +57,7 @@ Hide all regions in Australia except australiacentral:
|
|||||||
```tf
|
```tf
|
||||||
module "azure-region" {
|
module "azure-region" {
|
||||||
source = "registry.coder.com/modules/azure-region/coder"
|
source = "registry.coder.com/modules/azure-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
exclude = [
|
exclude = [
|
||||||
"australia",
|
"australia",
|
||||||
"australiacentral2",
|
"australiacentral2",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
executeScriptInContainer,
|
|
||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
@@ -22,4 +21,13 @@ describe("azure-region", async () => {
|
|||||||
});
|
});
|
||||||
expect(state.outputs.value.value).toBe("westus");
|
expect(state.outputs.value.value).toBe("westus");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(1);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -50,6 +50,12 @@ variable "exclude" {
|
|||||||
type = list(string)
|
type = list(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# Note: Options are limited to 64 regions, some redundant regions have been removed.
|
# Note: Options are limited to 64 regions, some redundant regions have been removed.
|
||||||
all_regions = {
|
all_regions = {
|
||||||
@@ -309,6 +315,7 @@ data "coder_parameter" "region" {
|
|||||||
display_name = var.display_name
|
display_name = var.display_name
|
||||||
description = var.description
|
description = var.description
|
||||||
default = var.default == "" ? null : var.default
|
default = var.default == "" ? null : var.default
|
||||||
|
order = var.coder_parameter_order
|
||||||
mutable = var.mutable
|
mutable = var.mutable
|
||||||
icon = "/icon/azure.png"
|
icon = "/icon/azure.png"
|
||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Automatically install [code-server](https://github.com/coder/code-server) in a w
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -28,7 +28,7 @@ module "code-server" {
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
install_version = "4.8.3"
|
install_version = "4.8.3"
|
||||||
}
|
}
|
||||||
@@ -41,7 +41,7 @@ Install the Dracula theme from [OpenVSX](https://open-vsx.org/):
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
extensions = [
|
extensions = [
|
||||||
"dracula-theme.theme-dracula"
|
"dracula-theme.theme-dracula"
|
||||||
@@ -58,7 +58,7 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
extensions = ["dracula-theme.theme-dracula"]
|
extensions = ["dracula-theme.theme-dracula"]
|
||||||
settings = {
|
settings = {
|
||||||
@@ -74,7 +74,7 @@ Just run code-server in the background, don't fetch it from GitHub:
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||||
}
|
}
|
||||||
@@ -89,7 +89,7 @@ Run an existing copy of code-server if found, otherwise download from GitHub:
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
use_cached = true
|
use_cached = true
|
||||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||||
@@ -101,7 +101,7 @@ Just run code-server in the background, don't fetch it from GitHub:
|
|||||||
```tf
|
```tf
|
||||||
module "code-server" {
|
module "code-server" {
|
||||||
source = "registry.coder.com/modules/code-server/coder"
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
offline = true
|
offline = true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -95,6 +95,33 @@ variable "use_cached" {
|
|||||||
default = false
|
default = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "use_cached_extensions" {
|
||||||
|
type = bool
|
||||||
|
description = "Uses cached copy of extensions, otherwise do a forced upgrade"
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "extensions_dir" {
|
||||||
|
type = string
|
||||||
|
description = "Override the directory to store extensions in."
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "auto_install_extensions" {
|
||||||
|
type = bool
|
||||||
|
description = "Automatically install recommended extensions when code-server starts."
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "subdomain" {
|
||||||
|
type = bool
|
||||||
|
description = <<-EOT
|
||||||
|
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||||
|
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||||
|
EOT
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
resource "coder_script" "code-server" {
|
resource "coder_script" "code-server" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
display_name = "code-server"
|
display_name = "code-server"
|
||||||
@@ -110,6 +137,10 @@ resource "coder_script" "code-server" {
|
|||||||
SETTINGS : replace(jsonencode(var.settings), "\"", "\\\""),
|
SETTINGS : replace(jsonencode(var.settings), "\"", "\\\""),
|
||||||
OFFLINE : var.offline,
|
OFFLINE : var.offline,
|
||||||
USE_CACHED : var.use_cached,
|
USE_CACHED : var.use_cached,
|
||||||
|
USE_CACHED_EXTENSIONS : var.use_cached_extensions,
|
||||||
|
EXTENSIONS_DIR : var.extensions_dir,
|
||||||
|
FOLDER : var.folder,
|
||||||
|
AUTO_INSTALL_EXTENSIONS : var.auto_install_extensions,
|
||||||
})
|
})
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
|
|
||||||
@@ -132,7 +163,7 @@ resource "coder_app" "code-server" {
|
|||||||
display_name = var.display_name
|
display_name = var.display_name
|
||||||
url = "http://localhost:${var.port}/${var.folder != "" ? "?folder=${urlencode(var.folder)}" : ""}"
|
url = "http://localhost:${var.port}/${var.folder != "" ? "?folder=${urlencode(var.folder)}" : ""}"
|
||||||
icon = "/icon/code.svg"
|
icon = "/icon/code.svg"
|
||||||
subdomain = false
|
subdomain = var.subdomain
|
||||||
share = var.share
|
share = var.share
|
||||||
order = var.order
|
order = var.order
|
||||||
|
|
||||||
|
|||||||
@@ -6,10 +6,17 @@ CODE='\033[36;40;1m'
|
|||||||
RESET='\033[0m'
|
RESET='\033[0m'
|
||||||
CODE_SERVER="${INSTALL_PREFIX}/bin/code-server"
|
CODE_SERVER="${INSTALL_PREFIX}/bin/code-server"
|
||||||
|
|
||||||
|
# Set extension directory
|
||||||
|
EXTENSION_ARG=""
|
||||||
|
if [ -n "${EXTENSIONS_DIR}" ]; then
|
||||||
|
EXTENSION_ARG="--extensions-dir=${EXTENSIONS_DIR}"
|
||||||
|
mkdir -p "${EXTENSIONS_DIR}"
|
||||||
|
fi
|
||||||
|
|
||||||
function run_code_server() {
|
function run_code_server() {
|
||||||
echo "👷 Running code-server in the background..."
|
echo "👷 Running code-server in the background..."
|
||||||
echo "Check logs at ${LOG_PATH}!"
|
echo "Check logs at ${LOG_PATH}!"
|
||||||
$CODE_SERVER --auth none --port "${PORT}" --app-name "${APP_NAME}" > "${LOG_PATH}" 2>&1 &
|
$CODE_SERVER "$EXTENSION_ARG" --auth none --port "${PORT}" --app-name "${APP_NAME}" > "${LOG_PATH}" 2>&1 &
|
||||||
}
|
}
|
||||||
|
|
||||||
# Check if the settings file exists...
|
# Check if the settings file exists...
|
||||||
@@ -19,20 +26,20 @@ if [ ! -f ~/.local/share/code-server/User/settings.json ]; then
|
|||||||
echo "${SETTINGS}" > ~/.local/share/code-server/User/settings.json
|
echo "${SETTINGS}" > ~/.local/share/code-server/User/settings.json
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if code-server is already installed for offline or cached mode
|
# Check if code-server is already installed for offline
|
||||||
|
if [ "${OFFLINE}" = true ]; then
|
||||||
if [ -f "$CODE_SERVER" ]; then
|
if [ -f "$CODE_SERVER" ]; then
|
||||||
if [ "${OFFLINE}" = true ] || [ "${USE_CACHED}" = true ]; then
|
|
||||||
echo "🥳 Found a copy of code-server"
|
echo "🥳 Found a copy of code-server"
|
||||||
run_code_server
|
run_code_server
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
fi
|
|
||||||
# Offline mode always expects a copy of code-server to be present
|
# Offline mode always expects a copy of code-server to be present
|
||||||
if [ "${OFFLINE}" = true ]; then
|
|
||||||
echo "Failed to find a copy of code-server"
|
echo "Failed to find a copy of code-server"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# If there is no cached install OR we don't want to use a cached install
|
||||||
|
if [ ! -f "$CODE_SERVER" ] || [ "${USE_CACHED}" != true ]; then
|
||||||
printf "$${BOLD}Installing code-server!\n"
|
printf "$${BOLD}Installing code-server!\n"
|
||||||
|
|
||||||
ARGS=(
|
ARGS=(
|
||||||
@@ -49,6 +56,23 @@ if [ $? -ne 0 ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get the list of installed extensions...
|
||||||
|
LIST_EXTENSIONS=$($CODE_SERVER --list-extensions $EXTENSION_ARG)
|
||||||
|
readarray -t EXTENSIONS_ARRAY <<< "$LIST_EXTENSIONS"
|
||||||
|
function extension_installed() {
|
||||||
|
if [ "${USE_CACHED_EXTENSIONS}" != true ]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
for _extension in "$${EXTENSIONS_ARRAY[@]}"; do
|
||||||
|
if [ "$_extension" == "$1" ]; then
|
||||||
|
echo "Extension $1 was already installed."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
# Install each extension...
|
# Install each extension...
|
||||||
IFS=',' read -r -a EXTENSIONLIST <<< "$${EXTENSIONS}"
|
IFS=',' read -r -a EXTENSIONLIST <<< "$${EXTENSIONS}"
|
||||||
@@ -56,12 +80,38 @@ for extension in "$${EXTENSIONLIST[@]}"; do
|
|||||||
if [ -z "$extension" ]; then
|
if [ -z "$extension" ]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
if extension_installed "$extension"; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
printf "🧩 Installing extension $${CODE}$extension$${RESET}...\n"
|
printf "🧩 Installing extension $${CODE}$extension$${RESET}...\n"
|
||||||
output=$($CODE_SERVER --install-extension "$extension")
|
output=$($CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension")
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "Failed to install extension: $extension: $output"
|
echo "Failed to install extension: $extension: $output"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ "${AUTO_INSTALL_EXTENSIONS}" = true ]; then
|
||||||
|
if ! command -v jq > /dev/null; then
|
||||||
|
echo "jq is required to install extensions from a workspace file."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
WORKSPACE_DIR="$HOME"
|
||||||
|
if [ -n "${FOLDER}" ]; then
|
||||||
|
WORKSPACE_DIR="${FOLDER}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$WORKSPACE_DIR/.vscode/extensions.json" ]; then
|
||||||
|
printf "🧩 Installing extensions from %s/.vscode/extensions.json...\n" "$WORKSPACE_DIR"
|
||||||
|
extensions=$(jq -r '.recommendations[]' "$WORKSPACE_DIR"/.vscode/extensions.json)
|
||||||
|
for extension in $extensions; do
|
||||||
|
if extension_installed "$extension"; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
$CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
run_code_server
|
run_code_server
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Automatically logs the user into Coder when creating their workspace.
|
|||||||
```tf
|
```tf
|
||||||
module "coder-login" {
|
module "coder-login" {
|
||||||
source = "registry.coder.com/modules/coder-login/coder"
|
source = "registry.coder.com/modules/coder-login/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,10 +1,5 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe } from "bun:test";
|
||||||
import {
|
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||||
executeScriptInContainer,
|
|
||||||
runTerraformApply,
|
|
||||||
runTerraformInit,
|
|
||||||
testRequiredVariables,
|
|
||||||
} from "../test";
|
|
||||||
|
|
||||||
describe("coder-login", async () => {
|
describe("coder-login", async () => {
|
||||||
await runTerraformInit(import.meta.dir);
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ terraform {
|
|||||||
required_providers {
|
required_providers {
|
||||||
coder = {
|
coder = {
|
||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.12"
|
version = ">= 0.23"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -15,11 +15,12 @@ variable "agent_id" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
resource "coder_script" "coder-login" {
|
resource "coder_script" "coder-login" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
script = templatefile("${path.module}/run.sh", {
|
script = templatefile("${path.module}/run.sh", {
|
||||||
CODER_USER_TOKEN : data.coder_workspace.me.owner_session_token,
|
CODER_USER_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||||
CODER_DEPLOYMENT_URL : data.coder_workspace.me.access_url
|
CODER_DEPLOYMENT_URL : data.coder_workspace.me.access_url
|
||||||
})
|
})
|
||||||
display_name = "Coder Login"
|
display_name = "Coder Login"
|
||||||
|
|||||||
35
cursor/README.md
Normal file
35
cursor/README.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
display_name: Cursor IDE
|
||||||
|
description: Add a one-click button to launch Cursor IDE
|
||||||
|
icon: ../.icons/cursor.svg
|
||||||
|
maintainer_github: coder
|
||||||
|
verified: true
|
||||||
|
tags: [ide, cursor, helper]
|
||||||
|
---
|
||||||
|
|
||||||
|
# Cursor IDE
|
||||||
|
|
||||||
|
Add a button to open any workspace with a single click in Cursor IDE.
|
||||||
|
|
||||||
|
Uses the [Coder Remote VS Code Extension](https://github.com/coder/cursor-coder).
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "cursor" {
|
||||||
|
source = "registry.coder.com/modules/cursor/coder"
|
||||||
|
version = "1.0.19"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Open in a specific directory
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "cursor" {
|
||||||
|
source = "registry.coder.com/modules/cursor/coder"
|
||||||
|
version = "1.0.19"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
folder = "/home/coder/project"
|
||||||
|
}
|
||||||
|
```
|
||||||
88
cursor/main.test.ts
Normal file
88
cursor/main.test.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import { describe, expect, it } from "bun:test";
|
||||||
|
import {
|
||||||
|
runTerraformApply,
|
||||||
|
runTerraformInit,
|
||||||
|
testRequiredVariables,
|
||||||
|
} from "../test";
|
||||||
|
|
||||||
|
describe("cursor", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
|
testRequiredVariables(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
});
|
||||||
|
|
||||||
|
it("default output", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
});
|
||||||
|
expect(state.outputs.cursor_url.value).toBe(
|
||||||
|
"cursor://coder.coder-remote/open?owner=default&workspace=default&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
|
||||||
|
const coder_app = state.resources.find(
|
||||||
|
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(coder_app).not.toBeNull();
|
||||||
|
expect(coder_app?.instances.length).toBe(1);
|
||||||
|
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
});
|
||||||
|
expect(state.outputs.cursor_url.value).toBe(
|
||||||
|
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder and open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
open_recent: "true",
|
||||||
|
});
|
||||||
|
expect(state.outputs.cursor_url.value).toBe(
|
||||||
|
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder but not open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
openRecent: "false",
|
||||||
|
});
|
||||||
|
expect(state.outputs.cursor_url.value).toBe(
|
||||||
|
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
open_recent: "true",
|
||||||
|
});
|
||||||
|
expect(state.outputs.cursor_url.value).toBe(
|
||||||
|
"cursor://coder.coder-remote/open?owner=default&workspace=default&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("expect order to be set", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
order: "22",
|
||||||
|
});
|
||||||
|
|
||||||
|
const coder_app = state.resources.find(
|
||||||
|
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(coder_app).not.toBeNull();
|
||||||
|
expect(coder_app?.instances.length).toBe(1);
|
||||||
|
expect(coder_app?.instances[0].attributes.order).toBe(22);
|
||||||
|
});
|
||||||
|
});
|
||||||
62
cursor/main.tf
Normal file
62
cursor/main.tf
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
terraform {
|
||||||
|
required_version = ">= 1.0"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = ">= 0.23"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "agent_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of a Coder agent."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "folder" {
|
||||||
|
type = string
|
||||||
|
description = "The folder to open in Cursor IDE."
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "open_recent" {
|
||||||
|
type = bool
|
||||||
|
description = "Open the most recent workspace or folder. Falls back to the folder if there is no recent workspace or folder to open."
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
resource "coder_app" "cursor" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
external = true
|
||||||
|
icon = "/icon/cursor.svg"
|
||||||
|
slug = "cursor"
|
||||||
|
display_name = "Cursor Desktop"
|
||||||
|
order = var.order
|
||||||
|
url = join("", [
|
||||||
|
"cursor://coder.coder-remote/open",
|
||||||
|
"?owner=",
|
||||||
|
data.coder_workspace_owner.me.name,
|
||||||
|
"&workspace=",
|
||||||
|
data.coder_workspace.me.name,
|
||||||
|
var.folder != "" ? join("", ["&folder=", var.folder]) : "",
|
||||||
|
var.open_recent ? "&openRecent" : "",
|
||||||
|
"&url=",
|
||||||
|
data.coder_workspace.me.access_url,
|
||||||
|
"&token=$SESSION_TOKEN",
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
output "cursor_url" {
|
||||||
|
value = coder_app.cursor.url
|
||||||
|
description = "Cursor IDE Desktop URL."
|
||||||
|
}
|
||||||
@@ -9,12 +9,70 @@ tags: [helper]
|
|||||||
|
|
||||||
# Dotfiles
|
# Dotfiles
|
||||||
|
|
||||||
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io)! Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io).
|
||||||
|
|
||||||
|
This will prompt the user for their dotfiles repository URL on template creation using a `coder_parameter`.
|
||||||
|
|
||||||
|
Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
||||||
|
|
||||||
```tf
|
```tf
|
||||||
module "dotfiles" {
|
module "dotfiles" {
|
||||||
source = "registry.coder.com/modules/dotfiles/coder"
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Apply dotfiles as the current user
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "dotfiles" {
|
||||||
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Apply dotfiles as another user (only works if sudo is passwordless)
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "dotfiles" {
|
||||||
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
user = "root"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Apply the same dotfiles as the current user and root (the root dotfiles can only be applied if sudo is passwordless)
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "dotfiles" {
|
||||||
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
}
|
||||||
|
|
||||||
|
module "dotfiles-root" {
|
||||||
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
user = "root"
|
||||||
|
dotfiles_uri = module.dotfiles.dotfiles_uri
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setting a default dotfiles repository
|
||||||
|
|
||||||
|
You can set a default dotfiles repository for all users by setting the `default_dotfiles_uri` variable:
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "dotfiles" {
|
||||||
|
source = "registry.coder.com/modules/dotfiles/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
default_dotfiles_uri = "https://github.com/coder/dotfiles"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -18,4 +18,23 @@ describe("dotfiles", async () => {
|
|||||||
});
|
});
|
||||||
expect(state.outputs.dotfiles_uri.value).toBe("");
|
expect(state.outputs.dotfiles_uri.value).toBe("");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set a default dotfiles_uri", async () => {
|
||||||
|
const default_dotfiles_uri = "foo";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
default_dotfiles_uri,
|
||||||
|
});
|
||||||
|
expect(state.outputs.dotfiles_uri.value).toBe(default_dotfiles_uri);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(2);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -14,30 +14,78 @@ variable "agent_id" {
|
|||||||
description = "The ID of a Coder agent."
|
description = "The ID of a Coder agent."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "default_dotfiles_uri" {
|
||||||
|
type = string
|
||||||
|
description = "The default dotfiles URI if the workspace user does not provide one"
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "dotfiles_uri" {
|
||||||
|
type = string
|
||||||
|
description = "The URL to a dotfiles repository. (optional, when set, the user isn't prompted for their dotfiles)"
|
||||||
|
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "user" {
|
||||||
|
type = string
|
||||||
|
description = "The name of the user to apply the dotfiles to. (optional, applies to the current user by default)"
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "manual_update" {
|
||||||
|
type = bool
|
||||||
|
description = "If true, this adds a button to workspace page to refresh dotfiles on demand."
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
data "coder_parameter" "dotfiles_uri" {
|
data "coder_parameter" "dotfiles_uri" {
|
||||||
|
count = var.dotfiles_uri == null ? 1 : 0
|
||||||
type = "string"
|
type = "string"
|
||||||
name = "dotfiles_uri"
|
name = "dotfiles_uri"
|
||||||
display_name = "Dotfiles URL (optional)"
|
display_name = "Dotfiles URL"
|
||||||
default = ""
|
order = var.coder_parameter_order
|
||||||
|
default = var.default_dotfiles_uri
|
||||||
description = "Enter a URL for a [dotfiles repository](https://dotfiles.github.io) to personalize your workspace"
|
description = "Enter a URL for a [dotfiles repository](https://dotfiles.github.io) to personalize your workspace"
|
||||||
mutable = true
|
mutable = true
|
||||||
icon = "/icon/dotfiles.svg"
|
icon = "/icon/dotfiles.svg"
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_script" "personalize" {
|
locals {
|
||||||
|
dotfiles_uri = var.dotfiles_uri != null ? var.dotfiles_uri : data.coder_parameter.dotfiles_uri[0].value
|
||||||
|
user = var.user != null ? var.user : ""
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_script" "dotfiles" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
script = <<-EOT
|
script = templatefile("${path.module}/run.sh", {
|
||||||
DOTFILES_URI="${data.coder_parameter.dotfiles_uri.value}"
|
DOTFILES_URI : local.dotfiles_uri,
|
||||||
if [ -n "$${DOTFILES_URI// }" ]; then
|
DOTFILES_USER : local.user
|
||||||
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee -a ~/.dotfiles.log
|
})
|
||||||
fi
|
|
||||||
EOT
|
|
||||||
display_name = "Dotfiles"
|
display_name = "Dotfiles"
|
||||||
icon = "/icon/dotfiles.svg"
|
icon = "/icon/dotfiles.svg"
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "coder_app" "dotfiles" {
|
||||||
|
count = var.manual_update ? 1 : 0
|
||||||
|
agent_id = var.agent_id
|
||||||
|
display_name = "Refresh Dotfiles"
|
||||||
|
slug = "dotfiles"
|
||||||
|
icon = "/icon/dotfiles.svg"
|
||||||
|
command = templatefile("${path.module}/run.sh", {
|
||||||
|
DOTFILES_URI : local.dotfiles_uri,
|
||||||
|
DOTFILES_USER : local.user
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
output "dotfiles_uri" {
|
output "dotfiles_uri" {
|
||||||
description = "Dotfiles URI"
|
description = "Dotfiles URI"
|
||||||
value = data.coder_parameter.dotfiles_uri.value
|
value = local.dotfiles_uri
|
||||||
}
|
}
|
||||||
23
dotfiles/run.sh
Normal file
23
dotfiles/run.sh
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
DOTFILES_URI="${DOTFILES_URI}"
|
||||||
|
DOTFILES_USER="${DOTFILES_USER}"
|
||||||
|
|
||||||
|
if [ -n "$${DOTFILES_URI// }" ]; then
|
||||||
|
if [ -z "$DOTFILES_USER" ]; then
|
||||||
|
DOTFILES_USER="$USER"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✨ Applying dotfiles for user $DOTFILES_USER"
|
||||||
|
|
||||||
|
if [ "$DOTFILES_USER" = "$USER" ]; then
|
||||||
|
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee ~/.dotfiles.log
|
||||||
|
else
|
||||||
|
# The `eval echo ~"$DOTFILES_USER"` part is used to dynamically get the home directory of the user, see https://superuser.com/a/484280
|
||||||
|
# eval echo ~coder -> "/home/coder"
|
||||||
|
# eval echo ~root -> "/root"
|
||||||
|
|
||||||
|
CODER_BIN=$(which coder)
|
||||||
|
DOTFILES_USER_HOME=$(eval echo ~"$DOTFILES_USER")
|
||||||
|
sudo -u "$DOTFILES_USER" sh -c "'$CODER_BIN' dotfiles '$DOTFILES_URI' -y 2>&1 | tee '$DOTFILES_USER_HOME'/.dotfiles.log"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-instance-type" {
|
module "exoscale-instance-type" {
|
||||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "standard.medium"
|
default = "standard.medium"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ Change the display name a type using the corresponding maps:
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-instance-type" {
|
module "exoscale-instance-type" {
|
||||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "standard.medium"
|
default = "standard.medium"
|
||||||
|
|
||||||
custom_names = {
|
custom_names = {
|
||||||
@@ -79,7 +79,7 @@ Show only gpu1 types
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-instance-type" {
|
module "exoscale-instance-type" {
|
||||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "gpu.large"
|
default = "gpu.large"
|
||||||
type_category = ["gpu"]
|
type_category = ["gpu"]
|
||||||
exclude = [
|
exclude = [
|
||||||
|
|||||||
@@ -31,4 +31,13 @@ describe("exoscale-instance-type", async () => {
|
|||||||
});
|
});
|
||||||
}).toThrow('default value "gpu3.huge" must be defined as one of options');
|
}).toThrow('default value "gpu3.huge" must be defined as one of options');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(1);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -56,6 +56,12 @@ variable "exclude" {
|
|||||||
type = list(string)
|
type = list(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# https://www.exoscale.com/pricing/
|
# https://www.exoscale.com/pricing/
|
||||||
|
|
||||||
@@ -257,6 +263,7 @@ data "coder_parameter" "instance_type" {
|
|||||||
display_name = var.display_name
|
display_name = var.display_name
|
||||||
description = var.description
|
description = var.description
|
||||||
default = var.default == "" ? null : var.default
|
default = var.default == "" ? null : var.default
|
||||||
|
order = var.coder_parameter_order
|
||||||
mutable = var.mutable
|
mutable = var.mutable
|
||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
for_each = [for k, v in concat(
|
for_each = [for k, v in concat(
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-zone" {
|
module "exoscale-zone" {
|
||||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "ch-dk-2"
|
default = "ch-dk-2"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,7 +44,7 @@ Change the display name and icon for a zone using the corresponding maps:
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-zone" {
|
module "exoscale-zone" {
|
||||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = "at-vie-1"
|
default = "at-vie-1"
|
||||||
|
|
||||||
custom_names = {
|
custom_names = {
|
||||||
@@ -76,7 +76,7 @@ Hide the Switzerland zones Geneva and Zurich
|
|||||||
```tf
|
```tf
|
||||||
module "exoscale-zone" {
|
module "exoscale-zone" {
|
||||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
exclude = ["ch-gva-2", "ch-dk-2"]
|
exclude = ["ch-gva-2", "ch-dk-2"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
executeScriptInContainer,
|
|
||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
@@ -22,4 +21,13 @@ describe("exoscale-zone", async () => {
|
|||||||
});
|
});
|
||||||
expect(state.outputs.value.value).toBe("at-vie-1");
|
expect(state.outputs.value.value).toBe("at-vie-1");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(1);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -51,6 +51,11 @@ variable "exclude" {
|
|||||||
type = list(string)
|
type = list(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# This is a static list because the zones don't change _that_
|
# This is a static list because the zones don't change _that_
|
||||||
@@ -94,6 +99,7 @@ data "coder_parameter" "zone" {
|
|||||||
display_name = var.display_name
|
display_name = var.display_name
|
||||||
description = var.description
|
description = var.description
|
||||||
default = var.default == "" ? null : var.default
|
default = var.default == "" ? null : var.default
|
||||||
|
order = var.coder_parameter_order
|
||||||
mutable = var.mutable
|
mutable = var.mutable
|
||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
for_each = { for k, v in local.zones : k => v if !(contains(var.exclude, k)) }
|
for_each = { for k, v in local.zones : k => v if !(contains(var.exclude, k)) }
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ A file browser for your workspace.
|
|||||||
```tf
|
```tf
|
||||||
module "filebrowser" {
|
module "filebrowser" {
|
||||||
source = "registry.coder.com/modules/filebrowser/coder"
|
source = "registry.coder.com/modules/filebrowser/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -28,7 +28,7 @@ module "filebrowser" {
|
|||||||
```tf
|
```tf
|
||||||
module "filebrowser" {
|
module "filebrowser" {
|
||||||
source = "registry.coder.com/modules/filebrowser/coder"
|
source = "registry.coder.com/modules/filebrowser/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
folder = "/home/coder/project"
|
folder = "/home/coder/project"
|
||||||
}
|
}
|
||||||
@@ -39,8 +39,19 @@ module "filebrowser" {
|
|||||||
```tf
|
```tf
|
||||||
module "filebrowser" {
|
module "filebrowser" {
|
||||||
source = "registry.coder.com/modules/filebrowser/coder"
|
source = "registry.coder.com/modules/filebrowser/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
database_path = ".config/filebrowser.db"
|
database_path = ".config/filebrowser.db"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Serve from the same domain (no subdomain)
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "filebrowser" {
|
||||||
|
source = "registry.coder.com/modules/filebrowser/coder"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
agent_name = "main"
|
||||||
|
subdomain = false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -88,4 +88,27 @@ describe("filebrowser", async () => {
|
|||||||
"📝 Logs at /tmp/filebrowser.log",
|
"📝 Logs at /tmp/filebrowser.log",
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("runs with subdomain=false", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
agent_name: "main",
|
||||||
|
subdomain: false,
|
||||||
|
});
|
||||||
|
const output = await executeScriptInContainer(state, "alpine");
|
||||||
|
expect(output.exitCode).toBe(0);
|
||||||
|
expect(output.stdout).toEqual([
|
||||||
|
"\u001B[0;1mInstalling filebrowser ",
|
||||||
|
"",
|
||||||
|
"🥳 Installation complete! ",
|
||||||
|
"",
|
||||||
|
"👷 Starting filebrowser in background... ",
|
||||||
|
"",
|
||||||
|
"📂 Serving /root at http://localhost:13339 ",
|
||||||
|
"",
|
||||||
|
"Running 'filebrowser --noauth --root /root --port 13339' ",
|
||||||
|
"",
|
||||||
|
"📝 Logs at /tmp/filebrowser.log",
|
||||||
|
]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -14,6 +14,21 @@ variable "agent_id" {
|
|||||||
description = "The ID of a Coder agent."
|
description = "The ID of a Coder agent."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
variable "agent_name" {
|
||||||
|
type = string
|
||||||
|
description = "The name of the main deployment. (Used to build the subpath for coder_app.)"
|
||||||
|
default = ""
|
||||||
|
validation {
|
||||||
|
# If subdomain is false, then agent_name must be set.
|
||||||
|
condition = var.subdomain || var.agent_name != ""
|
||||||
|
error_message = "The agent_name must be set."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
variable "database_path" {
|
variable "database_path" {
|
||||||
type = string
|
type = string
|
||||||
description = "The path to the filebrowser database."
|
description = "The path to the filebrowser database."
|
||||||
@@ -58,6 +73,15 @@ variable "order" {
|
|||||||
default = null
|
default = null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "subdomain" {
|
||||||
|
type = bool
|
||||||
|
description = <<-EOT
|
||||||
|
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||||
|
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||||
|
EOT
|
||||||
|
default = true
|
||||||
|
}
|
||||||
|
|
||||||
resource "coder_script" "filebrowser" {
|
resource "coder_script" "filebrowser" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
display_name = "File Browser"
|
display_name = "File Browser"
|
||||||
@@ -67,7 +91,9 @@ resource "coder_script" "filebrowser" {
|
|||||||
PORT : var.port,
|
PORT : var.port,
|
||||||
FOLDER : var.folder,
|
FOLDER : var.folder,
|
||||||
LOG_PATH : var.log_path,
|
LOG_PATH : var.log_path,
|
||||||
DB_PATH : var.database_path
|
DB_PATH : var.database_path,
|
||||||
|
SUBDOMAIN : var.subdomain,
|
||||||
|
SERVER_BASE_PATH : var.subdomain ? "" : format("/@%s/%s.%s/apps/filebrowser", data.coder_workspace_owner.me.name, data.coder_workspace.me.name, var.agent_name),
|
||||||
})
|
})
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
}
|
}
|
||||||
@@ -78,7 +104,7 @@ resource "coder_app" "filebrowser" {
|
|||||||
display_name = "File Browser"
|
display_name = "File Browser"
|
||||||
url = "http://localhost:${var.port}"
|
url = "http://localhost:${var.port}"
|
||||||
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
||||||
subdomain = true
|
subdomain = var.subdomain
|
||||||
share = var.share
|
share = var.share
|
||||||
order = var.order
|
order = var.order
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,9 @@ if [ "${DB_PATH}" != "filebrowser.db" ]; then
|
|||||||
DB_FLAG=" -d ${DB_PATH}"
|
DB_FLAG=" -d ${DB_PATH}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# set baseurl to be able to run if sudomain=false; if subdomain=true the SERVER_BASE_PATH value will be ""
|
||||||
|
filebrowser config set --baseurl "${SERVER_BASE_PATH}"$${DB_FLAG} > ${LOG_PATH} 2>&1
|
||||||
|
|
||||||
printf "📂 Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n"
|
printf "📂 Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n"
|
||||||
|
|
||||||
printf "Running 'filebrowser --noauth --root $ROOT_DIR --port ${PORT}$${DB_FLAG}' \n\n"
|
printf "Running 'filebrowser --noauth --root $ROOT_DIR --port ${PORT}$${DB_FLAG}' \n\n"
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ This module adds Google Cloud Platform regions to your Coder template.
|
|||||||
```tf
|
```tf
|
||||||
module "gcp_region" {
|
module "gcp_region" {
|
||||||
source = "registry.coder.com/modules/gcp-region/coder"
|
source = "registry.coder.com/modules/gcp-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
regions = ["us", "europe"]
|
regions = ["us", "europe"]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Note: setting `gpu_only = true` and using a default region without GPU support,
|
|||||||
```tf
|
```tf
|
||||||
module "gcp_region" {
|
module "gcp_region" {
|
||||||
source = "registry.coder.com/modules/gcp-region/coder"
|
source = "registry.coder.com/modules/gcp-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
default = ["us-west1-a"]
|
default = ["us-west1-a"]
|
||||||
regions = ["us-west1"]
|
regions = ["us-west1"]
|
||||||
gpu_only = false
|
gpu_only = false
|
||||||
@@ -50,7 +50,7 @@ resource "google_compute_instance" "example" {
|
|||||||
```tf
|
```tf
|
||||||
module "gcp_region" {
|
module "gcp_region" {
|
||||||
source = "registry.coder.com/modules/gcp-region/coder"
|
source = "registry.coder.com/modules/gcp-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
regions = ["europe-west"]
|
regions = ["europe-west"]
|
||||||
single_zone_per_region = false
|
single_zone_per_region = false
|
||||||
}
|
}
|
||||||
@@ -65,7 +65,7 @@ resource "google_compute_instance" "example" {
|
|||||||
```tf
|
```tf
|
||||||
module "gcp_region" {
|
module "gcp_region" {
|
||||||
source = "registry.coder.com/modules/gcp-region/coder"
|
source = "registry.coder.com/modules/gcp-region/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.12"
|
||||||
regions = ["us", "europe"]
|
regions = ["us", "europe"]
|
||||||
gpu_only = true
|
gpu_only = true
|
||||||
single_zone_per_region = true
|
single_zone_per_region = true
|
||||||
|
|||||||
@@ -40,4 +40,13 @@ describe("gcp-region", async () => {
|
|||||||
});
|
});
|
||||||
expect(state.outputs.value.value).toBe("us-west2-b");
|
expect(state.outputs.value.value).toBe("us-west2-b");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter", async () => {
|
||||||
|
const order = 99;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
expect(state.resources).toHaveLength(1);
|
||||||
|
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -63,6 +63,12 @@ variable "single_zone_per_region" {
|
|||||||
type = bool
|
type = bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
zones = {
|
zones = {
|
||||||
# US Central
|
# US Central
|
||||||
@@ -715,6 +721,7 @@ data "coder_parameter" "region" {
|
|||||||
icon = "/icon/gcp.png"
|
icon = "/icon/gcp.png"
|
||||||
mutable = var.mutable
|
mutable = var.mutable
|
||||||
default = var.default != null && var.default != "" && (!var.gpu_only || try(local.zones[var.default].gpu, false)) ? var.default : null
|
default = var.default != null && var.default != "" && (!var.gpu_only || try(local.zones[var.default].gpu, false)) ? var.default : null
|
||||||
|
order = var.coder_parameter_order
|
||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
for_each = {
|
for_each = {
|
||||||
for k, v in local.zones : k => v
|
for k, v in local.zones : k => v
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ This module allows you to automatically clone a repository by URL and skip if it
|
|||||||
```tf
|
```tf
|
||||||
module "git-clone" {
|
module "git-clone" {
|
||||||
source = "registry.coder.com/modules/git-clone/coder"
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
url = "https://github.com/coder/coder"
|
url = "https://github.com/coder/coder"
|
||||||
}
|
}
|
||||||
@@ -27,7 +27,7 @@ module "git-clone" {
|
|||||||
```tf
|
```tf
|
||||||
module "git-clone" {
|
module "git-clone" {
|
||||||
source = "registry.coder.com/modules/git-clone/coder"
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
url = "https://github.com/coder/coder"
|
url = "https://github.com/coder/coder"
|
||||||
base_dir = "~/projects/coder"
|
base_dir = "~/projects/coder"
|
||||||
@@ -41,7 +41,7 @@ To use with [Git Authentication](https://coder.com/docs/v2/latest/admin/git-prov
|
|||||||
```tf
|
```tf
|
||||||
module "git-clone" {
|
module "git-clone" {
|
||||||
source = "registry.coder.com/modules/git-clone/coder"
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
version = "1.0.2"
|
version = "1.0.18"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
url = "https://github.com/coder/coder"
|
url = "https://github.com/coder/coder"
|
||||||
}
|
}
|
||||||
@@ -50,3 +50,123 @@ data "coder_git_auth" "github" {
|
|||||||
id = "github"
|
id = "github"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## GitHub clone with branch name
|
||||||
|
|
||||||
|
To GitHub clone with a specific branch like `feat/example`
|
||||||
|
|
||||||
|
```tf
|
||||||
|
# Prompt the user for the git repo URL
|
||||||
|
data "coder_parameter" "git_repo" {
|
||||||
|
name = "git_repo"
|
||||||
|
display_name = "Git repository"
|
||||||
|
default = "https://github.com/coder/coder/tree/feat/example"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clone the repository for branch `feat/example`
|
||||||
|
module "git_clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = data.coder_parameter.git_repo.value
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a code-server instance for the cloned repository
|
||||||
|
module "code-server" {
|
||||||
|
source = "registry.coder.com/modules/code-server/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
order = 1
|
||||||
|
folder = "/home/${local.username}/${module.git_clone.folder_name}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a Coder app for the website
|
||||||
|
resource "coder_app" "website" {
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
order = 2
|
||||||
|
slug = "website"
|
||||||
|
external = true
|
||||||
|
display_name = module.git_clone.folder_name
|
||||||
|
url = module.git_clone.web_url
|
||||||
|
icon = module.git_clone.git_provider != "" ? "/icon/${module.git_clone.git_provider}.svg" : "/icon/git.svg"
|
||||||
|
count = module.git_clone.web_url != "" ? 1 : 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Configuring `git-clone` for a self-hosted GitHub Enterprise Server running at `github.example.com`
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "git-clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = "https://github.example.com/coder/coder/tree/feat/example"
|
||||||
|
git_providers = {
|
||||||
|
"https://github.example.com/" = {
|
||||||
|
provider = "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## GitLab clone with branch name
|
||||||
|
|
||||||
|
To GitLab clone with a specific branch like `feat/example`
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "git-clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = "https://gitlab.com/coder/coder/-/tree/feat/example"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Configuring `git-clone` for a self-hosted GitLab running at `gitlab.example.com`
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "git-clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = "https://gitlab.example.com/coder/coder/-/tree/feat/example"
|
||||||
|
git_providers = {
|
||||||
|
"https://gitlab.example.com/" = {
|
||||||
|
provider = "gitlab"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Git clone with branch_name set
|
||||||
|
|
||||||
|
Alternatively, you can set the `branch_name` attribute to clone a specific branch.
|
||||||
|
|
||||||
|
For example, to clone the `feat/example` branch:
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "git-clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = "https://github.com/coder/coder"
|
||||||
|
branch_name = "feat/example"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Git clone with different destination folder
|
||||||
|
|
||||||
|
By default, the repository will be cloned into a folder matching the repository name. You can use the `folder_name` attribute to change the name of the destination folder to something else.
|
||||||
|
|
||||||
|
For example, this will clone into the `~/projects/coder/coder-dev` folder:
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "git-clone" {
|
||||||
|
source = "registry.coder.com/modules/git-clone/coder"
|
||||||
|
version = "1.0.18"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
url = "https://github.com/coder/coder"
|
||||||
|
folder_name = "coder-dev"
|
||||||
|
base_dir = "~/projects/coder"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -36,4 +36,212 @@ describe("git-clone", async () => {
|
|||||||
"Cloning fake-url to ~/fake-url...",
|
"Cloning fake-url to ~/fake-url...",
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("repo_dir should match repo name for https", async () => {
|
||||||
|
const url = "https://github.com/coder/coder.git";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||||
|
expect(state.outputs.folder_name.value).toEqual("coder");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("repo_dir should match repo name for https without .git", async () => {
|
||||||
|
const url = "https://github.com/coder/coder";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("repo_dir should match repo name for ssh", async () => {
|
||||||
|
const url = "git@github.com:coder/coder.git";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||||
|
expect(state.outputs.git_provider.value).toEqual("");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
const https_url = "https://github.com/coder/coder.git";
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("repo_dir should match base_dir/folder_name", async () => {
|
||||||
|
const url = "git@github.com:coder/coder.git";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
folder_name: "foo",
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/foo");
|
||||||
|
expect(state.outputs.folder_name.value).toEqual("foo");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
const https_url = "https://github.com/coder/coder.git";
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("branch_name should not include query string", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch?ref_type=heads",
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("~/repo-tests.log");
|
||||||
|
expect(state.outputs.folder_name.value).toEqual("repo-tests.log");
|
||||||
|
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("branch_name should not include fragments", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch#name",
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||||
|
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("gitlab url with branch should match", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch",
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||||
|
expect(state.outputs.git_provider.value).toEqual("gitlab");
|
||||||
|
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("github url with branch should match", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url: "https://github.com/michaelbrewer/repo-tests.log/tree/feat/branch",
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||||
|
expect(state.outputs.git_provider.value).toEqual("github");
|
||||||
|
const https_url = "https://github.com/michaelbrewer/repo-tests.log";
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("self-host git url with branch should match", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url: "https://git.example.com/example/project/-/tree/feat/example",
|
||||||
|
git_providers: `
|
||||||
|
{
|
||||||
|
"https://git.example.com/" = {
|
||||||
|
provider = "gitlab"
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/project");
|
||||||
|
expect(state.outputs.git_provider.value).toEqual("gitlab");
|
||||||
|
const https_url = "https://git.example.com/example/project";
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("feat/example");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("handle unsupported git provider configuration", async () => {
|
||||||
|
const t = async () => {
|
||||||
|
await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
url: "foo",
|
||||||
|
git_providers: `
|
||||||
|
{
|
||||||
|
"https://git.example.com/" = {
|
||||||
|
provider = "bitbucket"
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
expect(t).toThrow('Allowed values for provider are "github" or "gitlab".');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("handle unknown git provider url", async () => {
|
||||||
|
const url = "https://git.unknown.com/coder/coder";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
base_dir: "/tmp",
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("runs with github clone with switch to feat/branch", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
url: "https://github.com/michaelbrewer/repo-tests.log/tree/feat/branch",
|
||||||
|
});
|
||||||
|
const output = await executeScriptInContainer(state, "alpine/git");
|
||||||
|
expect(output.exitCode).toBe(0);
|
||||||
|
expect(output.stdout).toEqual([
|
||||||
|
"Creating directory ~/repo-tests.log...",
|
||||||
|
"Cloning https://github.com/michaelbrewer/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("runs with gitlab clone with switch to feat/branch", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch",
|
||||||
|
});
|
||||||
|
const output = await executeScriptInContainer(state, "alpine/git");
|
||||||
|
expect(output.exitCode).toBe(0);
|
||||||
|
expect(output.stdout).toEqual([
|
||||||
|
"Creating directory ~/repo-tests.log...",
|
||||||
|
"Cloning https://gitlab.com/mike.brew/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("runs with github clone with branch_name set to feat/branch", async () => {
|
||||||
|
const url = "https://github.com/michaelbrewer/repo-tests.log";
|
||||||
|
const branch_name = "feat/branch";
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
url,
|
||||||
|
branch_name,
|
||||||
|
});
|
||||||
|
expect(state.outputs.repo_dir.value).toEqual("~/repo-tests.log");
|
||||||
|
expect(state.outputs.clone_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.web_url.value).toEqual(url);
|
||||||
|
expect(state.outputs.branch_name.value).toEqual(branch_name);
|
||||||
|
|
||||||
|
const output = await executeScriptInContainer(state, "alpine/git");
|
||||||
|
expect(output.exitCode).toBe(0);
|
||||||
|
expect(output.stdout).toEqual([
|
||||||
|
"Creating directory ~/repo-tests.log...",
|
||||||
|
"Cloning https://github.com/michaelbrewer/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||||
|
]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -25,8 +25,56 @@ variable "agent_id" {
|
|||||||
type = string
|
type = string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "git_providers" {
|
||||||
|
type = map(object({
|
||||||
|
provider = string
|
||||||
|
}))
|
||||||
|
description = "A mapping of URLs to their git provider."
|
||||||
|
default = {
|
||||||
|
"https://github.com/" = {
|
||||||
|
provider = "github"
|
||||||
|
},
|
||||||
|
"https://gitlab.com/" = {
|
||||||
|
provider = "gitlab"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
validation {
|
||||||
|
error_message = "Allowed values for provider are \"github\" or \"gitlab\"."
|
||||||
|
condition = alltrue([for provider in var.git_providers : contains(["github", "gitlab"], provider.provider)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "branch_name" {
|
||||||
|
description = "The branch name to clone. If not provided, the default branch will be cloned."
|
||||||
|
type = string
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "folder_name" {
|
||||||
|
description = "The destination folder to clone the repository into."
|
||||||
|
type = string
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
clone_path = var.base_dir != "" ? join("/", [var.base_dir, replace(basename(var.url), ".git", "")]) : join("/", ["~", replace(basename(var.url), ".git", "")])
|
# Remove query parameters and fragments from the URL
|
||||||
|
url = replace(replace(var.url, "/\\?.*/", ""), "/#.*/", "")
|
||||||
|
|
||||||
|
# Find the git provider based on the URL and determine the tree path
|
||||||
|
provider_key = try(one([for key in keys(var.git_providers) : key if startswith(local.url, key)]), null)
|
||||||
|
provider = try(lookup(var.git_providers, local.provider_key).provider, "")
|
||||||
|
tree_path = local.provider == "gitlab" ? "/-/tree/" : local.provider == "github" ? "/tree/" : ""
|
||||||
|
|
||||||
|
# Remove tree and branch name from the URL
|
||||||
|
clone_url = var.branch_name == "" && local.tree_path != "" ? replace(local.url, "/${local.tree_path}.*/", "") : local.url
|
||||||
|
# Extract the branch name from the URL
|
||||||
|
branch_name = var.branch_name == "" && local.tree_path != "" ? replace(replace(local.url, local.clone_url, ""), "/.*${local.tree_path}/", "") : var.branch_name
|
||||||
|
# Extract the folder name from the URL
|
||||||
|
folder_name = var.folder_name == "" ? replace(basename(local.clone_url), ".git", "") : var.folder_name
|
||||||
|
# Construct the path to clone the repository
|
||||||
|
clone_path = var.base_dir != "" ? join("/", [var.base_dir, local.folder_name]) : join("/", ["~", local.folder_name])
|
||||||
|
# Construct the web URL
|
||||||
|
web_url = startswith(local.clone_url, "git@") ? replace(replace(local.clone_url, ":", "/"), "git@", "https://") : local.clone_url
|
||||||
}
|
}
|
||||||
|
|
||||||
output "repo_dir" {
|
output "repo_dir" {
|
||||||
@@ -34,11 +82,37 @@ output "repo_dir" {
|
|||||||
description = "Full path of cloned repo directory"
|
description = "Full path of cloned repo directory"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
output "git_provider" {
|
||||||
|
value = local.provider
|
||||||
|
description = "The git provider of the repository"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "folder_name" {
|
||||||
|
value = local.folder_name
|
||||||
|
description = "The name of the folder that will be created"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "clone_url" {
|
||||||
|
value = local.clone_url
|
||||||
|
description = "The exact Git repository URL that will be cloned"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "web_url" {
|
||||||
|
value = local.web_url
|
||||||
|
description = "Git https repository URL (may be invalid for unsupported providers)"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "branch_name" {
|
||||||
|
value = local.branch_name
|
||||||
|
description = "Git branch name (may be empty)"
|
||||||
|
}
|
||||||
|
|
||||||
resource "coder_script" "git_clone" {
|
resource "coder_script" "git_clone" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
script = templatefile("${path.module}/run.sh", {
|
script = templatefile("${path.module}/run.sh", {
|
||||||
CLONE_PATH = local.clone_path
|
CLONE_PATH = local.clone_path,
|
||||||
REPO_URL : var.url,
|
REPO_URL : local.clone_url,
|
||||||
|
BRANCH_NAME : local.branch_name,
|
||||||
})
|
})
|
||||||
display_name = "Git Clone"
|
display_name = "Git Clone"
|
||||||
icon = "/icon/git.svg"
|
icon = "/icon/git.svg"
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
REPO_URL="${REPO_URL}"
|
REPO_URL="${REPO_URL}"
|
||||||
CLONE_PATH="${CLONE_PATH}"
|
CLONE_PATH="${CLONE_PATH}"
|
||||||
|
BRANCH_NAME="${BRANCH_NAME}"
|
||||||
# Expand home if it's specified!
|
# Expand home if it's specified!
|
||||||
CLONE_PATH="$${CLONE_PATH/#\~/$${HOME}}"
|
CLONE_PATH="$${CLONE_PATH/#\~/$${HOME}}"
|
||||||
|
|
||||||
@@ -33,8 +34,13 @@ fi
|
|||||||
# Check if the directory is empty
|
# Check if the directory is empty
|
||||||
# and if it is, clone the repo, otherwise skip cloning
|
# and if it is, clone the repo, otherwise skip cloning
|
||||||
if [ -z "$(ls -A "$CLONE_PATH")" ]; then
|
if [ -z "$(ls -A "$CLONE_PATH")" ]; then
|
||||||
|
if [ -z "$BRANCH_NAME" ]; then
|
||||||
echo "Cloning $REPO_URL to $CLONE_PATH..."
|
echo "Cloning $REPO_URL to $CLONE_PATH..."
|
||||||
git clone "$REPO_URL" "$CLONE_PATH"
|
git clone "$REPO_URL" "$CLONE_PATH"
|
||||||
|
else
|
||||||
|
echo "Cloning $REPO_URL to $CLONE_PATH on branch $BRANCH_NAME..."
|
||||||
|
git clone "$REPO_URL" -b "$BRANCH_NAME" "$CLONE_PATH"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "$CLONE_PATH already exists and isn't empty, skipping clone!"
|
echo "$CLONE_PATH already exists and isn't empty, skipping clone!"
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
display_name: Git commit signing
|
display_name: Git commit signing
|
||||||
description: Configures Git to sign commits using your Coder SSH key
|
description: Configures Git to sign commits using your Coder SSH key
|
||||||
icon: ../.icons/git.svg
|
icon: ../.icons/git.svg
|
||||||
maintainer_github: phorcys420
|
maintainer_github: coder
|
||||||
verified: false
|
verified: true
|
||||||
tags: [helper, git]
|
tags: [helper, git]
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ This module has a chance of conflicting with the user's dotfiles / the personali
|
|||||||
```tf
|
```tf
|
||||||
module "git-commit-signing" {
|
module "git-commit-signing" {
|
||||||
source = "registry.coder.com/modules/git-commit-signing/coder"
|
source = "registry.coder.com/modules/git-commit-signing/coder"
|
||||||
version = "1.0.9"
|
version = "1.0.11"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Runs a script that updates git credentials in the workspace to match the user's
|
|||||||
```tf
|
```tf
|
||||||
module "git-config" {
|
module "git-config" {
|
||||||
source = "registry.coder.com/modules/git-config/coder"
|
source = "registry.coder.com/modules/git-config/coder"
|
||||||
version = "1.0.3"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -28,7 +28,7 @@ TODO: Add screenshot
|
|||||||
```tf
|
```tf
|
||||||
module "git-config" {
|
module "git-config" {
|
||||||
source = "registry.coder.com/modules/git-config/coder"
|
source = "registry.coder.com/modules/git-config/coder"
|
||||||
version = "1.0.3"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
allow_email_change = true
|
allow_email_change = true
|
||||||
}
|
}
|
||||||
@@ -41,7 +41,7 @@ TODO: Add screenshot
|
|||||||
```tf
|
```tf
|
||||||
module "git-config" {
|
module "git-config" {
|
||||||
source = "registry.coder.com/modules/git-config/coder"
|
source = "registry.coder.com/modules/git-config/coder"
|
||||||
version = "1.0.3"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
allow_username_change = false
|
allow_username_change = false
|
||||||
allow_email_change = false
|
allow_email_change = false
|
||||||
|
|||||||
127
git-config/main.test.ts
Normal file
127
git-config/main.test.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import { describe, expect, it } from "bun:test";
|
||||||
|
import {
|
||||||
|
runTerraformApply,
|
||||||
|
runTerraformInit,
|
||||||
|
testRequiredVariables,
|
||||||
|
} from "../test";
|
||||||
|
|
||||||
|
describe("git-config", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
|
testRequiredVariables(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can run apply allow_username_change and allow_email_change disabled", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
allow_username_change: "false",
|
||||||
|
allow_email_change: "false",
|
||||||
|
});
|
||||||
|
|
||||||
|
const resources = state.resources;
|
||||||
|
expect(resources).toHaveLength(6);
|
||||||
|
expect(resources).toMatchObject([
|
||||||
|
{ type: "coder_workspace", name: "me" },
|
||||||
|
{ type: "coder_workspace_owner", name: "me" },
|
||||||
|
{ type: "coder_env", name: "git_author_email" },
|
||||||
|
{ type: "coder_env", name: "git_author_name" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_email" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_name" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can run apply allow_email_change enabled", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
allow_email_change: "true",
|
||||||
|
});
|
||||||
|
|
||||||
|
const resources = state.resources;
|
||||||
|
expect(resources).toHaveLength(8);
|
||||||
|
expect(resources).toMatchObject([
|
||||||
|
{ type: "coder_parameter", name: "user_email" },
|
||||||
|
{ type: "coder_parameter", name: "username" },
|
||||||
|
{ type: "coder_workspace", name: "me" },
|
||||||
|
{ type: "coder_workspace_owner", name: "me" },
|
||||||
|
{ type: "coder_env", name: "git_author_email" },
|
||||||
|
{ type: "coder_env", name: "git_author_name" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_email" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_name" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("can run apply allow_email_change enabled", async () => {
|
||||||
|
const state = await runTerraformApply(
|
||||||
|
import.meta.dir,
|
||||||
|
{
|
||||||
|
agent_id: "foo",
|
||||||
|
allow_username_change: "false",
|
||||||
|
allow_email_change: "false",
|
||||||
|
},
|
||||||
|
{ CODER_WORKSPACE_OWNER_EMAIL: "foo@email.com" },
|
||||||
|
);
|
||||||
|
|
||||||
|
const resources = state.resources;
|
||||||
|
expect(resources).toHaveLength(6);
|
||||||
|
expect(resources).toMatchObject([
|
||||||
|
{ type: "coder_workspace", name: "me" },
|
||||||
|
{ type: "coder_workspace_owner", name: "me" },
|
||||||
|
{ type: "coder_env", name: "git_author_email" },
|
||||||
|
{ type: "coder_env", name: "git_author_name" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_email" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_name" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter for both fields", async () => {
|
||||||
|
const order = 20;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
allow_username_change: "true",
|
||||||
|
allow_email_change: "true",
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
const resources = state.resources;
|
||||||
|
expect(resources).toHaveLength(8);
|
||||||
|
expect(resources).toMatchObject([
|
||||||
|
{ type: "coder_parameter", name: "user_email" },
|
||||||
|
{ type: "coder_parameter", name: "username" },
|
||||||
|
{ type: "coder_workspace", name: "me" },
|
||||||
|
{ type: "coder_workspace_owner", name: "me" },
|
||||||
|
{ type: "coder_env", name: "git_author_email" },
|
||||||
|
{ type: "coder_env", name: "git_author_name" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_email" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_name" },
|
||||||
|
]);
|
||||||
|
// user_email order is the same as the order
|
||||||
|
expect(resources[0].instances[0].attributes.order).toBe(order);
|
||||||
|
// username order is incremented by 1
|
||||||
|
// @ts-ignore: Object is possibly 'null'.
|
||||||
|
expect(resources[1].instances[0]?.attributes.order).toBe(order + 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("set custom order for coder_parameter for just username", async () => {
|
||||||
|
const order = 30;
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
allow_email_change: "false",
|
||||||
|
allow_username_change: "true",
|
||||||
|
coder_parameter_order: order.toString(),
|
||||||
|
});
|
||||||
|
const resources = state.resources;
|
||||||
|
expect(resources).toHaveLength(7);
|
||||||
|
expect(resources).toMatchObject([
|
||||||
|
{ type: "coder_parameter", name: "username" },
|
||||||
|
{ type: "coder_workspace", name: "me" },
|
||||||
|
{ type: "coder_workspace_owner", name: "me" },
|
||||||
|
{ type: "coder_env", name: "git_author_email" },
|
||||||
|
{ type: "coder_env", name: "git_author_name" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_email" },
|
||||||
|
{ type: "coder_env", name: "git_commmiter_name" },
|
||||||
|
]);
|
||||||
|
// user_email was not created
|
||||||
|
// username order is incremented by 1
|
||||||
|
expect(resources[0].instances[0].attributes.order).toBe(order + 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -4,7 +4,7 @@ terraform {
|
|||||||
required_providers {
|
required_providers {
|
||||||
coder = {
|
coder = {
|
||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.13"
|
version = ">= 0.23"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -26,14 +26,21 @@ variable "allow_email_change" {
|
|||||||
default = false
|
default = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "coder_parameter_order" {
|
||||||
|
type = number
|
||||||
|
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||||
|
default = null
|
||||||
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
data "coder_parameter" "user_email" {
|
data "coder_parameter" "user_email" {
|
||||||
count = var.allow_email_change ? 1 : 0
|
count = var.allow_email_change ? 1 : 0
|
||||||
name = "user_email"
|
name = "user_email"
|
||||||
type = "string"
|
type = "string"
|
||||||
default = ""
|
default = ""
|
||||||
|
order = var.coder_parameter_order != null ? var.coder_parameter_order + 0 : null
|
||||||
description = "Git user.email to be used for commits. Leave empty to default to Coder user's email."
|
description = "Git user.email to be used for commits. Leave empty to default to Coder user's email."
|
||||||
display_name = "Git config user.email"
|
display_name = "Git config user.email"
|
||||||
mutable = true
|
mutable = true
|
||||||
@@ -44,6 +51,7 @@ data "coder_parameter" "username" {
|
|||||||
name = "username"
|
name = "username"
|
||||||
type = "string"
|
type = "string"
|
||||||
default = ""
|
default = ""
|
||||||
|
order = var.coder_parameter_order != null ? var.coder_parameter_order + 1 : null
|
||||||
description = "Git user.name to be used for commits. Leave empty to default to Coder user's Full Name."
|
description = "Git user.name to be used for commits. Leave empty to default to Coder user's Full Name."
|
||||||
display_name = "Full Name for Git config"
|
display_name = "Full Name for Git config"
|
||||||
mutable = true
|
mutable = true
|
||||||
@@ -52,23 +60,25 @@ data "coder_parameter" "username" {
|
|||||||
resource "coder_env" "git_author_name" {
|
resource "coder_env" "git_author_name" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GIT_AUTHOR_NAME"
|
name = "GIT_AUTHOR_NAME"
|
||||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_env" "git_commmiter_name" {
|
resource "coder_env" "git_commmiter_name" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GIT_COMMITTER_NAME"
|
name = "GIT_COMMITTER_NAME"
|
||||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_env" "git_author_email" {
|
resource "coder_env" "git_author_email" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GIT_AUTHOR_EMAIL"
|
name = "GIT_AUTHOR_EMAIL"
|
||||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||||
|
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_env" "git_commmiter_email" {
|
resource "coder_env" "git_commmiter_email" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GIT_COMMITTER_EMAIL"
|
name = "GIT_COMMITTER_EMAIL"
|
||||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||||
|
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||||
}
|
}
|
||||||
|
|||||||
53
github-upload-public-key/README.md
Normal file
53
github-upload-public-key/README.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
display_name: Github Upload Public Key
|
||||||
|
description: Automates uploading Coder public key to Github so users don't have to.
|
||||||
|
icon: ../.icons/github.svg
|
||||||
|
maintainer_github: coder
|
||||||
|
verified: true
|
||||||
|
tags: [helper, git]
|
||||||
|
---
|
||||||
|
|
||||||
|
# github-upload-public-key
|
||||||
|
|
||||||
|
Templates that utilize Github External Auth can automatically ensure that the Coder public key is uploaded to Github so that users can clone repositories without needing to upload the public key themselves.
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "github-upload-public-key" {
|
||||||
|
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||||
|
version = "1.0.15"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Requirements
|
||||||
|
|
||||||
|
This module requires `curl` and `jq` to be installed inside your workspace.
|
||||||
|
|
||||||
|
Github External Auth must be enabled in the workspace for this module to work. The Github app that is configured for external auth must have both read and write permissions to "Git SSH keys" in order to upload the public key. Additionally, a Coder admin must also have the `admin:public_key` scope added to the external auth configuration of the Coder deployment. For example:
|
||||||
|
|
||||||
|
```
|
||||||
|
CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
|
||||||
|
CODER_EXTERNAL_AUTH_0_TYPE=github
|
||||||
|
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
|
||||||
|
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
|
||||||
|
CODER_EXTERNAL_AUTH_0_SCOPES="repo,workflow,admin:public_key"
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that the default scopes if not provided are `repo,workflow`. If the module is failing to complete after updating the external auth configuration, instruct users of the module to "Unlink" and "Link" their Github account in the External Auth user settings page to get the new scopes.
|
||||||
|
|
||||||
|
# Example
|
||||||
|
|
||||||
|
Using a coder github external auth with a non-default id: (default is `github`)
|
||||||
|
|
||||||
|
```tf
|
||||||
|
data "coder_external_auth" "github" {
|
||||||
|
id = "myauthid"
|
||||||
|
}
|
||||||
|
|
||||||
|
module "github-upload-public-key" {
|
||||||
|
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||||
|
version = "1.0.15"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
external_auth_id = data.coder_external_auth.github.id
|
||||||
|
}
|
||||||
|
```
|
||||||
132
github-upload-public-key/main.test.ts
Normal file
132
github-upload-public-key/main.test.ts
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
import { type Server, serve } from "bun";
|
||||||
|
import { describe, expect, it } from "bun:test";
|
||||||
|
import {
|
||||||
|
createJSONResponse,
|
||||||
|
execContainer,
|
||||||
|
findResourceInstance,
|
||||||
|
runContainer,
|
||||||
|
runTerraformApply,
|
||||||
|
runTerraformInit,
|
||||||
|
testRequiredVariables,
|
||||||
|
writeCoder,
|
||||||
|
} from "../test";
|
||||||
|
|
||||||
|
describe("github-upload-public-key", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
|
testRequiredVariables(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
});
|
||||||
|
|
||||||
|
it("creates new key if one does not exist", async () => {
|
||||||
|
const { instance, id, server } = await setupContainer();
|
||||||
|
await writeCoder(id, "echo foo");
|
||||||
|
|
||||||
|
const url = server.url.toString().slice(0, -1);
|
||||||
|
const exec = await execContainer(id, [
|
||||||
|
"env",
|
||||||
|
`CODER_ACCESS_URL=${url}`,
|
||||||
|
`GITHUB_API_URL=${url}`,
|
||||||
|
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||||
|
"CODER_EXTERNAL_AUTH_ID=github",
|
||||||
|
"bash",
|
||||||
|
"-c",
|
||||||
|
instance.script,
|
||||||
|
]);
|
||||||
|
expect(exec.stdout).toContain(
|
||||||
|
"Your Coder public key has been added to GitHub!",
|
||||||
|
);
|
||||||
|
expect(exec.exitCode).toBe(0);
|
||||||
|
// we need to increase timeout to pull the container
|
||||||
|
}, 15000);
|
||||||
|
|
||||||
|
it("does nothing if one already exists", async () => {
|
||||||
|
const { instance, id, server } = await setupContainer();
|
||||||
|
// use keyword to make server return a existing key
|
||||||
|
await writeCoder(id, "echo findkey");
|
||||||
|
|
||||||
|
const url = server.url.toString().slice(0, -1);
|
||||||
|
const exec = await execContainer(id, [
|
||||||
|
"env",
|
||||||
|
`CODER_ACCESS_URL=${url}`,
|
||||||
|
`GITHUB_API_URL=${url}`,
|
||||||
|
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||||
|
"CODER_EXTERNAL_AUTH_ID=github",
|
||||||
|
"bash",
|
||||||
|
"-c",
|
||||||
|
instance.script,
|
||||||
|
]);
|
||||||
|
expect(exec.stdout).toContain(
|
||||||
|
"Your Coder public key is already on GitHub!",
|
||||||
|
);
|
||||||
|
expect(exec.exitCode).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const setupContainer = async (
|
||||||
|
image = "lorello/alpine-bash",
|
||||||
|
vars: Record<string, string> = {},
|
||||||
|
) => {
|
||||||
|
const server = await setupServer();
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
...vars,
|
||||||
|
});
|
||||||
|
const instance = findResourceInstance(state, "coder_script");
|
||||||
|
const id = await runContainer(image);
|
||||||
|
return { id, instance, server };
|
||||||
|
};
|
||||||
|
|
||||||
|
const setupServer = async (): Promise<Server> => {
|
||||||
|
let url: URL;
|
||||||
|
const fakeSlackHost = serve({
|
||||||
|
fetch: (req) => {
|
||||||
|
url = new URL(req.url);
|
||||||
|
if (url.pathname === "/api/v2/users/me/gitsshkey") {
|
||||||
|
return createJSONResponse({
|
||||||
|
public_key: "exists",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.pathname === "/user/keys") {
|
||||||
|
if (req.method === "POST") {
|
||||||
|
return createJSONResponse(
|
||||||
|
{
|
||||||
|
key: "created",
|
||||||
|
},
|
||||||
|
201,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// case: key already exists
|
||||||
|
if (req.headers.get("Authorization") === "Bearer findkey") {
|
||||||
|
return createJSONResponse([
|
||||||
|
{
|
||||||
|
key: "foo",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: "exists",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// case: key does not exist
|
||||||
|
return createJSONResponse([
|
||||||
|
{
|
||||||
|
key: "foo",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return createJSONResponse(
|
||||||
|
{
|
||||||
|
error: "not_found",
|
||||||
|
},
|
||||||
|
404,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
port: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
return fakeSlackHost;
|
||||||
|
};
|
||||||
43
github-upload-public-key/main.tf
Normal file
43
github-upload-public-key/main.tf
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
terraform {
|
||||||
|
required_version = ">= 1.0"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = ">= 0.23"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "agent_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of a Coder agent."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "external_auth_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of the GitHub external auth."
|
||||||
|
default = "github"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "github_api_url" {
|
||||||
|
type = string
|
||||||
|
description = "The URL of the GitHub instance."
|
||||||
|
default = "https://api.github.com"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
resource "coder_script" "github_upload_public_key" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
script = templatefile("${path.module}/run.sh", {
|
||||||
|
CODER_OWNER_SESSION_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||||
|
CODER_ACCESS_URL : data.coder_workspace.me.access_url,
|
||||||
|
CODER_EXTERNAL_AUTH_ID : var.external_auth_id,
|
||||||
|
GITHUB_API_URL : var.github_api_url,
|
||||||
|
})
|
||||||
|
display_name = "Github Upload Public Key"
|
||||||
|
icon = "/icon/github.svg"
|
||||||
|
run_on_start = true
|
||||||
|
}
|
||||||
110
github-upload-public-key/run.sh
Executable file
110
github-upload-public-key/run.sh
Executable file
@@ -0,0 +1,110 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
if [ -z "$CODER_ACCESS_URL" ]; then
|
||||||
|
if [ -z "${CODER_ACCESS_URL}" ]; then
|
||||||
|
echo "CODER_ACCESS_URL is empty!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CODER_ACCESS_URL=${CODER_ACCESS_URL}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$CODER_OWNER_SESSION_TOKEN" ]; then
|
||||||
|
if [ -z "${CODER_OWNER_SESSION_TOKEN}" ]; then
|
||||||
|
echo "CODER_OWNER_SESSION_TOKEN is empty!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CODER_OWNER_SESSION_TOKEN=${CODER_OWNER_SESSION_TOKEN}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$CODER_EXTERNAL_AUTH_ID" ]; then
|
||||||
|
if [ -z "${CODER_EXTERNAL_AUTH_ID}" ]; then
|
||||||
|
echo "CODER_EXTERNAL_AUTH_ID is empty!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CODER_EXTERNAL_AUTH_ID=${CODER_EXTERNAL_AUTH_ID}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$GITHUB_API_URL" ]; then
|
||||||
|
if [ -z "${GITHUB_API_URL}" ]; then
|
||||||
|
echo "GITHUB_API_URL is empty!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
GITHUB_API_URL=${GITHUB_API_URL}
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Fetching GitHub token..."
|
||||||
|
GITHUB_TOKEN=$(coder external-auth access-token $CODER_EXTERNAL_AUTH_ID)
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
printf "Authenticate with Github to automatically upload Coder public key:\n$GITHUB_TOKEN\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Fetching public key from Coder..."
|
||||||
|
PUBLIC_KEY_RESPONSE=$(
|
||||||
|
curl -L -s \
|
||||||
|
-w "\n%%{http_code}" \
|
||||||
|
-H 'accept: application/json' \
|
||||||
|
-H "cookie: coder_session_token=$CODER_OWNER_SESSION_TOKEN" \
|
||||||
|
"$CODER_ACCESS_URL/api/v2/users/me/gitsshkey"
|
||||||
|
)
|
||||||
|
PUBLIC_KEY_RESPONSE_STATUS=$(tail -n1 <<< "$PUBLIC_KEY_RESPONSE")
|
||||||
|
PUBLIC_KEY_BODY=$(sed \$d <<< "$PUBLIC_KEY_RESPONSE")
|
||||||
|
|
||||||
|
if [ "$PUBLIC_KEY_RESPONSE_STATUS" -ne 200 ]; then
|
||||||
|
echo "Failed to fetch Coder public SSH key with status code $PUBLIC_KEY_RESPONSE_STATUS!"
|
||||||
|
echo "$PUBLIC_KEY_BODY"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
PUBLIC_KEY=$(jq -r '.public_key' <<< "$PUBLIC_KEY_BODY")
|
||||||
|
if [ -z "$PUBLIC_KEY" ]; then
|
||||||
|
echo "No Coder public SSH key found!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Fetching public keys from GitHub..."
|
||||||
|
GITHUB_KEYS_RESPONSE=$(
|
||||||
|
curl -L -s \
|
||||||
|
-w "\n%%{http_code}" \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
$GITHUB_API_URL/user/keys
|
||||||
|
)
|
||||||
|
GITHUB_KEYS_RESPONSE_STATUS=$(tail -n1 <<< "$GITHUB_KEYS_RESPONSE")
|
||||||
|
GITHUB_KEYS_RESPONSE_BODY=$(sed \$d <<< "$GITHUB_KEYS_RESPONSE")
|
||||||
|
|
||||||
|
if [ "$GITHUB_KEYS_RESPONSE_STATUS" -ne 200 ]; then
|
||||||
|
echo "Failed to fetch Coder public SSH key with status code $GITHUB_KEYS_RESPONSE_STATUS!"
|
||||||
|
echo "$GITHUB_KEYS_RESPONSE_BODY"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
GITHUB_MATCH=$(jq -r --arg PUBLIC_KEY "$PUBLIC_KEY" '.[] | select(.key == $PUBLIC_KEY) | .key' <<< "$GITHUB_KEYS_RESPONSE_BODY")
|
||||||
|
|
||||||
|
if [ "$PUBLIC_KEY" = "$GITHUB_MATCH" ]; then
|
||||||
|
echo "Your Coder public key is already on GitHub!"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Your Coder public key is not in GitHub. Adding it now..."
|
||||||
|
CODER_PUBLIC_KEY_NAME="$CODER_ACCESS_URL Workspaces"
|
||||||
|
UPLOAD_RESPONSE=$(
|
||||||
|
curl -L -s \
|
||||||
|
-X POST \
|
||||||
|
-w "\n%%{http_code}" \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
$GITHUB_API_URL/user/keys \
|
||||||
|
-d "{\"title\":\"$CODER_PUBLIC_KEY_NAME\",\"key\":\"$PUBLIC_KEY\"}"
|
||||||
|
)
|
||||||
|
UPLOAD_RESPONSE_STATUS=$(tail -n1 <<< "$UPLOAD_RESPONSE")
|
||||||
|
UPLOAD_RESPONSE_BODY=$(sed \$d <<< "$UPLOAD_RESPONSE")
|
||||||
|
|
||||||
|
if [ "$UPLOAD_RESPONSE_STATUS" -ne 201 ]; then
|
||||||
|
echo "Failed to upload Coder public SSH key with status code $UPLOAD_RESPONSE_STATUS!"
|
||||||
|
echo "$UPLOAD_RESPONSE_BODY"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Your Coder public key has been added to GitHub!"
|
||||||
@@ -14,7 +14,7 @@ This module adds a JetBrains Gateway Button to open any workspace with a single
|
|||||||
```tf
|
```tf
|
||||||
module "jetbrains_gateway" {
|
module "jetbrains_gateway" {
|
||||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
version = "1.0.9"
|
version = "1.0.21"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
agent_name = "example"
|
agent_name = "example"
|
||||||
folder = "/home/coder/example"
|
folder = "/home/coder/example"
|
||||||
@@ -27,12 +27,12 @@ module "jetbrains_gateway" {
|
|||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Add GoLand and WebStorm with the default set to GoLand
|
### Add GoLand and WebStorm as options with the default set to GoLand
|
||||||
|
|
||||||
```tf
|
```tf
|
||||||
module "jetbrains_gateway" {
|
module "jetbrains_gateway" {
|
||||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
version = "1.0.9"
|
version = "1.0.21"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
agent_name = "example"
|
agent_name = "example"
|
||||||
folder = "/home/coder/example"
|
folder = "/home/coder/example"
|
||||||
@@ -41,6 +41,37 @@ module "jetbrains_gateway" {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Use the latest release version
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "jetbrains_gateway" {
|
||||||
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
|
version = "1.0.21"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
agent_name = "example"
|
||||||
|
folder = "/home/coder/example"
|
||||||
|
jetbrains_ides = ["GO", "WS"]
|
||||||
|
default = "GO"
|
||||||
|
latest = true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use the latest EAP version
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "jetbrains_gateway" {
|
||||||
|
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||||
|
version = "1.0.21"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
agent_name = "example"
|
||||||
|
folder = "/home/coder/example"
|
||||||
|
jetbrains_ides = ["GO", "WS"]
|
||||||
|
default = "GO"
|
||||||
|
latest = true
|
||||||
|
channel = "eap"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Supported IDEs
|
## Supported IDEs
|
||||||
|
|
||||||
This module and JetBrains Gateway support the following JetBrains IDEs:
|
This module and JetBrains Gateway support the following JetBrains IDEs:
|
||||||
|
|||||||
@@ -14,6 +14,26 @@ describe("jetbrains-gateway", async () => {
|
|||||||
folder: "/home/foo",
|
folder: "/home/foo",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should create a link with the default values", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
// These are all required.
|
||||||
|
agent_id: "foo",
|
||||||
|
agent_name: "foo",
|
||||||
|
folder: "/home/coder",
|
||||||
|
});
|
||||||
|
expect(state.outputs.url.value).toBe(
|
||||||
|
"jetbrains-gateway://connect#type=coder&workspace=default&owner=default&agent=foo&folder=/home/coder&url=https://mydeployment.coder.com&token=$SESSION_TOKEN&ide_product_code=IU&ide_build_number=241.14494.240&ide_download_link=https://download.jetbrains.com/idea/ideaIU-2024.1.tar.gz",
|
||||||
|
);
|
||||||
|
|
||||||
|
const coder_app = state.resources.find(
|
||||||
|
(res) => res.type === "coder_app" && res.name === "gateway",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(coder_app).not.toBeNull();
|
||||||
|
expect(coder_app?.instances.length).toBe(1);
|
||||||
|
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
it("default to first ide", async () => {
|
it("default to first ide", async () => {
|
||||||
const state = await runTerraformApply(import.meta.dir, {
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
agent_id: "foo",
|
agent_id: "foo",
|
||||||
|
|||||||
@@ -6,6 +6,10 @@ terraform {
|
|||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.17"
|
version = ">= 0.17"
|
||||||
}
|
}
|
||||||
|
http = {
|
||||||
|
source = "hashicorp/http"
|
||||||
|
version = ">= 3.0"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -14,6 +18,12 @@ variable "agent_id" {
|
|||||||
description = "The ID of a Coder agent."
|
description = "The ID of a Coder agent."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "slug" {
|
||||||
|
type = string
|
||||||
|
description = "The slug for the coder_app. Allows resuing the module with the same template."
|
||||||
|
default = "gateway"
|
||||||
|
}
|
||||||
|
|
||||||
variable "agent_name" {
|
variable "agent_name" {
|
||||||
type = string
|
type = string
|
||||||
description = "Agent name."
|
description = "Agent name."
|
||||||
@@ -46,6 +56,22 @@ variable "coder_parameter_order" {
|
|||||||
default = null
|
default = null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "latest" {
|
||||||
|
type = bool
|
||||||
|
description = "Whether to fetch the latest version of the IDE."
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "channel" {
|
||||||
|
type = string
|
||||||
|
description = "JetBrains IDE release channel. Valid values are release and eap."
|
||||||
|
default = "release"
|
||||||
|
validation {
|
||||||
|
condition = can(regex("^(release|eap)$", var.channel))
|
||||||
|
error_message = "The channel must be either release or eap."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
variable "jetbrains_ide_versions" {
|
variable "jetbrains_ide_versions" {
|
||||||
type = map(object({
|
type = map(object({
|
||||||
build_number = string
|
build_number = string
|
||||||
@@ -120,6 +146,11 @@ variable "jetbrains_ides" {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data "http" "jetbrains_ide_versions" {
|
||||||
|
for_each = var.latest ? toset(var.jetbrains_ides) : toset([])
|
||||||
|
url = "https://data.services.jetbrains.com/products/releases?code=${each.key}&latest=true&type=${var.channel}"
|
||||||
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
jetbrains_ides = {
|
jetbrains_ides = {
|
||||||
"GO" = {
|
"GO" = {
|
||||||
@@ -128,6 +159,7 @@ locals {
|
|||||||
identifier = "GO",
|
identifier = "GO",
|
||||||
build_number = var.jetbrains_ide_versions["GO"].build_number,
|
build_number = var.jetbrains_ide_versions["GO"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["GO"].version
|
||||||
},
|
},
|
||||||
"WS" = {
|
"WS" = {
|
||||||
icon = "/icon/webstorm.svg",
|
icon = "/icon/webstorm.svg",
|
||||||
@@ -135,6 +167,7 @@ locals {
|
|||||||
identifier = "WS",
|
identifier = "WS",
|
||||||
build_number = var.jetbrains_ide_versions["WS"].build_number,
|
build_number = var.jetbrains_ide_versions["WS"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["WS"].version
|
||||||
},
|
},
|
||||||
"IU" = {
|
"IU" = {
|
||||||
icon = "/icon/intellij.svg",
|
icon = "/icon/intellij.svg",
|
||||||
@@ -142,6 +175,7 @@ locals {
|
|||||||
identifier = "IU",
|
identifier = "IU",
|
||||||
build_number = var.jetbrains_ide_versions["IU"].build_number,
|
build_number = var.jetbrains_ide_versions["IU"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["IU"].version
|
||||||
},
|
},
|
||||||
"PY" = {
|
"PY" = {
|
||||||
icon = "/icon/pycharm.svg",
|
icon = "/icon/pycharm.svg",
|
||||||
@@ -149,6 +183,7 @@ locals {
|
|||||||
identifier = "PY",
|
identifier = "PY",
|
||||||
build_number = var.jetbrains_ide_versions["PY"].build_number,
|
build_number = var.jetbrains_ide_versions["PY"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["PY"].version
|
||||||
},
|
},
|
||||||
"CL" = {
|
"CL" = {
|
||||||
icon = "/icon/clion.svg",
|
icon = "/icon/clion.svg",
|
||||||
@@ -156,6 +191,7 @@ locals {
|
|||||||
identifier = "CL",
|
identifier = "CL",
|
||||||
build_number = var.jetbrains_ide_versions["CL"].build_number,
|
build_number = var.jetbrains_ide_versions["CL"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["CL"].version
|
||||||
},
|
},
|
||||||
"PS" = {
|
"PS" = {
|
||||||
icon = "/icon/phpstorm.svg",
|
icon = "/icon/phpstorm.svg",
|
||||||
@@ -163,6 +199,7 @@ locals {
|
|||||||
identifier = "PS",
|
identifier = "PS",
|
||||||
build_number = var.jetbrains_ide_versions["PS"].build_number,
|
build_number = var.jetbrains_ide_versions["PS"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["PS"].version
|
||||||
},
|
},
|
||||||
"RM" = {
|
"RM" = {
|
||||||
icon = "/icon/rubymine.svg",
|
icon = "/icon/rubymine.svg",
|
||||||
@@ -170,6 +207,7 @@ locals {
|
|||||||
identifier = "RM",
|
identifier = "RM",
|
||||||
build_number = var.jetbrains_ide_versions["RM"].build_number,
|
build_number = var.jetbrains_ide_versions["RM"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["RM"].version
|
||||||
}
|
}
|
||||||
"RD" = {
|
"RD" = {
|
||||||
icon = "/icon/rider.svg",
|
icon = "/icon/rider.svg",
|
||||||
@@ -177,8 +215,18 @@ locals {
|
|||||||
identifier = "RD",
|
identifier = "RD",
|
||||||
build_number = var.jetbrains_ide_versions["RD"].build_number,
|
build_number = var.jetbrains_ide_versions["RD"].build_number,
|
||||||
download_link = "https://download.jetbrains.com/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
download_link = "https://download.jetbrains.com/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
||||||
|
version = var.jetbrains_ide_versions["RD"].version
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
icon = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].icon
|
||||||
|
json_data = var.latest ? jsondecode(data.http.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].response_body) : {}
|
||||||
|
key = var.latest ? keys(local.json_data)[0] : ""
|
||||||
|
display_name = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].name
|
||||||
|
identifier = data.coder_parameter.jetbrains_ide.value
|
||||||
|
download_link = var.latest ? local.json_data[local.key][0].downloads.linux.link : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
|
||||||
|
build_number = var.latest ? local.json_data[local.key][0].build : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].build_number
|
||||||
|
version = var.latest ? local.json_data[local.key][0].version : var.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].version
|
||||||
}
|
}
|
||||||
|
|
||||||
data "coder_parameter" "jetbrains_ide" {
|
data "coder_parameter" "jetbrains_ide" {
|
||||||
@@ -193,25 +241,28 @@ data "coder_parameter" "jetbrains_ide" {
|
|||||||
dynamic "option" {
|
dynamic "option" {
|
||||||
for_each = var.jetbrains_ides
|
for_each = var.jetbrains_ides
|
||||||
content {
|
content {
|
||||||
icon = lookup(local.jetbrains_ides, option.value).icon
|
icon = local.jetbrains_ides[option.value].icon
|
||||||
name = lookup(local.jetbrains_ides, option.value).name
|
name = local.jetbrains_ides[option.value].name
|
||||||
value = lookup(local.jetbrains_ides, option.value).identifier
|
value = option.value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
resource "coder_app" "gateway" {
|
resource "coder_app" "gateway" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
slug = "gateway"
|
slug = var.slug
|
||||||
display_name = try(lookup(local.jetbrains_ides, data.coder_parameter.jetbrains_ide.value).name, "JetBrains IDE")
|
display_name = local.display_name
|
||||||
icon = try(lookup(local.jetbrains_ides, data.coder_parameter.jetbrains_ide.value).icon, "/icon/gateway.svg")
|
icon = local.icon
|
||||||
external = true
|
external = true
|
||||||
order = var.order
|
order = var.order
|
||||||
url = join("", [
|
url = join("", [
|
||||||
"jetbrains-gateway://connect#type=coder&workspace=",
|
"jetbrains-gateway://connect#type=coder&workspace=",
|
||||||
data.coder_workspace.me.name,
|
data.coder_workspace.me.name,
|
||||||
|
"&owner=",
|
||||||
|
data.coder_workspace_owner.me.name,
|
||||||
"&agent=",
|
"&agent=",
|
||||||
var.agent_name,
|
var.agent_name,
|
||||||
"&folder=",
|
"&folder=",
|
||||||
@@ -221,36 +272,36 @@ resource "coder_app" "gateway" {
|
|||||||
"&token=",
|
"&token=",
|
||||||
"$SESSION_TOKEN",
|
"$SESSION_TOKEN",
|
||||||
"&ide_product_code=",
|
"&ide_product_code=",
|
||||||
local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].identifier,
|
data.coder_parameter.jetbrains_ide.value,
|
||||||
"&ide_build_number=",
|
"&ide_build_number=",
|
||||||
local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].build_number,
|
local.build_number,
|
||||||
"&ide_download_link=",
|
"&ide_download_link=",
|
||||||
local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
|
local.download_link,
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
output "identifier" {
|
output "identifier" {
|
||||||
value = data.coder_parameter.jetbrains_ide.value
|
value = local.identifier
|
||||||
}
|
}
|
||||||
|
|
||||||
output "name" {
|
output "display_name" {
|
||||||
value = coder_app.gateway.display_name
|
value = local.display_name
|
||||||
}
|
}
|
||||||
|
|
||||||
output "icon" {
|
output "icon" {
|
||||||
value = coder_app.gateway.icon
|
value = local.icon
|
||||||
}
|
}
|
||||||
|
|
||||||
output "download_link" {
|
output "download_link" {
|
||||||
value = lookup(local.jetbrains_ides, data.coder_parameter.jetbrains_ide.value).download_link
|
value = local.download_link
|
||||||
}
|
}
|
||||||
|
|
||||||
output "build_number" {
|
output "build_number" {
|
||||||
value = lookup(local.jetbrains_ides, data.coder_parameter.jetbrains_ide.value).build_number
|
value = local.build_number
|
||||||
}
|
}
|
||||||
|
|
||||||
output "version" {
|
output "version" {
|
||||||
value = var.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].version
|
value = local.version
|
||||||
}
|
}
|
||||||
|
|
||||||
output "url" {
|
output "url" {
|
||||||
|
|||||||
5
jfrog-oauth/.npmrc.tftpl
Normal file
5
jfrog-oauth/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
email=${ARTIFACTORY_EMAIL}
|
||||||
|
%{ for REPO in REPOS ~}
|
||||||
|
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||||
|
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||||
|
%{ endfor ~}
|
||||||
@@ -17,15 +17,16 @@ Install the JF CLI and authenticate package managers with Artifactory using OAut
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||||
version = "1.0.5"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://example.jfrog.io"
|
jfrog_url = "https://example.jfrog.io"
|
||||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||||
|
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm",
|
npm = ["npm", "@scoped:npm-scoped"]
|
||||||
"go" : "go",
|
go = ["go", "another-go-repo"]
|
||||||
"pypi" : "pypi"
|
pypi = ["pypi", "extra-index-pypi"]
|
||||||
|
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -44,13 +45,13 @@ Configure the Python pip package manager to fetch packages from Artifactory whil
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||||
version = "1.0.5"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://example.jfrog.io"
|
jfrog_url = "https://example.jfrog.io"
|
||||||
username_field = "email"
|
username_field = "email"
|
||||||
|
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"pypi" : "pypi"
|
pypi = ["pypi"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -72,15 +73,15 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||||
version = "1.0.5"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://example.jfrog.io"
|
jfrog_url = "https://example.jfrog.io"
|
||||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm",
|
npm = ["npm"]
|
||||||
"go" : "go",
|
go = ["go"]
|
||||||
"pypi" : "pypi"
|
pypi = ["pypi"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,19 +1,129 @@
|
|||||||
import { serve } from "bun";
|
import { describe, expect, it } from "bun:test";
|
||||||
import { describe } from "bun:test";
|
|
||||||
import {
|
import {
|
||||||
createJSONResponse,
|
findResourceInstance,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
|
runTerraformApply,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
} from "../test";
|
} from "../test";
|
||||||
|
|
||||||
describe("jfrog-oauth", async () => {
|
describe("jfrog-oauth", async () => {
|
||||||
|
type TestVariables = {
|
||||||
|
agent_id: string;
|
||||||
|
jfrog_url: string;
|
||||||
|
package_managers: string;
|
||||||
|
|
||||||
|
username_field?: string;
|
||||||
|
jfrog_server_id?: string;
|
||||||
|
external_auth_id?: string;
|
||||||
|
configure_code_server?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
await runTerraformInit(import.meta.dir);
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
testRequiredVariables(import.meta.dir, {
|
const fakeFrogApi = "localhost:8081/artifactory/api";
|
||||||
|
const fakeFrogUrl = "http://localhost:8081";
|
||||||
|
const user = "default";
|
||||||
|
|
||||||
|
it("can run apply with required variables", async () => {
|
||||||
|
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||||
agent_id: "some-agent-id",
|
agent_id: "some-agent-id",
|
||||||
jfrog_url: "http://localhost:8081",
|
jfrog_url: fakeFrogUrl,
|
||||||
package_managers: "{}",
|
package_managers: "{}",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
//TODO add more tests
|
it("generates an npmrc with scoped repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||||
|
email=${user}@example.com
|
||||||
|
registry=http://${fakeFrogApi}/npm/global
|
||||||
|
//${fakeFrogApi}/npm/global/:_authToken=
|
||||||
|
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||||
|
//${fakeFrogApi}/npm/foo/:_authToken=
|
||||||
|
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||||
|
//${fakeFrogApi}/npm/bar/:_authToken=
|
||||||
|
|
||||||
|
EOF`;
|
||||||
|
expect(coderScript.script).toContain(npmrcStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf npmc --global --repo-resolve "global"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured npm',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generates a pip config with extra-indexes", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
pypi: ["global", "foo", "bar"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||||
|
[global]
|
||||||
|
index-url = https://${user}:@${fakeFrogApi}/pypi/global/simple
|
||||||
|
extra-index-url =
|
||||||
|
https://${user}:@${fakeFrogApi}/pypi/foo/simple
|
||||||
|
https://${user}:@${fakeFrogApi}/pypi/bar/simple
|
||||||
|
|
||||||
|
EOF`;
|
||||||
|
expect(coderScript.script).toContain(pipStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf pipc --global --repo-resolve "global"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("registers multiple docker repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const dockerStanza = ["foo", "bar", "baz"]
|
||||||
|
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||||
|
.join("\n");
|
||||||
|
expect(coderScript.script).toContain(dockerStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured docker',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("sets goproxy with multiple repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
go: ["foo", "bar", "baz"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||||
|
const proxies = ["foo", "bar", "baz"]
|
||||||
|
.map((r) => `https://${user}:@${fakeFrogApi}/go/${r}`)
|
||||||
|
.join(",");
|
||||||
|
expect(proxyEnv.value).toEqual(proxies);
|
||||||
|
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf goc --global --repo-resolve "foo"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured go',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ terraform {
|
|||||||
required_providers {
|
required_providers {
|
||||||
coder = {
|
coder = {
|
||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.12.4"
|
version = ">= 0.23"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -53,26 +53,55 @@ variable "configure_code_server" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
variable "package_managers" {
|
variable "package_managers" {
|
||||||
type = map(string)
|
type = object({
|
||||||
description = <<EOF
|
npm = optional(list(string), [])
|
||||||
A map of package manager names to their respective artifactory repositories.
|
go = optional(list(string), [])
|
||||||
|
pypi = optional(list(string), [])
|
||||||
|
docker = optional(list(string), [])
|
||||||
|
})
|
||||||
|
description = <<-EOF
|
||||||
|
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||||
For example:
|
For example:
|
||||||
{
|
{
|
||||||
"npm": "YOUR_NPM_REPO_KEY",
|
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||||
"go": "YOUR_GO_REPO_KEY",
|
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# The username field to use for artifactory
|
# The username field to use for artifactory
|
||||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
jfrog_host = split("://", var.jfrog_url)[1]
|
||||||
|
common_values = {
|
||||||
|
JFROG_URL = var.jfrog_url
|
||||||
|
JFROG_HOST = local.jfrog_host
|
||||||
|
JFROG_SERVER_ID = var.jfrog_server_id
|
||||||
|
ARTIFACTORY_USERNAME = local.username
|
||||||
|
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||||
|
ARTIFACTORY_ACCESS_TOKEN = data.coder_external_auth.jfrog.access_token
|
||||||
|
}
|
||||||
|
npmrc = templatefile(
|
||||||
|
"${path.module}/.npmrc.tftpl",
|
||||||
|
merge(
|
||||||
|
local.common_values,
|
||||||
|
{
|
||||||
|
REPOS = [
|
||||||
|
for r in var.package_managers.npm :
|
||||||
|
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
pip_conf = templatefile(
|
||||||
|
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
data "coder_external_auth" "jfrog" {
|
data "coder_external_auth" "jfrog" {
|
||||||
id = var.external_auth_id
|
id = var.external_auth_id
|
||||||
@@ -82,19 +111,22 @@ resource "coder_script" "jfrog" {
|
|||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
display_name = "jfrog"
|
display_name = "jfrog"
|
||||||
icon = "/icon/jfrog.svg"
|
icon = "/icon/jfrog.svg"
|
||||||
script = templatefile("${path.module}/run.sh", {
|
script = templatefile("${path.module}/run.sh", merge(
|
||||||
JFROG_URL : var.jfrog_url,
|
local.common_values,
|
||||||
JFROG_HOST : local.jfrog_host,
|
{
|
||||||
JFROG_SERVER_ID : var.jfrog_server_id,
|
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||||
ARTIFACTORY_USERNAME : local.username,
|
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
NPMRC = local.npmrc
|
||||||
ARTIFACTORY_ACCESS_TOKEN : data.coder_external_auth.jfrog.access_token,
|
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
PIP_CONF = local.pip_conf
|
||||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||||
})
|
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||||
|
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||||
|
}
|
||||||
|
))
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,10 +152,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_env" "goproxy" {
|
resource "coder_env" "goproxy" {
|
||||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GOPROXY"
|
name = "GOPROXY"
|
||||||
value = "https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
value = join(",", [
|
||||||
|
for repo in var.package_managers.go :
|
||||||
|
"https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||||
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
output "access_token" {
|
output "access_token" {
|
||||||
|
|||||||
6
jfrog-oauth/pip.conf.tftpl
Normal file
6
jfrog-oauth/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[global]
|
||||||
|
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||||
|
extra-index-url =
|
||||||
|
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||||
|
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||||
|
%{ endfor ~}
|
||||||
@@ -2,6 +2,21 @@
|
|||||||
|
|
||||||
BOLD='\033[0;1m'
|
BOLD='\033[0;1m'
|
||||||
|
|
||||||
|
not_configured() {
|
||||||
|
type=$1
|
||||||
|
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||||
|
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||||
|
}
|
||||||
|
|
||||||
|
config_complete() {
|
||||||
|
echo "🥳 Configuration complete!"
|
||||||
|
}
|
||||||
|
|
||||||
|
register_docker() {
|
||||||
|
repo=$1
|
||||||
|
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||||
|
}
|
||||||
|
|
||||||
# check if JFrog CLI is already installed
|
# check if JFrog CLI is already installed
|
||||||
if command -v jf > /dev/null 2>&1; then
|
if command -v jf > /dev/null 2>&1; then
|
||||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||||
@@ -20,52 +35,47 @@ echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFRO
|
|||||||
jf c use "${JFROG_SERVER_ID}"
|
jf c use "${JFROG_SERVER_ID}"
|
||||||
|
|
||||||
# Configure npm to use the Artifactory "npm" repository.
|
# Configure npm to use the Artifactory "npm" repository.
|
||||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
if [ -z "${HAS_NPM}" ]; then
|
||||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
not_configured npm
|
||||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "📦 Configuring npm..."
|
echo "📦 Configuring npm..."
|
||||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||||
cat << EOF > ~/.npmrc
|
cat << EOF > ~/.npmrc
|
||||||
email=${ARTIFACTORY_EMAIL}
|
${NPMRC}
|
||||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
|
||||||
EOF
|
EOF
|
||||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
config_complete
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Configure the `pip` to use the Artifactory "python" repository.
|
# Configure the `pip` to use the Artifactory "python" repository.
|
||||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
if [ -z "${HAS_PYPI}" ]; then
|
||||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
not_configured pypi
|
||||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "📦 Configuring pip..."
|
echo "🐍 Configuring pip..."
|
||||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||||
mkdir -p ~/.pip
|
mkdir -p ~/.pip
|
||||||
cat << EOF > ~/.pip/pip.conf
|
cat << EOF > ~/.pip/pip.conf
|
||||||
[global]
|
${PIP_CONF}
|
||||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
|
||||||
EOF
|
EOF
|
||||||
|
config_complete
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Configure Artifactory "go" repository.
|
# Configure Artifactory "go" repository.
|
||||||
if [ -z "${REPOSITORY_GO}" ]; then
|
if [ -z "${HAS_GO}" ]; then
|
||||||
echo "🤔 no go repository is set, skipping go configuration."
|
not_configured go
|
||||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "🐹 Configuring go..."
|
echo "🐹 Configuring go..."
|
||||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||||
|
config_complete
|
||||||
fi
|
fi
|
||||||
echo "🥳 Configuration complete!"
|
|
||||||
|
|
||||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
if [ -z "${HAS_DOCKER}" ]; then
|
||||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
not_configured docker
|
||||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
if command -v docker > /dev/null 2>&1; then
|
if command -v docker > /dev/null 2>&1; then
|
||||||
echo "🔑 Configuring 🐳 docker credentials..."
|
echo "🔑 Configuring 🐳 docker credentials..."
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
${REGISTER_DOCKER}
|
||||||
else
|
else
|
||||||
echo "🤔 no docker is installed, skipping docker configuration."
|
echo "🤔 no docker is installed, skipping docker configuration."
|
||||||
fi
|
fi
|
||||||
@@ -96,20 +106,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
|||||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||||
# Generate the completion script
|
# Generate the completion script
|
||||||
jf completion $SHELLNAME --install
|
jf completion $SHELLNAME --install
|
||||||
|
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)"
|
||||||
# Add the completion script to the user's shell profile
|
# Add the completion script to the user's shell profile
|
||||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||||
echo "" >> ~/.bashrc
|
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.bashrc
|
|
||||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||||
else
|
else
|
||||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||||
fi
|
fi
|
||||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||||
echo "" >> ~/.zshrc
|
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.zshrc
|
|
||||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||||
echo "compinit" >> ~/.zshrc
|
echo "compinit" >> ~/.zshrc
|
||||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||||
|
|||||||
5
jfrog-token/.npmrc.tftpl
Normal file
5
jfrog-token/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
email=${ARTIFACTORY_EMAIL}
|
||||||
|
%{ for REPO in REPOS ~}
|
||||||
|
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||||
|
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||||
|
%{ endfor ~}
|
||||||
@@ -15,14 +15,15 @@ Install the JF CLI and authenticate package managers with Artifactory using Arti
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://XXXX.jfrog.io"
|
jfrog_url = "https://XXXX.jfrog.io"
|
||||||
artifactory_access_token = var.artifactory_access_token
|
artifactory_access_token = var.artifactory_access_token
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm",
|
npm = ["npm", "@scoped:npm-scoped"]
|
||||||
"go" : "go",
|
go = ["go", "another-go-repo"]
|
||||||
"pypi" : "pypi"
|
pypi = ["pypi", "extra-index-pypi"]
|
||||||
|
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -41,14 +42,14 @@ For detailed instructions, please see this [guide](https://coder.com/docs/v2/lat
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://YYYY.jfrog.io"
|
jfrog_url = "https://YYYY.jfrog.io"
|
||||||
artifactory_access_token = var.artifactory_access_token # An admin access token
|
artifactory_access_token = var.artifactory_access_token # An admin access token
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm-local",
|
npm = ["npm-local"]
|
||||||
"go" : "go-local",
|
go = ["go-local"]
|
||||||
"pypi" : "pypi-local"
|
pypi = ["pypi-local"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -74,15 +75,15 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
|||||||
```tf
|
```tf
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://XXXX.jfrog.io"
|
jfrog_url = "https://XXXX.jfrog.io"
|
||||||
artifactory_access_token = var.artifactory_access_token
|
artifactory_access_token = var.artifactory_access_token
|
||||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm",
|
npm = ["npm"]
|
||||||
"go" : "go",
|
go = ["go"]
|
||||||
"pypi" : "pypi"
|
pypi = ["pypi"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -94,15 +95,13 @@ data "coder_workspace" "me" {}
|
|||||||
|
|
||||||
module "jfrog" {
|
module "jfrog" {
|
||||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
jfrog_url = "https://XXXX.jfrog.io"
|
jfrog_url = "https://XXXX.jfrog.io"
|
||||||
artifactory_access_token = var.artifactory_access_token
|
artifactory_access_token = var.artifactory_access_token
|
||||||
token_description = "Token for Coder workspace: ${data.coder_workspace.me.owner}/${data.coder_workspace.me.name}"
|
token_description = "Token for Coder workspace: ${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}"
|
||||||
package_managers = {
|
package_managers = {
|
||||||
"npm" : "npm",
|
npm = ["npm"]
|
||||||
"go" : "go",
|
|
||||||
"pypi" : "pypi"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,12 +1,29 @@
|
|||||||
import { serve } from "bun";
|
import { serve } from "bun";
|
||||||
import { describe } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
createJSONResponse,
|
createJSONResponse,
|
||||||
|
findResourceInstance,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
|
runTerraformApply,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
} from "../test";
|
} from "../test";
|
||||||
|
|
||||||
describe("jfrog-token", async () => {
|
describe("jfrog-token", async () => {
|
||||||
|
type TestVariables = {
|
||||||
|
agent_id: string;
|
||||||
|
jfrog_url: string;
|
||||||
|
artifactory_access_token: string;
|
||||||
|
package_managers: string;
|
||||||
|
|
||||||
|
token_description?: string;
|
||||||
|
check_license?: boolean;
|
||||||
|
refreshable?: boolean;
|
||||||
|
expires_in?: number;
|
||||||
|
username_field?: string;
|
||||||
|
jfrog_server_id?: string;
|
||||||
|
configure_code_server?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
await runTerraformInit(import.meta.dir);
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
// Run a fake JFrog server so the provider can initialize
|
// Run a fake JFrog server so the provider can initialize
|
||||||
@@ -32,10 +49,116 @@ describe("jfrog-token", async () => {
|
|||||||
port: 0,
|
port: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
testRequiredVariables(import.meta.dir, {
|
const fakeFrogApi = `${fakeFrogHost.hostname}:${fakeFrogHost.port}/artifactory/api`;
|
||||||
|
const fakeFrogUrl = `http://${fakeFrogHost.hostname}:${fakeFrogHost.port}`;
|
||||||
|
const user = "default";
|
||||||
|
const token = "xxx";
|
||||||
|
|
||||||
|
it("can run apply with required variables", async () => {
|
||||||
|
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||||
agent_id: "some-agent-id",
|
agent_id: "some-agent-id",
|
||||||
jfrog_url: "http://" + fakeFrogHost.hostname + ":" + fakeFrogHost.port,
|
jfrog_url: fakeFrogUrl,
|
||||||
artifactory_access_token: "XXXX",
|
artifactory_access_token: "XXXX",
|
||||||
package_managers: "{}",
|
package_managers: "{}",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("generates an npmrc with scoped repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
artifactory_access_token: "XXXX",
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||||
|
email=${user}@example.com
|
||||||
|
registry=http://${fakeFrogApi}/npm/global
|
||||||
|
//${fakeFrogApi}/npm/global/:_authToken=xxx
|
||||||
|
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||||
|
//${fakeFrogApi}/npm/foo/:_authToken=xxx
|
||||||
|
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||||
|
//${fakeFrogApi}/npm/bar/:_authToken=xxx
|
||||||
|
|
||||||
|
EOF`;
|
||||||
|
expect(coderScript.script).toContain(npmrcStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf npmc --global --repo-resolve "global"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured npm',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generates a pip config with extra-indexes", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
artifactory_access_token: "XXXX",
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
pypi: ["global", "foo", "bar"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||||
|
[global]
|
||||||
|
index-url = https://${user}:${token}@${fakeFrogApi}/pypi/global/simple
|
||||||
|
extra-index-url =
|
||||||
|
https://${user}:${token}@${fakeFrogApi}/pypi/foo/simple
|
||||||
|
https://${user}:${token}@${fakeFrogApi}/pypi/bar/simple
|
||||||
|
|
||||||
|
EOF`;
|
||||||
|
expect(coderScript.script).toContain(pipStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf pipc --global --repo-resolve "global"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("registers multiple docker repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
artifactory_access_token: "XXXX",
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
const dockerStanza = ["foo", "bar", "baz"]
|
||||||
|
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||||
|
.join("\n");
|
||||||
|
expect(coderScript.script).toContain(dockerStanza);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured docker',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("sets goproxy with multiple repos", async () => {
|
||||||
|
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "some-agent-id",
|
||||||
|
jfrog_url: fakeFrogUrl,
|
||||||
|
artifactory_access_token: "XXXX",
|
||||||
|
package_managers: JSON.stringify({
|
||||||
|
go: ["foo", "bar", "baz"],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||||
|
const proxies = ["foo", "bar", "baz"]
|
||||||
|
.map((r) => `https://${user}:${token}@${fakeFrogApi}/go/${r}`)
|
||||||
|
.join(",");
|
||||||
|
expect(proxyEnv.value).toEqual(proxies);
|
||||||
|
|
||||||
|
const coderScript = findResourceInstance(state, "coder_script");
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'jf goc --global --repo-resolve "foo"',
|
||||||
|
);
|
||||||
|
expect(coderScript.script).toContain(
|
||||||
|
'if [ -z "YES" ]; then\n not_configured go',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ terraform {
|
|||||||
required_providers {
|
required_providers {
|
||||||
coder = {
|
coder = {
|
||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.12.4"
|
version = ">= 0.23"
|
||||||
}
|
}
|
||||||
artifactory = {
|
artifactory = {
|
||||||
source = "registry.terraform.io/jfrog/artifactory"
|
source = "registry.terraform.io/jfrog/artifactory"
|
||||||
@@ -80,23 +80,51 @@ variable "configure_code_server" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
variable "package_managers" {
|
variable "package_managers" {
|
||||||
type = map(string)
|
type = object({
|
||||||
description = <<EOF
|
npm = optional(list(string), [])
|
||||||
A map of package manager names to their respective artifactory repositories.
|
go = optional(list(string), [])
|
||||||
|
pypi = optional(list(string), [])
|
||||||
|
docker = optional(list(string), [])
|
||||||
|
})
|
||||||
|
description = <<-EOF
|
||||||
|
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||||
For example:
|
For example:
|
||||||
{
|
{
|
||||||
"npm": "YOUR_NPM_REPO_KEY",
|
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||||
"go": "YOUR_GO_REPO_KEY",
|
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
locals {
|
locals {
|
||||||
# The username field to use for artifactory
|
# The username field to use for artifactory
|
||||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
jfrog_host = split("://", var.jfrog_url)[1]
|
||||||
|
common_values = {
|
||||||
|
JFROG_URL = var.jfrog_url
|
||||||
|
JFROG_HOST = local.jfrog_host
|
||||||
|
JFROG_SERVER_ID = var.jfrog_server_id
|
||||||
|
ARTIFACTORY_USERNAME = local.username
|
||||||
|
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||||
|
ARTIFACTORY_ACCESS_TOKEN = artifactory_scoped_token.me.access_token
|
||||||
|
}
|
||||||
|
npmrc = templatefile(
|
||||||
|
"${path.module}/.npmrc.tftpl",
|
||||||
|
merge(
|
||||||
|
local.common_values,
|
||||||
|
{
|
||||||
|
REPOS = [
|
||||||
|
for r in var.package_managers.npm :
|
||||||
|
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
pip_conf = templatefile(
|
||||||
|
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Configure the Artifactory provider
|
# Configure the Artifactory provider
|
||||||
@@ -117,24 +145,28 @@ resource "artifactory_scoped_token" "me" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
resource "coder_script" "jfrog" {
|
resource "coder_script" "jfrog" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
display_name = "jfrog"
|
display_name = "jfrog"
|
||||||
icon = "/icon/jfrog.svg"
|
icon = "/icon/jfrog.svg"
|
||||||
script = templatefile("${path.module}/run.sh", {
|
script = templatefile("${path.module}/run.sh", merge(
|
||||||
JFROG_URL : var.jfrog_url,
|
local.common_values,
|
||||||
JFROG_HOST : local.jfrog_host,
|
{
|
||||||
JFROG_SERVER_ID : var.jfrog_server_id,
|
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||||
ARTIFACTORY_USERNAME : local.username,
|
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
NPMRC = local.npmrc
|
||||||
ARTIFACTORY_ACCESS_TOKEN : artifactory_scoped_token.me.access_token,
|
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
PIP_CONF = local.pip_conf
|
||||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||||
})
|
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||||
|
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||||
|
}
|
||||||
|
))
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,10 +192,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_env" "goproxy" {
|
resource "coder_env" "goproxy" {
|
||||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
name = "GOPROXY"
|
name = "GOPROXY"
|
||||||
value = "https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
value = join(",", [
|
||||||
|
for repo in var.package_managers.go :
|
||||||
|
"https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||||
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
output "access_token" {
|
output "access_token" {
|
||||||
|
|||||||
6
jfrog-token/pip.conf.tftpl
Normal file
6
jfrog-token/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[global]
|
||||||
|
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||||
|
extra-index-url =
|
||||||
|
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||||
|
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||||
|
%{ endfor ~}
|
||||||
@@ -2,6 +2,21 @@
|
|||||||
|
|
||||||
BOLD='\033[0;1m'
|
BOLD='\033[0;1m'
|
||||||
|
|
||||||
|
not_configured() {
|
||||||
|
type=$1
|
||||||
|
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||||
|
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||||
|
}
|
||||||
|
|
||||||
|
config_complete() {
|
||||||
|
echo "🥳 Configuration complete!"
|
||||||
|
}
|
||||||
|
|
||||||
|
register_docker() {
|
||||||
|
repo=$1
|
||||||
|
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||||
|
}
|
||||||
|
|
||||||
# check if JFrog CLI is already installed
|
# check if JFrog CLI is already installed
|
||||||
if command -v jf > /dev/null 2>&1; then
|
if command -v jf > /dev/null 2>&1; then
|
||||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||||
@@ -11,8 +26,7 @@ else
|
|||||||
sudo chmod 755 /usr/local/bin/jf
|
sudo chmod 755 /usr/local/bin/jf
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# The jf CLI checks $CI when determining whether to use interactive
|
# The jf CLI checks $CI when determining whether to use interactive flows.
|
||||||
# flows.
|
|
||||||
export CI=true
|
export CI=true
|
||||||
# Authenticate JFrog CLI with Artifactory.
|
# Authenticate JFrog CLI with Artifactory.
|
||||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}"
|
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}"
|
||||||
@@ -20,52 +34,47 @@ echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFRO
|
|||||||
jf c use "${JFROG_SERVER_ID}"
|
jf c use "${JFROG_SERVER_ID}"
|
||||||
|
|
||||||
# Configure npm to use the Artifactory "npm" repository.
|
# Configure npm to use the Artifactory "npm" repository.
|
||||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
if [ -z "${HAS_NPM}" ]; then
|
||||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
not_configured npm
|
||||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "📦 Configuring npm..."
|
echo "📦 Configuring npm..."
|
||||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||||
cat << EOF > ~/.npmrc
|
cat << EOF > ~/.npmrc
|
||||||
email=${ARTIFACTORY_EMAIL}
|
${NPMRC}
|
||||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
|
||||||
EOF
|
EOF
|
||||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
config_complete
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Configure the `pip` to use the Artifactory "python" repository.
|
# Configure the `pip` to use the Artifactory "python" repository.
|
||||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
if [ -z "${HAS_PYPI}" ]; then
|
||||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
not_configured pypi
|
||||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "🐍 Configuring pip..."
|
echo "🐍 Configuring pip..."
|
||||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||||
mkdir -p ~/.pip
|
mkdir -p ~/.pip
|
||||||
cat << EOF > ~/.pip/pip.conf
|
cat << EOF > ~/.pip/pip.conf
|
||||||
[global]
|
${PIP_CONF}
|
||||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
|
||||||
EOF
|
EOF
|
||||||
|
config_complete
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Configure Artifactory "go" repository.
|
# Configure Artifactory "go" repository.
|
||||||
if [ -z "${REPOSITORY_GO}" ]; then
|
if [ -z "${HAS_GO}" ]; then
|
||||||
echo "🤔 no go repository is set, skipping go configuration."
|
not_configured go
|
||||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
echo "🐹 Configuring go..."
|
echo "🐹 Configuring go..."
|
||||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||||
|
config_complete
|
||||||
fi
|
fi
|
||||||
echo "🥳 Configuration complete!"
|
|
||||||
|
|
||||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
if [ -z "${HAS_DOCKER}" ]; then
|
||||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
not_configured docker
|
||||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
|
||||||
else
|
else
|
||||||
if command -v docker > /dev/null 2>&1; then
|
if command -v docker > /dev/null 2>&1; then
|
||||||
echo "🔑 Configuring 🐳 docker credentials..."
|
echo "🔑 Configuring 🐳 docker credentials..."
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
${REGISTER_DOCKER}
|
||||||
else
|
else
|
||||||
echo "🤔 no docker is installed, skipping docker configuration."
|
echo "🤔 no docker is installed, skipping docker configuration."
|
||||||
fi
|
fi
|
||||||
@@ -96,20 +105,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
|||||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||||
# Generate the completion script
|
# Generate the completion script
|
||||||
jf completion $SHELLNAME --install
|
jf completion $SHELLNAME --install
|
||||||
|
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-token)"
|
||||||
# Add the completion script to the user's shell profile
|
# Add the completion script to the user's shell profile
|
||||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||||
echo "" >> ~/.bashrc
|
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.bashrc
|
|
||||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||||
else
|
else
|
||||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||||
fi
|
fi
|
||||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||||
echo "" >> ~/.zshrc
|
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.zshrc
|
|
||||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||||
echo "compinit" >> ~/.zshrc
|
echo "compinit" >> ~/.zshrc
|
||||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ A module that adds Jupyter Notebook in your Coder template.
|
|||||||
```tf
|
```tf
|
||||||
module "jupyter-notebook" {
|
module "jupyter-notebook" {
|
||||||
source = "registry.coder.com/modules/jupyter-notebook/coder"
|
source = "registry.coder.com/modules/jupyter-notebook/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.19"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -7,14 +7,14 @@ printf "$${BOLD}Installing jupyter-notebook!\n"
|
|||||||
# check if jupyter-notebook is installed
|
# check if jupyter-notebook is installed
|
||||||
if ! command -v jupyter-notebook > /dev/null 2>&1; then
|
if ! command -v jupyter-notebook > /dev/null 2>&1; then
|
||||||
# install jupyter-notebook
|
# install jupyter-notebook
|
||||||
# check if python3 pip is installed
|
# check if pipx is installed
|
||||||
if ! command -v pip3 > /dev/null 2>&1; then
|
if ! command -v pipx > /dev/null 2>&1; then
|
||||||
echo "pip3 is not installed"
|
echo "pipx is not installed"
|
||||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
echo "Please install pipx in your Dockerfile/VM image before using this module"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
# install jupyter-notebook
|
# install jupyter notebook
|
||||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyter
|
pipx install -q notebook
|
||||||
echo "🥳 jupyter-notebook has been installed\n\n"
|
echo "🥳 jupyter-notebook has been installed\n\n"
|
||||||
else
|
else
|
||||||
echo "🥳 jupyter-notebook is already installed\n\n"
|
echo "🥳 jupyter-notebook is already installed\n\n"
|
||||||
@@ -22,4 +22,4 @@ fi
|
|||||||
|
|
||||||
echo "👷 Starting jupyter-notebook in background..."
|
echo "👷 Starting jupyter-notebook in background..."
|
||||||
echo "check logs at ${LOG_PATH}"
|
echo "check logs at ${LOG_PATH}"
|
||||||
$HOME/.local/bin/jupyter notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
$HOME/.local/bin/jupyter-notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ A module that adds JupyterLab in your Coder template.
|
|||||||
```tf
|
```tf
|
||||||
module "jupyterlab" {
|
module "jupyterlab" {
|
||||||
source = "registry.coder.com/modules/jupyterlab/coder"
|
source = "registry.coder.com/modules/jupyterlab/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
|
execContainer,
|
||||||
executeScriptInContainer,
|
executeScriptInContainer,
|
||||||
|
findResourceInstance,
|
||||||
|
runContainer,
|
||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
findResourceInstance,
|
type TerraformState,
|
||||||
runContainer,
|
|
||||||
TerraformState,
|
|
||||||
execContainer,
|
|
||||||
} from "../test";
|
} from "../test";
|
||||||
|
|
||||||
// executes the coder script after installing pip
|
// executes the coder script after installing pip
|
||||||
const executeScriptInContainerWithPip = async (
|
const executeScriptInContainerWithPip = async (
|
||||||
state: TerraformState,
|
state: TerraformState,
|
||||||
image: string,
|
image: string,
|
||||||
shell: string = "sh",
|
shell = "sh",
|
||||||
): Promise<{
|
): Promise<{
|
||||||
exitCode: number;
|
exitCode: number;
|
||||||
stdout: string[];
|
stdout: string[];
|
||||||
@@ -22,7 +22,7 @@ const executeScriptInContainerWithPip = async (
|
|||||||
}> => {
|
}> => {
|
||||||
const instance = findResourceInstance(state, "coder_script");
|
const instance = findResourceInstance(state, "coder_script");
|
||||||
const id = await runContainer(image);
|
const id = await runContainer(image);
|
||||||
const respPip = await execContainer(id, [shell, "-c", "apk add py3-pip"]);
|
const respPipx = await execContainer(id, [shell, "-c", "apk add pipx"]);
|
||||||
const resp = await execContainer(id, [shell, "-c", instance.script]);
|
const resp = await execContainer(id, [shell, "-c", instance.script]);
|
||||||
const stdout = resp.stdout.trim().split("\n");
|
const stdout = resp.stdout.trim().split("\n");
|
||||||
const stderr = resp.stderr.trim().split("\n");
|
const stderr = resp.stderr.trim().split("\n");
|
||||||
@@ -40,7 +40,7 @@ describe("jupyterlab", async () => {
|
|||||||
agent_id: "foo",
|
agent_id: "foo",
|
||||||
});
|
});
|
||||||
|
|
||||||
it("fails without pip3", async () => {
|
it("fails without pipx", async () => {
|
||||||
const state = await runTerraformApply(import.meta.dir, {
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
agent_id: "foo",
|
agent_id: "foo",
|
||||||
});
|
});
|
||||||
@@ -48,14 +48,14 @@ describe("jupyterlab", async () => {
|
|||||||
expect(output.exitCode).toBe(1);
|
expect(output.exitCode).toBe(1);
|
||||||
expect(output.stdout).toEqual([
|
expect(output.stdout).toEqual([
|
||||||
"\u001B[0;1mInstalling jupyterlab!",
|
"\u001B[0;1mInstalling jupyterlab!",
|
||||||
"pip3 is not installed",
|
"pipx is not installed",
|
||||||
"Please install pip3 in your Dockerfile/VM image before running this script",
|
"Please install pipx in your Dockerfile/VM image before running this script",
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Add faster test to run with pip3.
|
// TODO: Add faster test to run with pipx.
|
||||||
// currently times out.
|
// currently times out.
|
||||||
// it("runs with pip3", async () => {
|
// it("runs with pipx", async () => {
|
||||||
// ...
|
// ...
|
||||||
// const output = await executeScriptInContainerWithPip(state, "alpine");
|
// const output = await executeScriptInContainerWithPip(state, "alpine");
|
||||||
// ...
|
// ...
|
||||||
|
|||||||
@@ -9,6 +9,9 @@ terraform {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
# Add required variables for your modules and remove any unneeded variables
|
# Add required variables for your modules and remove any unneeded variables
|
||||||
variable "agent_id" {
|
variable "agent_id" {
|
||||||
type = string
|
type = string
|
||||||
@@ -36,6 +39,12 @@ variable "share" {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "subdomain" {
|
||||||
|
type = bool
|
||||||
|
description = "Determines whether JupyterLab will be accessed via its own subdomain or whether it will be accessed via a path on Coder."
|
||||||
|
default = true
|
||||||
|
}
|
||||||
|
|
||||||
variable "order" {
|
variable "order" {
|
||||||
type = number
|
type = number
|
||||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||||
@@ -49,17 +58,18 @@ resource "coder_script" "jupyterlab" {
|
|||||||
script = templatefile("${path.module}/run.sh", {
|
script = templatefile("${path.module}/run.sh", {
|
||||||
LOG_PATH : var.log_path,
|
LOG_PATH : var.log_path,
|
||||||
PORT : var.port
|
PORT : var.port
|
||||||
|
BASE_URL : var.subdomain ? "" : "/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||||
})
|
})
|
||||||
run_on_start = true
|
run_on_start = true
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "coder_app" "jupyterlab" {
|
resource "coder_app" "jupyterlab" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
slug = "jupyterlab"
|
slug = "jupyterlab" # sync with the usage in URL
|
||||||
display_name = "JupyterLab"
|
display_name = "JupyterLab"
|
||||||
url = "http://localhost:${var.port}"
|
url = var.subdomain ? "http://localhost:${var.port}" : "http://localhost:${var.port}/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||||
icon = "/icon/jupyter.svg"
|
icon = "/icon/jupyter.svg"
|
||||||
subdomain = true
|
subdomain = var.subdomain
|
||||||
share = var.share
|
share = var.share
|
||||||
order = var.order
|
order = var.order
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
if [ -n "${BASE_URL}" ]; then
|
||||||
|
BASE_URL_FLAG="--ServerApp.base_url=${BASE_URL}"
|
||||||
|
fi
|
||||||
|
|
||||||
BOLD='\033[0;1m'
|
BOLD='\033[0;1m'
|
||||||
|
|
||||||
printf "$${BOLD}Installing jupyterlab!\n"
|
printf "$${BOLD}Installing jupyterlab!\n"
|
||||||
@@ -7,19 +11,25 @@ printf "$${BOLD}Installing jupyterlab!\n"
|
|||||||
# check if jupyterlab is installed
|
# check if jupyterlab is installed
|
||||||
if ! command -v jupyterlab > /dev/null 2>&1; then
|
if ! command -v jupyterlab > /dev/null 2>&1; then
|
||||||
# install jupyterlab
|
# install jupyterlab
|
||||||
# check if python3 pip is installed
|
# check if pipx is installed
|
||||||
if ! command -v pip3 > /dev/null 2>&1; then
|
if ! command -v pipx > /dev/null 2>&1; then
|
||||||
echo "pip3 is not installed"
|
echo "pipx is not installed"
|
||||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
echo "Please install pipx in your Dockerfile/VM image before running this script"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
# install jupyterlab
|
# install jupyterlab
|
||||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyterlab
|
pipx install -q jupyterlab
|
||||||
echo "🥳 jupyterlab has been installed\n\n"
|
printf "%s\n\n" "🥳 jupyterlab has been installed"
|
||||||
else
|
else
|
||||||
echo "🥳 jupyterlab is already installed\n\n"
|
printf "%s\n\n" "🥳 jupyterlab is already installed"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "👷 Starting jupyterlab in background..."
|
printf "👷 Starting jupyterlab in background..."
|
||||||
echo "check logs at ${LOG_PATH}"
|
printf "check logs at ${LOG_PATH}"
|
||||||
$HOME/.local/bin/jupyter lab --ServerApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
$HOME/.local/bin/jupyter-lab --no-browser \
|
||||||
|
"$BASE_URL_FLAG" \
|
||||||
|
--ServerApp.ip='*' \
|
||||||
|
--ServerApp.port="${PORT}" \
|
||||||
|
--ServerApp.token='' \
|
||||||
|
--ServerApp.password='' \
|
||||||
|
> "${LOG_PATH}" 2>&1 &
|
||||||
|
|||||||
42
kasmvnc/README.md
Normal file
42
kasmvnc/README.md
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
display_name: KasmVNC
|
||||||
|
description: A modern open source VNC server
|
||||||
|
icon: ../.icons/kasmvnc.svg
|
||||||
|
maintainer_github: coder
|
||||||
|
verified: true
|
||||||
|
tags: [helper, vnc, desktop]
|
||||||
|
---
|
||||||
|
|
||||||
|
# KasmVNC
|
||||||
|
|
||||||
|
Automatically install [KasmVNC](https://kasmweb.com/kasmvnc) in a workspace, and create an app to access it via the dashboard.
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "kasmvnc" {
|
||||||
|
source = "registry.coder.com/modules/kasmvnc/coder"
|
||||||
|
version = "1.0.23"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
desktop_environment = "xfce"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** This module only works on workspaces with a pre-installed desktop environment. As an example base image you can use `codercom/enterprise-desktop` image.
|
||||||
|
|
||||||
|
> **Note:** You can also use the kasmtech [custom images](https://kasmweb.com/docs/latest/guide/custom_images.html) by extending them as following:
|
||||||
|
|
||||||
|
```Dockerfile
|
||||||
|
FROM kasmweb/postman:1.16.0
|
||||||
|
ARG USER=kasm-user
|
||||||
|
USER root
|
||||||
|
# Overwrite the existing config file to disable ssl
|
||||||
|
RUN cat <<EOF > /etc/kasmvnc/kasmvnc.yaml
|
||||||
|
network:
|
||||||
|
protocol: http
|
||||||
|
ssl:
|
||||||
|
require_ssl: false
|
||||||
|
udp:
|
||||||
|
public_ip: 127.0.0.1
|
||||||
|
EOF
|
||||||
|
RUN addgroup $USER ssl-cert
|
||||||
|
USER $USER
|
||||||
|
```
|
||||||
37
kasmvnc/main.test.ts
Normal file
37
kasmvnc/main.test.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { describe, expect, it } from "bun:test";
|
||||||
|
import {
|
||||||
|
runTerraformApply,
|
||||||
|
runTerraformInit,
|
||||||
|
testRequiredVariables,
|
||||||
|
} from "../test";
|
||||||
|
|
||||||
|
const allowedDesktopEnvs = ["xfce", "kde", "gnome", "lxde", "lxqt"] as const;
|
||||||
|
type AllowedDesktopEnv = (typeof allowedDesktopEnvs)[number];
|
||||||
|
|
||||||
|
type TestVariables = Readonly<{
|
||||||
|
agent_id: string;
|
||||||
|
desktop_environment: AllowedDesktopEnv;
|
||||||
|
port?: string;
|
||||||
|
kasm_version?: string;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
describe("Kasm VNC", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
desktop_environment: "gnome",
|
||||||
|
});
|
||||||
|
|
||||||
|
it("Successfully installs for all expected Kasm desktop versions", async () => {
|
||||||
|
for (const v of allowedDesktopEnvs) {
|
||||||
|
const applyWithEnv = () => {
|
||||||
|
runTerraformApply<TestVariables>(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
desktop_environment: v,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(applyWithEnv).not.toThrow();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
63
kasmvnc/main.tf
Normal file
63
kasmvnc/main.tf
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
terraform {
|
||||||
|
required_version = ">= 1.0"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = ">= 0.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "agent_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of a Coder agent."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "port" {
|
||||||
|
type = number
|
||||||
|
description = "The port to run KasmVNC on."
|
||||||
|
default = 6800
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "kasm_version" {
|
||||||
|
type = string
|
||||||
|
description = "Version of KasmVNC to install."
|
||||||
|
default = "1.3.2"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "desktop_environment" {
|
||||||
|
type = string
|
||||||
|
description = "Specifies the desktop environment of the workspace. This should be pre-installed on the workspace."
|
||||||
|
validation {
|
||||||
|
condition = contains(["xfce", "kde", "gnome", "lxde", "lxqt"], var.desktop_environment)
|
||||||
|
error_message = "Invalid desktop environment. Please specify a valid desktop environment."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_script" "kasm_vnc" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
display_name = "KasmVNC"
|
||||||
|
icon = "/icon/kasmvnc.svg"
|
||||||
|
script = templatefile("${path.module}/run.sh", {
|
||||||
|
PORT : var.port,
|
||||||
|
DESKTOP_ENVIRONMENT : var.desktop_environment,
|
||||||
|
VERSION : var.kasm_version
|
||||||
|
})
|
||||||
|
run_on_start = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_app" "kasm_vnc" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
slug = "kasm-vnc"
|
||||||
|
display_name = "kasmVNC"
|
||||||
|
url = "http://localhost:${var.port}"
|
||||||
|
icon = "/icon/kasmvnc.svg"
|
||||||
|
subdomain = true
|
||||||
|
share = "owner"
|
||||||
|
healthcheck {
|
||||||
|
url = "http://localhost:${var.port}/app"
|
||||||
|
interval = 5
|
||||||
|
threshold = 5
|
||||||
|
}
|
||||||
|
}
|
||||||
196
kasmvnc/run.sh
Normal file
196
kasmvnc/run.sh
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Function to check if vncserver is already installed
|
||||||
|
check_installed() {
|
||||||
|
if command -v vncserver &> /dev/null; then
|
||||||
|
echo "A binary with name vncserver already installed."
|
||||||
|
return 0 # Don't exit, just indicate it's installed
|
||||||
|
else
|
||||||
|
return 1 # Indicates not installed
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to download a file using wget, curl, or busybox as a fallback
|
||||||
|
download_file() {
|
||||||
|
local url=$1
|
||||||
|
local output=$2
|
||||||
|
if command -v wget &> /dev/null; then
|
||||||
|
wget $url -O $output
|
||||||
|
elif command -v curl &> /dev/null; then
|
||||||
|
curl -fsSL $url -o $output
|
||||||
|
elif command -v busybox &> /dev/null; then
|
||||||
|
busybox wget -O $output $url
|
||||||
|
else
|
||||||
|
echo "Neither wget, curl, nor busybox is installed. Please install one of them to proceed."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install kasmvncserver for debian-based distros
|
||||||
|
install_deb() {
|
||||||
|
local url=$1
|
||||||
|
download_file $url /tmp/kasmvncserver.deb
|
||||||
|
sudo apt-get update
|
||||||
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install --yes -qq --no-install-recommends --no-install-suggests /tmp/kasmvncserver.deb
|
||||||
|
sudo usermod -aG ssl-cert $USER
|
||||||
|
rm /tmp/kasmvncserver.deb
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install kasmvncserver for Oracle 8
|
||||||
|
install_rpm_oracle8() {
|
||||||
|
local url=$1
|
||||||
|
download_file $url /tmp/kasmvncserver.rpm
|
||||||
|
sudo dnf config-manager --set-enabled ol8_codeready_builder
|
||||||
|
sudo dnf install oracle-epel-release-el8 -y
|
||||||
|
sudo dnf localinstall /tmp/kasmvncserver.rpm -y
|
||||||
|
sudo usermod -aG kasmvnc-cert $USER
|
||||||
|
rm /tmp/kasmvncserver.rpm
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install kasmvncserver for CentOS 7
|
||||||
|
install_rpm_centos7() {
|
||||||
|
local url=$1
|
||||||
|
download_file $url /tmp/kasmvncserver.rpm
|
||||||
|
sudo yum install epel-release -y
|
||||||
|
sudo yum install /tmp/kasmvncserver.rpm -y
|
||||||
|
sudo usermod -aG kasmvnc-cert $USER
|
||||||
|
rm /tmp/kasmvncserver.rpm
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install kasmvncserver for rpm-based distros
|
||||||
|
install_rpm() {
|
||||||
|
local url=$1
|
||||||
|
download_file $url /tmp/kasmvncserver.rpm
|
||||||
|
sudo rpm -i /tmp/kasmvncserver.rpm
|
||||||
|
rm /tmp/kasmvncserver.rpm
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install kasmvncserver for Alpine Linux
|
||||||
|
install_alpine() {
|
||||||
|
local url=$1
|
||||||
|
download_file $url /tmp/kasmvncserver.tgz
|
||||||
|
tar -xzf /tmp/kasmvncserver.tgz -C /usr/local/bin/
|
||||||
|
rm /tmp/kasmvncserver.tgz
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if vncserver is installed, and install if not
|
||||||
|
if ! check_installed; then
|
||||||
|
# Detect system information
|
||||||
|
distro=$(grep "^ID=" /etc/os-release | awk -F= '{print $2}')
|
||||||
|
version=$(grep "^VERSION_ID=" /etc/os-release | awk -F= '{print $2}' | tr -d '"')
|
||||||
|
arch=$(uname -m)
|
||||||
|
|
||||||
|
echo "Detected Distribution: $distro"
|
||||||
|
echo "Detected Version: $version"
|
||||||
|
echo "Detected Architecture: $arch"
|
||||||
|
|
||||||
|
# Map arch to package arch
|
||||||
|
if [[ "$arch" == "x86_64" ]]; then
|
||||||
|
if [[ "$distro" == "ubuntu" || "$distro" == "debian" || "$distro" == "kali" ]]; then
|
||||||
|
arch="amd64"
|
||||||
|
else
|
||||||
|
arch="x86_64"
|
||||||
|
fi
|
||||||
|
elif [[ "$arch" == "aarch64" || "$arch" == "arm64" ]]; then
|
||||||
|
if [[ "$distro" == "ubuntu" || "$distro" == "debian" || "$distro" == "kali" ]]; then
|
||||||
|
arch="arm64"
|
||||||
|
else
|
||||||
|
arch="aarch64"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Unsupported architecture: $arch"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Installing KASM version: ${VERSION}"
|
||||||
|
case $distro in
|
||||||
|
ubuntu | debian | kali)
|
||||||
|
case $version in
|
||||||
|
"20.04")
|
||||||
|
install_deb "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_focal_${VERSION}_$${arch}.deb"
|
||||||
|
;;
|
||||||
|
"22.04")
|
||||||
|
install_deb "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_jammy_${VERSION}_$${arch}.deb"
|
||||||
|
;;
|
||||||
|
"24.04")
|
||||||
|
install_deb "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_noble_${VERSION}_$${arch}.deb"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported Ubuntu/Debian/Kali version: $${version}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
oracle)
|
||||||
|
if [[ "$version" == "8" ]]; then
|
||||||
|
install_rpm_oracle8 "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_oracle_8_${VERSION}_$${arch}.rpm"
|
||||||
|
else
|
||||||
|
echo "Unsupported Oracle version: $${version}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
centos)
|
||||||
|
if [[ "$version" == "7" ]]; then
|
||||||
|
install_rpm_centos7 "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_centos_core_${VERSION}_$${arch}.rpm"
|
||||||
|
else
|
||||||
|
install_rpm "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_centos_core_${VERSION}_$${arch}.rpm"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
alpine)
|
||||||
|
if [[ "$version" == "3.17" || "$version" == "3.18" || "$version" == "3.19" || "$version" == "3.20" ]]; then
|
||||||
|
install_alpine "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvnc.alpine_$${version}_$${arch}.tgz"
|
||||||
|
else
|
||||||
|
echo "Unsupported Alpine version: $${version}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
fedora | opensuse)
|
||||||
|
install_rpm "https://github.com/kasmtech/KasmVNC/releases/download/v${VERSION}/kasmvncserver_$${distro}_$${version}_${VERSION}_$${arch}.rpm"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported distribution: $${distro}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
else
|
||||||
|
echo "Skipping installation."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try to create /etc/kasmvnc/kasmvnc.yaml system-wide
|
||||||
|
# we don't fail as some images might be missing sudo permissions
|
||||||
|
sudo mkdir -p /etc/kasmvnc || true
|
||||||
|
sudo bash -c "cat > /etc/kasmvnc/kasmvnc.yaml <<EOF
|
||||||
|
network:
|
||||||
|
protocol: http
|
||||||
|
websocket_port: ${PORT}
|
||||||
|
ssl:
|
||||||
|
require_ssl: false
|
||||||
|
udp:
|
||||||
|
public_ip: 127.0.0.1
|
||||||
|
EOF" || true
|
||||||
|
|
||||||
|
|
||||||
|
# There could already be a config file in the image at /etc/kasmvnc/kasmvnc.yaml, but we need to set the websocket port
|
||||||
|
mkdir -p "$HOME/.vnc"
|
||||||
|
cat > "$HOME/.vnc/kasmvnc.yaml" <<EOF
|
||||||
|
network:
|
||||||
|
protocol: http
|
||||||
|
websocket_port: ${PORT}
|
||||||
|
ssl:
|
||||||
|
require_ssl: false
|
||||||
|
pem_certificate:
|
||||||
|
pem_key:
|
||||||
|
udp:
|
||||||
|
public_ip: 127.0.0.1
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# This password is not used since we start the server without auth.
|
||||||
|
# The server is protected via the Coder session token / tunnel
|
||||||
|
# and does not listen publicly
|
||||||
|
echo -e "password\npassword\n" | vncpasswd -wo -u $USER
|
||||||
|
|
||||||
|
# Start the server
|
||||||
|
printf "🚀 Starting KasmVNC server...\n"
|
||||||
|
vncserver -select-de ${DESKTOP_ENVIRONMENT} -disableBasicAuth > /tmp/kasmvncserver.log 2>&1 &
|
||||||
15
lint.ts
15
lint.ts
@@ -5,14 +5,15 @@ import grayMatter from "gray-matter";
|
|||||||
|
|
||||||
const files = await readdir(".", { withFileTypes: true });
|
const files = await readdir(".", { withFileTypes: true });
|
||||||
const dirs = files.filter(
|
const dirs = files.filter(
|
||||||
(f) => f.isDirectory() && !f.name.startsWith(".") && f.name !== "node_modules"
|
(f) =>
|
||||||
|
f.isDirectory() && !f.name.startsWith(".") && f.name !== "node_modules",
|
||||||
);
|
);
|
||||||
|
|
||||||
let badExit = false;
|
let badExit = false;
|
||||||
|
|
||||||
// error reports an error to the console and sets badExit to true
|
// error reports an error to the console and sets badExit to true
|
||||||
// so that the process will exit with a non-zero exit code.
|
// so that the process will exit with a non-zero exit code.
|
||||||
const error = (...data: any[]) => {
|
const error = (...data: unknown[]) => {
|
||||||
console.error(...data);
|
console.error(...data);
|
||||||
badExit = true;
|
badExit = true;
|
||||||
};
|
};
|
||||||
@@ -22,7 +23,7 @@ const verifyCodeBlocks = (
|
|||||||
res = {
|
res = {
|
||||||
codeIsTF: false,
|
codeIsTF: false,
|
||||||
codeIsHCL: false,
|
codeIsHCL: false,
|
||||||
}
|
},
|
||||||
) => {
|
) => {
|
||||||
for (const token of tokens) {
|
for (const token of tokens) {
|
||||||
// Check in-depth.
|
// Check in-depth.
|
||||||
@@ -30,7 +31,12 @@ const verifyCodeBlocks = (
|
|||||||
verifyCodeBlocks(token.items, res);
|
verifyCodeBlocks(token.items, res);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (token.type === "list_item") {
|
if (token.type === "list_item") {
|
||||||
|
if (token.tokens === undefined) {
|
||||||
|
throw new Error("Tokens are missing for type list_item");
|
||||||
|
}
|
||||||
|
|
||||||
verifyCodeBlocks(token.tokens, res);
|
verifyCodeBlocks(token.tokens, res);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -80,8 +86,9 @@ for (const dir of dirs) {
|
|||||||
if (!data.maintainer_github) {
|
if (!data.maintainer_github) {
|
||||||
error(dir.name, "missing maintainer_github");
|
error(dir.name, "missing maintainer_github");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await stat(path.join(".", dir.name, data.icon));
|
await stat(path.join(".", dir.name, data.icon ?? ""));
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
error(dir.name, "icon does not exist", data.icon);
|
error(dir.name, "icon does not exist", data.icon);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { describe, expect, it } from "bun:test";
|
import { describe } from "bun:test";
|
||||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||||
|
|
||||||
describe("nodejs", async () => {
|
describe("nodejs", async () => {
|
||||||
|
|||||||
@@ -8,14 +8,15 @@
|
|||||||
"update-version": "./update-version.sh"
|
"update-version": "./update-version.sh"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"bun-types": "^1.0.18",
|
"bun-types": "^1.1.23",
|
||||||
"gray-matter": "^4.0.3",
|
"gray-matter": "^4.0.3",
|
||||||
"marked": "^12.0.0",
|
"marked": "^12.0.2",
|
||||||
|
"prettier": "^3.3.3",
|
||||||
"prettier-plugin-sh": "^0.13.1",
|
"prettier-plugin-sh": "^0.13.1",
|
||||||
"prettier-plugin-terraform-formatter": "^1.2.1"
|
"prettier-plugin-terraform-formatter": "^1.2.1"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.5.4"
|
||||||
},
|
},
|
||||||
"prettier": {
|
"prettier": {
|
||||||
"plugins": [
|
"plugins": [
|
||||||
|
|||||||
@@ -1,13 +1,9 @@
|
|||||||
import { readableStreamToText, spawn } from "bun";
|
|
||||||
import { describe, expect, it } from "bun:test";
|
import { describe, expect, it } from "bun:test";
|
||||||
import {
|
import {
|
||||||
executeScriptInContainer,
|
executeScriptInContainer,
|
||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
runContainer,
|
|
||||||
execContainer,
|
|
||||||
findResourceInstance,
|
|
||||||
} from "../test";
|
} from "../test";
|
||||||
|
|
||||||
describe("personalize", async () => {
|
describe("personalize", async () => {
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import {
|
|||||||
runTerraformApply,
|
runTerraformApply,
|
||||||
runTerraformInit,
|
runTerraformInit,
|
||||||
testRequiredVariables,
|
testRequiredVariables,
|
||||||
|
writeCoder,
|
||||||
} from "../test";
|
} from "../test";
|
||||||
|
|
||||||
describe("slackme", async () => {
|
describe("slackme", async () => {
|
||||||
@@ -71,7 +72,7 @@ executed`,
|
|||||||
it("formats execution with milliseconds", async () => {
|
it("formats execution with milliseconds", async () => {
|
||||||
await assertSlackMessage({
|
await assertSlackMessage({
|
||||||
command: "echo test",
|
command: "echo test",
|
||||||
format: `$COMMAND took $DURATION`,
|
format: "$COMMAND took $DURATION",
|
||||||
durationMS: 150,
|
durationMS: 150,
|
||||||
output: "echo test took 150ms",
|
output: "echo test took 150ms",
|
||||||
});
|
});
|
||||||
@@ -80,7 +81,7 @@ executed`,
|
|||||||
it("formats execution with seconds", async () => {
|
it("formats execution with seconds", async () => {
|
||||||
await assertSlackMessage({
|
await assertSlackMessage({
|
||||||
command: "echo test",
|
command: "echo test",
|
||||||
format: `$COMMAND took $DURATION`,
|
format: "$COMMAND took $DURATION",
|
||||||
durationMS: 15000,
|
durationMS: 15000,
|
||||||
output: "echo test took 15.0s",
|
output: "echo test took 15.0s",
|
||||||
});
|
});
|
||||||
@@ -89,7 +90,7 @@ executed`,
|
|||||||
it("formats execution with minutes", async () => {
|
it("formats execution with minutes", async () => {
|
||||||
await assertSlackMessage({
|
await assertSlackMessage({
|
||||||
command: "echo test",
|
command: "echo test",
|
||||||
format: `$COMMAND took $DURATION`,
|
format: "$COMMAND took $DURATION",
|
||||||
durationMS: 120000,
|
durationMS: 120000,
|
||||||
output: "echo test took 2m 0.0s",
|
output: "echo test took 2m 0.0s",
|
||||||
});
|
});
|
||||||
@@ -98,7 +99,7 @@ executed`,
|
|||||||
it("formats execution with hours", async () => {
|
it("formats execution with hours", async () => {
|
||||||
await assertSlackMessage({
|
await assertSlackMessage({
|
||||||
command: "echo test",
|
command: "echo test",
|
||||||
format: `$COMMAND took $DURATION`,
|
format: "$COMMAND took $DURATION",
|
||||||
durationMS: 60000 * 60,
|
durationMS: 60000 * 60,
|
||||||
output: "echo test took 1hr 0m 0.0s",
|
output: "echo test took 1hr 0m 0.0s",
|
||||||
});
|
});
|
||||||
@@ -119,22 +120,16 @@ const setupContainer = async (
|
|||||||
return { id, instance };
|
return { id, instance };
|
||||||
};
|
};
|
||||||
|
|
||||||
const writeCoder = async (id: string, script: string) => {
|
|
||||||
const exec = await execContainer(id, [
|
|
||||||
"sh",
|
|
||||||
"-c",
|
|
||||||
`echo '${script}' > /usr/bin/coder && chmod +x /usr/bin/coder`,
|
|
||||||
]);
|
|
||||||
expect(exec.exitCode).toBe(0);
|
|
||||||
};
|
|
||||||
|
|
||||||
const assertSlackMessage = async (opts: {
|
const assertSlackMessage = async (opts: {
|
||||||
command: string;
|
command: string;
|
||||||
format?: string;
|
format?: string;
|
||||||
durationMS?: number;
|
durationMS?: number;
|
||||||
output: string;
|
output: string;
|
||||||
}) => {
|
}) => {
|
||||||
let url: URL;
|
// Have to use non-null assertion because TS can't tell when the fetch
|
||||||
|
// function will run
|
||||||
|
let url!: URL;
|
||||||
|
|
||||||
const fakeSlackHost = serve({
|
const fakeSlackHost = serve({
|
||||||
fetch: (req) => {
|
fetch: (req) => {
|
||||||
url = new URL(req.url);
|
url = new URL(req.url);
|
||||||
@@ -146,15 +141,16 @@ const assertSlackMessage = async (opts: {
|
|||||||
},
|
},
|
||||||
port: 0,
|
port: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { instance, id } = await setupContainer(
|
const { instance, id } = await setupContainer(
|
||||||
"alpine/curl",
|
"alpine/curl",
|
||||||
opts.format && {
|
opts.format ? { slack_message: opts.format } : undefined,
|
||||||
slack_message: opts.format,
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
await writeCoder(id, "echo 'token'");
|
await writeCoder(id, "echo 'token'");
|
||||||
let exec = await execContainer(id, ["sh", "-c", instance.script]);
|
let exec = await execContainer(id, ["sh", "-c", instance.script]);
|
||||||
expect(exec.exitCode).toBe(0);
|
expect(exec.exitCode).toBe(0);
|
||||||
|
|
||||||
exec = await execContainer(id, [
|
exec = await execContainer(id, [
|
||||||
"sh",
|
"sh",
|
||||||
"-c",
|
"-c",
|
||||||
@@ -162,6 +158,7 @@ const assertSlackMessage = async (opts: {
|
|||||||
fakeSlackHost.hostname
|
fakeSlackHost.hostname
|
||||||
}:${fakeSlackHost.port}" slackme ${opts.command}`,
|
}:${fakeSlackHost.port}" slackme ${opts.command}`,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
expect(exec.stderr.trim()).toBe("");
|
expect(exec.stderr.trim()).toBe("");
|
||||||
expect(url.pathname).toEqual("/api/chat.postMessage");
|
expect(url.pathname).toEqual("/api/chat.postMessage");
|
||||||
expect(url.searchParams.get("channel")).toEqual("token");
|
expect(url.searchParams.get("channel")).toEqual("token");
|
||||||
|
|||||||
145
test.ts
145
test.ts
@@ -1,6 +1,6 @@
|
|||||||
import { readableStreamToText, spawn } from "bun";
|
import { readableStreamToText, spawn } from "bun";
|
||||||
import { afterEach, expect, it } from "bun:test";
|
import { expect, it } from "bun:test";
|
||||||
import { readFile, unlink } from "fs/promises";
|
import { readFile, unlink } from "node:fs/promises";
|
||||||
|
|
||||||
export const runContainer = async (
|
export const runContainer = async (
|
||||||
image: string,
|
image: string,
|
||||||
@@ -21,7 +21,8 @@ export const runContainer = async (
|
|||||||
"-c",
|
"-c",
|
||||||
init,
|
init,
|
||||||
]);
|
]);
|
||||||
let containerID = await readableStreamToText(proc.stdout);
|
|
||||||
|
const containerID = await readableStreamToText(proc.stdout);
|
||||||
const exitCode = await proc.exited;
|
const exitCode = await proc.exited;
|
||||||
if (exitCode !== 0) {
|
if (exitCode !== 0) {
|
||||||
throw new Error(containerID);
|
throw new Error(containerID);
|
||||||
@@ -29,12 +30,14 @@ export const runContainer = async (
|
|||||||
return containerID.trim();
|
return containerID.trim();
|
||||||
};
|
};
|
||||||
|
|
||||||
// executeScriptInContainer finds the only "coder_script"
|
/**
|
||||||
// resource in the given state and runs it in a container.
|
* Finds the only "coder_script" resource in the given state and runs it in a
|
||||||
|
* container.
|
||||||
|
*/
|
||||||
export const executeScriptInContainer = async (
|
export const executeScriptInContainer = async (
|
||||||
state: TerraformState,
|
state: TerraformState,
|
||||||
image: string,
|
image: string,
|
||||||
shell: string = "sh",
|
shell = "sh",
|
||||||
): Promise<{
|
): Promise<{
|
||||||
exitCode: number;
|
exitCode: number;
|
||||||
stdout: string[];
|
stdout: string[];
|
||||||
@@ -76,46 +79,56 @@ export const execContainer = async (
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface TerraformState {
|
type JsonValue =
|
||||||
outputs: {
|
| string
|
||||||
[key: string]: {
|
| number
|
||||||
type: string;
|
| boolean
|
||||||
value: any;
|
| null
|
||||||
};
|
| JsonValue[]
|
||||||
}
|
| { [key: string]: JsonValue };
|
||||||
resources: [
|
|
||||||
{
|
type TerraformStateResource = {
|
||||||
type: string;
|
type: string;
|
||||||
name: string;
|
name: string;
|
||||||
provider: string;
|
provider: string;
|
||||||
|
|
||||||
instances: [
|
instances: [
|
||||||
{
|
{
|
||||||
attributes: {
|
attributes: Record<string, JsonValue>;
|
||||||
[key: string]: any;
|
},
|
||||||
|
];
|
||||||
};
|
};
|
||||||
},
|
|
||||||
];
|
type TerraformOutput = {
|
||||||
},
|
type: string;
|
||||||
];
|
value: JsonValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface TerraformState {
|
||||||
|
outputs: Record<string, TerraformOutput>;
|
||||||
|
resources: [TerraformStateResource, ...TerraformStateResource[]];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TerraformVariables = Record<string, JsonValue>;
|
||||||
|
|
||||||
export interface CoderScriptAttributes {
|
export interface CoderScriptAttributes {
|
||||||
script: string;
|
script: string;
|
||||||
agent_id: string;
|
agent_id: string;
|
||||||
url: string;
|
url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// findResourceInstance finds the first instance of the given resource
|
export type ResourceInstance<T extends string = string> =
|
||||||
// type in the given state. If name is specified, it will only find
|
T extends "coder_script" ? CoderScriptAttributes : Record<string, string>;
|
||||||
// the instance with the given name.
|
|
||||||
export const findResourceInstance = <T extends "coder_script" | string>(
|
/**
|
||||||
|
* finds the first instance of the given resource type in the given state. If
|
||||||
|
* name is specified, it will only find the instance with the given name.
|
||||||
|
*/
|
||||||
|
export const findResourceInstance = <T extends string>(
|
||||||
state: TerraformState,
|
state: TerraformState,
|
||||||
type: T,
|
type: T,
|
||||||
name?: string,
|
name?: string,
|
||||||
// if type is "coder_script" return CoderScriptAttributes
|
): ResourceInstance<T> => {
|
||||||
): T extends "coder_script"
|
|
||||||
? CoderScriptAttributes
|
|
||||||
: Record<string, string> => {
|
|
||||||
const resource = state.resources.find(
|
const resource = state.resources.find(
|
||||||
(resource) =>
|
(resource) =>
|
||||||
resource.type === type && (name ? resource.name === name : true),
|
resource.type === type && (name ? resource.name === name : true),
|
||||||
@@ -128,33 +141,41 @@ export const findResourceInstance = <T extends "coder_script" | string>(
|
|||||||
`Resource ${type} has ${resource.instances.length} instances`,
|
`Resource ${type} has ${resource.instances.length} instances`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return resource.instances[0].attributes as any;
|
|
||||||
|
return resource.instances[0].attributes as ResourceInstance<T>;
|
||||||
};
|
};
|
||||||
|
|
||||||
// testRequiredVariables creates a test-case
|
/**
|
||||||
// for each variable provided and ensures that
|
* Creates a test-case for each variable provided and ensures that the apply
|
||||||
// the apply fails without it.
|
* fails without it.
|
||||||
export const testRequiredVariables = (
|
*/
|
||||||
|
export const testRequiredVariables = <TVars extends TerraformVariables>(
|
||||||
dir: string,
|
dir: string,
|
||||||
vars: Record<string, string>,
|
vars: Readonly<TVars>,
|
||||||
) => {
|
) => {
|
||||||
// Ensures that all required variables are provided.
|
// Ensures that all required variables are provided.
|
||||||
it("required variables", async () => {
|
it("required variables", async () => {
|
||||||
await runTerraformApply(dir, vars);
|
await runTerraformApply(dir, vars);
|
||||||
});
|
});
|
||||||
|
|
||||||
const varNames = Object.keys(vars);
|
const varNames = Object.keys(vars);
|
||||||
varNames.forEach((varName) => {
|
for (const varName of varNames) {
|
||||||
// Ensures that every variable provided is required!
|
// Ensures that every variable provided is required!
|
||||||
it("missing variable " + varName, async () => {
|
it(`missing variable: ${varName}`, async () => {
|
||||||
const localVars = {};
|
const localVars: TerraformVariables = {};
|
||||||
varNames.forEach((otherVarName) => {
|
for (const otherVarName of varNames) {
|
||||||
if (otherVarName !== varName) {
|
if (otherVarName !== varName) {
|
||||||
localVars[otherVarName] = vars[otherVarName];
|
localVars[otherVarName] = vars[otherVarName];
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await runTerraformApply(dir, localVars);
|
await runTerraformApply(dir, localVars);
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
|
if (!(ex instanceof Error)) {
|
||||||
|
throw new Error("Unknown error generated");
|
||||||
|
}
|
||||||
|
|
||||||
expect(ex.message).toContain(
|
expect(ex.message).toContain(
|
||||||
`input variable \"${varName}\" is not set`,
|
`input variable \"${varName}\" is not set`,
|
||||||
);
|
);
|
||||||
@@ -162,19 +183,26 @@ export const testRequiredVariables = (
|
|||||||
}
|
}
|
||||||
throw new Error(`${varName} is not a required variable!`);
|
throw new Error(`${varName} is not a required variable!`);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// runTerraformApply runs terraform apply in the given directory
|
/**
|
||||||
// with the given variables. It is fine to run in parallel with
|
* Runs terraform apply in the given directory with the given variables. It is
|
||||||
// other instances of this function, as it uses a random state file.
|
* fine to run in parallel with other instances of this function, as it uses a
|
||||||
export const runTerraformApply = async (
|
* random state file.
|
||||||
|
*/
|
||||||
|
export const runTerraformApply = async <TVars extends TerraformVariables>(
|
||||||
dir: string,
|
dir: string,
|
||||||
vars: Record<string, string>,
|
vars: Readonly<TVars>,
|
||||||
|
env?: Record<string, string>,
|
||||||
): Promise<TerraformState> => {
|
): Promise<TerraformState> => {
|
||||||
const stateFile = `${dir}/${crypto.randomUUID()}.tfstate`;
|
const stateFile = `${dir}/${crypto.randomUUID()}.tfstate`;
|
||||||
const env = {};
|
|
||||||
Object.keys(vars).forEach((key) => (env[`TF_VAR_${key}`] = vars[key]));
|
const combinedEnv = env === undefined ? {} : { ...env };
|
||||||
|
for (const [key, value] of Object.entries(vars)) {
|
||||||
|
combinedEnv[`TF_VAR_${key}`] = String(value);
|
||||||
|
}
|
||||||
|
|
||||||
const proc = spawn(
|
const proc = spawn(
|
||||||
[
|
[
|
||||||
"terraform",
|
"terraform",
|
||||||
@@ -188,22 +216,26 @@ export const runTerraformApply = async (
|
|||||||
],
|
],
|
||||||
{
|
{
|
||||||
cwd: dir,
|
cwd: dir,
|
||||||
env,
|
env: combinedEnv,
|
||||||
stderr: "pipe",
|
stderr: "pipe",
|
||||||
stdout: "pipe",
|
stdout: "pipe",
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const text = await readableStreamToText(proc.stderr);
|
const text = await readableStreamToText(proc.stderr);
|
||||||
const exitCode = await proc.exited;
|
const exitCode = await proc.exited;
|
||||||
if (exitCode !== 0) {
|
if (exitCode !== 0) {
|
||||||
throw new Error(text);
|
throw new Error(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = await readFile(stateFile, "utf8");
|
const content = await readFile(stateFile, "utf8");
|
||||||
await unlink(stateFile);
|
await unlink(stateFile);
|
||||||
return JSON.parse(content);
|
return JSON.parse(content);
|
||||||
};
|
};
|
||||||
|
|
||||||
// runTerraformInit runs terraform init in the given directory.
|
/**
|
||||||
|
* Runs terraform init in the given directory.
|
||||||
|
*/
|
||||||
export const runTerraformInit = async (dir: string) => {
|
export const runTerraformInit = async (dir: string) => {
|
||||||
const proc = spawn(["terraform", "init"], {
|
const proc = spawn(["terraform", "init"], {
|
||||||
cwd: dir,
|
cwd: dir,
|
||||||
@@ -221,5 +253,14 @@ export const createJSONResponse = (obj: object, statusCode = 200): Response => {
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
status: statusCode,
|
status: statusCode,
|
||||||
})
|
});
|
||||||
}
|
};
|
||||||
|
|
||||||
|
export const writeCoder = async (id: string, script: string) => {
|
||||||
|
const exec = await execContainer(id, [
|
||||||
|
"sh",
|
||||||
|
"-c",
|
||||||
|
`echo '${script}' > /usr/bin/coder && chmod +x /usr/bin/coder`,
|
||||||
|
]);
|
||||||
|
expect(exec.exitCode).toBe(0);
|
||||||
|
};
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "esnext",
|
// If we were just compiling for the tests, we could safely target ESNext at
|
||||||
"module": "esnext",
|
// all times, but just because we've been starting to add more runtime logic
|
||||||
|
// files to some of the modules, erring on the side of caution by having a
|
||||||
|
// older compilation target
|
||||||
|
"target": "ES6",
|
||||||
|
"module": "ESNext",
|
||||||
|
"strict": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"moduleResolution": "nodenext",
|
"moduleResolution": "node",
|
||||||
"types": ["bun-types"]
|
"types": ["bun-types"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# This script updates the version number in the README.md files of all modules
|
# This script increments the version number in the README.md files of all modules
|
||||||
# to the latest tag in the repository. It is intended to be run from the root
|
# by 1 patch version. It is intended to be run from the root
|
||||||
# of the repository or by using the `bun update-version` command.
|
# of the repository or by using the `bun update-version` command.
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
current_tag=$(git describe --tags --abbrev=0)
|
current_tag=$(git describe --tags --abbrev=0)
|
||||||
previous_tag=$(git describe --tags --abbrev=0 $current_tag^)
|
|
||||||
mapfile -t changed_dirs < <(git diff --name-only "$previous_tag"..."$current_tag" -- ':!**/README.md' ':!**/*.test.ts' | xargs dirname | grep -v '^\.' | sort -u)
|
|
||||||
|
|
||||||
LATEST_TAG=$(git describe --abbrev=0 --tags | sed 's/^v//') || exit $?
|
# Increment the patch version
|
||||||
|
LATEST_TAG=$(echo "$current_tag" | sed 's/^v//' | awk -F. '{print $1"."$2"."$3+1}') || exit $?
|
||||||
|
|
||||||
|
# List directories with changes that are not README.md or test files
|
||||||
|
mapfile -t changed_dirs < <(git diff --name-only "$current_tag" -- ':!**/README.md' ':!**/*.test.ts' | xargs dirname | grep -v '^\.' | sort -u)
|
||||||
|
|
||||||
|
echo "Directories with changes: ${changed_dirs[*]}"
|
||||||
|
|
||||||
|
# Iterate over directories and update version in README.md
|
||||||
for dir in "${changed_dirs[@]}"; do
|
for dir in "${changed_dirs[@]}"; do
|
||||||
if [[ -f "$dir/README.md" ]]; then
|
if [[ -f "$dir/README.md" ]]; then
|
||||||
echo "Bumping version in $dir/README.md"
|
|
||||||
file="$dir/README.md"
|
file="$dir/README.md"
|
||||||
tmpfile=$(mktemp /tmp/tempfile.XXXXXX)
|
tmpfile=$(mktemp /tmp/tempfile.XXXXXX)
|
||||||
awk -v tag="$LATEST_TAG" '{
|
awk -v tag="$LATEST_TAG" '{
|
||||||
@@ -25,5 +29,12 @@ for dir in "${changed_dirs[@]}"; do
|
|||||||
print
|
print
|
||||||
}
|
}
|
||||||
}' "$file" > "$tmpfile" && mv "$tmpfile" "$file"
|
}' "$file" > "$tmpfile" && mv "$tmpfile" "$file"
|
||||||
|
|
||||||
|
# Check if the README.md file has changed
|
||||||
|
if ! git diff --quiet -- "$dir/README.md"; then
|
||||||
|
echo "Bumping version in $dir/README.md from $current_tag to $LATEST_TAG (incremented)"
|
||||||
|
else
|
||||||
|
echo "Version in $dir/README.md is already up to date"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|||||||
77
vault-jwt/README.md
Normal file
77
vault-jwt/README.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
---
|
||||||
|
display_name: Hashicorp Vault Integration (JWT)
|
||||||
|
description: Authenticates with Vault using a JWT from Coder's OIDC provider
|
||||||
|
icon: ../.icons/vault.svg
|
||||||
|
maintainer_github: coder
|
||||||
|
partner_github: hashicorp
|
||||||
|
verified: true
|
||||||
|
tags: [helper, integration, vault, jwt, oidc]
|
||||||
|
---
|
||||||
|
|
||||||
|
# Hashicorp Vault Integration (JWT)
|
||||||
|
|
||||||
|
This module lets you authenticate with [Hashicorp Vault](https://www.vaultproject.io/) in your Coder workspaces by reusing the [OIDC](https://coder.com/docs/admin/auth#openid-connect) access token from Coder's OIDC authentication method. This requires configuring the Vault [JWT/OIDC](https://developer.hashicorp.com/vault/docs/auth/jwt#configuration) auth method.
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "vault" {
|
||||||
|
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||||
|
version = "1.0.20"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
vault_addr = "https://vault.example.com"
|
||||||
|
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you can use the Vault CLI in your workspaces to fetch secrets from Vault:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
vault kv get -namespace=coder -mount=secrets coder
|
||||||
|
```
|
||||||
|
|
||||||
|
or using the Vault API:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
curl -H "X-Vault-Token: ${VAULT_TOKEN}" -X GET "${VAULT_ADDR}/v1/coder/secrets/data/coder"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Configure Vault integration with a non standard auth path (default is "jwt")
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "vault" {
|
||||||
|
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||||
|
version = "1.0.20"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
vault_addr = "https://vault.example.com"
|
||||||
|
vault_jwt_auth_path = "oidc"
|
||||||
|
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Map workspace owner's group to a Vault role
|
||||||
|
|
||||||
|
```tf
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
|
module "vault" {
|
||||||
|
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||||
|
version = "1.0.20"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
vault_addr = "https://vault.example.com"
|
||||||
|
vault_jwt_role = data.coder_workspace_owner.me.groups[0]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install a specific version of the Vault CLI
|
||||||
|
|
||||||
|
```tf
|
||||||
|
module "vault" {
|
||||||
|
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||||
|
version = "1.0.20"
|
||||||
|
agent_id = coder_agent.example.id
|
||||||
|
vault_addr = "https://vault.example.com"
|
||||||
|
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||||
|
vault_cli_version = "1.17.5"
|
||||||
|
}
|
||||||
|
```
|
||||||
12
vault-jwt/main.test.ts
Normal file
12
vault-jwt/main.test.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { describe } from "bun:test";
|
||||||
|
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||||
|
|
||||||
|
describe("vault-jwt", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
|
testRequiredVariables(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
vault_addr: "foo",
|
||||||
|
vault_jwt_role: "foo",
|
||||||
|
});
|
||||||
|
});
|
||||||
64
vault-jwt/main.tf
Normal file
64
vault-jwt/main.tf
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
terraform {
|
||||||
|
required_version = ">= 1.0"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
coder = {
|
||||||
|
source = "coder/coder"
|
||||||
|
version = ">= 0.12.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add required variables for your modules and remove any unneeded variables
|
||||||
|
variable "agent_id" {
|
||||||
|
type = string
|
||||||
|
description = "The ID of a Coder agent."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "vault_addr" {
|
||||||
|
type = string
|
||||||
|
description = "The address of the Vault server."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "vault_jwt_auth_path" {
|
||||||
|
type = string
|
||||||
|
description = "The path to the Vault JWT auth method."
|
||||||
|
default = "jwt"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "vault_jwt_role" {
|
||||||
|
type = string
|
||||||
|
description = "The name of the Vault role to use for authentication."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "vault_cli_version" {
|
||||||
|
type = string
|
||||||
|
description = "The version of Vault to install."
|
||||||
|
default = "latest"
|
||||||
|
validation {
|
||||||
|
condition = can(regex("^(latest|[0-9]+\\.[0-9]+\\.[0-9]+)$", var.vault_cli_version))
|
||||||
|
error_message = "Vault version must be in the format 0.0.0 or latest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_script" "vault" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
display_name = "Vault (GitHub)"
|
||||||
|
icon = "/icon/vault.svg"
|
||||||
|
script = templatefile("${path.module}/run.sh", {
|
||||||
|
CODER_OIDC_ACCESS_TOKEN : data.coder_workspace_owner.me.oidc_access_token,
|
||||||
|
VAULT_JWT_AUTH_PATH : var.vault_jwt_auth_path,
|
||||||
|
VAULT_JWT_ROLE : var.vault_jwt_role,
|
||||||
|
VAULT_CLI_VERSION : var.vault_cli_version,
|
||||||
|
})
|
||||||
|
run_on_start = true
|
||||||
|
start_blocks_login = true
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "coder_env" "vault_addr" {
|
||||||
|
agent_id = var.agent_id
|
||||||
|
name = "VAULT_ADDR"
|
||||||
|
value = var.vault_addr
|
||||||
|
}
|
||||||
|
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
112
vault-jwt/run.sh
Normal file
112
vault-jwt/run.sh
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Convert all templated variables to shell variables
|
||||||
|
VAULT_CLI_VERSION=${VAULT_CLI_VERSION}
|
||||||
|
VAULT_JWT_AUTH_PATH=${VAULT_JWT_AUTH_PATH}
|
||||||
|
VAULT_JWT_ROLE=${VAULT_JWT_ROLE}
|
||||||
|
CODER_OIDC_ACCESS_TOKEN=${CODER_OIDC_ACCESS_TOKEN}
|
||||||
|
|
||||||
|
fetch() {
|
||||||
|
dest="$1"
|
||||||
|
url="$2"
|
||||||
|
if command -v curl > /dev/null 2>&1; then
|
||||||
|
curl -sSL --fail "$${url}" -o "$${dest}"
|
||||||
|
elif command -v wget > /dev/null 2>&1; then
|
||||||
|
wget -O "$${dest}" "$${url}"
|
||||||
|
elif command -v busybox > /dev/null 2>&1; then
|
||||||
|
busybox wget -O "$${dest}" "$${url}"
|
||||||
|
else
|
||||||
|
printf "curl, wget, or busybox is not installed. Please install curl or wget in your image.\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
unzip_safe() {
|
||||||
|
if command -v unzip > /dev/null 2>&1; then
|
||||||
|
command unzip "$@"
|
||||||
|
elif command -v busybox > /dev/null 2>&1; then
|
||||||
|
busybox unzip "$@"
|
||||||
|
else
|
||||||
|
printf "unzip or busybox is not installed. Please install unzip in your image.\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
install() {
|
||||||
|
# Get the architecture of the system
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
if [ "$${ARCH}" = "x86_64" ]; then
|
||||||
|
ARCH="amd64"
|
||||||
|
elif [ "$${ARCH}" = "aarch64" ]; then
|
||||||
|
ARCH="arm64"
|
||||||
|
else
|
||||||
|
printf "Unsupported architecture: $${ARCH}\n"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
# Fetch the latest version of Vault if VAULT_CLI_VERSION is 'latest'
|
||||||
|
if [ "$${VAULT_CLI_VERSION}" = "latest" ]; then
|
||||||
|
LATEST_VERSION=$(curl -s https://releases.hashicorp.com/vault/ | grep -v 'rc' | grep -oE 'vault/[0-9]+\.[0-9]+\.[0-9]+' | sed 's/vault\///' | sort -V | tail -n 1)
|
||||||
|
printf "Latest version of Vault is %s.\n\n" "$${LATEST_VERSION}"
|
||||||
|
if [ -z "$${LATEST_VERSION}" ]; then
|
||||||
|
printf "Failed to determine the latest Vault version.\n"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
VAULT_CLI_VERSION=$${LATEST_VERSION}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the vault CLI is installed and has the correct version
|
||||||
|
installation_needed=1
|
||||||
|
if command -v vault > /dev/null 2>&1; then
|
||||||
|
CURRENT_VERSION=$(vault version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
|
||||||
|
if [ "$${CURRENT_VERSION}" = "$${VAULT_CLI_VERSION}" ]; then
|
||||||
|
printf "Vault version %s is already installed and up-to-date.\n\n" "$${CURRENT_VERSION}"
|
||||||
|
installation_needed=0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $${installation_needed} -eq 1 ]; then
|
||||||
|
# Download and install Vault
|
||||||
|
if [ -z "$${CURRENT_VERSION}" ]; then
|
||||||
|
printf "Installing Vault CLI ...\n\n"
|
||||||
|
else
|
||||||
|
printf "Upgrading Vault CLI from version %s to %s ...\n\n" "$${CURRENT_VERSION}" "${VAULT_CLI_VERSION}"
|
||||||
|
fi
|
||||||
|
fetch vault.zip "https://releases.hashicorp.com/vault/$${VAULT_CLI_VERSION}/vault_$${VAULT_CLI_VERSION}_linux_$${ARCH}.zip"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
printf "Failed to download Vault.\n"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! unzip_safe vault.zip; then
|
||||||
|
printf "Failed to unzip Vault.\n"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
rm vault.zip
|
||||||
|
if sudo mv vault /usr/local/bin/vault 2> /dev/null; then
|
||||||
|
printf "Vault installed successfully!\n\n"
|
||||||
|
else
|
||||||
|
mkdir -p ~/.local/bin
|
||||||
|
if ! mv vault ~/.local/bin/vault; then
|
||||||
|
printf "Failed to move Vault to local bin.\n"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
printf "Please add ~/.local/bin to your PATH to use vault CLI.\n"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
TMP=$(mktemp -d)
|
||||||
|
if ! (
|
||||||
|
cd "$TMP"
|
||||||
|
install
|
||||||
|
); then
|
||||||
|
echo "Failed to install Vault CLI."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
rm -rf "$TMP"
|
||||||
|
|
||||||
|
# Authenticate with Vault
|
||||||
|
printf "🔑 Authenticating with Vault ...\n\n"
|
||||||
|
echo "$${CODER_OIDC_ACCESS_TOKEN}" | vault write auth/"$${VAULT_JWT_AUTH_PATH}"/login role="$${VAULT_JWT_ROLE}" jwt=-
|
||||||
|
printf "🥳 Vault authentication complete!\n\n"
|
||||||
|
printf "You can now use Vault CLI to access secrets.\n"
|
||||||
@@ -16,7 +16,7 @@ Uses the [Coder Remote VS Code Extension](https://github.com/coder/vscode-coder)
|
|||||||
```tf
|
```tf
|
||||||
module "vscode" {
|
module "vscode" {
|
||||||
source = "registry.coder.com/modules/vscode-desktop/coder"
|
source = "registry.coder.com/modules/vscode-desktop/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -28,7 +28,7 @@ module "vscode" {
|
|||||||
```tf
|
```tf
|
||||||
module "vscode" {
|
module "vscode" {
|
||||||
source = "registry.coder.com/modules/vscode-desktop/coder"
|
source = "registry.coder.com/modules/vscode-desktop/coder"
|
||||||
version = "1.0.8"
|
version = "1.0.15"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
folder = "/home/coder/project"
|
folder = "/home/coder/project"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,11 +18,58 @@ describe("vscode-desktop", async () => {
|
|||||||
agent_id: "foo",
|
agent_id: "foo",
|
||||||
});
|
});
|
||||||
expect(state.outputs.vscode_url.value).toBe(
|
expect(state.outputs.vscode_url.value).toBe(
|
||||||
"vscode://coder.coder-remote/open?owner=default&workspace=default&token=$SESSION_TOKEN",
|
"vscode://coder.coder-remote/open?owner=default&workspace=default&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
);
|
);
|
||||||
|
|
||||||
const resources: any = state.resources;
|
const coder_app = state.resources.find(
|
||||||
expect(resources[1].instances[0].attributes.order).toBeNull();
|
(res) => res.type === "coder_app" && res.name === "vscode",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(coder_app).not.toBeNull();
|
||||||
|
expect(coder_app?.instances.length).toBe(1);
|
||||||
|
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
});
|
||||||
|
expect(state.outputs.vscode_url.value).toBe(
|
||||||
|
"vscode://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder and open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
open_recent: "true",
|
||||||
|
});
|
||||||
|
expect(state.outputs.vscode_url.value).toBe(
|
||||||
|
"vscode://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds folder but not open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
folder: "/foo/bar",
|
||||||
|
openRecent: "false",
|
||||||
|
});
|
||||||
|
expect(state.outputs.vscode_url.value).toBe(
|
||||||
|
"vscode://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("adds open_recent", async () => {
|
||||||
|
const state = await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
open_recent: "true",
|
||||||
|
});
|
||||||
|
expect(state.outputs.vscode_url.value).toBe(
|
||||||
|
"vscode://coder.coder-remote/open?owner=default&workspace=default&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("expect order to be set", async () => {
|
it("expect order to be set", async () => {
|
||||||
@@ -31,7 +78,12 @@ describe("vscode-desktop", async () => {
|
|||||||
order: "22",
|
order: "22",
|
||||||
});
|
});
|
||||||
|
|
||||||
const resources: any = state.resources;
|
const coder_app = state.resources.find(
|
||||||
expect(resources[1].instances[0].attributes.order).toBe(22);
|
(res) => res.type === "coder_app" && res.name === "vscode",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(coder_app).not.toBeNull();
|
||||||
|
expect(coder_app?.instances.length).toBe(1);
|
||||||
|
expect(coder_app?.instances[0].attributes.order).toBe(22);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ terraform {
|
|||||||
required_providers {
|
required_providers {
|
||||||
coder = {
|
coder = {
|
||||||
source = "coder/coder"
|
source = "coder/coder"
|
||||||
version = ">= 0.17"
|
version = ">= 0.23"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -20,6 +20,12 @@ variable "folder" {
|
|||||||
default = ""
|
default = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
variable "open_recent" {
|
||||||
|
type = bool
|
||||||
|
description = "Open the most recent workspace or folder. Falls back to the folder if there is no recent workspace or folder to open."
|
||||||
|
default = false
|
||||||
|
}
|
||||||
|
|
||||||
variable "order" {
|
variable "order" {
|
||||||
type = number
|
type = number
|
||||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||||
@@ -27,6 +33,7 @@ variable "order" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data "coder_workspace" "me" {}
|
data "coder_workspace" "me" {}
|
||||||
|
data "coder_workspace_owner" "me" {}
|
||||||
|
|
||||||
resource "coder_app" "vscode" {
|
resource "coder_app" "vscode" {
|
||||||
agent_id = var.agent_id
|
agent_id = var.agent_id
|
||||||
@@ -35,22 +42,17 @@ resource "coder_app" "vscode" {
|
|||||||
slug = "vscode"
|
slug = "vscode"
|
||||||
display_name = "VS Code Desktop"
|
display_name = "VS Code Desktop"
|
||||||
order = var.order
|
order = var.order
|
||||||
url = var.folder != "" ? join("", [
|
url = join("", [
|
||||||
"vscode://coder.coder-remote/open?owner=",
|
"vscode://coder.coder-remote/open",
|
||||||
data.coder_workspace.me.owner,
|
"?owner=",
|
||||||
|
data.coder_workspace_owner.me.name,
|
||||||
"&workspace=",
|
"&workspace=",
|
||||||
data.coder_workspace.me.name,
|
data.coder_workspace.me.name,
|
||||||
"&folder=",
|
var.folder != "" ? join("", ["&folder=", var.folder]) : "",
|
||||||
var.folder,
|
var.open_recent ? "&openRecent" : "",
|
||||||
"&url=",
|
"&url=",
|
||||||
data.coder_workspace.me.access_url,
|
data.coder_workspace.me.access_url,
|
||||||
"&token=$SESSION_TOKEN",
|
"&token=$SESSION_TOKEN",
|
||||||
]) : join("", [
|
|
||||||
"vscode://coder.coder-remote/open?owner=",
|
|
||||||
data.coder_workspace.me.owner,
|
|
||||||
"&workspace=",
|
|
||||||
data.coder_workspace.me.name,
|
|
||||||
"&token=$SESSION_TOKEN",
|
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Automatically install [Visual Studio Code Server](https://code.visualstudio.com/
|
|||||||
```tf
|
```tf
|
||||||
module "vscode-web" {
|
module "vscode-web" {
|
||||||
source = "registry.coder.com/modules/vscode-web/coder"
|
source = "registry.coder.com/modules/vscode-web/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
accept_license = true
|
accept_license = true
|
||||||
}
|
}
|
||||||
@@ -29,7 +29,7 @@ module "vscode-web" {
|
|||||||
```tf
|
```tf
|
||||||
module "vscode-web" {
|
module "vscode-web" {
|
||||||
source = "registry.coder.com/modules/vscode-web/coder"
|
source = "registry.coder.com/modules/vscode-web/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
install_prefix = "/home/coder/.vscode-web"
|
install_prefix = "/home/coder/.vscode-web"
|
||||||
folder = "/home/coder"
|
folder = "/home/coder"
|
||||||
@@ -42,7 +42,7 @@ module "vscode-web" {
|
|||||||
```tf
|
```tf
|
||||||
module "vscode-web" {
|
module "vscode-web" {
|
||||||
source = "registry.coder.com/modules/vscode-web/coder"
|
source = "registry.coder.com/modules/vscode-web/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
extensions = ["github.copilot", "ms-python.python", "ms-toolsai.jupyter"]
|
extensions = ["github.copilot", "ms-python.python", "ms-toolsai.jupyter"]
|
||||||
accept_license = true
|
accept_license = true
|
||||||
@@ -56,7 +56,7 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
|
|||||||
```tf
|
```tf
|
||||||
module "vscode-web" {
|
module "vscode-web" {
|
||||||
source = "registry.coder.com/modules/vscode-web/coder"
|
source = "registry.coder.com/modules/vscode-web/coder"
|
||||||
version = "1.0.10"
|
version = "1.0.22"
|
||||||
agent_id = coder_agent.example.id
|
agent_id = coder_agent.example.id
|
||||||
extensions = ["dracula-theme.theme-dracula"]
|
extensions = ["dracula-theme.theme-dracula"]
|
||||||
settings = {
|
settings = {
|
||||||
|
|||||||
42
vscode-web/main.test.ts
Normal file
42
vscode-web/main.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { describe, expect, it } from "bun:test";
|
||||||
|
import { runTerraformApply, runTerraformInit } from "../test";
|
||||||
|
|
||||||
|
describe("vscode-web", async () => {
|
||||||
|
await runTerraformInit(import.meta.dir);
|
||||||
|
|
||||||
|
it("accept_license should be set to true", () => {
|
||||||
|
const t = async () => {
|
||||||
|
await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
accept_license: "false",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
expect(t).toThrow("Invalid value for variable");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("use_cached and offline can not be used together", () => {
|
||||||
|
const t = async () => {
|
||||||
|
await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
accept_license: "true",
|
||||||
|
use_cached: "true",
|
||||||
|
offline: "true",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
expect(t).toThrow("Offline and Use Cached can not be used together");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("offline and extensions can not be used together", () => {
|
||||||
|
const t = async () => {
|
||||||
|
await runTerraformApply(import.meta.dir, {
|
||||||
|
agent_id: "foo",
|
||||||
|
accept_license: "true",
|
||||||
|
offline: "true",
|
||||||
|
extensions: '["1", "2"]',
|
||||||
|
});
|
||||||
|
};
|
||||||
|
expect(t).toThrow("Offline mode does not allow extensions to be installed");
|
||||||
|
});
|
||||||
|
|
||||||
|
// More tests depend on shebang refactors
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user