Compare commits
244 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32b69016a0 | ||
|
|
6d2739131a | ||
|
|
cbd06b1135 | ||
|
|
675c82367a | ||
|
|
bf697e1fa4 | ||
|
|
b345e62ac1 | ||
|
|
6597a2d547 | ||
|
|
5101c27c83 | ||
|
|
90bfbfdc40 | ||
|
|
57d96ca27f | ||
|
|
f5ab7995d1 | ||
|
|
528a8a9fea | ||
|
|
87854707bc | ||
|
|
b53554b4e4 | ||
|
|
ce5a5b383a | ||
|
|
1b147ae90d | ||
|
|
7992d9d265 | ||
|
|
20d97a25dd | ||
|
|
8e0dfcd534 | ||
|
|
9752bf89a6 | ||
|
|
48c81c9ff4 | ||
|
|
acd5edffe7 | ||
|
|
4dcab99cb0 | ||
|
|
50a946df0f | ||
|
|
8a0ac3435c | ||
|
|
438c904567 | ||
|
|
bd6747f9bc | ||
|
|
fb81c8969f | ||
|
|
162808760d | ||
|
|
ad1189afff | ||
|
|
94e126f248 | ||
|
|
04535a9cd7 | ||
|
|
7a9f553564 | ||
|
|
e11b19d33e | ||
|
|
93c4fb3a8d | ||
|
|
86038f8d37 | ||
|
|
120a0e342e | ||
|
|
b51932d7ac | ||
|
|
834ffde032 | ||
|
|
831f64da56 | ||
|
|
236022f870 | ||
|
|
4c45d69994 | ||
|
|
310d0262bd | ||
|
|
f446fbd667 | ||
|
|
982c75e86f | ||
|
|
523ad9fe23 | ||
|
|
096cd214ce | ||
|
|
6a87fd18e5 | ||
|
|
fa4b84e8d1 | ||
|
|
7e0eacf1f4 | ||
|
|
cbe48aa072 | ||
|
|
89bb023fa5 | ||
|
|
66472b0105 | ||
|
|
cd010baac8 | ||
|
|
f7fa145855 | ||
|
|
f7f9c8b7ef | ||
|
|
889186d553 | ||
|
|
352577b833 | ||
|
|
4e59ecc606 | ||
|
|
a40f2b86c3 | ||
|
|
a2c29ace0a | ||
|
|
da4a561cb5 | ||
|
|
d77ad8ac63 | ||
|
|
b1f81afa7f | ||
|
|
883741244b | ||
|
|
c3eee866d1 | ||
|
|
bf175a1247 | ||
|
|
8fd54e0e78 | ||
|
|
e8ee02c044 | ||
|
|
aebdc9b434 | ||
|
|
d98bfcb20b | ||
|
|
894e507bb3 | ||
|
|
3f8f6181e0 | ||
|
|
b23d85327c | ||
|
|
a8580fe6b9 | ||
|
|
49f060549e | ||
|
|
b4153a6aaa | ||
|
|
13a8877791 | ||
|
|
fd2f91c043 | ||
|
|
c59eb0c0cc | ||
|
|
a381c3ee29 | ||
|
|
d9d1be08a3 | ||
|
|
7a8483d816 | ||
|
|
ec2c8edfb2 | ||
|
|
78f91a542a | ||
|
|
78c948094d | ||
|
|
16f96d3693 | ||
|
|
8262b29063 | ||
|
|
4ab72575ac | ||
|
|
f369697112 | ||
|
|
f82c7fd7a1 | ||
|
|
05a20a9e1f | ||
|
|
90e15cd90c | ||
|
|
5869eb86d4 | ||
|
|
25c90001f4 | ||
|
|
6409ee2bba | ||
|
|
7d366ff92a | ||
|
|
de00f6334f | ||
|
|
264584e673 | ||
|
|
83ecba2293 | ||
|
|
b2807640aa | ||
|
|
33d44fdf17 | ||
|
|
f335cd343d | ||
|
|
aebf095075 | ||
|
|
b283ac3129 | ||
|
|
5f418c3253 | ||
|
|
b09c4cb084 | ||
|
|
8aff87fdf7 | ||
|
|
f3c30abeb4 | ||
|
|
a9a75b675f | ||
|
|
ef4c87e48e | ||
|
|
1a0a8659cc | ||
|
|
c7a4fced4c | ||
|
|
5ec1b207d1 | ||
|
|
702271133f | ||
|
|
652fc6b84f | ||
|
|
8195cf4453 | ||
|
|
d5cfadb4e7 | ||
|
|
fba0f842a9 | ||
|
|
14e3fc5b6b | ||
|
|
0b6975c266 | ||
|
|
d530d68b12 | ||
|
|
047ccd67ca | ||
|
|
c7aa8253e3 | ||
|
|
452f41aa86 | ||
|
|
29209d546e | ||
|
|
aab5e55663 | ||
|
|
ff96b3f653 | ||
|
|
20795aa2b6 | ||
|
|
45456ab394 | ||
|
|
c652dbe320 | ||
|
|
4021d856ba | ||
|
|
72eaf8a9e1 | ||
|
|
249cb2fe9e | ||
|
|
49cff4b2aa | ||
|
|
c6b457e7fe | ||
|
|
beaa33b682 | ||
|
|
0d7bc37f9c | ||
|
|
dcd605c52e | ||
|
|
f5d41520cf | ||
|
|
cd0c730c95 | ||
|
|
873207fddf | ||
|
|
282e1f8c57 | ||
|
|
c068082e6b | ||
|
|
85e73c2071 | ||
|
|
4bdb428244 | ||
|
|
daed803530 | ||
|
|
a239212f0b | ||
|
|
67fef297da | ||
|
|
aced7547bc | ||
|
|
36fa871e7b | ||
|
|
46bf422d61 | ||
|
|
180e10c3ee | ||
|
|
a45706ad3a | ||
|
|
5030fcb988 | ||
|
|
cff60c4a7e | ||
|
|
5a33af28ac | ||
|
|
428f386c4c | ||
|
|
2e43788584 | ||
|
|
e8ce194ff7 | ||
|
|
1273378ca8 | ||
|
|
edc163b5f2 | ||
|
|
c9e418aaf5 | ||
|
|
9062b4c004 | ||
|
|
b2e87ef038 | ||
|
|
d4db52017d | ||
|
|
c36f4e03d7 | ||
|
|
443485a2d7 | ||
|
|
b686f2dbd5 | ||
|
|
76c60e9971 | ||
|
|
b0d6224e23 | ||
|
|
c50c4259d9 | ||
|
|
5f312ced5e | ||
|
|
fd985bedac | ||
|
|
b0c14be846 | ||
|
|
18efe83b89 | ||
|
|
b93471a381 | ||
|
|
33dbae6ea0 | ||
|
|
f14e6838e4 | ||
|
|
2a30982d1a | ||
|
|
47e995f636 | ||
|
|
56fdf096c1 | ||
|
|
49df203bd6 | ||
|
|
8766c670e6 | ||
|
|
43304e5d4e | ||
|
|
d8f71e4571 | ||
|
|
d8102e62ec | ||
|
|
ed16ba59a9 | ||
|
|
a8c659ad6f | ||
|
|
c4df384f4b | ||
|
|
892174da7c | ||
|
|
24e50e2bbb | ||
|
|
dfe69f25ce | ||
|
|
e8f6578ece | ||
|
|
53083a5718 | ||
|
|
7de78d2ef5 | ||
|
|
89135671b2 | ||
|
|
ac648cc0a9 | ||
|
|
748a180ac3 | ||
|
|
ec922c7c3d | ||
|
|
9f8eee55b2 | ||
|
|
0e7644b284 | ||
|
|
bf06e8d3ac | ||
|
|
12fd16f701 | ||
|
|
1197e6bf0d | ||
|
|
c5c521fabd | ||
|
|
838ec95875 | ||
|
|
5a0efdf867 | ||
|
|
4debc3200d | ||
|
|
5476f819ce | ||
|
|
9a5ff6df64 | ||
|
|
bab0f7d24d | ||
|
|
fc914626a2 | ||
|
|
fdbb2e30d0 | ||
|
|
ee80d1f64c | ||
|
|
017f007bde | ||
|
|
18810cc51e | ||
|
|
98a428ae89 | ||
|
|
dd072e261a | ||
|
|
7e3743739e | ||
|
|
f5681b5206 | ||
|
|
de0813f37f | ||
|
|
d8fa7c959f | ||
|
|
c3d1b4125e | ||
|
|
472d80ade6 | ||
|
|
7d6c526146 | ||
|
|
a3dc364227 | ||
|
|
f335a62891 | ||
|
|
8ed13be726 | ||
|
|
b90f6f9de8 | ||
|
|
948280600a | ||
|
|
407738b2be | ||
|
|
08adb4a839 | ||
|
|
313ec59d46 | ||
|
|
4b04d18f39 | ||
|
|
ee53ca0281 | ||
|
|
8e254a3bb9 | ||
|
|
1ab53139b3 | ||
|
|
147bea9782 | ||
|
|
8d8910c52a | ||
|
|
c00b7536cb | ||
|
|
d66d7e994e | ||
|
|
d10ce91a64 | ||
|
|
534491613f |
6
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
202
.github/scripts/check.sh
vendored
Executable file
@@ -0,0 +1,202 @@
|
||||
#!/usr/bin/env bash
|
||||
set -o pipefail
|
||||
set -u
|
||||
|
||||
VERBOSE="${VERBOSE:-0}"
|
||||
if [[ "${VERBOSE}" -ne "0" ]]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
# List of required environment variables
|
||||
required_vars=(
|
||||
"INSTATUS_API_KEY"
|
||||
"INSTATUS_PAGE_ID"
|
||||
"INSTATUS_COMPONENT_ID"
|
||||
"VERCEL_API_KEY"
|
||||
)
|
||||
|
||||
# Check if each required variable is set
|
||||
for var in "${required_vars[@]}"; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "Error: Environment variable '$var' is not set."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
REGISTRY_BASE_URL="${REGISTRY_BASE_URL:-https://registry.coder.com}"
|
||||
|
||||
status=0
|
||||
declare -a modules=()
|
||||
declare -a failures=()
|
||||
|
||||
# Collect all module directories containing a main.tf file
|
||||
for path in $(find . -maxdepth 2 -not -path '*/.*' -type f -name main.tf | cut -d '/' -f 2 | sort -u); do
|
||||
modules+=("${path}")
|
||||
done
|
||||
|
||||
echo "Checking modules: ${modules[*]}"
|
||||
|
||||
# Function to update the component status on Instatus
|
||||
update_component_status() {
|
||||
local component_status=$1
|
||||
# see https://instatus.com/help/api/components
|
||||
(curl -X PUT "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/components/$INSTATUS_COMPONENT_ID" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"status\": \"$component_status\"}")
|
||||
}
|
||||
|
||||
# Function to create an incident
|
||||
create_incident() {
|
||||
local incident_name="Degraded Service"
|
||||
local message="The following modules are experiencing issues:\n"
|
||||
for i in "${!failures[@]}"; do
|
||||
message+="$((i + 1)). ${failures[$i]}\n"
|
||||
done
|
||||
|
||||
component_status="PARTIALOUTAGE"
|
||||
if (( ${#failures[@]} == ${#modules[@]} )); then
|
||||
component_status="MAJOROUTAGE"
|
||||
fi
|
||||
# see https://instatus.com/help/api/incidents
|
||||
incident_id=$(curl -s -X POST "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"$incident_name\",
|
||||
\"message\": \"$message\",
|
||||
\"components\": [\"$INSTATUS_COMPONENT_ID\"],
|
||||
\"status\": \"INVESTIGATING\",
|
||||
\"notify\": true,
|
||||
\"statuses\": [
|
||||
{
|
||||
\"id\": \"$INSTATUS_COMPONENT_ID\",
|
||||
\"status\": \"PARTIALOUTAGE\"
|
||||
}
|
||||
]
|
||||
}" | jq -r '.id')
|
||||
|
||||
echo "Created incident with ID: $incident_id"
|
||||
}
|
||||
|
||||
# Function to check for existing unresolved incidents
|
||||
check_existing_incident() {
|
||||
# Fetch the latest incidents with status not equal to "RESOLVED"
|
||||
local unresolved_incidents=$(curl -s -X GET "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" | jq -r '.incidents[] | select(.status != "RESOLVED") | .id')
|
||||
|
||||
if [[ -n "$unresolved_incidents" ]]; then
|
||||
echo "Unresolved incidents found: $unresolved_incidents"
|
||||
return 0 # Indicate that there are unresolved incidents
|
||||
else
|
||||
echo "No unresolved incidents found."
|
||||
return 1 # Indicate that no unresolved incidents exist
|
||||
fi
|
||||
}
|
||||
|
||||
force_redeploy_registry () {
|
||||
# These are not secret values; safe to just expose directly in script
|
||||
local VERCEL_TEAM_SLUG="codercom"
|
||||
local VERCEL_TEAM_ID="team_tGkWfhEGGelkkqUUm9nXq17r"
|
||||
local VERCEL_APP="registry"
|
||||
|
||||
local latest_res
|
||||
latest_res=$(curl "https://api.vercel.com/v6/deployments?app=$VERCEL_APP&limit=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID&target=production&state=BUILDING,INITIALIZING,QUEUED,READY" \
|
||||
--fail \
|
||||
--silent \
|
||||
--header "Authorization: Bearer $VERCEL_API_KEY" \
|
||||
--header "Content-Type: application/json"
|
||||
)
|
||||
|
||||
# If we have zero deployments, something is VERY wrong. Make the whole
|
||||
# script exit with a non-zero status code
|
||||
local latest_id
|
||||
latest_id=$(echo "${latest_res}" | jq -r '.deployments[0].uid')
|
||||
if [[ "${latest_id}" = "null" ]]; then
|
||||
echo "Unable to pull any previous deployments for redeployment"
|
||||
echo "Please redeploy the latest deployment manually in Vercel."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local latest_date_ts_seconds
|
||||
latest_date_ts_seconds=$(echo "${latest_res}" | jq -r '.deployments[0].createdAt/1000|floor')
|
||||
local current_date_ts_seconds
|
||||
current_date_ts_seconds="$(date +%s)"
|
||||
local max_redeploy_interval_seconds=7200 # 2 hours
|
||||
if (( current_date_ts_seconds - latest_date_ts_seconds < max_redeploy_interval_seconds )); then
|
||||
echo "The registry was deployed less than 2 hours ago."
|
||||
echo "Not automatically re-deploying the regitstry."
|
||||
echo "A human reading this message should decide if a redeployment is necessary."
|
||||
echo "Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local latest_deployment_state
|
||||
latest_deployment_state="$(echo "${latest_res}" | jq -r '.deployments[0].state')"
|
||||
if [[ "${latest_deployment_state}" != "READY" ]]; then
|
||||
echo "Last deployment was not in READY state. Skipping redeployment."
|
||||
echo "A human reading this message should decide if a redeployment is necessary."
|
||||
echo "Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "============================================================="
|
||||
echo "!!! Redeploying registry with deployment ID: ${latest_id} !!!"
|
||||
echo "============================================================="
|
||||
|
||||
if ! curl -X POST "https://api.vercel.com/v13/deployments?forceNew=1&skipAutoDetectionConfirmation=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID" \
|
||||
--fail \
|
||||
--header "Authorization: Bearer $VERCEL_API_KEY" \
|
||||
--header "Content-Type: application/json" \
|
||||
--data-raw "{ \"deploymentId\": \"${latest_id}\", \"name\": \"${VERCEL_APP}\", \"target\": \"production\" }"; then
|
||||
echo "DEPLOYMENT FAILED! Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Check each module's accessibility
|
||||
for module in "${modules[@]}"; do
|
||||
# Trim leading/trailing whitespace from module name
|
||||
module=$(echo "${module}" | xargs)
|
||||
url="${REGISTRY_BASE_URL}/modules/${module}"
|
||||
printf "=== Checking module %s at %s\n" "${module}" "${url}"
|
||||
status_code=$(curl --output /dev/null --head --silent --fail --location "${url}" --retry 3 --write-out "%{http_code}")
|
||||
if (( status_code != 200 )); then
|
||||
printf "==> FAIL(%s)\n" "${status_code}"
|
||||
status=1
|
||||
failures+=("${module}")
|
||||
else
|
||||
printf "==> OK(%s)\n" "${status_code}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Determine overall status and update Instatus component
|
||||
if (( status == 0 )); then
|
||||
echo "All modules are operational."
|
||||
# set to
|
||||
update_component_status "OPERATIONAL"
|
||||
else
|
||||
echo "The following modules have issues: ${failures[*]}"
|
||||
# check if all modules are down
|
||||
if (( ${#failures[@]} == ${#modules[@]} )); then
|
||||
update_component_status "MAJOROUTAGE"
|
||||
else
|
||||
update_component_status "PARTIALOUTAGE"
|
||||
fi
|
||||
|
||||
# Check if there is an existing incident before creating a new one
|
||||
if ! check_existing_incident; then
|
||||
create_incident
|
||||
fi
|
||||
|
||||
# If a module is down, force a reployment to try getting things back online
|
||||
# ASAP
|
||||
force_redeploy_registry
|
||||
fi
|
||||
|
||||
exit "${status}"
|
||||
23
.github/workflows/check.yaml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Health
|
||||
# Check modules health on registry.coder.com
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0,15,30,45 * * * *" # Runs every 15 minutes
|
||||
workflow_dispatch: # Allows manual triggering of the workflow if needed
|
||||
|
||||
jobs:
|
||||
run-script:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run check.sh
|
||||
run: |
|
||||
./.github/scripts/check.sh
|
||||
env:
|
||||
INSTATUS_API_KEY: ${{ secrets.INSTATUS_API_KEY }}
|
||||
INSTATUS_PAGE_ID: ${{ secrets.INSTATUS_PAGE_ID }}
|
||||
INSTATUS_COMPONENT_ID: ${{ secrets.INSTATUS_COMPONENT_ID }}
|
||||
VERCEL_API_KEY: ${{ secrets.VERCEL_API_KEY }}
|
||||
21
.github/workflows/ci.yaml
vendored
@@ -17,7 +17,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
- uses: coder/coder/.github/actions/setup-tf@main
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Setup
|
||||
@@ -27,7 +28,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
fetch-depth: 0 # Needed to get tags
|
||||
- uses: coder/coder/.github/actions/setup-tf@main
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Setup
|
||||
@@ -38,3 +42,16 @@ jobs:
|
||||
uses: crate-ci/typos@v1.17.2
|
||||
- name: Lint
|
||||
run: bun lint
|
||||
- name: Check version
|
||||
shell: bash
|
||||
run: |
|
||||
# check for version changes
|
||||
./update-version.sh
|
||||
# Check if any changes were made in README.md files
|
||||
if [[ -n "$(git status --porcelain -- '**/README.md')" ]]; then
|
||||
echo "Version mismatch detected. Please run ./update-version.sh and commit the updated README.md files."
|
||||
git diff -- '**/README.md'
|
||||
exit 1
|
||||
else
|
||||
echo "No version mismatch detected. All versions are up to date."
|
||||
fi
|
||||
|
||||
41
.github/workflows/update-readme.yaml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Update README on Tag
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
update-readme:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get the latest tag
|
||||
id: get-latest-tag
|
||||
run: echo "TAG=$(git describe --tags --abbrev=0 | sed 's/^v//')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run update script
|
||||
run: ./update-version.sh
|
||||
|
||||
- name: Create Pull Request
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
commit-message: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
||||
title: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
||||
body: 'This is an auto-generated PR to update README.md files of all modules with the new tag ${{ env.TAG }}'
|
||||
branch: 'update-readme-branch'
|
||||
env:
|
||||
TAG: ${{ steps.get-latest-tag.outputs.TAG }}
|
||||
|
||||
- name: Auto-approve
|
||||
uses: hmarr/auto-approve-action@v4
|
||||
if: github.ref == 'refs/heads/update-readme-branch'
|
||||
19
.icons/airflow.svg
Normal file
|
After Width: | Height: | Size: 15 KiB |
1
.icons/cursor.svg
Normal file
|
After Width: | Height: | Size: 1.5 MiB |
1
.icons/dcv.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="82" height="80" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" overflow="hidden"><g transform="translate(-550 -124)"><g><g><g><g><path d="M551 124 631 124 631 204 551 204Z" fill="#ED7100" fill-rule="evenodd" fill-opacity="1"/><path d="M612.069 162.386C607.327 165.345 600.717 168.353 593.46 170.855 588.339 172.62 583.33 173.978 578.865 174.838 582.727 184.68 589.944 191.037 596.977 189.853 603.514 188.75 608.387 181.093 609.1 170.801L611.096 170.939C610.304 182.347 604.893 190.545 597.309 191.825 596.648 191.937 595.984 191.991 595.323 191.991 587.945 191.991 580.718 185.209 576.871 175.194 575.733 175.38 574.625 175.542 573.584 175.653 572.173 175.803 570.901 175.879 569.769 175.879 565.95 175.879 563.726 175.025 563.141 173.328 562.414 171.218 564.496 168.566 569.328 165.445L570.414 167.125C565.704 170.167 564.814 172.046 565.032 172.677 565.263 173.348 567.279 174.313 573.372 173.665 574.267 173.57 575.216 173.433 576.187 173.28 575.537 171.297 575.014 169.205 574.647 167.028 573.406 159.673 574.056 152.438 576.48 146.654 578.969 140.715 583.031 136.99 587.917 136.166 593.803 135.171 600.075 138.691 604.679 145.579L603.017 146.69C598.862 140.476 593.349 137.28 588.249 138.138 584.063 138.844 580.539 142.143 578.325 147.427 576.046 152.866 575.44 159.709 576.62 166.695 576.988 168.876 577.515 170.966 578.173 172.937 582.618 172.1 587.651 170.742 592.807 168.965 599.927 166.51 606.392 163.572 611.01 160.689 616.207 157.447 617.201 155.444 616.969 154.772 616.769 154.189 615.095 153.299 610.097 153.653L609.957 151.657C615.171 151.289 618.171 152.116 618.86 154.12 619.619 156.32 617.334 159.101 612.069 162.386" fill="#FFFFFF" fill-rule="evenodd" fill-opacity="1"/></g></g></g></g></g></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
5
.icons/desktop.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M31 6V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V6C1 4.35 2.35 3 4 3H28C29.65 3 31 4.35 31 6Z" fill="#2197F3"/>
|
||||
<path d="M21 27H17V24C17 23.4478 16.5522 23 16 23C15.4478 23 15 23.4478 15 24V27H11C10.4478 27 10 27.4478 10 28C10 28.5522 10.4478 29 11 29H21C21.5522 29 22 28.5522 22 28C22 27.4478 21.5522 27 21 27Z" fill="#FFC10A"/>
|
||||
<path d="M31 17V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V17H31Z" fill="#3F51B5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 540 B |
1
.icons/github.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="98" height="96" xmlns="http://www.w3.org/2000/svg"><path fill-rule="evenodd" clip-rule="evenodd" d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z" fill="#fff"/></svg>
|
||||
|
After Width: | Height: | Size: 960 B |
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 6.8 KiB |
1
.icons/node.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="2270" height="2500" viewBox="0 0 256 282" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMinYMin meet"><g fill="#8CC84B"><path d="M116.504 3.58c6.962-3.985 16.03-4.003 22.986 0 34.995 19.774 70.001 39.517 104.99 59.303 6.581 3.707 10.983 11.031 10.916 18.614v118.968c.049 7.897-4.788 15.396-11.731 19.019-34.88 19.665-69.742 39.354-104.616 59.019-7.106 4.063-16.356 3.75-23.24-.646-10.457-6.062-20.932-12.094-31.39-18.15-2.137-1.274-4.546-2.288-6.055-4.36 1.334-1.798 3.719-2.022 5.657-2.807 4.365-1.388 8.374-3.616 12.384-5.778 1.014-.694 2.252-.428 3.224.193 8.942 5.127 17.805 10.403 26.777 15.481 1.914 1.105 3.852-.362 5.488-1.274 34.228-19.345 68.498-38.617 102.72-57.968 1.268-.61 1.969-1.956 1.866-3.345.024-39.245.006-78.497.012-117.742.145-1.576-.767-3.025-2.192-3.67-34.759-19.575-69.5-39.18-104.253-58.76a3.621 3.621 0 0 0-4.094-.006C91.2 39.257 56.465 58.88 21.712 78.454c-1.42.646-2.373 2.071-2.204 3.653.006 39.245 0 78.497 0 117.748a3.329 3.329 0 0 0 1.89 3.303c9.274 5.259 18.56 10.481 27.84 15.722 5.228 2.814 11.647 4.486 17.407 2.33 5.083-1.823 8.646-7.01 8.549-12.407.048-39.016-.024-78.038.036-117.048-.127-1.732 1.516-3.163 3.2-3 4.456-.03 8.918-.06 13.374.012 1.86-.042 3.14 1.823 2.91 3.568-.018 39.263.048 78.527-.03 117.79.012 10.464-4.287 21.85-13.966 26.97-11.924 6.177-26.662 4.867-38.442-1.056-10.198-5.09-19.93-11.097-29.947-16.55C5.368 215.886.555 208.357.604 200.466V81.497c-.073-7.74 4.504-15.197 11.29-18.85C46.768 42.966 81.636 23.27 116.504 3.58z"/><path d="M146.928 85.99c15.21-.979 31.493-.58 45.18 6.913 10.597 5.742 16.472 17.793 16.659 29.566-.296 1.588-1.956 2.464-3.472 2.355-4.413-.006-8.827.06-13.24-.03-1.872.072-2.96-1.654-3.195-3.309-1.268-5.633-4.34-11.212-9.642-13.929-8.139-4.075-17.576-3.87-26.451-3.785-6.479.344-13.446.905-18.935 4.715-4.214 2.886-5.494 8.712-3.99 13.404 1.418 3.369 5.307 4.456 8.489 5.458 18.33 4.794 37.754 4.317 55.734 10.626 7.444 2.572 14.726 7.572 17.274 15.366 3.333 10.446 1.872 22.932-5.56 31.318-6.027 6.901-14.805 10.657-23.56 12.697-11.647 2.597-23.734 2.663-35.562 1.51-11.122-1.268-22.696-4.19-31.282-11.768-7.342-6.375-10.928-16.308-10.572-25.895.085-1.619 1.697-2.748 3.248-2.615 4.444-.036 8.888-.048 13.332.006 1.775-.127 3.091 1.407 3.182 3.08.82 5.367 2.837 11 7.517 14.182 9.032 5.827 20.365 5.428 30.707 5.591 8.568-.38 18.186-.495 25.178-6.158 3.689-3.23 4.782-8.634 3.785-13.283-1.08-3.925-5.186-5.754-8.712-6.95-18.095-5.724-37.736-3.647-55.656-10.12-7.275-2.571-14.31-7.432-17.105-14.906-3.9-10.578-2.113-23.662 6.098-31.765 8.006-8.06 19.563-11.164 30.551-12.275z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
BIN
.images/airflow.png
Normal file
|
After Width: | Height: | Size: 603 KiB |
BIN
.images/amazon-dcv-windows.png
Normal file
|
After Width: | Height: | Size: 3.3 MiB |
BIN
.images/hcp-vault-secrets-credentials.png
Normal file
|
After Width: | Height: | Size: 174 KiB |
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -50,6 +50,12 @@ variable "mutable" {
|
||||
description = "Whether the parameter is mutable."
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
# Add other variables here
|
||||
|
||||
|
||||
@@ -69,9 +75,10 @@ resource "coder_app" "MODULE_NAME" {
|
||||
slug = "MODULE_NAME"
|
||||
display_name = "MODULE_NAME"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = loocal.icon_url
|
||||
icon = local.icon_url
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
order = var.order
|
||||
|
||||
# Remove if the app does not have a healthcheck endpoint
|
||||
healthcheck {
|
||||
|
||||
@@ -1,28 +1,75 @@
|
||||
# Contributing
|
||||
|
||||
To create a new module, clone this repository and run:
|
||||
## Getting started
|
||||
|
||||
This repo uses the [Bun runtime](https://bun.sh/) to to run all code and tests. To install Bun, you can run this command on Linux/MacOS:
|
||||
|
||||
```shell
|
||||
./new.sh MODULE_NAME
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
Or this command on Windows:
|
||||
|
||||
```shell
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
```
|
||||
|
||||
Follow the instructions to ensure that Bun is available globally. Once Bun has been installed, clone this repository. From there, run this script to create a new module:
|
||||
|
||||
```shell
|
||||
./new.sh NAME_OF_NEW_MODULE
|
||||
```
|
||||
|
||||
## Testing a Module
|
||||
|
||||
> **Note:** It is the responsibility of the module author to implement tests for their module. The author must test the module locally before submitting a PR.
|
||||
|
||||
A suite of test-helpers exists to run `terraform apply` on modules with variables, and test script output against containers.
|
||||
|
||||
Reference existing `*.test.ts` files for implementation.
|
||||
The testing suite must be able to run docker containers with the `--network=host` flag. This typically requires running the tests on Linux as this flag does not apply to Docker Desktop for MacOS and Windows. MacOS users can work around this by using something like [colima](https://github.com/abiosoft/colima) or [Orbstack](https://orbstack.dev/) instead of Docker Desktop.
|
||||
|
||||
Reference the existing `*.test.ts` files to get an idea for how to set up tests.
|
||||
|
||||
You can run all tests in a specific file with this command:
|
||||
|
||||
```shell
|
||||
# Run tests for a specific module!
|
||||
$ bun test -t '<module>'
|
||||
```
|
||||
|
||||
Or run all tests by running this command:
|
||||
|
||||
```shell
|
||||
$ bun test
|
||||
```
|
||||
|
||||
You can test a module locally by updating the source as follows
|
||||
|
||||
```tf
|
||||
module "example" {
|
||||
source = "git::https://github.com/<USERNAME>/<REPO>.git//<MODULE-NAME>?ref=<BRANCH-NAME>"
|
||||
# You may need to remove the 'version' field, it is incompatible with some sources.
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** This is the responsibility of the module author to implement tests for their module. and test the module locally before submitting a PR.
|
||||
## Releases
|
||||
|
||||
> [!WARNING]
|
||||
> When creating a new release, make sure that your new version number is fully accurate. If a version number is incorrect or does not exist, we may end up serving incorrect/old data for our various tools and providers.
|
||||
|
||||
Much of our release process is automated. To cut a new release:
|
||||
|
||||
1. Navigate to [GitHub's Releases page](https://github.com/coder/modules/releases)
|
||||
2. Click "Draft a new release"
|
||||
3. Click the "Choose a tag" button and type a new release number in the format `v<major>.<minor>.<patch>` (e.g., `v1.18.0`). Then click "Create new tag".
|
||||
4. Click the "Generate release notes" button, and clean up the resulting README. Be sure to remove any notes that would not be relevant to end-users (e.g., bumping dependencies).
|
||||
5. Once everything looks good, click the "Publish release" button.
|
||||
|
||||
Once the release has been cut, a script will run to check whether there are any modules that will require that the new release number be published to Terraform. If there are any, a new pull request will automatically be generated. Be sure to approve this PR and merge it into the `main` branch.
|
||||
|
||||
Following that, our automated processes will handle publishing new data for [`registry.coder.com`](https://github.com/coder/registry.coder.com/):
|
||||
|
||||
1. Publishing new versions to Coder's [Terraform Registry](https://registry.terraform.io/providers/coder/coder/latest)
|
||||
2. Publishing new data to the [Coder Registry](https://registry.coder.com)
|
||||
|
||||
> [!NOTE]
|
||||
> Some data in `registry.coder.com` is fetched on demand from the Module repo's main branch. This data should be updated almost immediately after a new release, but other changes will take some time to propagate.
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
Modules
|
||||
</h1>
|
||||
|
||||
[Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
||||
[Module Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
||||
|
||||
[](https://discord.gg/coder)
|
||||
[](./LICENSE)
|
||||
[](https://github.com/coder/modules/actions/workflows/check.yaml)
|
||||
|
||||
</div>
|
||||
|
||||
Modules extend Templates to create reusable components for your development environment.
|
||||
Modules extend Coder Templates to create reusable components for your development environment.
|
||||
|
||||
e.g.
|
||||
|
||||
|
||||
49
amazon-dcv-windows/README.md
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
display_name: Amazon DCV Windows
|
||||
description: Amazon DCV Server and Web Client for Windows
|
||||
icon: ../.icons/dcv.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [windows, amazon, dcv, web, desktop]
|
||||
---
|
||||
|
||||
# Amazon DCV Windows
|
||||
|
||||
Amazon DCV is high performance remote display protocol that provides a secure way to deliver remote desktop and application streaming from any cloud or data center to any device, over varying network conditions.
|
||||
|
||||

|
||||
|
||||
Enable DCV Server and Web Client on Windows workspaces.
|
||||
|
||||
```tf
|
||||
module "dcv" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/amazon-dcv-windows/coder"
|
||||
version = "1.0.24"
|
||||
agent_id = resource.coder_agent.main.id
|
||||
}
|
||||
|
||||
|
||||
resource "coder_metadata" "dcv" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
resource_id = aws_instance.dev.id # id of the instance resource
|
||||
|
||||
item {
|
||||
key = "DCV client instructions"
|
||||
value = "Run `coder port-forward ${data.coder_workspace.me.name} -p ${module.dcv[count.index].port}` and connect to **localhost:${module.dcv[count.index].port}${module.dcv[count.index].web_url_path}**"
|
||||
}
|
||||
item {
|
||||
key = "username"
|
||||
value = module.dcv[count.index].username
|
||||
}
|
||||
item {
|
||||
key = "password"
|
||||
value = module.dcv[count.index].password
|
||||
sensitive = true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Amazon DCV is free to use on AWS EC2 instances but requires a license for other cloud providers. Please see the instructions [here](https://docs.aws.amazon.com/dcv/latest/adminguide/setting-up-license.html#setting-up-license-ec2) for more information.
|
||||
170
amazon-dcv-windows/install-dcv.ps1
Normal file
@@ -0,0 +1,170 @@
|
||||
# Terraform variables
|
||||
$adminPassword = "${admin_password}"
|
||||
$port = "${port}"
|
||||
$webURLPath = "${web_url_path}"
|
||||
|
||||
function Set-LocalAdminUser {
|
||||
Write-Output "[INFO] Starting Set-LocalAdminUser function"
|
||||
$securePassword = ConvertTo-SecureString $adminPassword -AsPlainText -Force
|
||||
Write-Output "[DEBUG] Secure password created"
|
||||
Get-LocalUser -Name Administrator | Set-LocalUser -Password $securePassword
|
||||
Write-Output "[INFO] Administrator password set"
|
||||
Get-LocalUser -Name Administrator | Enable-LocalUser
|
||||
Write-Output "[INFO] User Administrator enabled successfully"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
function Get-VirtualDisplayDriverRequired {
|
||||
Write-Output "[INFO] Starting Get-VirtualDisplayDriverRequired function"
|
||||
$token = Invoke-RestMethod -Headers @{'X-aws-ec2-metadata-token-ttl-seconds' = '21600'} -Method PUT -Uri http://169.254.169.254/latest/api/token
|
||||
Write-Output "[DEBUG] Token acquired: $token"
|
||||
$instanceType = Invoke-RestMethod -Headers @{'X-aws-ec2-metadata-token' = $token} -Method GET -Uri http://169.254.169.254/latest/meta-data/instance-type
|
||||
Write-Output "[DEBUG] Instance type: $instanceType"
|
||||
$OSVersion = ((Get-ItemProperty -Path "Microsoft.PowerShell.Core\Registry::\HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion" -Name ProductName).ProductName) -replace "[^0-9]", ''
|
||||
Write-Output "[DEBUG] OS version: $OSVersion"
|
||||
|
||||
# Force boolean result
|
||||
$result = (($OSVersion -ne "2019") -and ($OSVersion -ne "2022") -and ($OSVersion -ne "2025")) -and (($instanceType[0] -ne 'g') -and ($instanceType[0] -ne 'p'))
|
||||
Write-Output "[INFO] VirtualDisplayDriverRequired result: $result"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
return [bool]$result
|
||||
}
|
||||
|
||||
function Download-DCV {
|
||||
param (
|
||||
[bool]$VirtualDisplayDriverRequired
|
||||
)
|
||||
Write-Output "[INFO] Starting Download-DCV function"
|
||||
|
||||
$downloads = @(
|
||||
@{
|
||||
Name = "DCV Display Driver"
|
||||
Required = $VirtualDisplayDriverRequired
|
||||
Path = "C:\Windows\Temp\DCVDisplayDriver.msi"
|
||||
Uri = "https://d1uj6qtbmh3dt5.cloudfront.net/nice-dcv-virtual-display-x64-Release.msi"
|
||||
},
|
||||
@{
|
||||
Name = "DCV Server"
|
||||
Required = $true
|
||||
Path = "C:\Windows\Temp\DCVServer.msi"
|
||||
Uri = "https://d1uj6qtbmh3dt5.cloudfront.net/nice-dcv-server-x64-Release.msi"
|
||||
}
|
||||
)
|
||||
|
||||
foreach ($download in $downloads) {
|
||||
if ($download.Required -and -not (Test-Path $download.Path)) {
|
||||
try {
|
||||
Write-Output "[INFO] Downloading $($download.Name)"
|
||||
|
||||
# Display progress manually (no events)
|
||||
$progressActivity = "Downloading $($download.Name)"
|
||||
$progressStatus = "Starting download..."
|
||||
Write-Progress -Activity $progressActivity -Status $progressStatus -PercentComplete 0
|
||||
|
||||
# Synchronously download the file
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($download.Uri, $download.Path)
|
||||
|
||||
# Update progress
|
||||
Write-Progress -Activity $progressActivity -Status "Completed" -PercentComplete 100
|
||||
|
||||
Write-Output "[INFO] $($download.Name) downloaded successfully."
|
||||
} catch {
|
||||
Write-Output "[ERROR] Failed to download $($download.Name): $_"
|
||||
throw
|
||||
}
|
||||
} else {
|
||||
Write-Output "[INFO] $($download.Name) already exists. Skipping download."
|
||||
}
|
||||
}
|
||||
|
||||
Write-Output "[INFO] All downloads completed"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
function Install-DCV {
|
||||
param (
|
||||
[bool]$VirtualDisplayDriverRequired
|
||||
)
|
||||
Write-Output "[INFO] Starting Install-DCV function"
|
||||
|
||||
if (-not (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue)) {
|
||||
if ($VirtualDisplayDriverRequired) {
|
||||
Write-Output "[INFO] Installing DCV Display Driver"
|
||||
Start-Process "C:\Windows\System32\msiexec.exe" -ArgumentList "/I C:\Windows\Temp\DCVDisplayDriver.msi /quiet /norestart" -Wait
|
||||
} else {
|
||||
Write-Output "[INFO] DCV Display Driver installation skipped (not required)."
|
||||
}
|
||||
Write-Output "[INFO] Installing DCV Server"
|
||||
Start-Process "C:\Windows\System32\msiexec.exe" -ArgumentList "/I C:\Windows\Temp\DCVServer.msi ADDLOCAL=ALL /quiet /norestart /l*v C:\Windows\Temp\dcv_install_msi.log" -Wait
|
||||
} else {
|
||||
Write-Output "[INFO] DCV Server already installed, skipping installation."
|
||||
}
|
||||
|
||||
# Wait for the service to appear with a timeout
|
||||
$timeout = 10 # seconds
|
||||
$elapsed = 0
|
||||
while (-not (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue) -and ($elapsed -lt $timeout)) {
|
||||
Start-Sleep -Seconds 1
|
||||
$elapsed++
|
||||
}
|
||||
|
||||
if ($elapsed -ge $timeout) {
|
||||
Write-Output "[WARNING] Timeout waiting for dcvserver service. A restart is required to complete installation."
|
||||
Restart-SystemForDCV
|
||||
} else {
|
||||
Write-Output "[INFO] dcvserver service detected successfully."
|
||||
}
|
||||
}
|
||||
|
||||
function Restart-SystemForDCV {
|
||||
Write-Output "[INFO] The system will restart in 10 seconds to finalize DCV installation."
|
||||
Start-Sleep -Seconds 10
|
||||
|
||||
# Initiate restart
|
||||
Restart-Computer -Force
|
||||
|
||||
# Exit the script after initiating restart
|
||||
Write-Output "[INFO] Please wait for the system to restart..."
|
||||
|
||||
Exit 1
|
||||
}
|
||||
|
||||
|
||||
function Configure-DCV {
|
||||
Write-Output "[INFO] Starting Configure-DCV function"
|
||||
$dcvPath = "Microsoft.PowerShell.Core\Registry::\HKEY_USERS\S-1-5-18\Software\GSettings\com\nicesoftware\dcv"
|
||||
|
||||
# Create the required paths
|
||||
@("$dcvPath\connectivity", "$dcvPath\session-management", "$dcvPath\session-management\automatic-console-session", "$dcvPath\display") | ForEach-Object {
|
||||
if (-not (Test-Path $_)) {
|
||||
New-Item -Path $_ -Force | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Set registry keys
|
||||
New-ItemProperty -Path "$dcvPath\session-management" -Name create-session -PropertyType DWORD -Value 1 -Force
|
||||
New-ItemProperty -Path "$dcvPath\session-management\automatic-console-session" -Name owner -Value Administrator -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name quic-port -PropertyType DWORD -Value $port -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name web-port -PropertyType DWORD -Value $port -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name web-url-path -PropertyType String -Value $webURLPath -Force
|
||||
|
||||
# Attempt to restart service
|
||||
if (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue) {
|
||||
Restart-Service -Name "dcvserver"
|
||||
} else {
|
||||
Write-Output "[WARNING] dcvserver service not found. Ensure the system was restarted properly."
|
||||
}
|
||||
|
||||
Write-Output "[INFO] DCV configuration completed"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
# Main Script Execution
|
||||
Write-Output "[INFO] Starting script"
|
||||
$VirtualDisplayDriverRequired = [bool](Get-VirtualDisplayDriverRequired)
|
||||
Set-LocalAdminUser
|
||||
Download-DCV -VirtualDisplayDriverRequired $VirtualDisplayDriverRequired
|
||||
Install-DCV -VirtualDisplayDriverRequired $VirtualDisplayDriverRequired
|
||||
Configure-DCV
|
||||
Write-Output "[INFO] Script completed"
|
||||
85
amazon-dcv-windows/main.tf
Normal file
@@ -0,0 +1,85 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "admin_password" {
|
||||
type = string
|
||||
default = "coderDCV!"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port number for the DCV server."
|
||||
default = 8443
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = "Whether to use a subdomain for the DCV server."
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug of the web-dcv coder_app resource."
|
||||
default = "web-dcv"
|
||||
}
|
||||
|
||||
resource "coder_app" "web-dcv" {
|
||||
agent_id = var.agent_id
|
||||
slug = var.slug
|
||||
display_name = "Web DCV"
|
||||
url = "https://localhost:${var.port}${local.web_url_path}?username=${local.admin_username}&password=${var.admin_password}"
|
||||
icon = "/icon/dcv.svg"
|
||||
subdomain = var.subdomain
|
||||
}
|
||||
|
||||
resource "coder_script" "install-dcv" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "Install DCV"
|
||||
icon = "/icon/dcv.svg"
|
||||
run_on_start = true
|
||||
script = templatefile("${path.module}/install-dcv.ps1", {
|
||||
admin_password : var.admin_password,
|
||||
port : var.port,
|
||||
web_url_path : local.web_url_path
|
||||
})
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
locals {
|
||||
web_url_path = var.subdomain ? "/" : format("/@%s/%s/apps/%s", data.coder_workspace_owner.me.name, data.coder_workspace.me.name, var.slug)
|
||||
admin_username = "Administrator"
|
||||
}
|
||||
|
||||
output "web_url_path" {
|
||||
value = local.web_url_path
|
||||
}
|
||||
|
||||
output "username" {
|
||||
value = local.admin_username
|
||||
}
|
||||
|
||||
output "password" {
|
||||
value = var.admin_password
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "port" {
|
||||
value = var.port
|
||||
}
|
||||
23
apache-airflow/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
display_name: airflow
|
||||
description: A module that adds Apache Airflow in your Coder template
|
||||
icon: ../.icons/airflow.svg
|
||||
maintainer_github: coder
|
||||
partner_github: nataindata
|
||||
verified: true
|
||||
tags: [airflow, idea, web, helper]
|
||||
---
|
||||
|
||||
# airflow
|
||||
|
||||
A module that adds Apache Airflow in your Coder template.
|
||||
|
||||
```tf
|
||||
module "airflow" {
|
||||
source = "registry.coder.com/modules/apache-airflow/coder"
|
||||
version = "1.0.13"
|
||||
agent_id = coder_agent.main.id
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
65
apache-airflow/main.tf
Normal file
@@ -0,0 +1,65 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add required variables for your modules and remove any unneeded variables
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "log_path" {
|
||||
type = string
|
||||
description = "The path to log airflow to."
|
||||
default = "/tmp/airflow.log"
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port to run airflow on."
|
||||
default = 8080
|
||||
}
|
||||
|
||||
variable "share" {
|
||||
type = string
|
||||
default = "owner"
|
||||
validation {
|
||||
condition = var.share == "owner" || var.share == "authenticated" || var.share == "public"
|
||||
error_message = "Incorrect value. Please set either 'owner', 'authenticated', or 'public'."
|
||||
}
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
resource "coder_script" "airflow" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "airflow"
|
||||
icon = "/icon/apache-guacamole.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "airflow" {
|
||||
agent_id = var.agent_id
|
||||
slug = "airflow"
|
||||
display_name = "airflow"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "/icon/apache-guacamole.svg"
|
||||
subdomain = true
|
||||
share = var.share
|
||||
order = var.order
|
||||
}
|
||||
19
apache-airflow/run.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
PATH=$PATH:~/.local/bin
|
||||
pip install --upgrade apache-airflow
|
||||
|
||||
filename=~/airflow/airflow.db
|
||||
if ! [ -f $filename ] || ! [ -s $filename ]; then
|
||||
airflow db init
|
||||
fi
|
||||
|
||||
export AIRFLOW__CORE__LOAD_EXAMPLES=false
|
||||
|
||||
airflow webserver > ${LOG_PATH} 2>&1 &
|
||||
|
||||
airflow scheduler >> /tmp/airflow_scheduler.log 2>&1 &
|
||||
|
||||
airflow users create -u admin -p admin -r Admin -e admin@admin.com -f Coder -l User
|
||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
||||
```tf
|
||||
module "aws-region" {
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "us-east-1"
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ Change the display name and icon for a region using the corresponding maps:
|
||||
```tf
|
||||
module "aws-region" {
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "ap-south-1"
|
||||
|
||||
custom_names = {
|
||||
@@ -63,7 +63,7 @@ Hide the Asia Pacific regions Seoul and Osaka:
|
||||
```tf
|
||||
module "aws-region" {
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
exclude = ["ap-northeast-2", "ap-northeast-3"]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
@@ -22,4 +21,13 @@ describe("aws-region", async () => {
|
||||
});
|
||||
expect(state.outputs.value.value).toBe("us-west-2");
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(1);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,11 +51,25 @@ variable "exclude" {
|
||||
type = list(string)
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
locals {
|
||||
# This is a static list because the regions don't change _that_
|
||||
# frequently and including the `aws_regions` data source requires
|
||||
# the provider, which requires a region.
|
||||
regions = {
|
||||
"af-south-1" = {
|
||||
name = "Africa (Cape Town)"
|
||||
icon = "/emojis/1f1ff-1f1e6.png"
|
||||
}
|
||||
"ap-east-1" = {
|
||||
name = "Asia Pacific (Hong Kong)"
|
||||
icon = "/emojis/1f1ed-1f1f0.png"
|
||||
}
|
||||
"ap-northeast-1" = {
|
||||
name = "Asia Pacific (Tokyo)"
|
||||
icon = "/emojis/1f1ef-1f1f5.png"
|
||||
@@ -72,6 +86,10 @@ locals {
|
||||
name = "Asia Pacific (Mumbai)"
|
||||
icon = "/emojis/1f1ee-1f1f3.png"
|
||||
}
|
||||
"ap-south-2" = {
|
||||
name = "Asia Pacific (Hyderabad)"
|
||||
icon = "/emojis/1f1ee-1f1f3.png"
|
||||
}
|
||||
"ap-southeast-1" = {
|
||||
name = "Asia Pacific (Singapore)"
|
||||
icon = "/emojis/1f1f8-1f1ec.png"
|
||||
@@ -80,18 +98,42 @@ locals {
|
||||
name = "Asia Pacific (Sydney)"
|
||||
icon = "/emojis/1f1e6-1f1fa.png"
|
||||
}
|
||||
"ap-southeast-3" = {
|
||||
name = "Asia Pacific (Jakarta)"
|
||||
icon = "/emojis/1f1ee-1f1e9.png"
|
||||
}
|
||||
"ap-southeast-4" = {
|
||||
name = "Asia Pacific (Melbourne)"
|
||||
icon = "/emojis/1f1e6-1f1fa.png"
|
||||
}
|
||||
"ca-central-1" = {
|
||||
name = "Canada (Central)"
|
||||
icon = "/emojis/1f1e8-1f1e6.png"
|
||||
}
|
||||
"ca-west-1" = {
|
||||
name = "Canada West (Calgary)"
|
||||
icon = "/emojis/1f1e8-1f1e6.png"
|
||||
}
|
||||
"eu-central-1" = {
|
||||
name = "EU (Frankfurt)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"eu-central-2" = {
|
||||
name = "Europe (Zurich)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"eu-north-1" = {
|
||||
name = "EU (Stockholm)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"eu-south-1" = {
|
||||
name = "Europe (Milan)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"eu-south-2" = {
|
||||
name = "Europe (Spain)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"eu-west-1" = {
|
||||
name = "EU (Ireland)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
@@ -104,6 +146,14 @@ locals {
|
||||
name = "EU (Paris)"
|
||||
icon = "/emojis/1f1ea-1f1fa.png"
|
||||
}
|
||||
"il-central-1" = {
|
||||
name = "Israel (Tel Aviv)"
|
||||
icon = "/emojis/1f1ee-1f1f1.png"
|
||||
}
|
||||
"me-south-1" = {
|
||||
name = "Middle East (Bahrain)"
|
||||
icon = "/emojis/1f1e7-1f1ed.png"
|
||||
}
|
||||
"sa-east-1" = {
|
||||
name = "South America (São Paulo)"
|
||||
icon = "/emojis/1f1e7-1f1f7.png"
|
||||
@@ -132,6 +182,7 @@ data "coder_parameter" "region" {
|
||||
display_name = var.display_name
|
||||
description = var.description
|
||||
default = var.default == "" ? null : var.default
|
||||
order = var.coder_parameter_order
|
||||
mutable = var.mutable
|
||||
dynamic "option" {
|
||||
for_each = { for k, v in local.regions : k => v if !(contains(var.exclude, k)) }
|
||||
@@ -145,4 +196,4 @@ data "coder_parameter" "region" {
|
||||
|
||||
output "value" {
|
||||
value = data.coder_parameter.region.value
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@ This module adds a parameter with all Azure regions, allowing developers to sele
|
||||
```tf
|
||||
module "azure_region" {
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "eastus"
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ Change the display name and icon for a region using the corresponding maps:
|
||||
```tf
|
||||
module "azure-region" {
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
custom_names = {
|
||||
"australia" : "Go Australia!"
|
||||
}
|
||||
@@ -57,7 +57,7 @@ Hide all regions in Australia except australiacentral:
|
||||
```tf
|
||||
module "azure-region" {
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
exclude = [
|
||||
"australia",
|
||||
"australiacentral2",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
@@ -22,4 +21,13 @@ describe("azure-region", async () => {
|
||||
});
|
||||
expect(state.outputs.value.value).toBe("westus");
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(1);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -50,6 +50,12 @@ variable "exclude" {
|
||||
type = list(string)
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
locals {
|
||||
# Note: Options are limited to 64 regions, some redundant regions have been removed.
|
||||
all_regions = {
|
||||
@@ -309,6 +315,7 @@ data "coder_parameter" "region" {
|
||||
display_name = var.display_name
|
||||
description = var.description
|
||||
default = var.default == "" ? null : var.default
|
||||
order = var.coder_parameter_order
|
||||
mutable = var.mutable
|
||||
icon = "/icon/azure.png"
|
||||
dynamic "option" {
|
||||
|
||||
@@ -14,7 +14,7 @@ Automatically install [code-server](https://github.com/coder/code-server) in a w
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -28,7 +28,7 @@ module "code-server" {
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
install_version = "4.8.3"
|
||||
}
|
||||
@@ -41,7 +41,7 @@ Install the Dracula theme from [OpenVSX](https://open-vsx.org/):
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = [
|
||||
"dracula-theme.theme-dracula"
|
||||
@@ -58,7 +58,7 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = ["dracula-theme.theme-dracula"]
|
||||
settings = {
|
||||
@@ -74,20 +74,34 @@ Just run code-server in the background, don't fetch it from GitHub:
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||
}
|
||||
```
|
||||
|
||||
### Offline Mode
|
||||
### Offline and Use Cached Modes
|
||||
|
||||
By default the module looks for code-server at `/tmp/code-server` but this can be changed with `install_prefix`.
|
||||
|
||||
Run an existing copy of code-server if found, otherwise download from GitHub:
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
use_cached = true
|
||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||
}
|
||||
```
|
||||
|
||||
Just run code-server in the background, don't fetch it from GitHub:
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
offline = true
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("code-server", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
@@ -8,5 +12,27 @@ describe("code-server", async () => {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("use_cached and offline can not be used together", () => {
|
||||
const t = async () => {
|
||||
await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
use_cached: "true",
|
||||
offline: "true",
|
||||
});
|
||||
};
|
||||
expect(t).toThrow("Offline and Use Cached can not be used together");
|
||||
});
|
||||
|
||||
it("offline and extensions can not be used together", () => {
|
||||
const t = async () => {
|
||||
await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
offline: "true",
|
||||
extensions: '["1", "2"]',
|
||||
});
|
||||
};
|
||||
expect(t).toThrow("Offline mode does not allow extensions to be installed");
|
||||
});
|
||||
|
||||
// More tests depend on shebang refactors
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -32,8 +32,14 @@ variable "display_name" {
|
||||
default = "code-server"
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug for the code-server application."
|
||||
default = "code-server"
|
||||
}
|
||||
|
||||
variable "settings" {
|
||||
type = map(string)
|
||||
type = any
|
||||
description = "A map of settings to apply to code-server."
|
||||
default = {}
|
||||
}
|
||||
@@ -71,6 +77,51 @@ variable "share" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "offline" {
|
||||
type = bool
|
||||
description = "Just run code-server in the background, don't fetch it from GitHub"
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "use_cached" {
|
||||
type = bool
|
||||
description = "Uses cached copy code-server in the background, otherwise fetched it from GitHub"
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "use_cached_extensions" {
|
||||
type = bool
|
||||
description = "Uses cached copy of extensions, otherwise do a forced upgrade"
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "extensions_dir" {
|
||||
type = string
|
||||
description = "Override the directory to store extensions in."
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "auto_install_extensions" {
|
||||
type = bool
|
||||
description = "Automatically install recommended extensions when code-server starts."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = <<-EOT
|
||||
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||
EOT
|
||||
default = false
|
||||
}
|
||||
|
||||
resource "coder_script" "code-server" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "code-server"
|
||||
@@ -78,23 +129,43 @@ resource "coder_script" "code-server" {
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
VERSION : var.install_version,
|
||||
EXTENSIONS : join(",", var.extensions),
|
||||
APP_NAME : var.display_name,
|
||||
PORT : var.port,
|
||||
LOG_PATH : var.log_path,
|
||||
INSTALL_PREFIX : var.install_prefix,
|
||||
// This is necessary otherwise the quotes are stripped!
|
||||
SETTINGS : replace(jsonencode(var.settings), "\"", "\\\""),
|
||||
OFFLINE : var.offline,
|
||||
USE_CACHED : var.use_cached,
|
||||
USE_CACHED_EXTENSIONS : var.use_cached_extensions,
|
||||
EXTENSIONS_DIR : var.extensions_dir,
|
||||
FOLDER : var.folder,
|
||||
AUTO_INSTALL_EXTENSIONS : var.auto_install_extensions,
|
||||
})
|
||||
run_on_start = true
|
||||
|
||||
lifecycle {
|
||||
precondition {
|
||||
condition = !var.offline || length(var.extensions) == 0
|
||||
error_message = "Offline mode does not allow extensions to be installed"
|
||||
}
|
||||
|
||||
precondition {
|
||||
condition = !var.offline || !var.use_cached
|
||||
error_message = "Offline and Use Cached can not be used together"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "code-server" {
|
||||
agent_id = var.agent_id
|
||||
slug = "code-server"
|
||||
slug = var.slug
|
||||
display_name = var.display_name
|
||||
url = "http://localhost:${var.port}/${var.folder != "" ? "?folder=${urlencode(var.folder)}" : ""}"
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = false
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:${var.port}/healthz"
|
||||
|
||||
@@ -4,39 +4,20 @@ EXTENSIONS=("${EXTENSIONS}")
|
||||
BOLD='\033[0;1m'
|
||||
CODE='\033[36;40;1m'
|
||||
RESET='\033[0m'
|
||||
|
||||
printf "$${BOLD}Installing code-server!\n"
|
||||
|
||||
ARGS=(
|
||||
"--method=standalone"
|
||||
"--prefix=${INSTALL_PREFIX}"
|
||||
)
|
||||
if [ -n "${VERSION}" ]; then
|
||||
ARGS+=("--version=${VERSION}")
|
||||
fi
|
||||
|
||||
output=$(curl -fsSL https://code-server.dev/install.sh | sh -s -- "$${ARGS[@]}")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install code-server: $output"
|
||||
exit 1
|
||||
fi
|
||||
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
||||
|
||||
CODE_SERVER="${INSTALL_PREFIX}/bin/code-server"
|
||||
|
||||
# Install each extension...
|
||||
IFS=',' read -r -a EXTENSIONLIST <<< "$${EXTENSIONS}"
|
||||
for extension in "$${EXTENSIONLIST[@]}"; do
|
||||
if [ -z "$extension" ]; then
|
||||
continue
|
||||
fi
|
||||
printf "🧩 Installing extension $${CODE}$extension$${RESET}...\n"
|
||||
output=$($CODE_SERVER --install-extension "$extension")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install extension: $extension: $output"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
# Set extension directory
|
||||
EXTENSION_ARG=""
|
||||
if [ -n "${EXTENSIONS_DIR}" ]; then
|
||||
EXTENSION_ARG="--extensions-dir=${EXTENSIONS_DIR}"
|
||||
mkdir -p "${EXTENSIONS_DIR}"
|
||||
fi
|
||||
|
||||
function run_code_server() {
|
||||
echo "👷 Running code-server in the background..."
|
||||
echo "Check logs at ${LOG_PATH}!"
|
||||
$CODE_SERVER "$EXTENSION_ARG" --auth none --port "${PORT}" --app-name "${APP_NAME}" > "${LOG_PATH}" 2>&1 &
|
||||
}
|
||||
|
||||
# Check if the settings file exists...
|
||||
if [ ! -f ~/.local/share/code-server/User/settings.json ]; then
|
||||
@@ -45,6 +26,92 @@ if [ ! -f ~/.local/share/code-server/User/settings.json ]; then
|
||||
echo "${SETTINGS}" > ~/.local/share/code-server/User/settings.json
|
||||
fi
|
||||
|
||||
echo "👷 Running code-server in the background..."
|
||||
echo "Check logs at ${LOG_PATH}!"
|
||||
$CODE_SERVER --auth none --port ${PORT} > ${LOG_PATH} 2>&1 &
|
||||
# Check if code-server is already installed for offline
|
||||
if [ "${OFFLINE}" = true ]; then
|
||||
if [ -f "$CODE_SERVER" ]; then
|
||||
echo "🥳 Found a copy of code-server"
|
||||
run_code_server
|
||||
exit 0
|
||||
fi
|
||||
# Offline mode always expects a copy of code-server to be present
|
||||
echo "Failed to find a copy of code-server"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If there is no cached install OR we don't want to use a cached install
|
||||
if [ ! -f "$CODE_SERVER" ] || [ "${USE_CACHED}" != true ]; then
|
||||
printf "$${BOLD}Installing code-server!\n"
|
||||
|
||||
ARGS=(
|
||||
"--method=standalone"
|
||||
"--prefix=${INSTALL_PREFIX}"
|
||||
)
|
||||
if [ -n "${VERSION}" ]; then
|
||||
ARGS+=("--version=${VERSION}")
|
||||
fi
|
||||
|
||||
output=$(curl -fsSL https://code-server.dev/install.sh | sh -s -- "$${ARGS[@]}")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install code-server: $output"
|
||||
exit 1
|
||||
fi
|
||||
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
||||
fi
|
||||
|
||||
# Get the list of installed extensions...
|
||||
LIST_EXTENSIONS=$($CODE_SERVER --list-extensions $EXTENSION_ARG)
|
||||
readarray -t EXTENSIONS_ARRAY <<< "$LIST_EXTENSIONS"
|
||||
function extension_installed() {
|
||||
if [ "${USE_CACHED_EXTENSIONS}" != true ]; then
|
||||
return 1
|
||||
fi
|
||||
for _extension in "$${EXTENSIONS_ARRAY[@]}"; do
|
||||
if [ "$_extension" == "$1" ]; then
|
||||
echo "Extension $1 was already installed."
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Install each extension...
|
||||
IFS=',' read -r -a EXTENSIONLIST <<< "$${EXTENSIONS}"
|
||||
for extension in "$${EXTENSIONLIST[@]}"; do
|
||||
if [ -z "$extension" ]; then
|
||||
continue
|
||||
fi
|
||||
if extension_installed "$extension"; then
|
||||
continue
|
||||
fi
|
||||
printf "🧩 Installing extension $${CODE}$extension$${RESET}...\n"
|
||||
output=$($CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install extension: $extension: $output"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${AUTO_INSTALL_EXTENSIONS}" = true ]; then
|
||||
if ! command -v jq > /dev/null; then
|
||||
echo "jq is required to install extensions from a workspace file."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
WORKSPACE_DIR="$HOME"
|
||||
if [ -n "${FOLDER}" ]; then
|
||||
WORKSPACE_DIR="${FOLDER}"
|
||||
fi
|
||||
|
||||
if [ -f "$WORKSPACE_DIR/.vscode/extensions.json" ]; then
|
||||
printf "🧩 Installing extensions from %s/.vscode/extensions.json...\n" "$WORKSPACE_DIR"
|
||||
extensions=$(jq -r '.recommendations[]' "$WORKSPACE_DIR"/.vscode/extensions.json)
|
||||
for extension in $extensions; do
|
||||
if extension_installed "$extension"; then
|
||||
continue
|
||||
fi
|
||||
$CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
run_code_server
|
||||
|
||||
@@ -14,7 +14,7 @@ Automatically logs the user into Coder when creating their workspace.
|
||||
```tf
|
||||
module "coder-login" {
|
||||
source = "registry.coder.com/modules/coder-login/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
import { describe } from "bun:test";
|
||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||
|
||||
describe("coder-login", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,12 @@ variable "agent_id" {
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "coder-login" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
CODER_USER_TOKEN : data.coder_workspace.me.owner_session_token,
|
||||
CODER_USER_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||
CODER_DEPLOYMENT_URL : data.coder_workspace.me.access_url
|
||||
})
|
||||
display_name = "Coder Login"
|
||||
|
||||
35
cursor/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
display_name: Cursor IDE
|
||||
description: Add a one-click button to launch Cursor IDE
|
||||
icon: ../.icons/cursor.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [ide, cursor, helper]
|
||||
---
|
||||
|
||||
# Cursor IDE
|
||||
|
||||
Add a button to open any workspace with a single click in Cursor IDE.
|
||||
|
||||
Uses the [Coder Remote VS Code Extension](https://github.com/coder/cursor-coder).
|
||||
|
||||
```tf
|
||||
module "cursor" {
|
||||
source = "registry.coder.com/modules/cursor/coder"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Open in a specific directory
|
||||
|
||||
```tf
|
||||
module "cursor" {
|
||||
source = "registry.coder.com/modules/cursor/coder"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/project"
|
||||
}
|
||||
```
|
||||
88
cursor/main.test.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("cursor", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("default output", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||
});
|
||||
|
||||
it("adds folder", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds folder and open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
open_recent: "true",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds folder but not open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
openRecent: "false",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
open_recent: "true",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("expect order to be set", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
order: "22",
|
||||
});
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBe(22);
|
||||
});
|
||||
});
|
||||
62
cursor/main.tf
Normal file
@@ -0,0 +1,62 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "folder" {
|
||||
type = string
|
||||
description = "The folder to open in Cursor IDE."
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "open_recent" {
|
||||
type = bool
|
||||
description = "Open the most recent workspace or folder. Falls back to the folder if there is no recent workspace or folder to open."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_app" "cursor" {
|
||||
agent_id = var.agent_id
|
||||
external = true
|
||||
icon = "/icon/cursor.svg"
|
||||
slug = "cursor"
|
||||
display_name = "Cursor Desktop"
|
||||
order = var.order
|
||||
url = join("", [
|
||||
"cursor://coder.coder-remote/open",
|
||||
"?owner=",
|
||||
data.coder_workspace_owner.me.name,
|
||||
"&workspace=",
|
||||
data.coder_workspace.me.name,
|
||||
var.folder != "" ? join("", ["&folder=", var.folder]) : "",
|
||||
var.open_recent ? "&openRecent" : "",
|
||||
"&url=",
|
||||
data.coder_workspace.me.access_url,
|
||||
"&token=$SESSION_TOKEN",
|
||||
])
|
||||
}
|
||||
|
||||
output "cursor_url" {
|
||||
value = coder_app.cursor.url
|
||||
description = "Cursor IDE Desktop URL."
|
||||
}
|
||||
@@ -9,12 +9,70 @@ tags: [helper]
|
||||
|
||||
# Dotfiles
|
||||
|
||||
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io)! Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
||||
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io).
|
||||
|
||||
This will prompt the user for their dotfiles repository URL on template creation using a `coder_parameter`.
|
||||
|
||||
Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Apply dotfiles as the current user
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
### Apply dotfiles as another user (only works if sudo is passwordless)
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
user = "root"
|
||||
}
|
||||
```
|
||||
|
||||
### Apply the same dotfiles as the current user and root (the root dotfiles can only be applied if sudo is passwordless)
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
|
||||
module "dotfiles-root" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
user = "root"
|
||||
dotfiles_uri = module.dotfiles.dotfiles_uri
|
||||
}
|
||||
```
|
||||
|
||||
## Setting a default dotfiles repository
|
||||
|
||||
You can set a default dotfiles repository for all users by setting the `default_dotfiles_uri` variable:
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
default_dotfiles_uri = "https://github.com/coder/dotfiles"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -18,4 +18,23 @@ describe("dotfiles", async () => {
|
||||
});
|
||||
expect(state.outputs.dotfiles_uri.value).toBe("");
|
||||
});
|
||||
|
||||
it("set a default dotfiles_uri", async () => {
|
||||
const default_dotfiles_uri = "foo";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
default_dotfiles_uri,
|
||||
});
|
||||
expect(state.outputs.dotfiles_uri.value).toBe(default_dotfiles_uri);
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(2);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,30 +14,78 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "default_dotfiles_uri" {
|
||||
type = string
|
||||
description = "The default dotfiles URI if the workspace user does not provide one"
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "dotfiles_uri" {
|
||||
type = string
|
||||
description = "The URL to a dotfiles repository. (optional, when set, the user isn't prompted for their dotfiles)"
|
||||
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "user" {
|
||||
type = string
|
||||
description = "The name of the user to apply the dotfiles to. (optional, applies to the current user by default)"
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "manual_update" {
|
||||
type = bool
|
||||
description = "If true, this adds a button to workspace page to refresh dotfiles on demand."
|
||||
default = false
|
||||
}
|
||||
|
||||
data "coder_parameter" "dotfiles_uri" {
|
||||
count = var.dotfiles_uri == null ? 1 : 0
|
||||
type = "string"
|
||||
name = "dotfiles_uri"
|
||||
display_name = "Dotfiles URL (optional)"
|
||||
default = ""
|
||||
display_name = "Dotfiles URL"
|
||||
order = var.coder_parameter_order
|
||||
default = var.default_dotfiles_uri
|
||||
description = "Enter a URL for a [dotfiles repository](https://dotfiles.github.io) to personalize your workspace"
|
||||
mutable = true
|
||||
icon = "/icon/dotfiles.svg"
|
||||
}
|
||||
|
||||
resource "coder_script" "personalize" {
|
||||
agent_id = var.agent_id
|
||||
script = <<-EOT
|
||||
DOTFILES_URI="${data.coder_parameter.dotfiles_uri.value}"
|
||||
if [ -n "$${DOTFILES_URI// }" ]; then
|
||||
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee -a ~/.dotfiles.log
|
||||
fi
|
||||
EOT
|
||||
locals {
|
||||
dotfiles_uri = var.dotfiles_uri != null ? var.dotfiles_uri : data.coder_parameter.dotfiles_uri[0].value
|
||||
user = var.user != null ? var.user : ""
|
||||
}
|
||||
|
||||
resource "coder_script" "dotfiles" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
DOTFILES_URI : local.dotfiles_uri,
|
||||
DOTFILES_USER : local.user
|
||||
})
|
||||
display_name = "Dotfiles"
|
||||
icon = "/icon/dotfiles.svg"
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "dotfiles" {
|
||||
count = var.manual_update ? 1 : 0
|
||||
agent_id = var.agent_id
|
||||
display_name = "Refresh Dotfiles"
|
||||
slug = "dotfiles"
|
||||
icon = "/icon/dotfiles.svg"
|
||||
command = templatefile("${path.module}/run.sh", {
|
||||
DOTFILES_URI : local.dotfiles_uri,
|
||||
DOTFILES_USER : local.user
|
||||
})
|
||||
}
|
||||
|
||||
output "dotfiles_uri" {
|
||||
description = "Dotfiles URI"
|
||||
value = data.coder_parameter.dotfiles_uri.value
|
||||
}
|
||||
value = local.dotfiles_uri
|
||||
}
|
||||
|
||||
23
dotfiles/run.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
DOTFILES_URI="${DOTFILES_URI}"
|
||||
DOTFILES_USER="${DOTFILES_USER}"
|
||||
|
||||
if [ -n "$${DOTFILES_URI// }" ]; then
|
||||
if [ -z "$DOTFILES_USER" ]; then
|
||||
DOTFILES_USER="$USER"
|
||||
fi
|
||||
|
||||
echo "✨ Applying dotfiles for user $DOTFILES_USER"
|
||||
|
||||
if [ "$DOTFILES_USER" = "$USER" ]; then
|
||||
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee ~/.dotfiles.log
|
||||
else
|
||||
# The `eval echo ~"$DOTFILES_USER"` part is used to dynamically get the home directory of the user, see https://superuser.com/a/484280
|
||||
# eval echo ~coder -> "/home/coder"
|
||||
# eval echo ~root -> "/root"
|
||||
|
||||
CODER_BIN=$(which coder)
|
||||
DOTFILES_USER_HOME=$(eval echo ~"$DOTFILES_USER")
|
||||
sudo -u "$DOTFILES_USER" sh -c "'$CODER_BIN' dotfiles '$DOTFILES_URI' -y 2>&1 | tee '$DOTFILES_USER_HOME'/.dotfiles.log"
|
||||
fi
|
||||
fi
|
||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "standard.medium"
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ Change the display name a type using the corresponding maps:
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "standard.medium"
|
||||
|
||||
custom_names = {
|
||||
@@ -79,7 +79,7 @@ Show only gpu1 types
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "gpu.large"
|
||||
type_category = ["gpu"]
|
||||
exclude = [
|
||||
|
||||
@@ -31,4 +31,13 @@ describe("exoscale-instance-type", async () => {
|
||||
});
|
||||
}).toThrow('default value "gpu3.huge" must be defined as one of options');
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(1);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -56,6 +56,12 @@ variable "exclude" {
|
||||
type = list(string)
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
locals {
|
||||
# https://www.exoscale.com/pricing/
|
||||
|
||||
@@ -257,6 +263,7 @@ data "coder_parameter" "instance_type" {
|
||||
display_name = var.display_name
|
||||
description = var.description
|
||||
default = var.default == "" ? null : var.default
|
||||
order = var.coder_parameter_order
|
||||
mutable = var.mutable
|
||||
dynamic "option" {
|
||||
for_each = [for k, v in concat(
|
||||
|
||||
@@ -17,7 +17,7 @@ Customize the preselected parameter value:
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "ch-dk-2"
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ Change the display name and icon for a zone using the corresponding maps:
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "at-vie-1"
|
||||
|
||||
custom_names = {
|
||||
@@ -76,7 +76,7 @@ Hide the Switzerland zones Geneva and Zurich
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
exclude = ["ch-gva-2", "ch-dk-2"]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
@@ -22,4 +21,13 @@ describe("exoscale-zone", async () => {
|
||||
});
|
||||
expect(state.outputs.value.value).toBe("at-vie-1");
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(1);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,6 +51,11 @@ variable "exclude" {
|
||||
type = list(string)
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
locals {
|
||||
# This is a static list because the zones don't change _that_
|
||||
@@ -94,6 +99,7 @@ data "coder_parameter" "zone" {
|
||||
display_name = var.display_name
|
||||
description = var.description
|
||||
default = var.default == "" ? null : var.default
|
||||
order = var.coder_parameter_order
|
||||
mutable = var.mutable
|
||||
dynamic "option" {
|
||||
for_each = { for k, v in local.zones : k => v if !(contains(var.exclude, k)) }
|
||||
|
||||
@@ -14,7 +14,7 @@ A file browser for your workspace.
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -28,7 +28,7 @@ module "filebrowser" {
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/project"
|
||||
}
|
||||
@@ -39,8 +39,19 @@ module "filebrowser" {
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
database_path = ".config/filebrowser.db"
|
||||
}
|
||||
```
|
||||
|
||||
### Serve from the same domain (no subdomain)
|
||||
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "main"
|
||||
subdomain = false
|
||||
}
|
||||
```
|
||||
|
||||
@@ -88,4 +88,27 @@ describe("filebrowser", async () => {
|
||||
"📝 Logs at /tmp/filebrowser.log",
|
||||
]);
|
||||
});
|
||||
|
||||
it("runs with subdomain=false", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
agent_name: "main",
|
||||
subdomain: false,
|
||||
});
|
||||
const output = await executeScriptInContainer(state, "alpine");
|
||||
expect(output.exitCode).toBe(0);
|
||||
expect(output.stdout).toEqual([
|
||||
"\u001B[0;1mInstalling filebrowser ",
|
||||
"",
|
||||
"🥳 Installation complete! ",
|
||||
"",
|
||||
"👷 Starting filebrowser in background... ",
|
||||
"",
|
||||
"📂 Serving /root at http://localhost:13339 ",
|
||||
"",
|
||||
"Running 'filebrowser --noauth --root /root --port 13339' ",
|
||||
"",
|
||||
"📝 Logs at /tmp/filebrowser.log",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,16 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
variable "agent_name" {
|
||||
type = string
|
||||
description = "The name of the coder_agent resource. (Only required if subdomain is false and the template uses multiple agents.)"
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "database_path" {
|
||||
type = string
|
||||
description = "The path to the filebrowser database."
|
||||
@@ -52,26 +62,62 @@ variable "share" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug of the coder_app resource."
|
||||
default = "filebrowser"
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = <<-EOT
|
||||
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||
EOT
|
||||
default = true
|
||||
}
|
||||
|
||||
resource "coder_script" "filebrowser" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "File Browser"
|
||||
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
||||
icon = "/icon/filebrowser.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port,
|
||||
FOLDER : var.folder,
|
||||
LOG_PATH : var.log_path,
|
||||
DB_PATH : var.database_path
|
||||
DB_PATH : var.database_path,
|
||||
SUBDOMAIN : var.subdomain,
|
||||
SERVER_BASE_PATH : local.server_base_path
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "filebrowser" {
|
||||
agent_id = var.agent_id
|
||||
slug = "filebrowser"
|
||||
slug = var.slug
|
||||
display_name = "File Browser"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
||||
subdomain = true
|
||||
url = local.url
|
||||
icon = "/icon/filebrowser.svg"
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
|
||||
healthcheck {
|
||||
url = local.healthcheck_url
|
||||
interval = 5
|
||||
threshold = 6
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
server_base_path = var.subdomain ? "" : format("/@%s/%s%s/apps/%s", data.coder_workspace_owner.me.name, data.coder_workspace.me.name, var.agent_name != null ? ".${var.agent_name}" : "", var.slug)
|
||||
url = "http://localhost:${var.port}${local.server_base_path}"
|
||||
healthcheck_url = "http://localhost:${var.port}${local.server_base_path}/health"
|
||||
}
|
||||
@@ -1,9 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
printf "$${BOLD}Installing filebrowser \n\n"
|
||||
|
||||
curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
|
||||
# Check if filebrowser is installed
|
||||
if ! command -v filebrowser &> /dev/null; then
|
||||
curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
|
||||
fi
|
||||
|
||||
printf "🥳 Installation complete! \n\n"
|
||||
|
||||
@@ -17,6 +21,9 @@ if [ "${DB_PATH}" != "filebrowser.db" ]; then
|
||||
DB_FLAG=" -d ${DB_PATH}"
|
||||
fi
|
||||
|
||||
# set baseurl to be able to run if sudomain=false; if subdomain=true the SERVER_BASE_PATH value will be ""
|
||||
filebrowser config set --baseurl "${SERVER_BASE_PATH}"$${DB_FLAG} > ${LOG_PATH} 2>&1
|
||||
|
||||
printf "📂 Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n"
|
||||
|
||||
printf "Running 'filebrowser --noauth --root $ROOT_DIR --port ${PORT}$${DB_FLAG}' \n\n"
|
||||
|
||||
@@ -14,7 +14,7 @@ This module adds Google Cloud Platform regions to your Coder template.
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["us", "europe"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ Note: setting `gpu_only = true` and using a default region without GPU support,
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = ["us-west1-a"]
|
||||
regions = ["us-west1"]
|
||||
gpu_only = false
|
||||
@@ -50,7 +50,7 @@ resource "google_compute_instance" "example" {
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["europe-west"]
|
||||
single_zone_per_region = false
|
||||
}
|
||||
@@ -65,7 +65,7 @@ resource "google_compute_instance" "example" {
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["us", "europe"]
|
||||
gpu_only = true
|
||||
single_zone_per_region = true
|
||||
|
||||
@@ -40,4 +40,13 @@ describe("gcp-region", async () => {
|
||||
});
|
||||
expect(state.outputs.value.value).toBe("us-west2-b");
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter", async () => {
|
||||
const order = 99;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(1);
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -63,6 +63,12 @@ variable "single_zone_per_region" {
|
||||
type = bool
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
locals {
|
||||
zones = {
|
||||
# US Central
|
||||
@@ -715,6 +721,7 @@ data "coder_parameter" "region" {
|
||||
icon = "/icon/gcp.png"
|
||||
mutable = var.mutable
|
||||
default = var.default != null && var.default != "" && (!var.gpu_only || try(local.zones[var.default].gpu, false)) ? var.default : null
|
||||
order = var.coder_parameter_order
|
||||
dynamic "option" {
|
||||
for_each = {
|
||||
for k, v in local.zones : k => v
|
||||
|
||||
@@ -14,7 +14,7 @@ This module allows you to automatically clone a repository by URL and skip if it
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
}
|
||||
@@ -27,7 +27,7 @@ module "git-clone" {
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
base_dir = "~/projects/coder"
|
||||
@@ -41,7 +41,7 @@ To use with [Git Authentication](https://coder.com/docs/v2/latest/admin/git-prov
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
}
|
||||
@@ -50,3 +50,123 @@ data "coder_git_auth" "github" {
|
||||
id = "github"
|
||||
}
|
||||
```
|
||||
|
||||
## GitHub clone with branch name
|
||||
|
||||
To GitHub clone with a specific branch like `feat/example`
|
||||
|
||||
```tf
|
||||
# Prompt the user for the git repo URL
|
||||
data "coder_parameter" "git_repo" {
|
||||
name = "git_repo"
|
||||
display_name = "Git repository"
|
||||
default = "https://github.com/coder/coder/tree/feat/example"
|
||||
}
|
||||
|
||||
# Clone the repository for branch `feat/example`
|
||||
module "git_clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = data.coder_parameter.git_repo.value
|
||||
}
|
||||
|
||||
# Create a code-server instance for the cloned repository
|
||||
module "code-server" {
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
order = 1
|
||||
folder = "/home/${local.username}/${module.git_clone.folder_name}"
|
||||
}
|
||||
|
||||
# Create a Coder app for the website
|
||||
resource "coder_app" "website" {
|
||||
agent_id = coder_agent.example.id
|
||||
order = 2
|
||||
slug = "website"
|
||||
external = true
|
||||
display_name = module.git_clone.folder_name
|
||||
url = module.git_clone.web_url
|
||||
icon = module.git_clone.git_provider != "" ? "/icon/${module.git_clone.git_provider}.svg" : "/icon/git.svg"
|
||||
count = module.git_clone.web_url != "" ? 1 : 0
|
||||
}
|
||||
```
|
||||
|
||||
Configuring `git-clone` for a self-hosted GitHub Enterprise Server running at `github.example.com`
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.example.com/coder/coder/tree/feat/example"
|
||||
git_providers = {
|
||||
"https://github.example.com/" = {
|
||||
provider = "github"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## GitLab clone with branch name
|
||||
|
||||
To GitLab clone with a specific branch like `feat/example`
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://gitlab.com/coder/coder/-/tree/feat/example"
|
||||
}
|
||||
```
|
||||
|
||||
Configuring `git-clone` for a self-hosted GitLab running at `gitlab.example.com`
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://gitlab.example.com/coder/coder/-/tree/feat/example"
|
||||
git_providers = {
|
||||
"https://gitlab.example.com/" = {
|
||||
provider = "gitlab"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Git clone with branch_name set
|
||||
|
||||
Alternatively, you can set the `branch_name` attribute to clone a specific branch.
|
||||
|
||||
For example, to clone the `feat/example` branch:
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
branch_name = "feat/example"
|
||||
}
|
||||
```
|
||||
|
||||
## Git clone with different destination folder
|
||||
|
||||
By default, the repository will be cloned into a folder matching the repository name. You can use the `folder_name` attribute to change the name of the destination folder to something else.
|
||||
|
||||
For example, this will clone into the `~/projects/coder/coder-dev` folder:
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
folder_name = "coder-dev"
|
||||
base_dir = "~/projects/coder"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -36,4 +36,212 @@ describe("git-clone", async () => {
|
||||
"Cloning fake-url to ~/fake-url...",
|
||||
]);
|
||||
});
|
||||
|
||||
it("repo_dir should match repo name for https", async () => {
|
||||
const url = "https://github.com/coder/coder.git";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||
expect(state.outputs.folder_name.value).toEqual("coder");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
expect(state.outputs.web_url.value).toEqual(url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("repo_dir should match repo name for https without .git", async () => {
|
||||
const url = "https://github.com/coder/coder";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
expect(state.outputs.web_url.value).toEqual(url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("repo_dir should match repo name for ssh", async () => {
|
||||
const url = "git@github.com:coder/coder.git";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||
expect(state.outputs.git_provider.value).toEqual("");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
const https_url = "https://github.com/coder/coder.git";
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("repo_dir should match base_dir/folder_name", async () => {
|
||||
const url = "git@github.com:coder/coder.git";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
folder_name: "foo",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/foo");
|
||||
expect(state.outputs.folder_name.value).toEqual("foo");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
const https_url = "https://github.com/coder/coder.git";
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("branch_name should not include query string", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch?ref_type=heads",
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("~/repo-tests.log");
|
||||
expect(state.outputs.folder_name.value).toEqual("repo-tests.log");
|
||||
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||
});
|
||||
|
||||
it("branch_name should not include fragments", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch#name",
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||
});
|
||||
|
||||
it("gitlab url with branch should match", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch",
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||
expect(state.outputs.git_provider.value).toEqual("gitlab");
|
||||
const https_url = "https://gitlab.com/mike.brew/repo-tests.log";
|
||||
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||
});
|
||||
|
||||
it("github url with branch should match", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url: "https://github.com/michaelbrewer/repo-tests.log/tree/feat/branch",
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/repo-tests.log");
|
||||
expect(state.outputs.git_provider.value).toEqual("github");
|
||||
const https_url = "https://github.com/michaelbrewer/repo-tests.log";
|
||||
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("feat/branch");
|
||||
});
|
||||
|
||||
it("self-host git url with branch should match", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url: "https://git.example.com/example/project/-/tree/feat/example",
|
||||
git_providers: `
|
||||
{
|
||||
"https://git.example.com/" = {
|
||||
provider = "gitlab"
|
||||
}
|
||||
}`,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/project");
|
||||
expect(state.outputs.git_provider.value).toEqual("gitlab");
|
||||
const https_url = "https://git.example.com/example/project";
|
||||
expect(state.outputs.clone_url.value).toEqual(https_url);
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("feat/example");
|
||||
});
|
||||
|
||||
it("handle unsupported git provider configuration", async () => {
|
||||
const t = async () => {
|
||||
await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
url: "foo",
|
||||
git_providers: `
|
||||
{
|
||||
"https://git.example.com/" = {
|
||||
provider = "bitbucket"
|
||||
}
|
||||
}`,
|
||||
});
|
||||
};
|
||||
expect(t).toThrow('Allowed values for provider are "github" or "gitlab".');
|
||||
});
|
||||
|
||||
it("handle unknown git provider url", async () => {
|
||||
const url = "https://git.unknown.com/coder/coder";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/coder");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
expect(state.outputs.web_url.value).toEqual(url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("runs with github clone with switch to feat/branch", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
url: "https://github.com/michaelbrewer/repo-tests.log/tree/feat/branch",
|
||||
});
|
||||
const output = await executeScriptInContainer(state, "alpine/git");
|
||||
expect(output.exitCode).toBe(0);
|
||||
expect(output.stdout).toEqual([
|
||||
"Creating directory ~/repo-tests.log...",
|
||||
"Cloning https://github.com/michaelbrewer/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||
]);
|
||||
});
|
||||
|
||||
it("runs with gitlab clone with switch to feat/branch", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
url: "https://gitlab.com/mike.brew/repo-tests.log/-/tree/feat/branch",
|
||||
});
|
||||
const output = await executeScriptInContainer(state, "alpine/git");
|
||||
expect(output.exitCode).toBe(0);
|
||||
expect(output.stdout).toEqual([
|
||||
"Creating directory ~/repo-tests.log...",
|
||||
"Cloning https://gitlab.com/mike.brew/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||
]);
|
||||
});
|
||||
|
||||
it("runs with github clone with branch_name set to feat/branch", async () => {
|
||||
const url = "https://github.com/michaelbrewer/repo-tests.log";
|
||||
const branch_name = "feat/branch";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
url,
|
||||
branch_name,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("~/repo-tests.log");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
expect(state.outputs.web_url.value).toEqual(url);
|
||||
expect(state.outputs.branch_name.value).toEqual(branch_name);
|
||||
|
||||
const output = await executeScriptInContainer(state, "alpine/git");
|
||||
expect(output.exitCode).toBe(0);
|
||||
expect(output.stdout).toEqual([
|
||||
"Creating directory ~/repo-tests.log...",
|
||||
"Cloning https://github.com/michaelbrewer/repo-tests.log to ~/repo-tests.log on branch feat/branch...",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -25,8 +25,56 @@ variable "agent_id" {
|
||||
type = string
|
||||
}
|
||||
|
||||
variable "git_providers" {
|
||||
type = map(object({
|
||||
provider = string
|
||||
}))
|
||||
description = "A mapping of URLs to their git provider."
|
||||
default = {
|
||||
"https://github.com/" = {
|
||||
provider = "github"
|
||||
},
|
||||
"https://gitlab.com/" = {
|
||||
provider = "gitlab"
|
||||
},
|
||||
}
|
||||
validation {
|
||||
error_message = "Allowed values for provider are \"github\" or \"gitlab\"."
|
||||
condition = alltrue([for provider in var.git_providers : contains(["github", "gitlab"], provider.provider)])
|
||||
}
|
||||
}
|
||||
|
||||
variable "branch_name" {
|
||||
description = "The branch name to clone. If not provided, the default branch will be cloned."
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "folder_name" {
|
||||
description = "The destination folder to clone the repository into."
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
locals {
|
||||
clone_path = var.base_dir != "" ? join("/", [var.base_dir, replace(basename(var.url), ".git", "")]) : join("/", ["~", replace(basename(var.url), ".git", "")])
|
||||
# Remove query parameters and fragments from the URL
|
||||
url = replace(replace(var.url, "/\\?.*/", ""), "/#.*/", "")
|
||||
|
||||
# Find the git provider based on the URL and determine the tree path
|
||||
provider_key = try(one([for key in keys(var.git_providers) : key if startswith(local.url, key)]), null)
|
||||
provider = try(lookup(var.git_providers, local.provider_key).provider, "")
|
||||
tree_path = local.provider == "gitlab" ? "/-/tree/" : local.provider == "github" ? "/tree/" : ""
|
||||
|
||||
# Remove tree and branch name from the URL
|
||||
clone_url = var.branch_name == "" && local.tree_path != "" ? replace(local.url, "/${local.tree_path}.*/", "") : local.url
|
||||
# Extract the branch name from the URL
|
||||
branch_name = var.branch_name == "" && local.tree_path != "" ? replace(replace(local.url, local.clone_url, ""), "/.*${local.tree_path}/", "") : var.branch_name
|
||||
# Extract the folder name from the URL
|
||||
folder_name = var.folder_name == "" ? replace(basename(local.clone_url), ".git", "") : var.folder_name
|
||||
# Construct the path to clone the repository
|
||||
clone_path = var.base_dir != "" ? join("/", [var.base_dir, local.folder_name]) : join("/", ["~", local.folder_name])
|
||||
# Construct the web URL
|
||||
web_url = startswith(local.clone_url, "git@") ? replace(replace(local.clone_url, ":", "/"), "git@", "https://") : local.clone_url
|
||||
}
|
||||
|
||||
output "repo_dir" {
|
||||
@@ -34,11 +82,37 @@ output "repo_dir" {
|
||||
description = "Full path of cloned repo directory"
|
||||
}
|
||||
|
||||
output "git_provider" {
|
||||
value = local.provider
|
||||
description = "The git provider of the repository"
|
||||
}
|
||||
|
||||
output "folder_name" {
|
||||
value = local.folder_name
|
||||
description = "The name of the folder that will be created"
|
||||
}
|
||||
|
||||
output "clone_url" {
|
||||
value = local.clone_url
|
||||
description = "The exact Git repository URL that will be cloned"
|
||||
}
|
||||
|
||||
output "web_url" {
|
||||
value = local.web_url
|
||||
description = "Git https repository URL (may be invalid for unsupported providers)"
|
||||
}
|
||||
|
||||
output "branch_name" {
|
||||
value = local.branch_name
|
||||
description = "Git branch name (may be empty)"
|
||||
}
|
||||
|
||||
resource "coder_script" "git_clone" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
CLONE_PATH = local.clone_path
|
||||
REPO_URL : var.url,
|
||||
CLONE_PATH = local.clone_path,
|
||||
REPO_URL : local.clone_url,
|
||||
BRANCH_NAME : local.branch_name,
|
||||
})
|
||||
display_name = "Git Clone"
|
||||
icon = "/icon/git.svg"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
REPO_URL="${REPO_URL}"
|
||||
CLONE_PATH="${CLONE_PATH}"
|
||||
BRANCH_NAME="${BRANCH_NAME}"
|
||||
# Expand home if it's specified!
|
||||
CLONE_PATH="$${CLONE_PATH/#\~/$${HOME}}"
|
||||
|
||||
@@ -33,8 +34,13 @@ fi
|
||||
# Check if the directory is empty
|
||||
# and if it is, clone the repo, otherwise skip cloning
|
||||
if [ -z "$(ls -A "$CLONE_PATH")" ]; then
|
||||
echo "Cloning $REPO_URL to $CLONE_PATH..."
|
||||
git clone "$REPO_URL" "$CLONE_PATH"
|
||||
if [ -z "$BRANCH_NAME" ]; then
|
||||
echo "Cloning $REPO_URL to $CLONE_PATH..."
|
||||
git clone "$REPO_URL" "$CLONE_PATH"
|
||||
else
|
||||
echo "Cloning $REPO_URL to $CLONE_PATH on branch $BRANCH_NAME..."
|
||||
git clone "$REPO_URL" -b "$BRANCH_NAME" "$CLONE_PATH"
|
||||
fi
|
||||
else
|
||||
echo "$CLONE_PATH already exists and isn't empty, skipping clone!"
|
||||
exit 0
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
display_name: Git commit signing
|
||||
description: Configures Git to sign commits using your Coder SSH key
|
||||
icon: ../.icons/git.svg
|
||||
maintainer_github: phorcys420
|
||||
verified: false
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, git]
|
||||
---
|
||||
|
||||
@@ -19,7 +19,7 @@ This module has a chance of conflicting with the user's dotfiles / the personali
|
||||
```tf
|
||||
module "git-commit-signing" {
|
||||
source = "registry.coder.com/modules/git-commit-signing/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.11"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -16,7 +16,7 @@ variable "agent_id" {
|
||||
|
||||
resource "coder_script" "git-commit-signing" {
|
||||
display_name = "Git commit signing"
|
||||
icon = "https://raw.githubusercontent.com/coder/modules/main/.icons/git.svg"
|
||||
icon = "/icon/git.svg"
|
||||
|
||||
script = file("${path.module}/run.sh")
|
||||
run_on_start = true
|
||||
|
||||
@@ -21,7 +21,8 @@ echo "Downloading SSH key"
|
||||
|
||||
ssh_key=$(curl --request GET \
|
||||
--url "${CODER_AGENT_URL}api/v2/workspaceagents/me/gitsshkey" \
|
||||
--header "Coder-Session-Token: ${CODER_AGENT_TOKEN}")
|
||||
--header "Coder-Session-Token: ${CODER_AGENT_TOKEN}" \
|
||||
--silent --show-error)
|
||||
|
||||
jq --raw-output ".public_key" > ~/.ssh/git-commit-signing/coder.pub << EOF
|
||||
$ssh_key
|
||||
|
||||
@@ -14,7 +14,7 @@ Runs a script that updates git credentials in the workspace to match the user's
|
||||
```tf
|
||||
module "git-config" {
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -28,7 +28,7 @@ TODO: Add screenshot
|
||||
```tf
|
||||
module "git-config" {
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
allow_email_change = true
|
||||
}
|
||||
@@ -41,7 +41,7 @@ TODO: Add screenshot
|
||||
```tf
|
||||
module "git-config" {
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
allow_username_change = false
|
||||
allow_email_change = false
|
||||
|
||||
127
git-config/main.test.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("git-config", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("can run apply allow_username_change and allow_email_change disabled", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
allow_username_change: "false",
|
||||
allow_email_change: "false",
|
||||
});
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(6);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("can run apply allow_email_change enabled", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
allow_email_change: "true",
|
||||
});
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(8);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "user_email" },
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("can run apply allow_email_change enabled", async () => {
|
||||
const state = await runTerraformApply(
|
||||
import.meta.dir,
|
||||
{
|
||||
agent_id: "foo",
|
||||
allow_username_change: "false",
|
||||
allow_email_change: "false",
|
||||
},
|
||||
{ CODER_WORKSPACE_OWNER_EMAIL: "foo@email.com" },
|
||||
);
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(6);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter for both fields", async () => {
|
||||
const order = 20;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
allow_username_change: "true",
|
||||
allow_email_change: "true",
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(8);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "user_email" },
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
// user_email order is the same as the order
|
||||
expect(resources[0].instances[0].attributes.order).toBe(order);
|
||||
// username order is incremented by 1
|
||||
// @ts-ignore: Object is possibly 'null'.
|
||||
expect(resources[1].instances[0]?.attributes.order).toBe(order + 1);
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter for just username", async () => {
|
||||
const order = 30;
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
allow_email_change: "false",
|
||||
allow_username_change: "true",
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(7);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
// user_email was not created
|
||||
// username order is incremented by 1
|
||||
expect(resources[0].instances[0].attributes.order).toBe(order + 1);
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.13"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,14 +26,21 @@ variable "allow_email_change" {
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
data "coder_parameter" "user_email" {
|
||||
count = var.allow_email_change ? 1 : 0
|
||||
name = "user_email"
|
||||
type = "string"
|
||||
default = ""
|
||||
order = var.coder_parameter_order != null ? var.coder_parameter_order + 0 : null
|
||||
description = "Git user.email to be used for commits. Leave empty to default to Coder user's email."
|
||||
display_name = "Git config user.email"
|
||||
mutable = true
|
||||
@@ -44,6 +51,7 @@ data "coder_parameter" "username" {
|
||||
name = "username"
|
||||
type = "string"
|
||||
default = ""
|
||||
order = var.coder_parameter_order != null ? var.coder_parameter_order + 1 : null
|
||||
description = "Git user.name to be used for commits. Leave empty to default to Coder user's Full Name."
|
||||
display_name = "Full Name for Git config"
|
||||
mutable = true
|
||||
@@ -52,23 +60,25 @@ data "coder_parameter" "username" {
|
||||
resource "coder_env" "git_author_name" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_AUTHOR_NAME"
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
}
|
||||
|
||||
resource "coder_env" "git_commmiter_name" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_COMMITTER_NAME"
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
}
|
||||
|
||||
resource "coder_env" "git_author_email" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_AUTHOR_EMAIL"
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||
}
|
||||
|
||||
resource "coder_env" "git_commmiter_email" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_COMMITTER_EMAIL"
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||
}
|
||||
|
||||
53
github-upload-public-key/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
display_name: Github Upload Public Key
|
||||
description: Automates uploading Coder public key to Github so users don't have to.
|
||||
icon: ../.icons/github.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, git]
|
||||
---
|
||||
|
||||
# github-upload-public-key
|
||||
|
||||
Templates that utilize Github External Auth can automatically ensure that the Coder public key is uploaded to Github so that users can clone repositories without needing to upload the public key themselves.
|
||||
|
||||
```tf
|
||||
module "github-upload-public-key" {
|
||||
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
# Requirements
|
||||
|
||||
This module requires `curl` and `jq` to be installed inside your workspace.
|
||||
|
||||
Github External Auth must be enabled in the workspace for this module to work. The Github app that is configured for external auth must have both read and write permissions to "Git SSH keys" in order to upload the public key. Additionally, a Coder admin must also have the `admin:public_key` scope added to the external auth configuration of the Coder deployment. For example:
|
||||
|
||||
```
|
||||
CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
|
||||
CODER_EXTERNAL_AUTH_0_TYPE=github
|
||||
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
|
||||
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
|
||||
CODER_EXTERNAL_AUTH_0_SCOPES="repo,workflow,admin:public_key"
|
||||
```
|
||||
|
||||
Note that the default scopes if not provided are `repo,workflow`. If the module is failing to complete after updating the external auth configuration, instruct users of the module to "Unlink" and "Link" their Github account in the External Auth user settings page to get the new scopes.
|
||||
|
||||
# Example
|
||||
|
||||
Using a coder github external auth with a non-default id: (default is `github`)
|
||||
|
||||
```tf
|
||||
data "coder_external_auth" "github" {
|
||||
id = "myauthid"
|
||||
}
|
||||
|
||||
module "github-upload-public-key" {
|
||||
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
external_auth_id = data.coder_external_auth.github.id
|
||||
}
|
||||
```
|
||||
132
github-upload-public-key/main.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { type Server, serve } from "bun";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
execContainer,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
writeCoder,
|
||||
} from "../test";
|
||||
|
||||
describe("github-upload-public-key", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("creates new key if one does not exist", async () => {
|
||||
const { instance, id, server } = await setupContainer();
|
||||
await writeCoder(id, "echo foo");
|
||||
|
||||
const url = server.url.toString().slice(0, -1);
|
||||
const exec = await execContainer(id, [
|
||||
"env",
|
||||
`CODER_ACCESS_URL=${url}`,
|
||||
`GITHUB_API_URL=${url}`,
|
||||
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||
"CODER_EXTERNAL_AUTH_ID=github",
|
||||
"bash",
|
||||
"-c",
|
||||
instance.script,
|
||||
]);
|
||||
expect(exec.stdout).toContain(
|
||||
"Your Coder public key has been added to GitHub!",
|
||||
);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
// we need to increase timeout to pull the container
|
||||
}, 15000);
|
||||
|
||||
it("does nothing if one already exists", async () => {
|
||||
const { instance, id, server } = await setupContainer();
|
||||
// use keyword to make server return a existing key
|
||||
await writeCoder(id, "echo findkey");
|
||||
|
||||
const url = server.url.toString().slice(0, -1);
|
||||
const exec = await execContainer(id, [
|
||||
"env",
|
||||
`CODER_ACCESS_URL=${url}`,
|
||||
`GITHUB_API_URL=${url}`,
|
||||
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||
"CODER_EXTERNAL_AUTH_ID=github",
|
||||
"bash",
|
||||
"-c",
|
||||
instance.script,
|
||||
]);
|
||||
expect(exec.stdout).toContain(
|
||||
"Your Coder public key is already on GitHub!",
|
||||
);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
const setupContainer = async (
|
||||
image = "lorello/alpine-bash",
|
||||
vars: Record<string, string> = {},
|
||||
) => {
|
||||
const server = await setupServer();
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
...vars,
|
||||
});
|
||||
const instance = findResourceInstance(state, "coder_script");
|
||||
const id = await runContainer(image);
|
||||
return { id, instance, server };
|
||||
};
|
||||
|
||||
const setupServer = async (): Promise<Server> => {
|
||||
let url: URL;
|
||||
const fakeSlackHost = serve({
|
||||
fetch: (req) => {
|
||||
url = new URL(req.url);
|
||||
if (url.pathname === "/api/v2/users/me/gitsshkey") {
|
||||
return createJSONResponse({
|
||||
public_key: "exists",
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === "/user/keys") {
|
||||
if (req.method === "POST") {
|
||||
return createJSONResponse(
|
||||
{
|
||||
key: "created",
|
||||
},
|
||||
201,
|
||||
);
|
||||
}
|
||||
|
||||
// case: key already exists
|
||||
if (req.headers.get("Authorization") === "Bearer findkey") {
|
||||
return createJSONResponse([
|
||||
{
|
||||
key: "foo",
|
||||
},
|
||||
{
|
||||
key: "exists",
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
// case: key does not exist
|
||||
return createJSONResponse([
|
||||
{
|
||||
key: "foo",
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return createJSONResponse(
|
||||
{
|
||||
error: "not_found",
|
||||
},
|
||||
404,
|
||||
);
|
||||
},
|
||||
port: 0,
|
||||
});
|
||||
|
||||
return fakeSlackHost;
|
||||
};
|
||||
43
github-upload-public-key/main.tf
Normal file
@@ -0,0 +1,43 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "external_auth_id" {
|
||||
type = string
|
||||
description = "The ID of the GitHub external auth."
|
||||
default = "github"
|
||||
}
|
||||
|
||||
variable "github_api_url" {
|
||||
type = string
|
||||
description = "The URL of the GitHub instance."
|
||||
default = "https://api.github.com"
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "github_upload_public_key" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
CODER_OWNER_SESSION_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||
CODER_ACCESS_URL : data.coder_workspace.me.access_url,
|
||||
CODER_EXTERNAL_AUTH_ID : var.external_auth_id,
|
||||
GITHUB_API_URL : var.github_api_url,
|
||||
})
|
||||
display_name = "Github Upload Public Key"
|
||||
icon = "/icon/github.svg"
|
||||
run_on_start = true
|
||||
}
|
||||
110
github-upload-public-key/run.sh
Executable file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [ -z "$CODER_ACCESS_URL" ]; then
|
||||
if [ -z "${CODER_ACCESS_URL}" ]; then
|
||||
echo "CODER_ACCESS_URL is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_ACCESS_URL=${CODER_ACCESS_URL}
|
||||
fi
|
||||
|
||||
if [ -z "$CODER_OWNER_SESSION_TOKEN" ]; then
|
||||
if [ -z "${CODER_OWNER_SESSION_TOKEN}" ]; then
|
||||
echo "CODER_OWNER_SESSION_TOKEN is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_OWNER_SESSION_TOKEN=${CODER_OWNER_SESSION_TOKEN}
|
||||
fi
|
||||
|
||||
if [ -z "$CODER_EXTERNAL_AUTH_ID" ]; then
|
||||
if [ -z "${CODER_EXTERNAL_AUTH_ID}" ]; then
|
||||
echo "CODER_EXTERNAL_AUTH_ID is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_EXTERNAL_AUTH_ID=${CODER_EXTERNAL_AUTH_ID}
|
||||
fi
|
||||
|
||||
if [ -z "$GITHUB_API_URL" ]; then
|
||||
if [ -z "${GITHUB_API_URL}" ]; then
|
||||
echo "GITHUB_API_URL is empty!"
|
||||
exit 1
|
||||
fi
|
||||
GITHUB_API_URL=${GITHUB_API_URL}
|
||||
fi
|
||||
|
||||
echo "Fetching GitHub token..."
|
||||
GITHUB_TOKEN=$(coder external-auth access-token $CODER_EXTERNAL_AUTH_ID)
|
||||
if [ $? -ne 0 ]; then
|
||||
printf "Authenticate with Github to automatically upload Coder public key:\n$GITHUB_TOKEN\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Fetching public key from Coder..."
|
||||
PUBLIC_KEY_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-w "\n%%{http_code}" \
|
||||
-H 'accept: application/json' \
|
||||
-H "cookie: coder_session_token=$CODER_OWNER_SESSION_TOKEN" \
|
||||
"$CODER_ACCESS_URL/api/v2/users/me/gitsshkey"
|
||||
)
|
||||
PUBLIC_KEY_RESPONSE_STATUS=$(tail -n1 <<< "$PUBLIC_KEY_RESPONSE")
|
||||
PUBLIC_KEY_BODY=$(sed \$d <<< "$PUBLIC_KEY_RESPONSE")
|
||||
|
||||
if [ "$PUBLIC_KEY_RESPONSE_STATUS" -ne 200 ]; then
|
||||
echo "Failed to fetch Coder public SSH key with status code $PUBLIC_KEY_RESPONSE_STATUS!"
|
||||
echo "$PUBLIC_KEY_BODY"
|
||||
exit 1
|
||||
fi
|
||||
PUBLIC_KEY=$(jq -r '.public_key' <<< "$PUBLIC_KEY_BODY")
|
||||
if [ -z "$PUBLIC_KEY" ]; then
|
||||
echo "No Coder public SSH key found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Fetching public keys from GitHub..."
|
||||
GITHUB_KEYS_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-w "\n%%{http_code}" \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
$GITHUB_API_URL/user/keys
|
||||
)
|
||||
GITHUB_KEYS_RESPONSE_STATUS=$(tail -n1 <<< "$GITHUB_KEYS_RESPONSE")
|
||||
GITHUB_KEYS_RESPONSE_BODY=$(sed \$d <<< "$GITHUB_KEYS_RESPONSE")
|
||||
|
||||
if [ "$GITHUB_KEYS_RESPONSE_STATUS" -ne 200 ]; then
|
||||
echo "Failed to fetch Coder public SSH key with status code $GITHUB_KEYS_RESPONSE_STATUS!"
|
||||
echo "$GITHUB_KEYS_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
GITHUB_MATCH=$(jq -r --arg PUBLIC_KEY "$PUBLIC_KEY" '.[] | select(.key == $PUBLIC_KEY) | .key' <<< "$GITHUB_KEYS_RESPONSE_BODY")
|
||||
|
||||
if [ "$PUBLIC_KEY" = "$GITHUB_MATCH" ]; then
|
||||
echo "Your Coder public key is already on GitHub!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Your Coder public key is not in GitHub. Adding it now..."
|
||||
CODER_PUBLIC_KEY_NAME="$CODER_ACCESS_URL Workspaces"
|
||||
UPLOAD_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-X POST \
|
||||
-w "\n%%{http_code}" \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
$GITHUB_API_URL/user/keys \
|
||||
-d "{\"title\":\"$CODER_PUBLIC_KEY_NAME\",\"key\":\"$PUBLIC_KEY\"}"
|
||||
)
|
||||
UPLOAD_RESPONSE_STATUS=$(tail -n1 <<< "$UPLOAD_RESPONSE")
|
||||
UPLOAD_RESPONSE_BODY=$(sed \$d <<< "$UPLOAD_RESPONSE")
|
||||
|
||||
if [ "$UPLOAD_RESPONSE_STATUS" -ne 201 ]; then
|
||||
echo "Failed to upload Coder public SSH key with status code $UPLOAD_RESPONSE_STATUS!"
|
||||
echo "$UPLOAD_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Your Coder public key has been added to GitHub!"
|
||||
@@ -14,16 +14,25 @@ This module lets you fetch all or selective secrets from a [HCP Vault Secrets](h
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.3"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
project_id = "aaa-bbb-ccc"
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
To configure the HCP Vault Secrets module, you must create an HCP Service Principal from the HCP Vault Secrets app in the HCP console. This will give you the `HCP_CLIENT_ID` and `HCP_CLIENT_SECRET` that you need to authenticate with HCP Vault Secrets. See the [HCP Vault Secrets documentation](https://developer.hashicorp.com/hcp/docs/vault-secrets) for more information.
|
||||
To configure the HCP Vault Secrets module, follow these steps,
|
||||
|
||||
1. [Create secrets in HCP Vault Secrets](https://developer.hashicorp.com/vault/tutorials/hcp-vault-secrets-get-started/hcp-vault-secrets-create-secret)
|
||||
2. Create an HCP Service Principal from the HCP Vault Secrets app in the HCP console. This will give you the `HCP_CLIENT_ID` and `HCP_CLIENT_SECRET` that you need to authenticate with HCP Vault Secrets.
|
||||

|
||||
3. Set `HCP_CLIENT_ID` and `HCP_CLIENT_SECRET` variables on the coder provisioner (recommended) or supply them as input to the module.
|
||||
4. Set the `project_id`. This is the ID of the project where the HCP Vault Secrets app is running.
|
||||
|
||||
> See the [HCP Vault Secrets documentation](https://developer.hashicorp.com/hcp/docs/vault-secrets) for more information.
|
||||
|
||||
## Fetch All Secrets
|
||||
|
||||
@@ -31,10 +40,11 @@ To fetch all secrets from the HCP Vault Secrets app, skip the `secrets` input.
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.3"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
project_id = "aaa-bbb-ccc"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -44,11 +54,12 @@ To fetch selective secrets from the HCP Vault Secrets app, set the `secrets` inp
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.3"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
secrets = ["MY_SECRET_1", "MY_SECRET_2"]
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
project_id = "aaa-bbb-ccc"
|
||||
secrets = ["MY_SECRET_1", "MY_SECRET_2"]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -59,9 +70,10 @@ Set `client_id` and `client_secret` as module inputs.
|
||||
```tf
|
||||
module "vault" {
|
||||
source = "registry.coder.com/modules/hcp-vault-secrets/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
app_name = "demo-app"
|
||||
project_id = "aaa-bbb-ccc"
|
||||
client_id = "HCP_CLIENT_ID"
|
||||
client_secret = "HCP_CLIENT_SECRET"
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ terraform {
|
||||
provider "hcp" {
|
||||
client_id = var.client_id
|
||||
client_secret = var.client_secret
|
||||
project_id = var.project_id
|
||||
}
|
||||
|
||||
provider "coder" {}
|
||||
@@ -25,6 +26,11 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "project_id" {
|
||||
type = string
|
||||
description = "The ID of the HCP project."
|
||||
}
|
||||
|
||||
variable "client_id" {
|
||||
type = string
|
||||
description = <<-EOF
|
||||
|
||||
@@ -14,12 +14,12 @@ This module adds a JetBrains Gateway Button to open any workspace with a single
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS", "IU", "PY", "PS", "CL", "RM"]
|
||||
default = "PY"
|
||||
jetbrains_ides = ["CL", "GO", "IU", "PY", "WS"]
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -27,12 +27,12 @@ module "jetbrains_gateway" {
|
||||
|
||||
## Examples
|
||||
|
||||
### Add GoLand and WebStorm with the default set to GoLand
|
||||
### Add GoLand and WebStorm as options with the default set to GoLand
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
@@ -41,6 +41,80 @@ module "jetbrains_gateway" {
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest version of each IDE
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = true
|
||||
}
|
||||
```
|
||||
|
||||
### Use fixed versions set by `jetbrains_ide_versions`
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = false
|
||||
jetbrains_ide_versions = {
|
||||
"IU" = {
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest EAP version
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
default = "GO"
|
||||
latest = true
|
||||
channel = "eap"
|
||||
}
|
||||
```
|
||||
|
||||
### Custom base link
|
||||
|
||||
Due to the highest priority of the `ide_download_link` parameter in the `(jetbrains-gateway://...` within IDEA, the pre-configured download address will be overridden when using [IDEA's offline mode](https://www.jetbrains.com/help/idea/fully-offline-mode.html). Therefore, it is necessary to configure the `download_base_link` parameter for the `jetbrains_gateway` module to change the value of `ide_download_link`.
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.25"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
releases_base_link = "https://releases.internal.site/"
|
||||
download_base_link = "https://download.internal.site/"
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||
## Supported IDEs
|
||||
|
||||
This module and JetBrains Gateway support the following JetBrains IDEs:
|
||||
@@ -52,3 +126,4 @@ This module and JetBrains Gateway support the following JetBrains IDEs:
|
||||
- PhpStorm (`PS`)
|
||||
- CLion (`CL`)
|
||||
- RubyMine (`RM`)
|
||||
- Rider (`RD`)
|
||||
|
||||
@@ -11,18 +11,36 @@ describe("jetbrains-gateway", async () => {
|
||||
await testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
agent_name: "foo",
|
||||
folder: "/baz/",
|
||||
folder: "/home/foo",
|
||||
});
|
||||
|
||||
it("should create a link with the default values", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
// These are all required.
|
||||
agent_id: "foo",
|
||||
agent_name: "foo",
|
||||
folder: "/home/coder",
|
||||
});
|
||||
expect(state.outputs.url.value).toBe(
|
||||
"jetbrains-gateway://connect#type=coder&workspace=default&owner=default&agent=foo&folder=/home/coder&url=https://mydeployment.coder.com&token=$SESSION_TOKEN&ide_product_code=IU&ide_build_number=243.21565.193&ide_download_link=https://download.jetbrains.com/idea/ideaIU-2024.3.tar.gz",
|
||||
);
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "gateway",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||
});
|
||||
|
||||
it("default to first ide", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
agent_name: "foo",
|
||||
folder: "/baz/",
|
||||
folder: "/home/foo",
|
||||
jetbrains_ides: '["IU", "GO", "PY"]',
|
||||
});
|
||||
expect(state.outputs.jetbrains_ides.value).toBe(
|
||||
'["IU","232.10203.10","https://download.jetbrains.com/idea/ideaIU-2023.2.4.tar.gz"]',
|
||||
);
|
||||
expect(state.outputs.identifier.value).toBe("IU");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,11 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.11"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +18,12 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug for the coder_app. Allows resuing the module with the same template."
|
||||
default = "gateway"
|
||||
}
|
||||
|
||||
variable "agent_name" {
|
||||
type = string
|
||||
description = "Agent name."
|
||||
@@ -22,6 +32,10 @@ variable "agent_name" {
|
||||
variable "folder" {
|
||||
type = string
|
||||
description = "The directory to open in the IDE. e.g. /home/coder/project"
|
||||
validation {
|
||||
condition = can(regex("^(?:/[^/]+)+$", var.folder))
|
||||
error_message = "The folder must be a full path and must not start with a ~."
|
||||
}
|
||||
}
|
||||
|
||||
variable "default" {
|
||||
@@ -30,8 +44,32 @@ variable "default" {
|
||||
description = "Default IDE"
|
||||
}
|
||||
|
||||
locals {
|
||||
supported_ides = ["IU", "PS", "WS", "PY", "CL", "GO", "RM"]
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "latest" {
|
||||
type = bool
|
||||
description = "Whether to fetch the latest version of the IDE."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "channel" {
|
||||
type = string
|
||||
description = "JetBrains IDE release channel. Valid values are release and eap."
|
||||
default = "release"
|
||||
validation {
|
||||
condition = can(regex("^(release|eap)$", var.channel))
|
||||
error_message = "The channel must be either release or eap."
|
||||
}
|
||||
}
|
||||
|
||||
variable "jetbrains_ide_versions" {
|
||||
@@ -42,56 +80,59 @@ variable "jetbrains_ide_versions" {
|
||||
description = "The set of versions for each jetbrains IDE"
|
||||
default = {
|
||||
"IU" = {
|
||||
build_number = "232.10203.10"
|
||||
version = "2023.2.4"
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PS" = {
|
||||
build_number = "232.10072.32"
|
||||
version = "2023.2.3"
|
||||
build_number = "243.21565.202"
|
||||
version = "2024.3"
|
||||
}
|
||||
"WS" = {
|
||||
build_number = "232.10203.14"
|
||||
version = "2023.2.4"
|
||||
build_number = "243.21565.180"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "232.10203.26"
|
||||
version = "2023.2.4"
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
"CL" = {
|
||||
build_number = "232.9921.42"
|
||||
version = "2023.2.2"
|
||||
build_number = "243.21565.238"
|
||||
version = "2024.1"
|
||||
}
|
||||
"GO" = {
|
||||
build_number = "232.10203.20"
|
||||
version = "2023.2.4"
|
||||
build_number = "243.21565.208"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RM" = {
|
||||
build_number = "232.10203.15"
|
||||
version = "2023.2.4"
|
||||
build_number = "243.21565.197"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RD" = {
|
||||
build_number = "243.21565.191"
|
||||
version = "2024.3"
|
||||
}
|
||||
|
||||
}
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in var.jetbrains_ide_versions : contains(local.supported_ides, code)
|
||||
for code in keys(var.jetbrains_ide_versions) : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ide_versions must contain a map of valid product codes. Valid product codes are ${join(",", local.supported_ides)}."
|
||||
error_message = "The jetbrains_ide_versions must contain a map of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"])}."
|
||||
}
|
||||
}
|
||||
|
||||
variable "jetbrains_ides" {
|
||||
type = list(string)
|
||||
description = "The list of IDE product codes."
|
||||
default = local.supported_ides
|
||||
default = ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"]
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in var.jetbrains_ides : contains(local.supported_ides, code)
|
||||
for code in var.jetbrains_ides : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ides must be a list of valid product codes. Valid product codes are ${join(",", local.supported_ides)}."
|
||||
error_message = "The jetbrains_ides must be a list of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"])}."
|
||||
}
|
||||
# check if the list is empty
|
||||
validation {
|
||||
@@ -105,76 +146,143 @@ variable "jetbrains_ides" {
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
jetbrains_ides = {
|
||||
"GO" = {
|
||||
icon = "/icon/goland.svg",
|
||||
name = "GoLand",
|
||||
value = jsonencode(["GO", var.jetbrains_ide_versions["GO"].build_number, "https://download.jetbrains.com/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"])
|
||||
},
|
||||
"WS" = {
|
||||
icon = "/icon/webstorm.svg",
|
||||
name = "WebStorm",
|
||||
value = jsonencode(["WS", var.jetbrains_ide_versions["WS"].build_number, "https://download.jetbrains.com/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"])
|
||||
},
|
||||
"IU" = {
|
||||
icon = "/icon/intellij.svg",
|
||||
name = "IntelliJ IDEA Ultimate",
|
||||
value = jsonencode(["IU", var.jetbrains_ide_versions["IU"].build_number, "https://download.jetbrains.com/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"])
|
||||
},
|
||||
"PY" = {
|
||||
icon = "/icon/pycharm.svg",
|
||||
name = "PyCharm Professional",
|
||||
value = jsonencode(["PY", var.jetbrains_ide_versions["PY"].build_number, "https://download.jetbrains.com/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"])
|
||||
},
|
||||
"CL" = {
|
||||
icon = "/icon/clion.svg",
|
||||
name = "CLion",
|
||||
value = jsonencode(["CL", var.jetbrains_ide_versions["CL"].build_number, "https://download.jetbrains.com/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"])
|
||||
},
|
||||
"PS" = {
|
||||
icon = "/icon/phpstorm.svg",
|
||||
name = "PhpStorm",
|
||||
value = jsonencode(["PS", var.jetbrains_ide_versions["PS"].build_number, "https://download.jetbrains.com/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"])
|
||||
},
|
||||
"RM" = {
|
||||
icon = "/icon/rubymine.svg",
|
||||
name = "RubyMine",
|
||||
value = jsonencode(["RM", var.jetbrains_ide_versions["RM"].build_number, "https://download.jetbrains.com/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"])
|
||||
}
|
||||
variable "releases_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://data.services.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.releases_base_link))
|
||||
error_message = "The releases_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
variable "download_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://download.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.download_base_link))
|
||||
error_message = "The download_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
data "http" "jetbrains_ide_versions" {
|
||||
for_each = var.latest ? toset(var.jetbrains_ides) : toset([])
|
||||
url = "${var.releases_base_link}/products/releases?code=${each.key}&latest=true&type=${var.channel}"
|
||||
}
|
||||
|
||||
locals {
|
||||
jetbrains_ides = {
|
||||
"GO" = {
|
||||
icon = "/icon/goland.svg",
|
||||
name = "GoLand",
|
||||
identifier = "GO",
|
||||
build_number = var.jetbrains_ide_versions["GO"].build_number,
|
||||
download_link = "${var.download_base_link}/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["GO"].version
|
||||
},
|
||||
"WS" = {
|
||||
icon = "/icon/webstorm.svg",
|
||||
name = "WebStorm",
|
||||
identifier = "WS",
|
||||
build_number = var.jetbrains_ide_versions["WS"].build_number,
|
||||
download_link = "${var.download_base_link}/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["WS"].version
|
||||
},
|
||||
"IU" = {
|
||||
icon = "/icon/intellij.svg",
|
||||
name = "IntelliJ IDEA Ultimate",
|
||||
identifier = "IU",
|
||||
build_number = var.jetbrains_ide_versions["IU"].build_number,
|
||||
download_link = "${var.download_base_link}/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["IU"].version
|
||||
},
|
||||
"PY" = {
|
||||
icon = "/icon/pycharm.svg",
|
||||
name = "PyCharm Professional",
|
||||
identifier = "PY",
|
||||
build_number = var.jetbrains_ide_versions["PY"].build_number,
|
||||
download_link = "${var.download_base_link}/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PY"].version
|
||||
},
|
||||
"CL" = {
|
||||
icon = "/icon/clion.svg",
|
||||
name = "CLion",
|
||||
identifier = "CL",
|
||||
build_number = var.jetbrains_ide_versions["CL"].build_number,
|
||||
download_link = "${var.download_base_link}/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["CL"].version
|
||||
},
|
||||
"PS" = {
|
||||
icon = "/icon/phpstorm.svg",
|
||||
name = "PhpStorm",
|
||||
identifier = "PS",
|
||||
build_number = var.jetbrains_ide_versions["PS"].build_number,
|
||||
download_link = "${var.download_base_link}/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PS"].version
|
||||
},
|
||||
"RM" = {
|
||||
icon = "/icon/rubymine.svg",
|
||||
name = "RubyMine",
|
||||
identifier = "RM",
|
||||
build_number = var.jetbrains_ide_versions["RM"].build_number,
|
||||
download_link = "${var.download_base_link}/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RM"].version
|
||||
}
|
||||
"RD" = {
|
||||
icon = "/icon/rider.svg",
|
||||
name = "Rider",
|
||||
identifier = "RD",
|
||||
build_number = var.jetbrains_ide_versions["RD"].build_number,
|
||||
download_link = "${var.download_base_link}/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RD"].version
|
||||
}
|
||||
}
|
||||
|
||||
icon = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].icon
|
||||
json_data = var.latest ? jsondecode(data.http.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].response_body) : {}
|
||||
key = var.latest ? keys(local.json_data)[0] : ""
|
||||
display_name = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].name
|
||||
identifier = data.coder_parameter.jetbrains_ide.value
|
||||
download_link = var.latest ? local.json_data[local.key][0].downloads.linux.link : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
|
||||
build_number = var.latest ? local.json_data[local.key][0].build : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].build_number
|
||||
version = var.latest ? local.json_data[local.key][0].version : var.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].version
|
||||
}
|
||||
|
||||
data "coder_parameter" "jetbrains_ide" {
|
||||
type = "list(string)"
|
||||
type = "string"
|
||||
name = "jetbrains_ide"
|
||||
display_name = "JetBrains IDE"
|
||||
icon = "/icon/gateway.svg"
|
||||
mutable = true
|
||||
# check if default is in the jet_brains_ides list and if it is not empty or null otherwise set it to null
|
||||
default = var.default != null && var.default != "" && contains(var.jetbrains_ides, var.default) ? local.jetbrains_ides[var.default].value : local.jetbrains_ides[var.jetbrains_ides[0]].value
|
||||
default = var.default == "" ? var.jetbrains_ides[0] : var.default
|
||||
order = var.coder_parameter_order
|
||||
|
||||
dynamic "option" {
|
||||
for_each = { for key, value in local.jetbrains_ides : key => value if contains(var.jetbrains_ides, key) }
|
||||
for_each = var.jetbrains_ides
|
||||
content {
|
||||
icon = option.value.icon
|
||||
name = option.value.name
|
||||
value = option.value.value
|
||||
icon = local.jetbrains_ides[option.value].icon
|
||||
name = local.jetbrains_ides[option.value].name
|
||||
value = option.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_app" "gateway" {
|
||||
agent_id = var.agent_id
|
||||
display_name = data.coder_parameter.jetbrains_ide.option[index(data.coder_parameter.jetbrains_ide.option.*.value, data.coder_parameter.jetbrains_ide.value)].name
|
||||
slug = "gateway"
|
||||
icon = data.coder_parameter.jetbrains_ide.option[index(data.coder_parameter.jetbrains_ide.option.*.value, data.coder_parameter.jetbrains_ide.value)].icon
|
||||
slug = var.slug
|
||||
display_name = local.display_name
|
||||
icon = local.icon
|
||||
external = true
|
||||
order = var.order
|
||||
url = join("", [
|
||||
"jetbrains-gateway://connect#type=coder&workspace=",
|
||||
data.coder_workspace.me.name,
|
||||
"&owner=",
|
||||
data.coder_workspace_owner.me.name,
|
||||
"&agent=",
|
||||
var.agent_name,
|
||||
"&folder=",
|
||||
@@ -184,14 +292,38 @@ resource "coder_app" "gateway" {
|
||||
"&token=",
|
||||
"$SESSION_TOKEN",
|
||||
"&ide_product_code=",
|
||||
jsondecode(data.coder_parameter.jetbrains_ide.value)[0],
|
||||
data.coder_parameter.jetbrains_ide.value,
|
||||
"&ide_build_number=",
|
||||
jsondecode(data.coder_parameter.jetbrains_ide.value)[1],
|
||||
local.build_number,
|
||||
"&ide_download_link=",
|
||||
jsondecode(data.coder_parameter.jetbrains_ide.value)[2],
|
||||
local.download_link,
|
||||
])
|
||||
}
|
||||
|
||||
output "jetbrains_ides" {
|
||||
value = data.coder_parameter.jetbrains_ide.value
|
||||
output "identifier" {
|
||||
value = local.identifier
|
||||
}
|
||||
|
||||
output "display_name" {
|
||||
value = local.display_name
|
||||
}
|
||||
|
||||
output "icon" {
|
||||
value = local.icon
|
||||
}
|
||||
|
||||
output "download_link" {
|
||||
value = local.download_link
|
||||
}
|
||||
|
||||
output "build_number" {
|
||||
value = local.build_number
|
||||
}
|
||||
|
||||
output "version" {
|
||||
value = local.version
|
||||
}
|
||||
|
||||
output "url" {
|
||||
value = coder_app.gateway.url
|
||||
}
|
||||
|
||||
5
jfrog-oauth/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
%{ for REPO in REPOS ~}
|
||||
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||
%{ endfor ~}
|
||||
@@ -17,15 +17,16 @@ Install the JF CLI and authenticate package managers with Artifactory using OAut
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm", "@scoped:npm-scoped"]
|
||||
go = ["go", "another-go-repo"]
|
||||
pypi = ["pypi", "extra-index-pypi"]
|
||||
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -44,13 +45,13 @@ Configure the Python pip package manager to fetch packages from Artifactory whil
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "email"
|
||||
|
||||
package_managers = {
|
||||
"pypi" : "pypi"
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -72,15 +73,15 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm"]
|
||||
go = ["go"]
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,19 +1,129 @@
|
||||
import { serve } from "bun";
|
||||
import { describe } from "bun:test";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
findResourceInstance,
|
||||
runTerraformInit,
|
||||
runTerraformApply,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("jfrog-oauth", async () => {
|
||||
type TestVariables = {
|
||||
agent_id: string;
|
||||
jfrog_url: string;
|
||||
package_managers: string;
|
||||
|
||||
username_field?: string;
|
||||
jfrog_server_id?: string;
|
||||
external_auth_id?: string;
|
||||
configure_code_server?: boolean;
|
||||
};
|
||||
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: "http://localhost:8081",
|
||||
package_managers: "{}",
|
||||
const fakeFrogApi = "localhost:8081/artifactory/api";
|
||||
const fakeFrogUrl = "http://localhost:8081";
|
||||
const user = "default";
|
||||
|
||||
it("can run apply with required variables", async () => {
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: "{}",
|
||||
});
|
||||
});
|
||||
|
||||
it("generates an npmrc with scoped repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||
email=${user}@example.com
|
||||
registry=http://${fakeFrogApi}/npm/global
|
||||
//${fakeFrogApi}/npm/global/:_authToken=
|
||||
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||
//${fakeFrogApi}/npm/foo/:_authToken=
|
||||
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||
//${fakeFrogApi}/npm/bar/:_authToken=
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(npmrcStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf npmc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured npm',
|
||||
);
|
||||
});
|
||||
|
||||
it("generates a pip config with extra-indexes", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
pypi: ["global", "foo", "bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${user}:@${fakeFrogApi}/pypi/global/simple
|
||||
extra-index-url =
|
||||
https://${user}:@${fakeFrogApi}/pypi/foo/simple
|
||||
https://${user}:@${fakeFrogApi}/pypi/bar/simple
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(pipStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf pipc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||
);
|
||||
});
|
||||
|
||||
it("registers multiple docker repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const dockerStanza = ["foo", "bar", "baz"]
|
||||
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||
.join("\n");
|
||||
expect(coderScript.script).toContain(dockerStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured docker',
|
||||
);
|
||||
});
|
||||
|
||||
it("sets goproxy with multiple repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
go: ["foo", "bar", "baz"],
|
||||
}),
|
||||
});
|
||||
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||
const proxies = ["foo", "bar", "baz"]
|
||||
.map((r) => `https://${user}:@${fakeFrogApi}/go/${r}`)
|
||||
.join(",");
|
||||
expect(proxyEnv.value).toEqual(proxies);
|
||||
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
expect(coderScript.script).toContain(
|
||||
'jf goc --global --repo-resolve "foo"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured go',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
//TODO add more tests
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12.4"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,12 @@ variable "jfrog_url" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "jfrog_server_id" {
|
||||
type = string
|
||||
description = "The server ID of the JFrog instance for JFrog CLI configuration"
|
||||
default = "0"
|
||||
}
|
||||
|
||||
variable "username_field" {
|
||||
type = string
|
||||
description = "The field to use for the artifactory username. i.e. Coder username or email."
|
||||
@@ -47,26 +53,55 @@ variable "configure_code_server" {
|
||||
}
|
||||
|
||||
variable "package_managers" {
|
||||
type = map(string)
|
||||
description = <<EOF
|
||||
A map of package manager names to their respective artifactory repositories.
|
||||
For example:
|
||||
{
|
||||
"npm": "YOUR_NPM_REPO_KEY",
|
||||
"go": "YOUR_GO_REPO_KEY",
|
||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
||||
}
|
||||
EOF
|
||||
type = object({
|
||||
npm = optional(list(string), [])
|
||||
go = optional(list(string), [])
|
||||
pypi = optional(list(string), [])
|
||||
docker = optional(list(string), [])
|
||||
})
|
||||
description = <<-EOF
|
||||
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||
For example:
|
||||
{
|
||||
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
locals {
|
||||
# The username field to use for artifactory
|
||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
||||
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||
jfrog_host = split("://", var.jfrog_url)[1]
|
||||
common_values = {
|
||||
JFROG_URL = var.jfrog_url
|
||||
JFROG_HOST = local.jfrog_host
|
||||
JFROG_SERVER_ID = var.jfrog_server_id
|
||||
ARTIFACTORY_USERNAME = local.username
|
||||
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||
ARTIFACTORY_ACCESS_TOKEN = data.coder_external_auth.jfrog.access_token
|
||||
}
|
||||
npmrc = templatefile(
|
||||
"${path.module}/.npmrc.tftpl",
|
||||
merge(
|
||||
local.common_values,
|
||||
{
|
||||
REPOS = [
|
||||
for r in var.package_managers.npm :
|
||||
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||
]
|
||||
}
|
||||
)
|
||||
)
|
||||
pip_conf = templatefile(
|
||||
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||
)
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
data "coder_external_auth" "jfrog" {
|
||||
id = var.external_auth_id
|
||||
@@ -76,18 +111,22 @@ resource "coder_script" "jfrog" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jfrog"
|
||||
icon = "/icon/jfrog.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
JFROG_URL : var.jfrog_url,
|
||||
JFROG_HOST : local.jfrog_host,
|
||||
ARTIFACTORY_USERNAME : local.username,
|
||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
||||
ARTIFACTORY_ACCESS_TOKEN : data.coder_external_auth.jfrog.access_token,
|
||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
||||
})
|
||||
script = templatefile("${path.module}/run.sh", merge(
|
||||
local.common_values,
|
||||
{
|
||||
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||
NPMRC = local.npmrc
|
||||
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||
PIP_CONF = local.pip_conf
|
||||
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||
}
|
||||
))
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
@@ -113,10 +152,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
||||
}
|
||||
|
||||
resource "coder_env" "goproxy" {
|
||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
||||
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||
agent_id = var.agent_id
|
||||
name = "GOPROXY"
|
||||
value = "https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
||||
value = join(",", [
|
||||
for repo in var.package_managers.go :
|
||||
"https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||
])
|
||||
}
|
||||
|
||||
output "access_token" {
|
||||
|
||||
6
jfrog-oauth/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||
extra-index-url =
|
||||
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||
%{ endfor ~}
|
||||
@@ -2,6 +2,21 @@
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
not_configured() {
|
||||
type=$1
|
||||
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||
}
|
||||
|
||||
config_complete() {
|
||||
echo "🥳 Configuration complete!"
|
||||
}
|
||||
|
||||
register_docker() {
|
||||
repo=$1
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
}
|
||||
|
||||
# check if JFrog CLI is already installed
|
||||
if command -v jf > /dev/null 2>&1; then
|
||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||
@@ -15,57 +30,52 @@ fi
|
||||
# flows.
|
||||
export CI=true
|
||||
# Authenticate JFrog CLI with Artifactory.
|
||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite 0
|
||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}"
|
||||
# Set the configured server as the default.
|
||||
jf c use 0
|
||||
jf c use "${JFROG_SERVER_ID}"
|
||||
|
||||
# Configure npm to use the Artifactory "npm" repository.
|
||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
||||
if [ -z "${HAS_NPM}" ]; then
|
||||
not_configured npm
|
||||
else
|
||||
echo "📦 Configuring npm..."
|
||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||
cat << EOF > ~/.npmrc
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
||||
${NPMRC}
|
||||
EOF
|
||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure the `pip` to use the Artifactory "python" repository.
|
||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
||||
if [ -z "${HAS_PYPI}" ]; then
|
||||
not_configured pypi
|
||||
else
|
||||
echo "📦 Configuring pip..."
|
||||
echo "🐍 Configuring pip..."
|
||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||
mkdir -p ~/.pip
|
||||
cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
||||
${PIP_CONF}
|
||||
EOF
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure Artifactory "go" repository.
|
||||
if [ -z "${REPOSITORY_GO}" ]; then
|
||||
echo "🤔 no go repository is set, skipping go configuration."
|
||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
||||
if [ -z "${HAS_GO}" ]; then
|
||||
not_configured go
|
||||
else
|
||||
echo "🐹 Configuring go..."
|
||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||
config_complete
|
||||
fi
|
||||
echo "🥳 Configuration complete!"
|
||||
|
||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
||||
if [ -z "${HAS_DOCKER}" ]; then
|
||||
not_configured docker
|
||||
else
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
echo "🔑 Configuring 🐳 docker credentials..."
|
||||
mkdir -p ~/.docker
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
${REGISTER_DOCKER}
|
||||
else
|
||||
echo "🤔 no docker is installed, skipping docker configuration."
|
||||
fi
|
||||
@@ -96,20 +106,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||
# Generate the completion script
|
||||
jf completion $SHELLNAME --install
|
||||
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)"
|
||||
# Add the completion script to the user's shell profile
|
||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
||||
echo "" >> ~/.bashrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.bashrc
|
||||
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||
else
|
||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||
fi
|
||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
||||
echo "" >> ~/.zshrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.zshrc
|
||||
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||
echo "compinit" >> ~/.zshrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||
|
||||
5
jfrog-token/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
%{ for REPO in REPOS ~}
|
||||
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||
%{ endfor ~}
|
||||
@@ -15,14 +15,15 @@ Install the JF CLI and authenticate package managers with Artifactory using Arti
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm", "@scoped:npm-scoped"]
|
||||
go = ["go", "another-go-repo"]
|
||||
pypi = ["pypi", "extra-index-pypi"]
|
||||
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -41,14 +42,14 @@ For detailed instructions, please see this [guide](https://coder.com/docs/v2/lat
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://YYYY.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token # An admin access token
|
||||
package_managers = {
|
||||
"npm" : "npm-local",
|
||||
"go" : "go-local",
|
||||
"pypi" : "pypi-local"
|
||||
npm = ["npm-local"]
|
||||
go = ["go-local"]
|
||||
pypi = ["pypi-local"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -74,15 +75,33 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm"]
|
||||
go = ["go"]
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Add a custom token description
|
||||
|
||||
```tf
|
||||
data "coder_workspace" "me" {}
|
||||
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
token_description = "Token for Coder workspace: ${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}"
|
||||
package_managers = {
|
||||
npm = ["npm"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,12 +1,29 @@
|
||||
import { serve } from "bun";
|
||||
import { describe } from "bun:test";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
findResourceInstance,
|
||||
runTerraformInit,
|
||||
runTerraformApply,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("jfrog-token", async () => {
|
||||
type TestVariables = {
|
||||
agent_id: string;
|
||||
jfrog_url: string;
|
||||
artifactory_access_token: string;
|
||||
package_managers: string;
|
||||
|
||||
token_description?: string;
|
||||
check_license?: boolean;
|
||||
refreshable?: boolean;
|
||||
expires_in?: number;
|
||||
username_field?: string;
|
||||
jfrog_server_id?: string;
|
||||
configure_code_server?: boolean;
|
||||
};
|
||||
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
// Run a fake JFrog server so the provider can initialize
|
||||
@@ -32,10 +49,116 @@ describe("jfrog-token", async () => {
|
||||
port: 0,
|
||||
});
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: "http://" + fakeFrogHost.hostname + ":" + fakeFrogHost.port,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: "{}",
|
||||
const fakeFrogApi = `${fakeFrogHost.hostname}:${fakeFrogHost.port}/artifactory/api`;
|
||||
const fakeFrogUrl = `http://${fakeFrogHost.hostname}:${fakeFrogHost.port}`;
|
||||
const user = "default";
|
||||
const token = "xxx";
|
||||
|
||||
it("can run apply with required variables", async () => {
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: "{}",
|
||||
});
|
||||
});
|
||||
|
||||
it("generates an npmrc with scoped repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||
email=${user}@example.com
|
||||
registry=http://${fakeFrogApi}/npm/global
|
||||
//${fakeFrogApi}/npm/global/:_authToken=xxx
|
||||
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||
//${fakeFrogApi}/npm/foo/:_authToken=xxx
|
||||
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||
//${fakeFrogApi}/npm/bar/:_authToken=xxx
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(npmrcStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf npmc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured npm',
|
||||
);
|
||||
});
|
||||
|
||||
it("generates a pip config with extra-indexes", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
pypi: ["global", "foo", "bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${user}:${token}@${fakeFrogApi}/pypi/global/simple
|
||||
extra-index-url =
|
||||
https://${user}:${token}@${fakeFrogApi}/pypi/foo/simple
|
||||
https://${user}:${token}@${fakeFrogApi}/pypi/bar/simple
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(pipStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf pipc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||
);
|
||||
});
|
||||
|
||||
it("registers multiple docker repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const dockerStanza = ["foo", "bar", "baz"]
|
||||
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||
.join("\n");
|
||||
expect(coderScript.script).toContain(dockerStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured docker',
|
||||
);
|
||||
});
|
||||
|
||||
it("sets goproxy with multiple repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
go: ["foo", "bar", "baz"],
|
||||
}),
|
||||
});
|
||||
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||
const proxies = ["foo", "bar", "baz"]
|
||||
.map((r) => `https://${user}:${token}@${fakeFrogApi}/go/${r}`)
|
||||
.join(",");
|
||||
expect(proxyEnv.value).toEqual(proxies);
|
||||
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
expect(coderScript.script).toContain(
|
||||
'jf goc --global --repo-resolve "foo"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured go',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12.4"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
artifactory = {
|
||||
source = "registry.terraform.io/jfrog/artifactory"
|
||||
@@ -23,11 +23,23 @@ variable "jfrog_url" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "jfrog_server_id" {
|
||||
type = string
|
||||
description = "The server ID of the JFrog instance for JFrog CLI configuration"
|
||||
default = "0"
|
||||
}
|
||||
|
||||
variable "artifactory_access_token" {
|
||||
type = string
|
||||
description = "The admin-level access token to use for JFrog."
|
||||
}
|
||||
|
||||
variable "token_description" {
|
||||
type = string
|
||||
description = "Free text token description. Useful for filtering and managing tokens."
|
||||
default = "Token for Coder workspace"
|
||||
}
|
||||
|
||||
variable "check_license" {
|
||||
type = bool
|
||||
description = "Toggle for pre-flight checking of Artifactory license. Default to `true`."
|
||||
@@ -68,23 +80,51 @@ variable "configure_code_server" {
|
||||
}
|
||||
|
||||
variable "package_managers" {
|
||||
type = map(string)
|
||||
description = <<EOF
|
||||
A map of package manager names to their respective artifactory repositories.
|
||||
For example:
|
||||
{
|
||||
"npm": "YOUR_NPM_REPO_KEY",
|
||||
"go": "YOUR_GO_REPO_KEY",
|
||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
||||
}
|
||||
EOF
|
||||
type = object({
|
||||
npm = optional(list(string), [])
|
||||
go = optional(list(string), [])
|
||||
pypi = optional(list(string), [])
|
||||
docker = optional(list(string), [])
|
||||
})
|
||||
description = <<-EOF
|
||||
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||
For example:
|
||||
{
|
||||
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
locals {
|
||||
# The username field to use for artifactory
|
||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
||||
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||
jfrog_host = split("://", var.jfrog_url)[1]
|
||||
common_values = {
|
||||
JFROG_URL = var.jfrog_url
|
||||
JFROG_HOST = local.jfrog_host
|
||||
JFROG_SERVER_ID = var.jfrog_server_id
|
||||
ARTIFACTORY_USERNAME = local.username
|
||||
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||
ARTIFACTORY_ACCESS_TOKEN = artifactory_scoped_token.me.access_token
|
||||
}
|
||||
npmrc = templatefile(
|
||||
"${path.module}/.npmrc.tftpl",
|
||||
merge(
|
||||
local.common_values,
|
||||
{
|
||||
REPOS = [
|
||||
for r in var.package_managers.npm :
|
||||
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||
]
|
||||
}
|
||||
)
|
||||
)
|
||||
pip_conf = templatefile(
|
||||
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||
)
|
||||
}
|
||||
|
||||
# Configure the Artifactory provider
|
||||
@@ -101,26 +141,32 @@ resource "artifactory_scoped_token" "me" {
|
||||
scopes = ["applied-permissions/user"]
|
||||
refreshable = var.refreshable
|
||||
expires_in = var.expires_in
|
||||
description = var.token_description
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "jfrog" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jfrog"
|
||||
icon = "/icon/jfrog.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
JFROG_URL : var.jfrog_url,
|
||||
JFROG_HOST : local.jfrog_host,
|
||||
ARTIFACTORY_USERNAME : local.username,
|
||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
||||
ARTIFACTORY_ACCESS_TOKEN : artifactory_scoped_token.me.access_token,
|
||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
||||
})
|
||||
script = templatefile("${path.module}/run.sh", merge(
|
||||
local.common_values,
|
||||
{
|
||||
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||
NPMRC = local.npmrc
|
||||
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||
PIP_CONF = local.pip_conf
|
||||
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||
}
|
||||
))
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
@@ -146,10 +192,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
||||
}
|
||||
|
||||
resource "coder_env" "goproxy" {
|
||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
||||
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||
agent_id = var.agent_id
|
||||
name = "GOPROXY"
|
||||
value = "https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
||||
value = join(",", [
|
||||
for repo in var.package_managers.go :
|
||||
"https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||
])
|
||||
}
|
||||
|
||||
output "access_token" {
|
||||
|
||||
6
jfrog-token/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||
extra-index-url =
|
||||
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||
%{ endfor ~}
|
||||
@@ -2,6 +2,21 @@
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
not_configured() {
|
||||
type=$1
|
||||
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||
}
|
||||
|
||||
config_complete() {
|
||||
echo "🥳 Configuration complete!"
|
||||
}
|
||||
|
||||
register_docker() {
|
||||
repo=$1
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
}
|
||||
|
||||
# check if JFrog CLI is already installed
|
||||
if command -v jf > /dev/null 2>&1; then
|
||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||
@@ -11,61 +26,55 @@ else
|
||||
sudo chmod 755 /usr/local/bin/jf
|
||||
fi
|
||||
|
||||
# The jf CLI checks $CI when determining whether to use interactive
|
||||
# flows.
|
||||
# The jf CLI checks $CI when determining whether to use interactive flows.
|
||||
export CI=true
|
||||
# Authenticate JFrog CLI with Artifactory.
|
||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite 0
|
||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}"
|
||||
# Set the configured server as the default.
|
||||
jf c use 0
|
||||
jf c use "${JFROG_SERVER_ID}"
|
||||
|
||||
# Configure npm to use the Artifactory "npm" repository.
|
||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
||||
if [ -z "${HAS_NPM}" ]; then
|
||||
not_configured npm
|
||||
else
|
||||
echo "📦 Configuring npm..."
|
||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||
cat << EOF > ~/.npmrc
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
||||
${NPMRC}
|
||||
EOF
|
||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure the `pip` to use the Artifactory "python" repository.
|
||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
||||
if [ -z "${HAS_PYPI}" ]; then
|
||||
not_configured pypi
|
||||
else
|
||||
echo "🐍 Configuring pip..."
|
||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||
mkdir -p ~/.pip
|
||||
cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
||||
${PIP_CONF}
|
||||
EOF
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure Artifactory "go" repository.
|
||||
if [ -z "${REPOSITORY_GO}" ]; then
|
||||
echo "🤔 no go repository is set, skipping go configuration."
|
||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
||||
if [ -z "${HAS_GO}" ]; then
|
||||
not_configured go
|
||||
else
|
||||
echo "🐹 Configuring go..."
|
||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||
config_complete
|
||||
fi
|
||||
echo "🥳 Configuration complete!"
|
||||
|
||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
||||
if [ -z "${HAS_DOCKER}" ]; then
|
||||
not_configured docker
|
||||
else
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
echo "🔑 Configuring 🐳 docker credentials..."
|
||||
mkdir -p ~/.docker
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
${REGISTER_DOCKER}
|
||||
else
|
||||
echo "🤔 no docker is installed, skipping docker configuration."
|
||||
fi
|
||||
@@ -96,20 +105,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||
# Generate the completion script
|
||||
jf completion $SHELLNAME --install
|
||||
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-token)"
|
||||
# Add the completion script to the user's shell profile
|
||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
||||
echo "" >> ~/.bashrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.bashrc
|
||||
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||
else
|
||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||
fi
|
||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
||||
echo "" >> ~/.zshrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.zshrc
|
||||
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||
echo "compinit" >> ~/.zshrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||
|
||||
@@ -16,7 +16,7 @@ A module that adds Jupyter Notebook in your Coder template.
|
||||
```tf
|
||||
module "jupyter-notebook" {
|
||||
source = "registry.coder.com/modules/jupyter-notebook/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,12 @@ variable "share" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
resource "coder_script" "jupyter-notebook" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jupyter-notebook"
|
||||
@@ -55,4 +61,5 @@ resource "coder_app" "jupyter-notebook" {
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
share = var.share
|
||||
order = var.order
|
||||
}
|
||||
|
||||
@@ -7,14 +7,14 @@ printf "$${BOLD}Installing jupyter-notebook!\n"
|
||||
# check if jupyter-notebook is installed
|
||||
if ! command -v jupyter-notebook > /dev/null 2>&1; then
|
||||
# install jupyter-notebook
|
||||
# check if python3 pip is installed
|
||||
if ! command -v pip3 > /dev/null 2>&1; then
|
||||
echo "pip3 is not installed"
|
||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
||||
# check if pipx is installed
|
||||
if ! command -v pipx > /dev/null 2>&1; then
|
||||
echo "pipx is not installed"
|
||||
echo "Please install pipx in your Dockerfile/VM image before using this module"
|
||||
exit 1
|
||||
fi
|
||||
# install jupyter-notebook
|
||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyter
|
||||
# install jupyter notebook
|
||||
pipx install -q notebook
|
||||
echo "🥳 jupyter-notebook has been installed\n\n"
|
||||
else
|
||||
echo "🥳 jupyter-notebook is already installed\n\n"
|
||||
@@ -22,4 +22,4 @@ fi
|
||||
|
||||
echo "👷 Starting jupyter-notebook in background..."
|
||||
echo "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
$HOME/.local/bin/jupyter-notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
|
||||
@@ -16,7 +16,7 @@ A module that adds JupyterLab in your Coder template.
|
||||
```tf
|
||||
module "jupyterlab" {
|
||||
source = "registry.coder.com/modules/jupyterlab/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
execContainer,
|
||||
executeScriptInContainer,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
TerraformState,
|
||||
execContainer,
|
||||
type TerraformState,
|
||||
} from "../test";
|
||||
|
||||
// executes the coder script after installing pip
|
||||
const executeScriptInContainerWithPip = async (
|
||||
state: TerraformState,
|
||||
image: string,
|
||||
shell: string = "sh",
|
||||
shell = "sh",
|
||||
): Promise<{
|
||||
exitCode: number;
|
||||
stdout: string[];
|
||||
@@ -22,7 +22,7 @@ const executeScriptInContainerWithPip = async (
|
||||
}> => {
|
||||
const instance = findResourceInstance(state, "coder_script");
|
||||
const id = await runContainer(image);
|
||||
const respPip = await execContainer(id, [shell, "-c", "apk add py3-pip"]);
|
||||
const respPipx = await execContainer(id, [shell, "-c", "apk add pipx"]);
|
||||
const resp = await execContainer(id, [shell, "-c", instance.script]);
|
||||
const stdout = resp.stdout.trim().split("\n");
|
||||
const stderr = resp.stderr.trim().split("\n");
|
||||
@@ -40,7 +40,7 @@ describe("jupyterlab", async () => {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("fails without pip3", async () => {
|
||||
it("fails without pipx", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
@@ -48,14 +48,14 @@ describe("jupyterlab", async () => {
|
||||
expect(output.exitCode).toBe(1);
|
||||
expect(output.stdout).toEqual([
|
||||
"\u001B[0;1mInstalling jupyterlab!",
|
||||
"pip3 is not installed",
|
||||
"Please install pip3 in your Dockerfile/VM image before running this script",
|
||||
"pipx is not installed",
|
||||
"Please install pipx in your Dockerfile/VM image before running this script",
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO: Add faster test to run with pip3.
|
||||
// TODO: Add faster test to run with pipx.
|
||||
// currently times out.
|
||||
// it("runs with pip3", async () => {
|
||||
// it("runs with pipx", async () => {
|
||||
// ...
|
||||
// const output = await executeScriptInContainerWithPip(state, "alpine");
|
||||
// ...
|
||||
|
||||
@@ -4,11 +4,14 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# Add required variables for your modules and remove any unneeded variables
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
@@ -36,6 +39,18 @@ variable "share" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = "Determines whether JupyterLab will be accessed via its own subdomain or whether it will be accessed via a path on Coder."
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
resource "coder_script" "jupyterlab" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jupyterlab"
|
||||
@@ -43,16 +58,18 @@ resource "coder_script" "jupyterlab" {
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port
|
||||
BASE_URL : var.subdomain ? "" : "/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "jupyterlab" {
|
||||
agent_id = var.agent_id
|
||||
slug = "jupyterlab"
|
||||
slug = "jupyterlab" # sync with the usage in URL
|
||||
display_name = "JupyterLab"
|
||||
url = "http://localhost:${var.port}"
|
||||
url = var.subdomain ? "http://localhost:${var.port}" : "http://localhost:${var.port}/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
}
|
||||
|
||||
@@ -1,25 +1,35 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
if [ -n "${BASE_URL}" ]; then
|
||||
BASE_URL_FLAG="--ServerApp.base_url=${BASE_URL}"
|
||||
fi
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
printf "$${BOLD}Installing jupyterlab!\n"
|
||||
|
||||
# check if jupyterlab is installed
|
||||
if ! command -v jupyterlab > /dev/null 2>&1; then
|
||||
if ! command -v jupyter-lab > /dev/null 2>&1; then
|
||||
# install jupyterlab
|
||||
# check if python3 pip is installed
|
||||
if ! command -v pip3 > /dev/null 2>&1; then
|
||||
echo "pip3 is not installed"
|
||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
||||
# check if pipx is installed
|
||||
if ! command -v pipx > /dev/null 2>&1; then
|
||||
echo "pipx is not installed"
|
||||
echo "Please install pipx in your Dockerfile/VM image before running this script"
|
||||
exit 1
|
||||
fi
|
||||
# install jupyterlab
|
||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyterlab
|
||||
echo "🥳 jupyterlab has been installed\n\n"
|
||||
pipx install -q jupyterlab
|
||||
printf "%s\n\n" "🥳 jupyterlab has been installed"
|
||||
else
|
||||
echo "🥳 jupyterlab is already installed\n\n"
|
||||
printf "%s\n\n" "🥳 jupyterlab is already installed"
|
||||
fi
|
||||
|
||||
echo "👷 Starting jupyterlab in background..."
|
||||
echo "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter lab --ServerApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
printf "👷 Starting jupyterlab in background..."
|
||||
printf "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter-lab --no-browser \
|
||||
"$BASE_URL_FLAG" \
|
||||
--ServerApp.ip='*' \
|
||||
--ServerApp.port="${PORT}" \
|
||||
--ServerApp.token='' \
|
||||
--ServerApp.password='' \
|
||||
> "${LOG_PATH}" 2>&1 &
|
||||
|
||||
23
kasmvnc/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
display_name: KasmVNC
|
||||
description: A modern open source VNC server
|
||||
icon: ../.icons/kasmvnc.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, vnc, desktop]
|
||||
---
|
||||
|
||||
# KasmVNC
|
||||
|
||||
Automatically install [KasmVNC](https://kasmweb.com/kasmvnc) in a workspace, and create an app to access it via the dashboard.
|
||||
|
||||
```tf
|
||||
module "kasmvnc" {
|
||||
source = "registry.coder.com/modules/kasmvnc/coder"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
desktop_environment = "xfce"
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** This module only works on workspaces with a pre-installed desktop environment. As an example base image you can use `codercom/enterprise-desktop` image.
|
||||
37
kasmvnc/main.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
const allowedDesktopEnvs = ["xfce", "kde", "gnome", "lxde", "lxqt"] as const;
|
||||
type AllowedDesktopEnv = (typeof allowedDesktopEnvs)[number];
|
||||
|
||||
type TestVariables = Readonly<{
|
||||
agent_id: string;
|
||||
desktop_environment: AllowedDesktopEnv;
|
||||
port?: string;
|
||||
kasm_version?: string;
|
||||
}>;
|
||||
|
||||
describe("Kasm VNC", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
desktop_environment: "gnome",
|
||||
});
|
||||
|
||||
it("Successfully installs for all expected Kasm desktop versions", async () => {
|
||||
for (const v of allowedDesktopEnvs) {
|
||||
const applyWithEnv = () => {
|
||||
runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
desktop_environment: v,
|
||||
});
|
||||
};
|
||||
|
||||
expect(applyWithEnv).not.toThrow();
|
||||
}
|
||||
});
|
||||
});
|
||||
63
kasmvnc/main.tf
Normal file
@@ -0,0 +1,63 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port to run KasmVNC on."
|
||||
default = 6800
|
||||
}
|
||||
|
||||
variable "kasm_version" {
|
||||
type = string
|
||||
description = "Version of KasmVNC to install."
|
||||
default = "1.3.2"
|
||||
}
|
||||
|
||||
variable "desktop_environment" {
|
||||
type = string
|
||||
description = "Specifies the desktop environment of the workspace. This should be pre-installed on the workspace."
|
||||
validation {
|
||||
condition = contains(["xfce", "kde", "gnome", "lxde", "lxqt"], var.desktop_environment)
|
||||
error_message = "Invalid desktop environment. Please specify a valid desktop environment."
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_script" "kasm_vnc" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "KasmVNC"
|
||||
icon = "/icon/kasmvnc.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
PORT : var.port,
|
||||
DESKTOP_ENVIRONMENT : var.desktop_environment,
|
||||
KASM_VERSION : var.kasm_version
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "kasm_vnc" {
|
||||
agent_id = var.agent_id
|
||||
slug = "kasm-vnc"
|
||||
display_name = "kasmVNC"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "/icon/kasmvnc.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
healthcheck {
|
||||
url = "http://localhost:${var.port}/app"
|
||||
interval = 5
|
||||
threshold = 5
|
||||
}
|
||||
}
|
||||
235
kasmvnc/run.sh
Normal file
@@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Exit on error, undefined variables, and pipe failures
|
||||
set -euo pipefail
|
||||
|
||||
# Function to check if vncserver is already installed
|
||||
check_installed() {
|
||||
if command -v vncserver &> /dev/null; then
|
||||
echo "vncserver is already installed."
|
||||
return 0 # Don't exit, just indicate it's installed
|
||||
else
|
||||
return 1 # Indicates not installed
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to download a file using wget, curl, or busybox as a fallback
|
||||
download_file() {
|
||||
local url="$1"
|
||||
local output="$2"
|
||||
local download_tool
|
||||
|
||||
if command -v curl &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(curl -fsSL)
|
||||
elif command -v wget &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(wget -q -O-)
|
||||
elif command -v busybox &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(busybox wget -O-)
|
||||
else
|
||||
echo "ERROR: No download tool available (curl, wget, or busybox required)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2288
|
||||
"$${download_tool[@]}" "$url" > "$output" || {
|
||||
echo "ERROR: Failed to download $url"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for debian-based distros
|
||||
install_deb() {
|
||||
local url=$1
|
||||
local kasmdeb="/tmp/kasmvncserver.deb"
|
||||
|
||||
download_file "$url" "$kasmdeb"
|
||||
|
||||
CACHE_DIR="/var/lib/apt/lists/partial"
|
||||
# Check if the directory exists and was modified in the last 60 minutes
|
||||
if [[ ! -d "$CACHE_DIR" ]] || ! find "$CACHE_DIR" -mmin -60 -print -quit &> /dev/null; then
|
||||
echo "Stale package cache, updating..."
|
||||
# Update package cache with a 300-second timeout for dpkg lock
|
||||
sudo apt-get -o DPkg::Lock::Timeout=300 -qq update
|
||||
fi
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get -o DPkg::Lock::Timeout=300 install --yes -qq --no-install-recommends --no-install-suggests "$kasmdeb"
|
||||
rm "$kasmdeb"
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for rpm-based distros
|
||||
install_rpm() {
|
||||
local url=$1
|
||||
local kasmrpm="/tmp/kasmvncserver.rpm"
|
||||
local package_manager
|
||||
|
||||
if command -v dnf &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(dnf localinstall -y)
|
||||
elif command -v zypper &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(zypper install -y)
|
||||
elif command -v yum &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(yum localinstall -y)
|
||||
elif command -v rpm &> /dev/null; then
|
||||
# Do we need to manually handle missing dependencies?
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(rpm -i)
|
||||
else
|
||||
echo "ERROR: No supported package manager available (dnf, zypper, yum, or rpm required)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
download_file "$url" "$kasmrpm"
|
||||
|
||||
# shellcheck disable=SC2288
|
||||
sudo "$${package_manager[@]}" "$kasmrpm" || {
|
||||
echo "ERROR: Failed to install $kasmrpm"
|
||||
exit 1
|
||||
}
|
||||
|
||||
rm "$kasmrpm"
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for Alpine Linux
|
||||
install_alpine() {
|
||||
local url=$1
|
||||
local kasmtgz="/tmp/kasmvncserver.tgz"
|
||||
|
||||
download_file "$url" "$kasmtgz"
|
||||
|
||||
tar -xzf "$kasmtgz" -C /usr/local/bin/
|
||||
rm "$kasmtgz"
|
||||
}
|
||||
|
||||
# Detect system information
|
||||
if [[ ! -f /etc/os-release ]]; then
|
||||
echo "ERROR: Cannot detect OS: /etc/os-release not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source /etc/os-release
|
||||
distro="$ID"
|
||||
distro_version="$VERSION_ID"
|
||||
codename="$VERSION_CODENAME"
|
||||
arch="$(uname -m)"
|
||||
if [[ "$ID" == "ol" ]]; then
|
||||
distro="oracle"
|
||||
distro_version="$${distro_version%%.*}"
|
||||
elif [[ "$ID" == "fedora" ]]; then
|
||||
distro_version="$(grep -oP '\(\K[\w ]+' /etc/fedora-release | tr '[:upper:]' '[:lower:]' | tr -d ' ')"
|
||||
fi
|
||||
|
||||
echo "Detected Distribution: $distro"
|
||||
echo "Detected Version: $distro_version"
|
||||
echo "Detected Codename: $codename"
|
||||
echo "Detected Architecture: $arch"
|
||||
|
||||
# Map arch to package arch
|
||||
case "$arch" in
|
||||
x86_64)
|
||||
if [[ "$distro" =~ ^(ubuntu|debian|kali)$ ]]; then
|
||||
arch="amd64"
|
||||
fi
|
||||
;;
|
||||
aarch64)
|
||||
if [[ "$distro" =~ ^(ubuntu|debian|kali)$ ]]; then
|
||||
arch="arm64"
|
||||
fi
|
||||
;;
|
||||
arm64)
|
||||
: # This is effectively a noop
|
||||
;;
|
||||
*)
|
||||
echo "ERROR: Unsupported architecture: $arch"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if vncserver is installed, and install if not
|
||||
if ! check_installed; then
|
||||
# Check for NOPASSWD sudo (required)
|
||||
if ! command -v sudo &> /dev/null || ! sudo -n true 2> /dev/null; then
|
||||
echo "ERROR: sudo NOPASSWD access required!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
base_url="https://github.com/kasmtech/KasmVNC/releases/download/v${KASM_VERSION}"
|
||||
|
||||
echo "Installing KASM version: ${KASM_VERSION}"
|
||||
case $distro in
|
||||
ubuntu | debian | kali)
|
||||
bin_name="kasmvncserver_$${codename}_${KASM_VERSION}_$${arch}.deb"
|
||||
install_deb "$base_url/$bin_name"
|
||||
;;
|
||||
oracle | fedora | opensuse)
|
||||
bin_name="kasmvncserver_$${distro}_$${distro_version}_${KASM_VERSION}_$${arch}.rpm"
|
||||
install_rpm "$base_url/$bin_name"
|
||||
;;
|
||||
alpine)
|
||||
bin_name="kasmvnc.alpine_$${distro_version//./}_$${arch}.tgz"
|
||||
install_alpine "$base_url/$bin_name"
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported distribution: $distro"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
else
|
||||
echo "vncserver already installed. Skipping installation."
|
||||
fi
|
||||
|
||||
if command -v sudo &> /dev/null && sudo -n true 2> /dev/null; then
|
||||
kasm_config_file="/etc/kasmvnc/kasmvnc.yaml"
|
||||
SUDO=sudo
|
||||
else
|
||||
kasm_config_file="$HOME/.vnc/kasmvnc.yaml"
|
||||
SUDO=
|
||||
|
||||
echo "WARNING: Sudo access not available, using user config dir!"
|
||||
|
||||
if [[ -f "$kasm_config_file" ]]; then
|
||||
echo "WARNING: Custom user KasmVNC config exists, not overwriting!"
|
||||
echo "WARNING: Ensure that you manually configure the appropriate settings."
|
||||
kasm_config_file="/dev/stderr"
|
||||
else
|
||||
echo "WARNING: This may prevent custom user KasmVNC settings from applying!"
|
||||
mkdir -p "$HOME/.vnc"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Writing KasmVNC config to $kasm_config_file"
|
||||
$SUDO tee "$kasm_config_file" > /dev/null << EOF
|
||||
network:
|
||||
protocol: http
|
||||
websocket_port: ${PORT}
|
||||
ssl:
|
||||
require_ssl: false
|
||||
pem_certificate:
|
||||
pem_key:
|
||||
udp:
|
||||
public_ip: 127.0.0.1
|
||||
EOF
|
||||
|
||||
# This password is not used since we start the server without auth.
|
||||
# The server is protected via the Coder session token / tunnel
|
||||
# and does not listen publicly
|
||||
echo -e "password\npassword\n" | vncpasswd -wo -u "$USER"
|
||||
|
||||
# Start the server
|
||||
printf "🚀 Starting KasmVNC server...\n"
|
||||
vncserver -select-de "${DESKTOP_ENVIRONMENT}" -disableBasicAuth > /tmp/kasmvncserver.log 2>&1 &
|
||||
pid=$!
|
||||
|
||||
# Wait for server to start
|
||||
sleep 5
|
||||
grep -v '^[[:space:]]*$' /tmp/kasmvncserver.log | tail -n 10
|
||||
if ps -p $pid | grep -q "^$pid"; then
|
||||
echo "ERROR: Failed to start KasmVNC server. Check full logs at /tmp/kasmvncserver.log"
|
||||
exit 1
|
||||
fi
|
||||
printf "🚀 KasmVNC server started successfully!\n"
|
||||