Compare commits
201 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9238f107a | ||
|
|
e94f70a286 | ||
|
|
7654140330 | ||
|
|
bc6490f0d3 | ||
|
|
482ed84399 | ||
|
|
32b69016a0 | ||
|
|
6d2739131a | ||
|
|
cbd06b1135 | ||
|
|
675c82367a | ||
|
|
bf697e1fa4 | ||
|
|
b345e62ac1 | ||
|
|
6597a2d547 | ||
|
|
5101c27c83 | ||
|
|
90bfbfdc40 | ||
|
|
57d96ca27f | ||
|
|
f5ab7995d1 | ||
|
|
528a8a9fea | ||
|
|
87854707bc | ||
|
|
b53554b4e4 | ||
|
|
ce5a5b383a | ||
|
|
1b147ae90d | ||
|
|
7992d9d265 | ||
|
|
20d97a25dd | ||
|
|
8e0dfcd534 | ||
|
|
9752bf89a6 | ||
|
|
48c81c9ff4 | ||
|
|
acd5edffe7 | ||
|
|
4dcab99cb0 | ||
|
|
50a946df0f | ||
|
|
8a0ac3435c | ||
|
|
438c904567 | ||
|
|
bd6747f9bc | ||
|
|
fb81c8969f | ||
|
|
162808760d | ||
|
|
ad1189afff | ||
|
|
94e126f248 | ||
|
|
04535a9cd7 | ||
|
|
7a9f553564 | ||
|
|
e11b19d33e | ||
|
|
93c4fb3a8d | ||
|
|
86038f8d37 | ||
|
|
120a0e342e | ||
|
|
b51932d7ac | ||
|
|
834ffde032 | ||
|
|
831f64da56 | ||
|
|
236022f870 | ||
|
|
4c45d69994 | ||
|
|
310d0262bd | ||
|
|
f446fbd667 | ||
|
|
982c75e86f | ||
|
|
523ad9fe23 | ||
|
|
096cd214ce | ||
|
|
6a87fd18e5 | ||
|
|
fa4b84e8d1 | ||
|
|
7e0eacf1f4 | ||
|
|
cbe48aa072 | ||
|
|
89bb023fa5 | ||
|
|
66472b0105 | ||
|
|
cd010baac8 | ||
|
|
f7fa145855 | ||
|
|
f7f9c8b7ef | ||
|
|
889186d553 | ||
|
|
352577b833 | ||
|
|
4e59ecc606 | ||
|
|
a40f2b86c3 | ||
|
|
a2c29ace0a | ||
|
|
da4a561cb5 | ||
|
|
d77ad8ac63 | ||
|
|
b1f81afa7f | ||
|
|
883741244b | ||
|
|
c3eee866d1 | ||
|
|
bf175a1247 | ||
|
|
8fd54e0e78 | ||
|
|
e8ee02c044 | ||
|
|
aebdc9b434 | ||
|
|
d98bfcb20b | ||
|
|
894e507bb3 | ||
|
|
3f8f6181e0 | ||
|
|
b23d85327c | ||
|
|
a8580fe6b9 | ||
|
|
49f060549e | ||
|
|
b4153a6aaa | ||
|
|
13a8877791 | ||
|
|
fd2f91c043 | ||
|
|
c59eb0c0cc | ||
|
|
a381c3ee29 | ||
|
|
d9d1be08a3 | ||
|
|
7a8483d816 | ||
|
|
ec2c8edfb2 | ||
|
|
78f91a542a | ||
|
|
78c948094d | ||
|
|
16f96d3693 | ||
|
|
8262b29063 | ||
|
|
4ab72575ac | ||
|
|
f369697112 | ||
|
|
f82c7fd7a1 | ||
|
|
05a20a9e1f | ||
|
|
90e15cd90c | ||
|
|
5869eb86d4 | ||
|
|
25c90001f4 | ||
|
|
6409ee2bba | ||
|
|
7d366ff92a | ||
|
|
de00f6334f | ||
|
|
264584e673 | ||
|
|
83ecba2293 | ||
|
|
b2807640aa | ||
|
|
33d44fdf17 | ||
|
|
f335cd343d | ||
|
|
aebf095075 | ||
|
|
b283ac3129 | ||
|
|
5f418c3253 | ||
|
|
b09c4cb084 | ||
|
|
8aff87fdf7 | ||
|
|
f3c30abeb4 | ||
|
|
a9a75b675f | ||
|
|
ef4c87e48e | ||
|
|
1a0a8659cc | ||
|
|
c7a4fced4c | ||
|
|
5ec1b207d1 | ||
|
|
702271133f | ||
|
|
652fc6b84f | ||
|
|
8195cf4453 | ||
|
|
d5cfadb4e7 | ||
|
|
fba0f842a9 | ||
|
|
14e3fc5b6b | ||
|
|
0b6975c266 | ||
|
|
d530d68b12 | ||
|
|
047ccd67ca | ||
|
|
c7aa8253e3 | ||
|
|
452f41aa86 | ||
|
|
29209d546e | ||
|
|
aab5e55663 | ||
|
|
ff96b3f653 | ||
|
|
20795aa2b6 | ||
|
|
45456ab394 | ||
|
|
c652dbe320 | ||
|
|
4021d856ba | ||
|
|
72eaf8a9e1 | ||
|
|
249cb2fe9e | ||
|
|
49cff4b2aa | ||
|
|
c6b457e7fe | ||
|
|
beaa33b682 | ||
|
|
0d7bc37f9c | ||
|
|
dcd605c52e | ||
|
|
f5d41520cf | ||
|
|
cd0c730c95 | ||
|
|
873207fddf | ||
|
|
282e1f8c57 | ||
|
|
c068082e6b | ||
|
|
85e73c2071 | ||
|
|
4bdb428244 | ||
|
|
daed803530 | ||
|
|
a239212f0b | ||
|
|
67fef297da | ||
|
|
aced7547bc | ||
|
|
36fa871e7b | ||
|
|
46bf422d61 | ||
|
|
180e10c3ee | ||
|
|
a45706ad3a | ||
|
|
5030fcb988 | ||
|
|
cff60c4a7e | ||
|
|
5a33af28ac | ||
|
|
428f386c4c | ||
|
|
2e43788584 | ||
|
|
e8ce194ff7 | ||
|
|
1273378ca8 | ||
|
|
edc163b5f2 | ||
|
|
c9e418aaf5 | ||
|
|
9062b4c004 | ||
|
|
b2e87ef038 | ||
|
|
d4db52017d | ||
|
|
c36f4e03d7 | ||
|
|
443485a2d7 | ||
|
|
b686f2dbd5 | ||
|
|
76c60e9971 | ||
|
|
b0d6224e23 | ||
|
|
c50c4259d9 | ||
|
|
5f312ced5e | ||
|
|
fd985bedac | ||
|
|
b0c14be846 | ||
|
|
18efe83b89 | ||
|
|
b93471a381 | ||
|
|
33dbae6ea0 | ||
|
|
f14e6838e4 | ||
|
|
2a30982d1a | ||
|
|
47e995f636 | ||
|
|
56fdf096c1 | ||
|
|
49df203bd6 | ||
|
|
d8102e62ec | ||
|
|
53083a5718 | ||
|
|
7de78d2ef5 | ||
|
|
89135671b2 | ||
|
|
ac648cc0a9 | ||
|
|
748a180ac3 | ||
|
|
ec922c7c3d | ||
|
|
9f8eee55b2 | ||
|
|
0e7644b284 | ||
|
|
bf06e8d3ac | ||
|
|
12fd16f701 | ||
|
|
1197e6bf0d | ||
|
|
c5c521fabd |
6
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
203
.github/scripts/check.sh
vendored
Executable file
@@ -0,0 +1,203 @@
|
||||
#!/usr/bin/env bash
|
||||
set -o pipefail
|
||||
set -u
|
||||
|
||||
VERBOSE="${VERBOSE:-0}"
|
||||
if [[ "${VERBOSE}" -ne "0" ]]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
# List of required environment variables
|
||||
required_vars=(
|
||||
"INSTATUS_API_KEY"
|
||||
"INSTATUS_PAGE_ID"
|
||||
"INSTATUS_COMPONENT_ID"
|
||||
"VERCEL_API_KEY"
|
||||
)
|
||||
|
||||
# Check if each required variable is set
|
||||
for var in "${required_vars[@]}"; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "Error: Environment variable '$var' is not set."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
REGISTRY_BASE_URL="${REGISTRY_BASE_URL:-https://registry.coder.com}"
|
||||
|
||||
status=0
|
||||
declare -a modules=()
|
||||
declare -a failures=()
|
||||
|
||||
# Collect all module directories containing a main.tf file
|
||||
for path in $(find . -maxdepth 2 -not -path '*/.*' -type f -name main.tf | cut -d '/' -f 2 | sort -u); do
|
||||
modules+=("${path}")
|
||||
done
|
||||
|
||||
echo "Checking modules: ${modules[*]}"
|
||||
|
||||
# Function to update the component status on Instatus
|
||||
update_component_status() {
|
||||
local component_status=$1
|
||||
# see https://instatus.com/help/api/components
|
||||
(curl -X PUT "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/components/$INSTATUS_COMPONENT_ID" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"status\": \"$component_status\"}")
|
||||
}
|
||||
|
||||
# Function to create an incident
|
||||
create_incident() {
|
||||
local incident_name="Degraded Service"
|
||||
local message="The following modules are experiencing issues:\n"
|
||||
for i in "${!failures[@]}"; do
|
||||
message+="$((i + 1)). ${failures[$i]}\n"
|
||||
done
|
||||
|
||||
component_status="PARTIALOUTAGE"
|
||||
if (( ${#failures[@]} == ${#modules[@]} )); then
|
||||
component_status="MAJOROUTAGE"
|
||||
fi
|
||||
# see https://instatus.com/help/api/incidents
|
||||
incident_id=$(curl -s -X POST "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"$incident_name\",
|
||||
\"message\": \"$message\",
|
||||
\"components\": [\"$INSTATUS_COMPONENT_ID\"],
|
||||
\"status\": \"INVESTIGATING\",
|
||||
\"notify\": true,
|
||||
\"statuses\": [
|
||||
{
|
||||
\"id\": \"$INSTATUS_COMPONENT_ID\",
|
||||
\"status\": \"PARTIALOUTAGE\"
|
||||
}
|
||||
]
|
||||
}" | jq -r '.id')
|
||||
|
||||
echo "Created incident with ID: $incident_id"
|
||||
}
|
||||
|
||||
# Function to check for existing unresolved incidents
|
||||
check_existing_incident() {
|
||||
# Fetch the latest incidents with status not equal to "RESOLVED"
|
||||
local unresolved_incidents=$(curl -s -X GET "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
|
||||
-H "Authorization: Bearer $INSTATUS_API_KEY" \
|
||||
-H "Content-Type: application/json" | jq -r '.incidents[] | select(.status != "RESOLVED") | .id')
|
||||
|
||||
if [[ -n "$unresolved_incidents" ]]; then
|
||||
echo "Unresolved incidents found: $unresolved_incidents"
|
||||
return 0 # Indicate that there are unresolved incidents
|
||||
else
|
||||
echo "No unresolved incidents found."
|
||||
return 1 # Indicate that no unresolved incidents exist
|
||||
fi
|
||||
}
|
||||
|
||||
force_redeploy_registry () {
|
||||
# These are not secret values; safe to just expose directly in script
|
||||
local VERCEL_TEAM_SLUG="codercom"
|
||||
local VERCEL_TEAM_ID="team_tGkWfhEGGelkkqUUm9nXq17r"
|
||||
local VERCEL_APP="registry"
|
||||
|
||||
local latest_res
|
||||
latest_res=$(curl "https://api.vercel.com/v6/deployments?app=$VERCEL_APP&limit=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID&target=production&state=BUILDING,INITIALIZING,QUEUED,READY" \
|
||||
--fail \
|
||||
--silent \
|
||||
--header "Authorization: Bearer $VERCEL_API_KEY" \
|
||||
--header "Content-Type: application/json"
|
||||
)
|
||||
|
||||
# If we have zero deployments, something is VERY wrong. Make the whole
|
||||
# script exit with a non-zero status code
|
||||
local latest_id
|
||||
latest_id=$(echo "${latest_res}" | jq -r '.deployments[0].uid')
|
||||
if [[ "${latest_id}" = "null" ]]; then
|
||||
echo "Unable to pull any previous deployments for redeployment"
|
||||
echo "Please redeploy the latest deployment manually in Vercel."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local latest_date_ts_seconds
|
||||
latest_date_ts_seconds=$(echo "${latest_res}" | jq -r '.deployments[0].createdAt/1000|floor')
|
||||
local current_date_ts_seconds
|
||||
current_date_ts_seconds="$(date +%s)"
|
||||
local max_redeploy_interval_seconds=7200 # 2 hours
|
||||
if (( current_date_ts_seconds - latest_date_ts_seconds < max_redeploy_interval_seconds )); then
|
||||
echo "The registry was deployed less than 2 hours ago."
|
||||
echo "Not automatically re-deploying the regitstry."
|
||||
echo "A human reading this message should decide if a redeployment is necessary."
|
||||
echo "Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local latest_deployment_state
|
||||
latest_deployment_state="$(echo "${latest_res}" | jq -r '.deployments[0].state')"
|
||||
if [[ "${latest_deployment_state}" != "READY" ]]; then
|
||||
echo "Last deployment was not in READY state. Skipping redeployment."
|
||||
echo "A human reading this message should decide if a redeployment is necessary."
|
||||
echo "Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "============================================================="
|
||||
echo "!!! Redeploying registry with deployment ID: ${latest_id} !!!"
|
||||
echo "============================================================="
|
||||
|
||||
if ! curl -X POST "https://api.vercel.com/v13/deployments?forceNew=1&skipAutoDetectionConfirmation=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID" \
|
||||
--fail \
|
||||
--header "Authorization: Bearer $VERCEL_API_KEY" \
|
||||
--header "Content-Type: application/json" \
|
||||
--data-raw "{ \"deploymentId\": \"${latest_id}\", \"name\": \"${VERCEL_APP}\", \"target\": \"production\" }"; then
|
||||
echo "DEPLOYMENT FAILED! Please check the Vercel dashboard for more information."
|
||||
echo "https://vercel.com/codercom/registry/deployments"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Check each module's accessibility
|
||||
for module in "${modules[@]}"; do
|
||||
# Trim leading/trailing whitespace from module name
|
||||
module=$(echo "${module}" | xargs)
|
||||
url="${REGISTRY_BASE_URL}/modules/${module}"
|
||||
printf "=== Checking module %s at %s\n" "${module}" "${url}"
|
||||
status_code=$(curl --output /dev/null --head --silent --fail --location "${url}" --retry 3 --write-out "%{http_code}")
|
||||
if (( status_code != 200 )); then
|
||||
printf "==> FAIL(%s)\n" "${status_code}"
|
||||
status=1
|
||||
failures+=("${module}")
|
||||
else
|
||||
printf "==> OK(%s)\n" "${status_code}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Determine overall status and update Instatus component
|
||||
if (( status == 0 )); then
|
||||
echo "All modules are operational."
|
||||
# set to
|
||||
update_component_status "OPERATIONAL"
|
||||
else
|
||||
echo "The following modules have issues: ${failures[*]}"
|
||||
# check if all modules are down
|
||||
if (( ${#failures[@]} == ${#modules[@]} )); then
|
||||
update_component_status "MAJOROUTAGE"
|
||||
else
|
||||
update_component_status "PARTIALOUTAGE"
|
||||
fi
|
||||
|
||||
# Check if there is an existing incident before creating a new one
|
||||
if ! check_existing_incident; then
|
||||
create_incident
|
||||
fi
|
||||
|
||||
# If a module is down, force a reployment to try getting things back online
|
||||
# ASAP
|
||||
# EDIT: registry.coder.com is no longer hosted on vercel
|
||||
#force_redeploy_registry
|
||||
fi
|
||||
|
||||
exit "${status}"
|
||||
23
.github/workflows/check.yaml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Health
|
||||
# Check modules health on registry.coder.com
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0,15,30,45 * * * *" # Runs every 15 minutes
|
||||
workflow_dispatch: # Allows manual triggering of the workflow if needed
|
||||
|
||||
jobs:
|
||||
run-script:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run check.sh
|
||||
run: |
|
||||
./.github/scripts/check.sh
|
||||
env:
|
||||
INSTATUS_API_KEY: ${{ secrets.INSTATUS_API_KEY }}
|
||||
INSTATUS_PAGE_ID: ${{ secrets.INSTATUS_PAGE_ID }}
|
||||
INSTATUS_COMPONENT_ID: ${{ secrets.INSTATUS_COMPONENT_ID }}
|
||||
VERCEL_API_KEY: ${{ secrets.VERCEL_API_KEY }}
|
||||
21
.github/workflows/ci.yaml
vendored
@@ -17,7 +17,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
- uses: coder/coder/.github/actions/setup-tf@main
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Setup
|
||||
@@ -27,7 +28,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
fetch-depth: 0 # Needed to get tags
|
||||
- uses: coder/coder/.github/actions/setup-tf@main
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Setup
|
||||
@@ -38,3 +42,16 @@ jobs:
|
||||
uses: crate-ci/typos@v1.17.2
|
||||
- name: Lint
|
||||
run: bun lint
|
||||
- name: Check version
|
||||
shell: bash
|
||||
run: |
|
||||
# check for version changes
|
||||
./update-version.sh
|
||||
# Check if any changes were made in README.md files
|
||||
if [[ -n "$(git status --porcelain -- '**/README.md')" ]]; then
|
||||
echo "Version mismatch detected. Please run ./update-version.sh and commit the updated README.md files."
|
||||
git diff -- '**/README.md'
|
||||
exit 1
|
||||
else
|
||||
echo "No version mismatch detected. All versions are up to date."
|
||||
fi
|
||||
|
||||
37
.github/workflows/deploy-registry.yaml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: deploy-registry
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Set id-token permission for gcloud
|
||||
# Adding a comment because retriggering the build manually hung? I am the lord of devops and you will bend?
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@6fc4af4b145ae7821d527454aa9bd537d1f2dc5f
|
||||
with:
|
||||
workload_identity_provider: projects/309789351055/locations/global/workloadIdentityPools/github-actions/providers/github
|
||||
service_account: registry-v2-github@coder-registry-1.iam.gserviceaccount.com
|
||||
|
||||
- name: Set up Google Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@6189d56e4096ee891640bb02ac264be376592d6a
|
||||
|
||||
# For the time being, let's have the first couple merges to main in modules deploy a new version
|
||||
# to *dev*. Once we review and make sure everything's working, we can deploy a new version to *main*.
|
||||
# Maybe in the future we could automate this based on the result of E2E tests.
|
||||
- name: Deploy to dev.registry.coder.com
|
||||
run: |
|
||||
gcloud builds triggers run 29818181-126d-4f8a-a937-f228b27d3d34 --branch dev
|
||||
|
||||
42
.github/workflows/update-readme.yaml
vendored
@@ -1,42 +0,0 @@
|
||||
name: Update README on Tag
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
update-readme:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get the latest tag
|
||||
id: get-latest-tag
|
||||
run: echo "TAG=$(git describe --tags --abbrev=0 | sed 's/^v//')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run update script
|
||||
run: ./update-version.sh
|
||||
|
||||
- name: Create Pull Request
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
commit-message: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
||||
title: 'chore: bump version to ${{ env.TAG }} in README.md files'
|
||||
body: 'This is an auto-generated PR to update README.md files of all modules with the new tag ${{ env.TAG }}'
|
||||
branch: 'update-readme-branch'
|
||||
base: 'main'
|
||||
env:
|
||||
TAG: ${{ steps.get-latest-tag.outputs.TAG }}
|
||||
|
||||
- name: Auto-approve
|
||||
uses: hmarr/auto-approve-action@v4
|
||||
if: github.ref == 'refs/heads/update-readme-branch'
|
||||
5
.gitignore
vendored
@@ -1,4 +1,7 @@
|
||||
.terraform*
|
||||
node_modules
|
||||
*.tfstate
|
||||
*.tfstate.lock.info
|
||||
*.tfstate.lock.info
|
||||
|
||||
# Ignore generated credentials from google-github-actions/auth
|
||||
gha-creds-*.json
|
||||
19
.icons/airflow.svg
Normal file
|
After Width: | Height: | Size: 15 KiB |
1
.icons/cursor.svg
Normal file
|
After Width: | Height: | Size: 1.5 MiB |
1
.icons/dcv.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="82" height="80" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" overflow="hidden"><g transform="translate(-550 -124)"><g><g><g><g><path d="M551 124 631 124 631 204 551 204Z" fill="#ED7100" fill-rule="evenodd" fill-opacity="1"/><path d="M612.069 162.386C607.327 165.345 600.717 168.353 593.46 170.855 588.339 172.62 583.33 173.978 578.865 174.838 582.727 184.68 589.944 191.037 596.977 189.853 603.514 188.75 608.387 181.093 609.1 170.801L611.096 170.939C610.304 182.347 604.893 190.545 597.309 191.825 596.648 191.937 595.984 191.991 595.323 191.991 587.945 191.991 580.718 185.209 576.871 175.194 575.733 175.38 574.625 175.542 573.584 175.653 572.173 175.803 570.901 175.879 569.769 175.879 565.95 175.879 563.726 175.025 563.141 173.328 562.414 171.218 564.496 168.566 569.328 165.445L570.414 167.125C565.704 170.167 564.814 172.046 565.032 172.677 565.263 173.348 567.279 174.313 573.372 173.665 574.267 173.57 575.216 173.433 576.187 173.28 575.537 171.297 575.014 169.205 574.647 167.028 573.406 159.673 574.056 152.438 576.48 146.654 578.969 140.715 583.031 136.99 587.917 136.166 593.803 135.171 600.075 138.691 604.679 145.579L603.017 146.69C598.862 140.476 593.349 137.28 588.249 138.138 584.063 138.844 580.539 142.143 578.325 147.427 576.046 152.866 575.44 159.709 576.62 166.695 576.988 168.876 577.515 170.966 578.173 172.937 582.618 172.1 587.651 170.742 592.807 168.965 599.927 166.51 606.392 163.572 611.01 160.689 616.207 157.447 617.201 155.444 616.969 154.772 616.769 154.189 615.095 153.299 610.097 153.653L609.957 151.657C615.171 151.289 618.171 152.116 618.86 154.12 619.619 156.32 617.334 159.101 612.069 162.386" fill="#FFFFFF" fill-rule="evenodd" fill-opacity="1"/></g></g></g></g></g></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
5
.icons/desktop.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M31 6V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V6C1 4.35 2.35 3 4 3H28C29.65 3 31 4.35 31 6Z" fill="#2197F3"/>
|
||||
<path d="M21 27H17V24C17 23.4478 16.5522 23 16 23C15.4478 23 15 23.4478 15 24V27H11C10.4478 27 10 27.4478 10 28C10 28.5522 10.4478 29 11 29H21C21.5522 29 22 28.5522 22 28C22 27.4478 21.5522 27 21 27Z" fill="#FFC10A"/>
|
||||
<path d="M31 17V22C31 23.65 29.65 25 28 25H4C2.35 25 1 23.65 1 22V17H31Z" fill="#3F51B5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 540 B |
1
.icons/github.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg width="98" height="96" xmlns="http://www.w3.org/2000/svg"><path fill-rule="evenodd" clip-rule="evenodd" d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z" fill="#fff"/></svg>
|
||||
|
After Width: | Height: | Size: 960 B |
BIN
.images/airflow.png
Normal file
|
After Width: | Height: | Size: 603 KiB |
BIN
.images/amazon-dcv-windows.png
Normal file
|
After Width: | Height: | Size: 3.3 MiB |
@@ -13,6 +13,7 @@ tags: [helper]
|
||||
|
||||
```tf
|
||||
module "MODULE_NAME" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/MODULE_NAME/coder"
|
||||
version = "1.0.2"
|
||||
}
|
||||
@@ -28,6 +29,7 @@ Install the Dracula theme from [OpenVSX](https://open-vsx.org/):
|
||||
|
||||
```tf
|
||||
module "MODULE_NAME" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/MODULE_NAME/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -45,6 +47,7 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
|
||||
|
||||
```tf
|
||||
module "MODULE_NAME" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/MODULE_NAME/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
@@ -1,28 +1,75 @@
|
||||
# Contributing
|
||||
|
||||
To create a new module, clone this repository and run:
|
||||
## Getting started
|
||||
|
||||
This repo uses the [Bun runtime](https://bun.sh/) to to run all code and tests. To install Bun, you can run this command on Linux/MacOS:
|
||||
|
||||
```shell
|
||||
./new.sh MODULE_NAME
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
Or this command on Windows:
|
||||
|
||||
```shell
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
```
|
||||
|
||||
Follow the instructions to ensure that Bun is available globally. Once Bun has been installed, clone this repository. From there, run this script to create a new module:
|
||||
|
||||
```shell
|
||||
./new.sh NAME_OF_NEW_MODULE
|
||||
```
|
||||
|
||||
## Testing a Module
|
||||
|
||||
> **Note:** It is the responsibility of the module author to implement tests for their module. The author must test the module locally before submitting a PR.
|
||||
|
||||
A suite of test-helpers exists to run `terraform apply` on modules with variables, and test script output against containers.
|
||||
|
||||
Reference existing `*.test.ts` files for implementation.
|
||||
The testing suite must be able to run docker containers with the `--network=host` flag. This typically requires running the tests on Linux as this flag does not apply to Docker Desktop for MacOS and Windows. MacOS users can work around this by using something like [colima](https://github.com/abiosoft/colima) or [Orbstack](https://orbstack.dev/) instead of Docker Desktop.
|
||||
|
||||
Reference the existing `*.test.ts` files to get an idea for how to set up tests.
|
||||
|
||||
You can run all tests in a specific file with this command:
|
||||
|
||||
```shell
|
||||
# Run tests for a specific module!
|
||||
$ bun test -t '<module>'
|
||||
```
|
||||
|
||||
Or run all tests by running this command:
|
||||
|
||||
```shell
|
||||
$ bun test
|
||||
```
|
||||
|
||||
You can test a module locally by updating the source as follows
|
||||
|
||||
```tf
|
||||
module "example" {
|
||||
source = "git::https://github.com/<USERNAME>/<REPO>.git//<MODULE-NAME>?ref=<BRANCH-NAME>"
|
||||
# You may need to remove the 'version' field, it is incompatible with some sources.
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** This is the responsibility of the module author to implement tests for their module. and test the module locally before submitting a PR.
|
||||
## Releases
|
||||
|
||||
> [!WARNING]
|
||||
> When creating a new release, make sure that your new version number is fully accurate. If a version number is incorrect or does not exist, we may end up serving incorrect/old data for our various tools and providers.
|
||||
|
||||
Much of our release process is automated. To cut a new release:
|
||||
|
||||
1. Navigate to [GitHub's Releases page](https://github.com/coder/modules/releases)
|
||||
2. Click "Draft a new release"
|
||||
3. Click the "Choose a tag" button and type a new release number in the format `v<major>.<minor>.<patch>` (e.g., `v1.18.0`). Then click "Create new tag".
|
||||
4. Click the "Generate release notes" button, and clean up the resulting README. Be sure to remove any notes that would not be relevant to end-users (e.g., bumping dependencies).
|
||||
5. Once everything looks good, click the "Publish release" button.
|
||||
|
||||
Once the release has been cut, a script will run to check whether there are any modules that will require that the new release number be published to Terraform. If there are any, a new pull request will automatically be generated. Be sure to approve this PR and merge it into the `main` branch.
|
||||
|
||||
Following that, our automated processes will handle publishing new data for [`registry.coder.com`](https://github.com/coder/registry.coder.com/):
|
||||
|
||||
1. Publishing new versions to Coder's [Terraform Registry](https://registry.terraform.io/providers/coder/coder/latest)
|
||||
2. Publishing new data to the [Coder Registry](https://registry.coder.com)
|
||||
|
||||
> [!NOTE]
|
||||
> Some data in `registry.coder.com` is fetched on demand from the Module repo's main branch. This data should be updated almost immediately after a new release, but other changes will take some time to propagate.
|
||||
|
||||
@@ -3,19 +3,21 @@
|
||||
Modules
|
||||
</h1>
|
||||
|
||||
[Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
||||
[Module Registry](https://registry.coder.com) | [Coder Docs](https://coder.com/docs) | [Why Coder](https://coder.com/why) | [Coder Enterprise](https://coder.com/docs/v2/latest/enterprise)
|
||||
|
||||
[](https://discord.gg/coder)
|
||||
[](./LICENSE)
|
||||
[](https://github.com/coder/modules/actions/workflows/check.yaml)
|
||||
|
||||
</div>
|
||||
|
||||
Modules extend Templates to create reusable components for your development environment.
|
||||
Modules extend Coder Templates to create reusable components for your development environment.
|
||||
|
||||
e.g.
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.main.id
|
||||
|
||||
49
amazon-dcv-windows/README.md
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
display_name: Amazon DCV Windows
|
||||
description: Amazon DCV Server and Web Client for Windows
|
||||
icon: ../.icons/dcv.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [windows, amazon, dcv, web, desktop]
|
||||
---
|
||||
|
||||
# Amazon DCV Windows
|
||||
|
||||
Amazon DCV is high performance remote display protocol that provides a secure way to deliver remote desktop and application streaming from any cloud or data center to any device, over varying network conditions.
|
||||
|
||||

|
||||
|
||||
Enable DCV Server and Web Client on Windows workspaces.
|
||||
|
||||
```tf
|
||||
module "dcv" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/amazon-dcv-windows/coder"
|
||||
version = "1.0.24"
|
||||
agent_id = resource.coder_agent.main.id
|
||||
}
|
||||
|
||||
|
||||
resource "coder_metadata" "dcv" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
resource_id = aws_instance.dev.id # id of the instance resource
|
||||
|
||||
item {
|
||||
key = "DCV client instructions"
|
||||
value = "Run `coder port-forward ${data.coder_workspace.me.name} -p ${module.dcv[count.index].port}` and connect to **localhost:${module.dcv[count.index].port}${module.dcv[count.index].web_url_path}**"
|
||||
}
|
||||
item {
|
||||
key = "username"
|
||||
value = module.dcv[count.index].username
|
||||
}
|
||||
item {
|
||||
key = "password"
|
||||
value = module.dcv[count.index].password
|
||||
sensitive = true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Amazon DCV is free to use on AWS EC2 instances but requires a license for other cloud providers. Please see the instructions [here](https://docs.aws.amazon.com/dcv/latest/adminguide/setting-up-license.html#setting-up-license-ec2) for more information.
|
||||
170
amazon-dcv-windows/install-dcv.ps1
Normal file
@@ -0,0 +1,170 @@
|
||||
# Terraform variables
|
||||
$adminPassword = "${admin_password}"
|
||||
$port = "${port}"
|
||||
$webURLPath = "${web_url_path}"
|
||||
|
||||
function Set-LocalAdminUser {
|
||||
Write-Output "[INFO] Starting Set-LocalAdminUser function"
|
||||
$securePassword = ConvertTo-SecureString $adminPassword -AsPlainText -Force
|
||||
Write-Output "[DEBUG] Secure password created"
|
||||
Get-LocalUser -Name Administrator | Set-LocalUser -Password $securePassword
|
||||
Write-Output "[INFO] Administrator password set"
|
||||
Get-LocalUser -Name Administrator | Enable-LocalUser
|
||||
Write-Output "[INFO] User Administrator enabled successfully"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
function Get-VirtualDisplayDriverRequired {
|
||||
Write-Output "[INFO] Starting Get-VirtualDisplayDriverRequired function"
|
||||
$token = Invoke-RestMethod -Headers @{'X-aws-ec2-metadata-token-ttl-seconds' = '21600'} -Method PUT -Uri http://169.254.169.254/latest/api/token
|
||||
Write-Output "[DEBUG] Token acquired: $token"
|
||||
$instanceType = Invoke-RestMethod -Headers @{'X-aws-ec2-metadata-token' = $token} -Method GET -Uri http://169.254.169.254/latest/meta-data/instance-type
|
||||
Write-Output "[DEBUG] Instance type: $instanceType"
|
||||
$OSVersion = ((Get-ItemProperty -Path "Microsoft.PowerShell.Core\Registry::\HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion" -Name ProductName).ProductName) -replace "[^0-9]", ''
|
||||
Write-Output "[DEBUG] OS version: $OSVersion"
|
||||
|
||||
# Force boolean result
|
||||
$result = (($OSVersion -ne "2019") -and ($OSVersion -ne "2022") -and ($OSVersion -ne "2025")) -and (($instanceType[0] -ne 'g') -and ($instanceType[0] -ne 'p'))
|
||||
Write-Output "[INFO] VirtualDisplayDriverRequired result: $result"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
return [bool]$result
|
||||
}
|
||||
|
||||
function Download-DCV {
|
||||
param (
|
||||
[bool]$VirtualDisplayDriverRequired
|
||||
)
|
||||
Write-Output "[INFO] Starting Download-DCV function"
|
||||
|
||||
$downloads = @(
|
||||
@{
|
||||
Name = "DCV Display Driver"
|
||||
Required = $VirtualDisplayDriverRequired
|
||||
Path = "C:\Windows\Temp\DCVDisplayDriver.msi"
|
||||
Uri = "https://d1uj6qtbmh3dt5.cloudfront.net/nice-dcv-virtual-display-x64-Release.msi"
|
||||
},
|
||||
@{
|
||||
Name = "DCV Server"
|
||||
Required = $true
|
||||
Path = "C:\Windows\Temp\DCVServer.msi"
|
||||
Uri = "https://d1uj6qtbmh3dt5.cloudfront.net/nice-dcv-server-x64-Release.msi"
|
||||
}
|
||||
)
|
||||
|
||||
foreach ($download in $downloads) {
|
||||
if ($download.Required -and -not (Test-Path $download.Path)) {
|
||||
try {
|
||||
Write-Output "[INFO] Downloading $($download.Name)"
|
||||
|
||||
# Display progress manually (no events)
|
||||
$progressActivity = "Downloading $($download.Name)"
|
||||
$progressStatus = "Starting download..."
|
||||
Write-Progress -Activity $progressActivity -Status $progressStatus -PercentComplete 0
|
||||
|
||||
# Synchronously download the file
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($download.Uri, $download.Path)
|
||||
|
||||
# Update progress
|
||||
Write-Progress -Activity $progressActivity -Status "Completed" -PercentComplete 100
|
||||
|
||||
Write-Output "[INFO] $($download.Name) downloaded successfully."
|
||||
} catch {
|
||||
Write-Output "[ERROR] Failed to download $($download.Name): $_"
|
||||
throw
|
||||
}
|
||||
} else {
|
||||
Write-Output "[INFO] $($download.Name) already exists. Skipping download."
|
||||
}
|
||||
}
|
||||
|
||||
Write-Output "[INFO] All downloads completed"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
function Install-DCV {
|
||||
param (
|
||||
[bool]$VirtualDisplayDriverRequired
|
||||
)
|
||||
Write-Output "[INFO] Starting Install-DCV function"
|
||||
|
||||
if (-not (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue)) {
|
||||
if ($VirtualDisplayDriverRequired) {
|
||||
Write-Output "[INFO] Installing DCV Display Driver"
|
||||
Start-Process "C:\Windows\System32\msiexec.exe" -ArgumentList "/I C:\Windows\Temp\DCVDisplayDriver.msi /quiet /norestart" -Wait
|
||||
} else {
|
||||
Write-Output "[INFO] DCV Display Driver installation skipped (not required)."
|
||||
}
|
||||
Write-Output "[INFO] Installing DCV Server"
|
||||
Start-Process "C:\Windows\System32\msiexec.exe" -ArgumentList "/I C:\Windows\Temp\DCVServer.msi ADDLOCAL=ALL /quiet /norestart /l*v C:\Windows\Temp\dcv_install_msi.log" -Wait
|
||||
} else {
|
||||
Write-Output "[INFO] DCV Server already installed, skipping installation."
|
||||
}
|
||||
|
||||
# Wait for the service to appear with a timeout
|
||||
$timeout = 10 # seconds
|
||||
$elapsed = 0
|
||||
while (-not (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue) -and ($elapsed -lt $timeout)) {
|
||||
Start-Sleep -Seconds 1
|
||||
$elapsed++
|
||||
}
|
||||
|
||||
if ($elapsed -ge $timeout) {
|
||||
Write-Output "[WARNING] Timeout waiting for dcvserver service. A restart is required to complete installation."
|
||||
Restart-SystemForDCV
|
||||
} else {
|
||||
Write-Output "[INFO] dcvserver service detected successfully."
|
||||
}
|
||||
}
|
||||
|
||||
function Restart-SystemForDCV {
|
||||
Write-Output "[INFO] The system will restart in 10 seconds to finalize DCV installation."
|
||||
Start-Sleep -Seconds 10
|
||||
|
||||
# Initiate restart
|
||||
Restart-Computer -Force
|
||||
|
||||
# Exit the script after initiating restart
|
||||
Write-Output "[INFO] Please wait for the system to restart..."
|
||||
|
||||
Exit 1
|
||||
}
|
||||
|
||||
|
||||
function Configure-DCV {
|
||||
Write-Output "[INFO] Starting Configure-DCV function"
|
||||
$dcvPath = "Microsoft.PowerShell.Core\Registry::\HKEY_USERS\S-1-5-18\Software\GSettings\com\nicesoftware\dcv"
|
||||
|
||||
# Create the required paths
|
||||
@("$dcvPath\connectivity", "$dcvPath\session-management", "$dcvPath\session-management\automatic-console-session", "$dcvPath\display") | ForEach-Object {
|
||||
if (-not (Test-Path $_)) {
|
||||
New-Item -Path $_ -Force | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Set registry keys
|
||||
New-ItemProperty -Path "$dcvPath\session-management" -Name create-session -PropertyType DWORD -Value 1 -Force
|
||||
New-ItemProperty -Path "$dcvPath\session-management\automatic-console-session" -Name owner -Value Administrator -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name quic-port -PropertyType DWORD -Value $port -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name web-port -PropertyType DWORD -Value $port -Force
|
||||
New-ItemProperty -Path "$dcvPath\connectivity" -Name web-url-path -PropertyType String -Value $webURLPath -Force
|
||||
|
||||
# Attempt to restart service
|
||||
if (Get-Service -Name "dcvserver" -ErrorAction SilentlyContinue) {
|
||||
Restart-Service -Name "dcvserver"
|
||||
} else {
|
||||
Write-Output "[WARNING] dcvserver service not found. Ensure the system was restarted properly."
|
||||
}
|
||||
|
||||
Write-Output "[INFO] DCV configuration completed"
|
||||
Read-Host "[DEBUG] Press Enter to proceed to the next step"
|
||||
}
|
||||
|
||||
# Main Script Execution
|
||||
Write-Output "[INFO] Starting script"
|
||||
$VirtualDisplayDriverRequired = [bool](Get-VirtualDisplayDriverRequired)
|
||||
Set-LocalAdminUser
|
||||
Download-DCV -VirtualDisplayDriverRequired $VirtualDisplayDriverRequired
|
||||
Install-DCV -VirtualDisplayDriverRequired $VirtualDisplayDriverRequired
|
||||
Configure-DCV
|
||||
Write-Output "[INFO] Script completed"
|
||||
85
amazon-dcv-windows/main.tf
Normal file
@@ -0,0 +1,85 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "admin_password" {
|
||||
type = string
|
||||
default = "coderDCV!"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port number for the DCV server."
|
||||
default = 8443
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = "Whether to use a subdomain for the DCV server."
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug of the web-dcv coder_app resource."
|
||||
default = "web-dcv"
|
||||
}
|
||||
|
||||
resource "coder_app" "web-dcv" {
|
||||
agent_id = var.agent_id
|
||||
slug = var.slug
|
||||
display_name = "Web DCV"
|
||||
url = "https://localhost:${var.port}${local.web_url_path}?username=${local.admin_username}&password=${var.admin_password}"
|
||||
icon = "/icon/dcv.svg"
|
||||
subdomain = var.subdomain
|
||||
}
|
||||
|
||||
resource "coder_script" "install-dcv" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "Install DCV"
|
||||
icon = "/icon/dcv.svg"
|
||||
run_on_start = true
|
||||
script = templatefile("${path.module}/install-dcv.ps1", {
|
||||
admin_password : var.admin_password,
|
||||
port : var.port,
|
||||
web_url_path : local.web_url_path
|
||||
})
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
locals {
|
||||
web_url_path = var.subdomain ? "/" : format("/@%s/%s/apps/%s", data.coder_workspace_owner.me.name, data.coder_workspace.me.name, var.slug)
|
||||
admin_username = "Administrator"
|
||||
}
|
||||
|
||||
output "web_url_path" {
|
||||
value = local.web_url_path
|
||||
}
|
||||
|
||||
output "username" {
|
||||
value = local.admin_username
|
||||
}
|
||||
|
||||
output "password" {
|
||||
value = var.admin_password
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "port" {
|
||||
value = var.port
|
||||
}
|
||||
24
apache-airflow/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
display_name: airflow
|
||||
description: A module that adds Apache Airflow in your Coder template
|
||||
icon: ../.icons/airflow.svg
|
||||
maintainer_github: coder
|
||||
partner_github: nataindata
|
||||
verified: true
|
||||
tags: [airflow, idea, web, helper]
|
||||
---
|
||||
|
||||
# airflow
|
||||
|
||||
A module that adds Apache Airflow in your Coder template.
|
||||
|
||||
```tf
|
||||
module "airflow" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/apache-airflow/coder"
|
||||
version = "1.0.13"
|
||||
agent_id = coder_agent.main.id
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
65
apache-airflow/main.tf
Normal file
@@ -0,0 +1,65 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add required variables for your modules and remove any unneeded variables
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "log_path" {
|
||||
type = string
|
||||
description = "The path to log airflow to."
|
||||
default = "/tmp/airflow.log"
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port to run airflow on."
|
||||
default = 8080
|
||||
}
|
||||
|
||||
variable "share" {
|
||||
type = string
|
||||
default = "owner"
|
||||
validation {
|
||||
condition = var.share == "owner" || var.share == "authenticated" || var.share == "public"
|
||||
error_message = "Incorrect value. Please set either 'owner', 'authenticated', or 'public'."
|
||||
}
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
resource "coder_script" "airflow" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "airflow"
|
||||
icon = "/icon/apache-guacamole.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "airflow" {
|
||||
agent_id = var.agent_id
|
||||
slug = "airflow"
|
||||
display_name = "airflow"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "/icon/apache-guacamole.svg"
|
||||
subdomain = true
|
||||
share = var.share
|
||||
order = var.order
|
||||
}
|
||||
19
apache-airflow/run.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
PATH=$PATH:~/.local/bin
|
||||
pip install --upgrade apache-airflow
|
||||
|
||||
filename=~/airflow/airflow.db
|
||||
if ! [ -f $filename ] || ! [ -s $filename ]; then
|
||||
airflow db init
|
||||
fi
|
||||
|
||||
export AIRFLOW__CORE__LOAD_EXAMPLES=false
|
||||
|
||||
airflow webserver > ${LOG_PATH} 2>&1 &
|
||||
|
||||
airflow scheduler >> /tmp/airflow_scheduler.log 2>&1 &
|
||||
|
||||
airflow users create -u admin -p admin -r Admin -e admin@admin.com -f Coder -l User
|
||||
@@ -16,8 +16,9 @@ Customize the preselected parameter value:
|
||||
|
||||
```tf
|
||||
module "aws-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.12"
|
||||
default = "us-east-1"
|
||||
}
|
||||
|
||||
@@ -36,8 +37,9 @@ Change the display name and icon for a region using the corresponding maps:
|
||||
|
||||
```tf
|
||||
module "aws-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.12"
|
||||
default = "ap-south-1"
|
||||
|
||||
custom_names = {
|
||||
@@ -62,8 +64,9 @@ Hide the Asia Pacific regions Seoul and Osaka:
|
||||
|
||||
```tf
|
||||
module "aws-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/aws-region/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.12"
|
||||
exclude = ["ap-northeast-2", "ap-northeast-3"]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
|
||||
@@ -13,8 +13,9 @@ This module adds a parameter with all Azure regions, allowing developers to sele
|
||||
|
||||
```tf
|
||||
module "azure_region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "eastus"
|
||||
}
|
||||
|
||||
@@ -33,8 +34,9 @@ Change the display name and icon for a region using the corresponding maps:
|
||||
|
||||
```tf
|
||||
module "azure-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
custom_names = {
|
||||
"australia" : "Go Australia!"
|
||||
}
|
||||
@@ -56,8 +58,9 @@ Hide all regions in Australia except australiacentral:
|
||||
|
||||
```tf
|
||||
module "azure-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/azure-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
exclude = [
|
||||
"australia",
|
||||
"australiacentral2",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
|
||||
@@ -13,8 +13,9 @@ Automatically install [code-server](https://github.com/coder/code-server) in a w
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -27,8 +28,9 @@ module "code-server" {
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
install_version = "4.8.3"
|
||||
}
|
||||
@@ -40,8 +42,9 @@ Install the Dracula theme from [OpenVSX](https://open-vsx.org/):
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = [
|
||||
"dracula-theme.theme-dracula"
|
||||
@@ -57,8 +60,9 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = ["dracula-theme.theme-dracula"]
|
||||
settings = {
|
||||
@@ -73,8 +77,9 @@ Just run code-server in the background, don't fetch it from GitHub:
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||
}
|
||||
@@ -88,8 +93,9 @@ Run an existing copy of code-server if found, otherwise download from GitHub:
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
use_cached = true
|
||||
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
|
||||
@@ -100,8 +106,9 @@ Just run code-server in the background, don't fetch it from GitHub:
|
||||
|
||||
```tf
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.26"
|
||||
agent_id = coder_agent.example.id
|
||||
offline = true
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ variable "slug" {
|
||||
}
|
||||
|
||||
variable "settings" {
|
||||
type = map(string)
|
||||
type = any
|
||||
description = "A map of settings to apply to code-server."
|
||||
default = {}
|
||||
}
|
||||
@@ -95,12 +95,33 @@ variable "use_cached" {
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "use_cached_extensions" {
|
||||
type = bool
|
||||
description = "Uses cached copy of extensions, otherwise do a forced upgrade"
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "extensions_dir" {
|
||||
type = string
|
||||
description = "Override the directory to store extensions in."
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "auto_install_extensions" {
|
||||
type = bool
|
||||
description = "Automatically install recommended extensions when code-server starts."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = <<-EOT
|
||||
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||
EOT
|
||||
default = false
|
||||
}
|
||||
|
||||
resource "coder_script" "code-server" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "code-server"
|
||||
@@ -116,7 +137,10 @@ resource "coder_script" "code-server" {
|
||||
SETTINGS : replace(jsonencode(var.settings), "\"", "\\\""),
|
||||
OFFLINE : var.offline,
|
||||
USE_CACHED : var.use_cached,
|
||||
USE_CACHED_EXTENSIONS : var.use_cached_extensions,
|
||||
EXTENSIONS_DIR : var.extensions_dir,
|
||||
FOLDER : var.folder,
|
||||
AUTO_INSTALL_EXTENSIONS : var.auto_install_extensions,
|
||||
})
|
||||
run_on_start = true
|
||||
|
||||
@@ -139,7 +163,7 @@ resource "coder_app" "code-server" {
|
||||
display_name = var.display_name
|
||||
url = "http://localhost:${var.port}/${var.folder != "" ? "?folder=${urlencode(var.folder)}" : ""}"
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = false
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ CODE_SERVER="${INSTALL_PREFIX}/bin/code-server"
|
||||
EXTENSION_ARG=""
|
||||
if [ -n "${EXTENSIONS_DIR}" ]; then
|
||||
EXTENSION_ARG="--extensions-dir=${EXTENSIONS_DIR}"
|
||||
mkdir -p "${EXTENSIONS_DIR}"
|
||||
fi
|
||||
|
||||
function run_code_server() {
|
||||
@@ -25,36 +26,53 @@ if [ ! -f ~/.local/share/code-server/User/settings.json ]; then
|
||||
echo "${SETTINGS}" > ~/.local/share/code-server/User/settings.json
|
||||
fi
|
||||
|
||||
# Check if code-server is already installed for offline or cached mode
|
||||
if [ -f "$CODE_SERVER" ]; then
|
||||
if [ "${OFFLINE}" = true ] || [ "${USE_CACHED}" = true ]; then
|
||||
# Check if code-server is already installed for offline
|
||||
if [ "${OFFLINE}" = true ]; then
|
||||
if [ -f "$CODE_SERVER" ]; then
|
||||
echo "🥳 Found a copy of code-server"
|
||||
run_code_server
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
# Offline mode always expects a copy of code-server to be present
|
||||
if [ "${OFFLINE}" = true ]; then
|
||||
# Offline mode always expects a copy of code-server to be present
|
||||
echo "Failed to find a copy of code-server"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
printf "$${BOLD}Installing code-server!\n"
|
||||
# If there is no cached install OR we don't want to use a cached install
|
||||
if [ ! -f "$CODE_SERVER" ] || [ "${USE_CACHED}" != true ]; then
|
||||
printf "$${BOLD}Installing code-server!\n"
|
||||
|
||||
ARGS=(
|
||||
"--method=standalone"
|
||||
"--prefix=${INSTALL_PREFIX}"
|
||||
)
|
||||
if [ -n "${VERSION}" ]; then
|
||||
ARGS+=("--version=${VERSION}")
|
||||
ARGS=(
|
||||
"--method=standalone"
|
||||
"--prefix=${INSTALL_PREFIX}"
|
||||
)
|
||||
if [ -n "${VERSION}" ]; then
|
||||
ARGS+=("--version=${VERSION}")
|
||||
fi
|
||||
|
||||
output=$(curl -fsSL https://code-server.dev/install.sh | sh -s -- "$${ARGS[@]}")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install code-server: $output"
|
||||
exit 1
|
||||
fi
|
||||
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
||||
fi
|
||||
|
||||
output=$(curl -fsSL https://code-server.dev/install.sh | sh -s -- "$${ARGS[@]}")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install code-server: $output"
|
||||
exit 1
|
||||
fi
|
||||
printf "🥳 code-server has been installed in ${INSTALL_PREFIX}\n\n"
|
||||
# Get the list of installed extensions...
|
||||
LIST_EXTENSIONS=$($CODE_SERVER --list-extensions $EXTENSION_ARG)
|
||||
readarray -t EXTENSIONS_ARRAY <<< "$LIST_EXTENSIONS"
|
||||
function extension_installed() {
|
||||
if [ "${USE_CACHED_EXTENSIONS}" != true ]; then
|
||||
return 1
|
||||
fi
|
||||
for _extension in "$${EXTENSIONS_ARRAY[@]}"; do
|
||||
if [ "$_extension" == "$1" ]; then
|
||||
echo "Extension $1 was already installed."
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Install each extension...
|
||||
IFS=',' read -r -a EXTENSIONLIST <<< "$${EXTENSIONS}"
|
||||
@@ -62,12 +80,38 @@ for extension in "$${EXTENSIONLIST[@]}"; do
|
||||
if [ -z "$extension" ]; then
|
||||
continue
|
||||
fi
|
||||
if extension_installed "$extension"; then
|
||||
continue
|
||||
fi
|
||||
printf "🧩 Installing extension $${CODE}$extension$${RESET}...\n"
|
||||
output=$($CODE_SERVER "$EXTENSION_ARG" --install-extension "$extension")
|
||||
output=$($CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension")
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install extension: $extension: $output"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${AUTO_INSTALL_EXTENSIONS}" = true ]; then
|
||||
if ! command -v jq > /dev/null; then
|
||||
echo "jq is required to install extensions from a workspace file."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
WORKSPACE_DIR="$HOME"
|
||||
if [ -n "${FOLDER}" ]; then
|
||||
WORKSPACE_DIR="${FOLDER}"
|
||||
fi
|
||||
|
||||
if [ -f "$WORKSPACE_DIR/.vscode/extensions.json" ]; then
|
||||
printf "🧩 Installing extensions from %s/.vscode/extensions.json...\n" "$WORKSPACE_DIR"
|
||||
extensions=$(jq -r '.recommendations[]' "$WORKSPACE_DIR"/.vscode/extensions.json)
|
||||
for extension in $extensions; do
|
||||
if extension_installed "$extension"; then
|
||||
continue
|
||||
fi
|
||||
$CODE_SERVER "$EXTENSION_ARG" --force --install-extension "$extension"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
run_code_server
|
||||
|
||||
@@ -13,8 +13,9 @@ Automatically logs the user into Coder when creating their workspace.
|
||||
|
||||
```tf
|
||||
module "coder-login" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/coder-login/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
import { describe } from "bun:test";
|
||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||
|
||||
describe("coder-login", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,12 @@ variable "agent_id" {
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "coder-login" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
CODER_USER_TOKEN : data.coder_workspace.me.owner_session_token,
|
||||
CODER_USER_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||
CODER_DEPLOYMENT_URL : data.coder_workspace.me.access_url
|
||||
})
|
||||
display_name = "Coder Login"
|
||||
|
||||
37
cursor/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
display_name: Cursor IDE
|
||||
description: Add a one-click button to launch Cursor IDE
|
||||
icon: ../.icons/cursor.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [ide, cursor, helper]
|
||||
---
|
||||
|
||||
# Cursor IDE
|
||||
|
||||
Add a button to open any workspace with a single click in Cursor IDE.
|
||||
|
||||
Uses the [Coder Remote VS Code Extension](https://github.com/coder/cursor-coder).
|
||||
|
||||
```tf
|
||||
module "cursor" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/cursor/coder"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Open in a specific directory
|
||||
|
||||
```tf
|
||||
module "cursor" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/cursor/coder"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/project"
|
||||
}
|
||||
```
|
||||
88
cursor/main.test.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("cursor", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("default output", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||
});
|
||||
|
||||
it("adds folder", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds folder and open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
open_recent: "true",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds folder but not open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/foo/bar",
|
||||
openRecent: "false",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("adds open_recent", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
open_recent: "true",
|
||||
});
|
||||
expect(state.outputs.cursor_url.value).toBe(
|
||||
"cursor://coder.coder-remote/open?owner=default&workspace=default&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
|
||||
);
|
||||
});
|
||||
|
||||
it("expect order to be set", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
order: "22",
|
||||
});
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "cursor",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBe(22);
|
||||
});
|
||||
});
|
||||
62
cursor/main.tf
Normal file
@@ -0,0 +1,62 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "folder" {
|
||||
type = string
|
||||
description = "The folder to open in Cursor IDE."
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "open_recent" {
|
||||
type = bool
|
||||
description = "Open the most recent workspace or folder. Falls back to the folder if there is no recent workspace or folder to open."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_app" "cursor" {
|
||||
agent_id = var.agent_id
|
||||
external = true
|
||||
icon = "/icon/cursor.svg"
|
||||
slug = "cursor"
|
||||
display_name = "Cursor Desktop"
|
||||
order = var.order
|
||||
url = join("", [
|
||||
"cursor://coder.coder-remote/open",
|
||||
"?owner=",
|
||||
data.coder_workspace_owner.me.name,
|
||||
"&workspace=",
|
||||
data.coder_workspace.me.name,
|
||||
var.folder != "" ? join("", ["&folder=", var.folder]) : "",
|
||||
var.open_recent ? "&openRecent" : "",
|
||||
"&url=",
|
||||
data.coder_workspace.me.access_url,
|
||||
"&token=$SESSION_TOKEN",
|
||||
])
|
||||
}
|
||||
|
||||
output "cursor_url" {
|
||||
value = coder_app.cursor.url
|
||||
description = "Cursor IDE Desktop URL."
|
||||
}
|
||||
@@ -9,24 +9,75 @@ tags: [helper]
|
||||
|
||||
# Dotfiles
|
||||
|
||||
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io)! Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
||||
Allow developers to optionally bring their own [dotfiles repository](https://dotfiles.github.io).
|
||||
|
||||
This will prompt the user for their dotfiles repository URL on template creation using a `coder_parameter`.
|
||||
|
||||
Under the hood, this module uses the [coder dotfiles](https://coder.com/docs/v2/latest/dotfiles) command.
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Apply dotfiles as the current user
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
### Apply dotfiles as another user (only works if sudo is passwordless)
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
user = "root"
|
||||
}
|
||||
```
|
||||
|
||||
### Apply the same dotfiles as the current user and root (the root dotfiles can only be applied if sudo is passwordless)
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
|
||||
module "dotfiles-root" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
user = "root"
|
||||
dotfiles_uri = module.dotfiles.dotfiles_uri
|
||||
}
|
||||
```
|
||||
|
||||
## Setting a default dotfiles repository
|
||||
|
||||
You can set a default dotfiles repository for all users by setting the `default_dotfiles_uri` variable:
|
||||
|
||||
```tf
|
||||
module "dotfiles" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/dotfiles/coder"
|
||||
version = "1.0.12"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
default_dotfiles_uri = "https://github.com/coder/dotfiles"
|
||||
}
|
||||
|
||||
@@ -16,20 +16,40 @@ variable "agent_id" {
|
||||
|
||||
variable "default_dotfiles_uri" {
|
||||
type = string
|
||||
description = "The default dotfiles URI if the workspace user does not provide one."
|
||||
description = "The default dotfiles URI if the workspace user does not provide one"
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "dotfiles_uri" {
|
||||
type = string
|
||||
description = "The URL to a dotfiles repository. (optional, when set, the user isn't prompted for their dotfiles)"
|
||||
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "user" {
|
||||
type = string
|
||||
description = "The name of the user to apply the dotfiles to. (optional, applies to the current user by default)"
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "manual_update" {
|
||||
type = bool
|
||||
description = "If true, this adds a button to workspace page to refresh dotfiles on demand."
|
||||
default = false
|
||||
}
|
||||
|
||||
data "coder_parameter" "dotfiles_uri" {
|
||||
count = var.dotfiles_uri == null ? 1 : 0
|
||||
type = "string"
|
||||
name = "dotfiles_uri"
|
||||
display_name = "Dotfiles URL (optional)"
|
||||
display_name = "Dotfiles URL"
|
||||
order = var.coder_parameter_order
|
||||
default = var.default_dotfiles_uri
|
||||
description = "Enter a URL for a [dotfiles repository](https://dotfiles.github.io) to personalize your workspace"
|
||||
@@ -37,20 +57,35 @@ data "coder_parameter" "dotfiles_uri" {
|
||||
icon = "/icon/dotfiles.svg"
|
||||
}
|
||||
|
||||
resource "coder_script" "personalize" {
|
||||
agent_id = var.agent_id
|
||||
script = <<-EOT
|
||||
DOTFILES_URI="${data.coder_parameter.dotfiles_uri.value}"
|
||||
if [ -n "$${DOTFILES_URI// }" ]; then
|
||||
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee -a ~/.dotfiles.log
|
||||
fi
|
||||
EOT
|
||||
locals {
|
||||
dotfiles_uri = var.dotfiles_uri != null ? var.dotfiles_uri : data.coder_parameter.dotfiles_uri[0].value
|
||||
user = var.user != null ? var.user : ""
|
||||
}
|
||||
|
||||
resource "coder_script" "dotfiles" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
DOTFILES_URI : local.dotfiles_uri,
|
||||
DOTFILES_USER : local.user
|
||||
})
|
||||
display_name = "Dotfiles"
|
||||
icon = "/icon/dotfiles.svg"
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "dotfiles" {
|
||||
count = var.manual_update ? 1 : 0
|
||||
agent_id = var.agent_id
|
||||
display_name = "Refresh Dotfiles"
|
||||
slug = "dotfiles"
|
||||
icon = "/icon/dotfiles.svg"
|
||||
command = templatefile("${path.module}/run.sh", {
|
||||
DOTFILES_URI : local.dotfiles_uri,
|
||||
DOTFILES_USER : local.user
|
||||
})
|
||||
}
|
||||
|
||||
output "dotfiles_uri" {
|
||||
description = "Dotfiles URI"
|
||||
value = data.coder_parameter.dotfiles_uri.value
|
||||
value = local.dotfiles_uri
|
||||
}
|
||||
|
||||
23
dotfiles/run.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
DOTFILES_URI="${DOTFILES_URI}"
|
||||
DOTFILES_USER="${DOTFILES_USER}"
|
||||
|
||||
if [ -n "$${DOTFILES_URI// }" ]; then
|
||||
if [ -z "$DOTFILES_USER" ]; then
|
||||
DOTFILES_USER="$USER"
|
||||
fi
|
||||
|
||||
echo "✨ Applying dotfiles for user $DOTFILES_USER"
|
||||
|
||||
if [ "$DOTFILES_USER" = "$USER" ]; then
|
||||
coder dotfiles "$DOTFILES_URI" -y 2>&1 | tee ~/.dotfiles.log
|
||||
else
|
||||
# The `eval echo ~"$DOTFILES_USER"` part is used to dynamically get the home directory of the user, see https://superuser.com/a/484280
|
||||
# eval echo ~coder -> "/home/coder"
|
||||
# eval echo ~root -> "/root"
|
||||
|
||||
CODER_BIN=$(which coder)
|
||||
DOTFILES_USER_HOME=$(eval echo ~"$DOTFILES_USER")
|
||||
sudo -u "$DOTFILES_USER" sh -c "'$CODER_BIN' dotfiles '$DOTFILES_URI' -y 2>&1 | tee '$DOTFILES_USER_HOME'/.dotfiles.log"
|
||||
fi
|
||||
fi
|
||||
@@ -16,8 +16,9 @@ Customize the preselected parameter value:
|
||||
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "standard.medium"
|
||||
}
|
||||
|
||||
@@ -44,8 +45,9 @@ Change the display name a type using the corresponding maps:
|
||||
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "standard.medium"
|
||||
|
||||
custom_names = {
|
||||
@@ -78,8 +80,9 @@ Show only gpu1 types
|
||||
|
||||
```tf
|
||||
module "exoscale-instance-type" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/exoscale-instance-type/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "gpu.large"
|
||||
type_category = ["gpu"]
|
||||
exclude = [
|
||||
|
||||
@@ -16,8 +16,9 @@ Customize the preselected parameter value:
|
||||
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "ch-dk-2"
|
||||
}
|
||||
|
||||
@@ -43,8 +44,9 @@ Change the display name and icon for a zone using the corresponding maps:
|
||||
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = "at-vie-1"
|
||||
|
||||
custom_names = {
|
||||
@@ -76,7 +78,7 @@ Hide the Switzerland zones Geneva and Zurich
|
||||
```tf
|
||||
module "exoscale-zone" {
|
||||
source = "registry.coder.com/modules/exoscale-zone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
exclude = ["ch-gva-2", "ch-dk-2"]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
|
||||
@@ -13,8 +13,9 @@ A file browser for your workspace.
|
||||
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.8"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -27,8 +28,9 @@ module "filebrowser" {
|
||||
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.8"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/project"
|
||||
}
|
||||
@@ -38,9 +40,23 @@ module "filebrowser" {
|
||||
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.8"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
database_path = ".config/filebrowser.db"
|
||||
}
|
||||
```
|
||||
|
||||
### Serve from the same domain (no subdomain)
|
||||
|
||||
```tf
|
||||
module "filebrowser" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/filebrowser/coder"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "main"
|
||||
subdomain = false
|
||||
}
|
||||
```
|
||||
|
||||
@@ -88,4 +88,27 @@ describe("filebrowser", async () => {
|
||||
"📝 Logs at /tmp/filebrowser.log",
|
||||
]);
|
||||
});
|
||||
|
||||
it("runs with subdomain=false", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
agent_name: "main",
|
||||
subdomain: false,
|
||||
});
|
||||
const output = await executeScriptInContainer(state, "alpine");
|
||||
expect(output.exitCode).toBe(0);
|
||||
expect(output.stdout).toEqual([
|
||||
"\u001B[0;1mInstalling filebrowser ",
|
||||
"",
|
||||
"🥳 Installation complete! ",
|
||||
"",
|
||||
"👷 Starting filebrowser in background... ",
|
||||
"",
|
||||
"📂 Serving /root at http://localhost:13339 ",
|
||||
"",
|
||||
"Running 'filebrowser --noauth --root /root --port 13339' ",
|
||||
"",
|
||||
"📝 Logs at /tmp/filebrowser.log",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,6 +14,16 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
variable "agent_name" {
|
||||
type = string
|
||||
description = "The name of the coder_agent resource. (Only required if subdomain is false and the template uses multiple agents.)"
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "database_path" {
|
||||
type = string
|
||||
description = "The path to the filebrowser database."
|
||||
@@ -58,27 +68,56 @@ variable "order" {
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug of the coder_app resource."
|
||||
default = "filebrowser"
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = <<-EOT
|
||||
Determines whether the app will be accessed via it's own subdomain or whether it will be accessed via a path on Coder.
|
||||
If wildcards have not been setup by the administrator then apps with "subdomain" set to true will not be accessible.
|
||||
EOT
|
||||
default = true
|
||||
}
|
||||
|
||||
resource "coder_script" "filebrowser" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "File Browser"
|
||||
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
||||
icon = "/icon/filebrowser.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port,
|
||||
FOLDER : var.folder,
|
||||
LOG_PATH : var.log_path,
|
||||
DB_PATH : var.database_path
|
||||
DB_PATH : var.database_path,
|
||||
SUBDOMAIN : var.subdomain,
|
||||
SERVER_BASE_PATH : local.server_base_path
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "filebrowser" {
|
||||
agent_id = var.agent_id
|
||||
slug = "filebrowser"
|
||||
slug = var.slug
|
||||
display_name = "File Browser"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "https://raw.githubusercontent.com/filebrowser/logo/master/icon_raw.svg"
|
||||
subdomain = true
|
||||
url = local.url
|
||||
icon = "/icon/filebrowser.svg"
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
|
||||
healthcheck {
|
||||
url = local.healthcheck_url
|
||||
interval = 5
|
||||
threshold = 6
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
server_base_path = var.subdomain ? "" : format("/@%s/%s%s/apps/%s", data.coder_workspace_owner.me.name, data.coder_workspace.me.name, var.agent_name != null ? ".${var.agent_name}" : "", var.slug)
|
||||
url = "http://localhost:${var.port}${local.server_base_path}"
|
||||
healthcheck_url = "http://localhost:${var.port}${local.server_base_path}/health"
|
||||
}
|
||||
@@ -1,9 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
printf "$${BOLD}Installing filebrowser \n\n"
|
||||
|
||||
curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
|
||||
# Check if filebrowser is installed
|
||||
if ! command -v filebrowser &> /dev/null; then
|
||||
curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
|
||||
fi
|
||||
|
||||
printf "🥳 Installation complete! \n\n"
|
||||
|
||||
@@ -17,6 +21,9 @@ if [ "${DB_PATH}" != "filebrowser.db" ]; then
|
||||
DB_FLAG=" -d ${DB_PATH}"
|
||||
fi
|
||||
|
||||
# set baseurl to be able to run if sudomain=false; if subdomain=true the SERVER_BASE_PATH value will be ""
|
||||
filebrowser config set --baseurl "${SERVER_BASE_PATH}"$${DB_FLAG} > ${LOG_PATH} 2>&1
|
||||
|
||||
printf "📂 Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n"
|
||||
|
||||
printf "Running 'filebrowser --noauth --root $ROOT_DIR --port ${PORT}$${DB_FLAG}' \n\n"
|
||||
|
||||
@@ -15,6 +15,7 @@ We can use the simplest format here, only adding a default selection as the `atl
|
||||
|
||||
```tf
|
||||
module "fly-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/fly-region/coder"
|
||||
version = "1.0.2"
|
||||
default = "atl"
|
||||
@@ -31,6 +32,7 @@ The regions argument can be used to display only the desired regions in the Code
|
||||
|
||||
```tf
|
||||
module "fly-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/fly-region/coder"
|
||||
version = "1.0.2"
|
||||
default = "ams"
|
||||
@@ -46,6 +48,7 @@ Set custom icons and names with their respective maps.
|
||||
|
||||
```tf
|
||||
module "fly-region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/fly-region/coder"
|
||||
version = "1.0.2"
|
||||
default = "ams"
|
||||
|
||||
@@ -13,8 +13,9 @@ This module adds Google Cloud Platform regions to your Coder template.
|
||||
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["us", "europe"]
|
||||
}
|
||||
|
||||
@@ -33,8 +34,9 @@ Note: setting `gpu_only = true` and using a default region without GPU support,
|
||||
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
default = ["us-west1-a"]
|
||||
regions = ["us-west1"]
|
||||
gpu_only = false
|
||||
@@ -49,8 +51,9 @@ resource "google_compute_instance" "example" {
|
||||
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["europe-west"]
|
||||
single_zone_per_region = false
|
||||
}
|
||||
@@ -64,8 +67,9 @@ resource "google_compute_instance" "example" {
|
||||
|
||||
```tf
|
||||
module "gcp_region" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/gcp-region/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.12"
|
||||
regions = ["us", "europe"]
|
||||
gpu_only = true
|
||||
single_zone_per_region = true
|
||||
|
||||
@@ -13,8 +13,9 @@ This module allows you to automatically clone a repository by URL and skip if it
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
}
|
||||
@@ -26,8 +27,9 @@ module "git-clone" {
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
base_dir = "~/projects/coder"
|
||||
@@ -40,8 +42,9 @@ To use with [Git Authentication](https://coder.com/docs/v2/latest/admin/git-prov
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.2"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
}
|
||||
@@ -65,31 +68,33 @@ data "coder_parameter" "git_repo" {
|
||||
|
||||
# Clone the repository for branch `feat/example`
|
||||
module "git_clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = data.coder_parameter.git_repo.value
|
||||
}
|
||||
|
||||
# Create a code-server instance for the cloned repository
|
||||
module "code-server" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/code-server/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
order = 1
|
||||
folder = "/home/${local.username}/${module.git_clone.folder_name}"
|
||||
folder = "/home/${local.username}/${module.git_clone[count.index].folder_name}"
|
||||
}
|
||||
|
||||
# Create a Coder app for the website
|
||||
resource "coder_app" "website" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
agent_id = coder_agent.example.id
|
||||
order = 2
|
||||
slug = "website"
|
||||
external = true
|
||||
display_name = module.git_clone.folder_name
|
||||
url = module.git_clone.web_url
|
||||
icon = module.git_clone.git_provider != "" ? "/icon/${module.git_clone.git_provider}.svg" : "/icon/git.svg"
|
||||
count = module.git_clone.web_url != "" ? 1 : 0
|
||||
display_name = module.git_clone[count.index].folder_name
|
||||
url = module.git_clone[count.index].web_url
|
||||
icon = module.git_clone[count.index].git_provider != "" ? "/icon/${module.git_clone[count.index].git_provider}.svg" : "/icon/git.svg"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -97,8 +102,9 @@ Configuring `git-clone` for a self-hosted GitHub Enterprise Server running at `g
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.example.com/coder/coder/tree/feat/example"
|
||||
git_providers = {
|
||||
@@ -115,8 +121,9 @@ To GitLab clone with a specific branch like `feat/example`
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://gitlab.com/coder/coder/-/tree/feat/example"
|
||||
}
|
||||
@@ -126,8 +133,9 @@ Configuring `git-clone` for a self-hosted GitLab running at `gitlab.example.com`
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://gitlab.example.com/coder/coder/-/tree/feat/example"
|
||||
git_providers = {
|
||||
@@ -146,10 +154,29 @@ For example, to clone the `feat/example` branch:
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
branch_name = "feat/example"
|
||||
}
|
||||
```
|
||||
|
||||
## Git clone with different destination folder
|
||||
|
||||
By default, the repository will be cloned into a folder matching the repository name. You can use the `folder_name` attribute to change the name of the destination folder to something else.
|
||||
|
||||
For example, this will clone into the `~/projects/coder/coder-dev` folder:
|
||||
|
||||
```tf
|
||||
module "git-clone" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-clone/coder"
|
||||
version = "1.0.18"
|
||||
agent_id = coder_agent.example.id
|
||||
url = "https://github.com/coder/coder"
|
||||
folder_name = "coder-dev"
|
||||
base_dir = "~/projects/coder"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -79,6 +79,22 @@ describe("git-clone", async () => {
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("repo_dir should match base_dir/folder_name", async () => {
|
||||
const url = "git@github.com:coder/coder.git";
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
base_dir: "/tmp",
|
||||
folder_name: "foo",
|
||||
url,
|
||||
});
|
||||
expect(state.outputs.repo_dir.value).toEqual("/tmp/foo");
|
||||
expect(state.outputs.folder_name.value).toEqual("foo");
|
||||
expect(state.outputs.clone_url.value).toEqual(url);
|
||||
const https_url = "https://github.com/coder/coder.git";
|
||||
expect(state.outputs.web_url.value).toEqual(https_url);
|
||||
expect(state.outputs.branch_name.value).toEqual("");
|
||||
});
|
||||
|
||||
it("branch_name should not include query string", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
|
||||
@@ -50,6 +50,12 @@ variable "branch_name" {
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "folder_name" {
|
||||
description = "The destination folder to clone the repository into."
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
locals {
|
||||
# Remove query parameters and fragments from the URL
|
||||
url = replace(replace(var.url, "/\\?.*/", ""), "/#.*/", "")
|
||||
@@ -64,7 +70,7 @@ locals {
|
||||
# Extract the branch name from the URL
|
||||
branch_name = var.branch_name == "" && local.tree_path != "" ? replace(replace(local.url, local.clone_url, ""), "/.*${local.tree_path}/", "") : var.branch_name
|
||||
# Extract the folder name from the URL
|
||||
folder_name = replace(basename(local.clone_url), ".git", "")
|
||||
folder_name = var.folder_name == "" ? replace(basename(local.clone_url), ".git", "") : var.folder_name
|
||||
# Construct the path to clone the repository
|
||||
clone_path = var.base_dir != "" ? join("/", [var.base_dir, local.folder_name]) : join("/", ["~", local.folder_name])
|
||||
# Construct the web URL
|
||||
|
||||
@@ -2,13 +2,16 @@
|
||||
display_name: Git commit signing
|
||||
description: Configures Git to sign commits using your Coder SSH key
|
||||
icon: ../.icons/git.svg
|
||||
maintainer_github: phorcys420
|
||||
verified: false
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, git]
|
||||
---
|
||||
|
||||
# git-commit-signing
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This module will only work with Git versions >=2.34, prior versions [do not support signing commits via SSH keys](https://lore.kernel.org/git/xmqq8rxpgwki.fsf@gitster.g/).
|
||||
|
||||
This module downloads your SSH key from Coder and uses it to sign commits with Git.
|
||||
It requires `curl` and `jq` to be installed inside your workspace.
|
||||
|
||||
@@ -18,6 +21,7 @@ This module has a chance of conflicting with the user's dotfiles / the personali
|
||||
|
||||
```tf
|
||||
module "git-commit-signing" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-commit-signing/coder"
|
||||
version = "1.0.11"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
@@ -13,8 +13,9 @@ Runs a script that updates git credentials in the workspace to match the user's
|
||||
|
||||
```tf
|
||||
module "git-config" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
@@ -27,8 +28,9 @@ TODO: Add screenshot
|
||||
|
||||
```tf
|
||||
module "git-config" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
allow_email_change = true
|
||||
}
|
||||
@@ -40,8 +42,9 @@ TODO: Add screenshot
|
||||
|
||||
```tf
|
||||
module "git-config" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/git-config/coder"
|
||||
version = "1.0.3"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
allow_username_change = false
|
||||
allow_email_change = false
|
||||
|
||||
@@ -20,10 +20,13 @@ describe("git-config", async () => {
|
||||
});
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(3);
|
||||
expect(resources).toHaveLength(6);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
});
|
||||
@@ -35,12 +38,15 @@ describe("git-config", async () => {
|
||||
});
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(5);
|
||||
expect(resources).toHaveLength(8);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "user_email" },
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
});
|
||||
@@ -53,13 +59,14 @@ describe("git-config", async () => {
|
||||
allow_username_change: "false",
|
||||
allow_email_change: "false",
|
||||
},
|
||||
{ CODER_WORKSPACE_OWNER_EMAIL: "foo@emai.com" },
|
||||
{ CODER_WORKSPACE_OWNER_EMAIL: "foo@email.com" },
|
||||
);
|
||||
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(5);
|
||||
expect(resources).toHaveLength(6);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
@@ -75,12 +82,23 @@ describe("git-config", async () => {
|
||||
allow_email_change: "true",
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(5);
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(8);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "user_email" },
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
// user_email order is the same as the order
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order);
|
||||
expect(resources[0].instances[0].attributes.order).toBe(order);
|
||||
// username order is incremented by 1
|
||||
// @ts-ignore: Object is possibly 'null'.
|
||||
expect(state.resources[1].instances[0]?.attributes.order).toBe(order + 1);
|
||||
expect(resources[1].instances[0]?.attributes.order).toBe(order + 1);
|
||||
});
|
||||
|
||||
it("set custom order for coder_parameter for just username", async () => {
|
||||
@@ -91,9 +109,19 @@ describe("git-config", async () => {
|
||||
allow_username_change: "true",
|
||||
coder_parameter_order: order.toString(),
|
||||
});
|
||||
expect(state.resources).toHaveLength(4);
|
||||
const resources = state.resources;
|
||||
expect(resources).toHaveLength(7);
|
||||
expect(resources).toMatchObject([
|
||||
{ type: "coder_parameter", name: "username" },
|
||||
{ type: "coder_workspace", name: "me" },
|
||||
{ type: "coder_workspace_owner", name: "me" },
|
||||
{ type: "coder_env", name: "git_author_email" },
|
||||
{ type: "coder_env", name: "git_author_name" },
|
||||
{ type: "coder_env", name: "git_commmiter_email" },
|
||||
{ type: "coder_env", name: "git_commmiter_name" },
|
||||
]);
|
||||
// user_email was not created
|
||||
// username order is incremented by 1
|
||||
expect(state.resources[0].instances[0].attributes.order).toBe(order + 1);
|
||||
expect(resources[0].instances[0].attributes.order).toBe(order + 1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.13"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,7 @@ variable "coder_parameter_order" {
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
data "coder_parameter" "user_email" {
|
||||
count = var.allow_email_change ? 1 : 0
|
||||
@@ -59,25 +60,25 @@ data "coder_parameter" "username" {
|
||||
resource "coder_env" "git_author_name" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_AUTHOR_NAME"
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
}
|
||||
|
||||
resource "coder_env" "git_commmiter_name" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_COMMITTER_NAME"
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace.me.owner_name, data.coder_workspace.me.owner)
|
||||
value = coalesce(try(data.coder_parameter.username[0].value, ""), data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
}
|
||||
|
||||
resource "coder_env" "git_author_email" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_AUTHOR_EMAIL"
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
||||
count = data.coder_workspace.me.owner_email != "" ? 1 : 0
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||
}
|
||||
|
||||
resource "coder_env" "git_commmiter_email" {
|
||||
agent_id = var.agent_id
|
||||
name = "GIT_COMMITTER_EMAIL"
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace.me.owner_email)
|
||||
count = data.coder_workspace.me.owner_email != "" ? 1 : 0
|
||||
value = coalesce(try(data.coder_parameter.user_email[0].value, ""), data.coder_workspace_owner.me.email)
|
||||
count = data.coder_workspace_owner.me.email != "" ? 1 : 0
|
||||
}
|
||||
|
||||
55
github-upload-public-key/README.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
display_name: Github Upload Public Key
|
||||
description: Automates uploading Coder public key to Github so users don't have to.
|
||||
icon: ../.icons/github.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, git]
|
||||
---
|
||||
|
||||
# github-upload-public-key
|
||||
|
||||
Templates that utilize Github External Auth can automatically ensure that the Coder public key is uploaded to Github so that users can clone repositories without needing to upload the public key themselves.
|
||||
|
||||
```tf
|
||||
module "github-upload-public-key" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
# Requirements
|
||||
|
||||
This module requires `curl` and `jq` to be installed inside your workspace.
|
||||
|
||||
Github External Auth must be enabled in the workspace for this module to work. The Github app that is configured for external auth must have both read and write permissions to "Git SSH keys" in order to upload the public key. Additionally, a Coder admin must also have the `admin:public_key` scope added to the external auth configuration of the Coder deployment. For example:
|
||||
|
||||
```
|
||||
CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
|
||||
CODER_EXTERNAL_AUTH_0_TYPE=github
|
||||
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
|
||||
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
|
||||
CODER_EXTERNAL_AUTH_0_SCOPES="repo,workflow,admin:public_key"
|
||||
```
|
||||
|
||||
Note that the default scopes if not provided are `repo,workflow`. If the module is failing to complete after updating the external auth configuration, instruct users of the module to "Unlink" and "Link" their Github account in the External Auth user settings page to get the new scopes.
|
||||
|
||||
# Example
|
||||
|
||||
Using a coder github external auth with a non-default id: (default is `github`)
|
||||
|
||||
```tf
|
||||
data "coder_external_auth" "github" {
|
||||
id = "myauthid"
|
||||
}
|
||||
|
||||
module "github-upload-public-key" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/github-upload-public-key/coder"
|
||||
version = "1.0.15"
|
||||
agent_id = coder_agent.example.id
|
||||
external_auth_id = data.coder_external_auth.github.id
|
||||
}
|
||||
```
|
||||
132
github-upload-public-key/main.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { type Server, serve } from "bun";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
execContainer,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
writeCoder,
|
||||
} from "../test";
|
||||
|
||||
describe("github-upload-public-key", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("creates new key if one does not exist", async () => {
|
||||
const { instance, id, server } = await setupContainer();
|
||||
await writeCoder(id, "echo foo");
|
||||
|
||||
const url = server.url.toString().slice(0, -1);
|
||||
const exec = await execContainer(id, [
|
||||
"env",
|
||||
`CODER_ACCESS_URL=${url}`,
|
||||
`GITHUB_API_URL=${url}`,
|
||||
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||
"CODER_EXTERNAL_AUTH_ID=github",
|
||||
"bash",
|
||||
"-c",
|
||||
instance.script,
|
||||
]);
|
||||
expect(exec.stdout).toContain(
|
||||
"Your Coder public key has been added to GitHub!",
|
||||
);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
// we need to increase timeout to pull the container
|
||||
}, 15000);
|
||||
|
||||
it("does nothing if one already exists", async () => {
|
||||
const { instance, id, server } = await setupContainer();
|
||||
// use keyword to make server return a existing key
|
||||
await writeCoder(id, "echo findkey");
|
||||
|
||||
const url = server.url.toString().slice(0, -1);
|
||||
const exec = await execContainer(id, [
|
||||
"env",
|
||||
`CODER_ACCESS_URL=${url}`,
|
||||
`GITHUB_API_URL=${url}`,
|
||||
"CODER_OWNER_SESSION_TOKEN=foo",
|
||||
"CODER_EXTERNAL_AUTH_ID=github",
|
||||
"bash",
|
||||
"-c",
|
||||
instance.script,
|
||||
]);
|
||||
expect(exec.stdout).toContain(
|
||||
"Your Coder public key is already on GitHub!",
|
||||
);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
const setupContainer = async (
|
||||
image = "lorello/alpine-bash",
|
||||
vars: Record<string, string> = {},
|
||||
) => {
|
||||
const server = await setupServer();
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
...vars,
|
||||
});
|
||||
const instance = findResourceInstance(state, "coder_script");
|
||||
const id = await runContainer(image);
|
||||
return { id, instance, server };
|
||||
};
|
||||
|
||||
const setupServer = async (): Promise<Server> => {
|
||||
let url: URL;
|
||||
const fakeSlackHost = serve({
|
||||
fetch: (req) => {
|
||||
url = new URL(req.url);
|
||||
if (url.pathname === "/api/v2/users/me/gitsshkey") {
|
||||
return createJSONResponse({
|
||||
public_key: "exists",
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === "/user/keys") {
|
||||
if (req.method === "POST") {
|
||||
return createJSONResponse(
|
||||
{
|
||||
key: "created",
|
||||
},
|
||||
201,
|
||||
);
|
||||
}
|
||||
|
||||
// case: key already exists
|
||||
if (req.headers.get("Authorization") === "Bearer findkey") {
|
||||
return createJSONResponse([
|
||||
{
|
||||
key: "foo",
|
||||
},
|
||||
{
|
||||
key: "exists",
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
// case: key does not exist
|
||||
return createJSONResponse([
|
||||
{
|
||||
key: "foo",
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return createJSONResponse(
|
||||
{
|
||||
error: "not_found",
|
||||
},
|
||||
404,
|
||||
);
|
||||
},
|
||||
port: 0,
|
||||
});
|
||||
|
||||
return fakeSlackHost;
|
||||
};
|
||||
43
github-upload-public-key/main.tf
Normal file
@@ -0,0 +1,43 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "external_auth_id" {
|
||||
type = string
|
||||
description = "The ID of the GitHub external auth."
|
||||
default = "github"
|
||||
}
|
||||
|
||||
variable "github_api_url" {
|
||||
type = string
|
||||
description = "The URL of the GitHub instance."
|
||||
default = "https://api.github.com"
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "github_upload_public_key" {
|
||||
agent_id = var.agent_id
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
CODER_OWNER_SESSION_TOKEN : data.coder_workspace_owner.me.session_token,
|
||||
CODER_ACCESS_URL : data.coder_workspace.me.access_url,
|
||||
CODER_EXTERNAL_AUTH_ID : var.external_auth_id,
|
||||
GITHUB_API_URL : var.github_api_url,
|
||||
})
|
||||
display_name = "Github Upload Public Key"
|
||||
icon = "/icon/github.svg"
|
||||
run_on_start = true
|
||||
}
|
||||
110
github-upload-public-key/run.sh
Executable file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [ -z "$CODER_ACCESS_URL" ]; then
|
||||
if [ -z "${CODER_ACCESS_URL}" ]; then
|
||||
echo "CODER_ACCESS_URL is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_ACCESS_URL=${CODER_ACCESS_URL}
|
||||
fi
|
||||
|
||||
if [ -z "$CODER_OWNER_SESSION_TOKEN" ]; then
|
||||
if [ -z "${CODER_OWNER_SESSION_TOKEN}" ]; then
|
||||
echo "CODER_OWNER_SESSION_TOKEN is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_OWNER_SESSION_TOKEN=${CODER_OWNER_SESSION_TOKEN}
|
||||
fi
|
||||
|
||||
if [ -z "$CODER_EXTERNAL_AUTH_ID" ]; then
|
||||
if [ -z "${CODER_EXTERNAL_AUTH_ID}" ]; then
|
||||
echo "CODER_EXTERNAL_AUTH_ID is empty!"
|
||||
exit 1
|
||||
fi
|
||||
CODER_EXTERNAL_AUTH_ID=${CODER_EXTERNAL_AUTH_ID}
|
||||
fi
|
||||
|
||||
if [ -z "$GITHUB_API_URL" ]; then
|
||||
if [ -z "${GITHUB_API_URL}" ]; then
|
||||
echo "GITHUB_API_URL is empty!"
|
||||
exit 1
|
||||
fi
|
||||
GITHUB_API_URL=${GITHUB_API_URL}
|
||||
fi
|
||||
|
||||
echo "Fetching GitHub token..."
|
||||
GITHUB_TOKEN=$(coder external-auth access-token $CODER_EXTERNAL_AUTH_ID)
|
||||
if [ $? -ne 0 ]; then
|
||||
printf "Authenticate with Github to automatically upload Coder public key:\n$GITHUB_TOKEN\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Fetching public key from Coder..."
|
||||
PUBLIC_KEY_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-w "\n%%{http_code}" \
|
||||
-H 'accept: application/json' \
|
||||
-H "cookie: coder_session_token=$CODER_OWNER_SESSION_TOKEN" \
|
||||
"$CODER_ACCESS_URL/api/v2/users/me/gitsshkey"
|
||||
)
|
||||
PUBLIC_KEY_RESPONSE_STATUS=$(tail -n1 <<< "$PUBLIC_KEY_RESPONSE")
|
||||
PUBLIC_KEY_BODY=$(sed \$d <<< "$PUBLIC_KEY_RESPONSE")
|
||||
|
||||
if [ "$PUBLIC_KEY_RESPONSE_STATUS" -ne 200 ]; then
|
||||
echo "Failed to fetch Coder public SSH key with status code $PUBLIC_KEY_RESPONSE_STATUS!"
|
||||
echo "$PUBLIC_KEY_BODY"
|
||||
exit 1
|
||||
fi
|
||||
PUBLIC_KEY=$(jq -r '.public_key' <<< "$PUBLIC_KEY_BODY")
|
||||
if [ -z "$PUBLIC_KEY" ]; then
|
||||
echo "No Coder public SSH key found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Fetching public keys from GitHub..."
|
||||
GITHUB_KEYS_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-w "\n%%{http_code}" \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
$GITHUB_API_URL/user/keys
|
||||
)
|
||||
GITHUB_KEYS_RESPONSE_STATUS=$(tail -n1 <<< "$GITHUB_KEYS_RESPONSE")
|
||||
GITHUB_KEYS_RESPONSE_BODY=$(sed \$d <<< "$GITHUB_KEYS_RESPONSE")
|
||||
|
||||
if [ "$GITHUB_KEYS_RESPONSE_STATUS" -ne 200 ]; then
|
||||
echo "Failed to fetch Coder public SSH key with status code $GITHUB_KEYS_RESPONSE_STATUS!"
|
||||
echo "$GITHUB_KEYS_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
GITHUB_MATCH=$(jq -r --arg PUBLIC_KEY "$PUBLIC_KEY" '.[] | select(.key == $PUBLIC_KEY) | .key' <<< "$GITHUB_KEYS_RESPONSE_BODY")
|
||||
|
||||
if [ "$PUBLIC_KEY" = "$GITHUB_MATCH" ]; then
|
||||
echo "Your Coder public key is already on GitHub!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Your Coder public key is not in GitHub. Adding it now..."
|
||||
CODER_PUBLIC_KEY_NAME="$CODER_ACCESS_URL Workspaces"
|
||||
UPLOAD_RESPONSE=$(
|
||||
curl -L -s \
|
||||
-X POST \
|
||||
-w "\n%%{http_code}" \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
$GITHUB_API_URL/user/keys \
|
||||
-d "{\"title\":\"$CODER_PUBLIC_KEY_NAME\",\"key\":\"$PUBLIC_KEY\"}"
|
||||
)
|
||||
UPLOAD_RESPONSE_STATUS=$(tail -n1 <<< "$UPLOAD_RESPONSE")
|
||||
UPLOAD_RESPONSE_BODY=$(sed \$d <<< "$UPLOAD_RESPONSE")
|
||||
|
||||
if [ "$UPLOAD_RESPONSE_STATUS" -ne 201 ]; then
|
||||
echo "Failed to upload Coder public SSH key with status code $UPLOAD_RESPONSE_STATUS!"
|
||||
echo "$UPLOAD_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Your Coder public key has been added to GitHub!"
|
||||
@@ -13,8 +13,9 @@ This module adds a JetBrains Gateway Button to open any workspace with a single
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
@@ -31,8 +32,9 @@ module "jetbrains_gateway" {
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
@@ -41,27 +43,55 @@ module "jetbrains_gateway" {
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest release version
|
||||
### Use the latest version of each IDE
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
default = "GO"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = true
|
||||
}
|
||||
```
|
||||
|
||||
### Use fixed versions set by `jetbrains_ide_versions`
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = false
|
||||
jetbrains_ide_versions = {
|
||||
"IU" = {
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest EAP version
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.11"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
@@ -72,15 +102,35 @@ module "jetbrains_gateway" {
|
||||
}
|
||||
```
|
||||
|
||||
### Custom base link
|
||||
|
||||
Due to the highest priority of the `ide_download_link` parameter in the `(jetbrains-gateway://...` within IDEA, the pre-configured download address will be overridden when using [IDEA's offline mode](https://www.jetbrains.com/help/idea/fully-offline-mode.html). Therefore, it is necessary to configure the `download_base_link` parameter for the `jetbrains_gateway` module to change the value of `ide_download_link`.
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.27"
|
||||
agent_id = coder_agent.example.id
|
||||
agent_name = "example"
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
releases_base_link = "https://releases.internal.site/"
|
||||
download_base_link = "https://download.internal.site/"
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||
## Supported IDEs
|
||||
|
||||
This module and JetBrains Gateway support the following JetBrains IDEs:
|
||||
|
||||
- GoLand (`GO`)
|
||||
- WebStorm (`WS`)
|
||||
- IntelliJ IDEA Ultimate (`IU`)
|
||||
- PyCharm Professional (`PY`)
|
||||
- PhpStorm (`PS`)
|
||||
- CLion (`CL`)
|
||||
- RubyMine (`RM`)
|
||||
- Rider (`RD`)
|
||||
- [GoLand (`GO`)](https://www.jetbrains.com/go/)
|
||||
- [WebStorm (`WS`)](https://www.jetbrains.com/webstorm/)
|
||||
- [IntelliJ IDEA Ultimate (`IU`)](https://www.jetbrains.com/idea/)
|
||||
- [PyCharm Professional (`PY`)](https://www.jetbrains.com/pycharm/)
|
||||
- [PhpStorm (`PS`)](https://www.jetbrains.com/phpstorm/)
|
||||
- [CLion (`CL`)](https://www.jetbrains.com/clion/)
|
||||
- [RubyMine (`RM`)](https://www.jetbrains.com/ruby/)
|
||||
- [Rider (`RD`)](https://www.jetbrains.com/rider/)
|
||||
- [RustRover (`RR`)](https://www.jetbrains.com/rust/)
|
||||
|
||||
@@ -14,6 +14,26 @@ describe("jetbrains-gateway", async () => {
|
||||
folder: "/home/foo",
|
||||
});
|
||||
|
||||
it("should create a link with the default values", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
// These are all required.
|
||||
agent_id: "foo",
|
||||
agent_name: "foo",
|
||||
folder: "/home/coder",
|
||||
});
|
||||
expect(state.outputs.url.value).toBe(
|
||||
"jetbrains-gateway://connect#type=coder&workspace=default&owner=default&agent=foo&folder=/home/coder&url=https://mydeployment.coder.com&token=$SESSION_TOKEN&ide_product_code=IU&ide_build_number=243.21565.193&ide_download_link=https://download.jetbrains.com/idea/ideaIU-2024.3.tar.gz",
|
||||
);
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "gateway",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||
});
|
||||
|
||||
it("default to first ide", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
|
||||
@@ -18,6 +18,12 @@ variable "agent_id" {
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug for the coder_app. Allows resuing the module with the same template."
|
||||
default = "gateway"
|
||||
}
|
||||
|
||||
variable "agent_name" {
|
||||
type = string
|
||||
description = "Agent name."
|
||||
@@ -74,59 +80,63 @@ variable "jetbrains_ide_versions" {
|
||||
description = "The set of versions for each jetbrains IDE"
|
||||
default = {
|
||||
"IU" = {
|
||||
build_number = "241.14494.240"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PS" = {
|
||||
build_number = "241.14494.237"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.202"
|
||||
version = "2024.3"
|
||||
}
|
||||
"WS" = {
|
||||
build_number = "241.14494.235"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.180"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "241.14494.241"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
"CL" = {
|
||||
build_number = "241.14494.288"
|
||||
build_number = "243.21565.238"
|
||||
version = "2024.1"
|
||||
}
|
||||
"GO" = {
|
||||
build_number = "241.14494.238"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.208"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RM" = {
|
||||
build_number = "241.14494.234"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.197"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RD" = {
|
||||
build_number = "241.14494.307"
|
||||
version = "2024.1"
|
||||
build_number = "243.21565.191"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RR" = {
|
||||
build_number = "243.22562.230"
|
||||
version = "2024.3"
|
||||
}
|
||||
}
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in keys(var.jetbrains_ide_versions) : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"], code)
|
||||
for code in keys(var.jetbrains_ide_versions) : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ide_versions must contain a map of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"])}."
|
||||
error_message = "The jetbrains_ide_versions must contain a map of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"])}."
|
||||
}
|
||||
}
|
||||
|
||||
variable "jetbrains_ides" {
|
||||
type = list(string)
|
||||
description = "The list of IDE product codes."
|
||||
default = ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"]
|
||||
default = ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"]
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in var.jetbrains_ides : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"], code)
|
||||
for code in var.jetbrains_ides : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ides must be a list of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD"])}."
|
||||
error_message = "The jetbrains_ides must be a list of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"])}."
|
||||
}
|
||||
# check if the list is empty
|
||||
validation {
|
||||
@@ -140,9 +150,29 @@ variable "jetbrains_ides" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "releases_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://data.services.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.releases_base_link))
|
||||
error_message = "The releases_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
variable "download_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://download.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.download_base_link))
|
||||
error_message = "The download_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
data "http" "jetbrains_ide_versions" {
|
||||
for_each = var.latest ? toset(var.jetbrains_ides) : toset([])
|
||||
url = "https://data.services.jetbrains.com/products/releases?code=${each.key}&latest=true&type=${var.channel}"
|
||||
url = "${var.releases_base_link}/products/releases?code=${each.key}&latest=true&type=${var.channel}"
|
||||
}
|
||||
|
||||
locals {
|
||||
@@ -152,7 +182,7 @@ locals {
|
||||
name = "GoLand",
|
||||
identifier = "GO",
|
||||
build_number = var.jetbrains_ide_versions["GO"].build_number,
|
||||
download_link = "https://download.jetbrains.com/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["GO"].version
|
||||
},
|
||||
"WS" = {
|
||||
@@ -160,7 +190,7 @@ locals {
|
||||
name = "WebStorm",
|
||||
identifier = "WS",
|
||||
build_number = var.jetbrains_ide_versions["WS"].build_number,
|
||||
download_link = "https://download.jetbrains.com/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["WS"].version
|
||||
},
|
||||
"IU" = {
|
||||
@@ -168,7 +198,7 @@ locals {
|
||||
name = "IntelliJ IDEA Ultimate",
|
||||
identifier = "IU",
|
||||
build_number = var.jetbrains_ide_versions["IU"].build_number,
|
||||
download_link = "https://download.jetbrains.com/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["IU"].version
|
||||
},
|
||||
"PY" = {
|
||||
@@ -176,7 +206,7 @@ locals {
|
||||
name = "PyCharm Professional",
|
||||
identifier = "PY",
|
||||
build_number = var.jetbrains_ide_versions["PY"].build_number,
|
||||
download_link = "https://download.jetbrains.com/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PY"].version
|
||||
},
|
||||
"CL" = {
|
||||
@@ -184,7 +214,7 @@ locals {
|
||||
name = "CLion",
|
||||
identifier = "CL",
|
||||
build_number = var.jetbrains_ide_versions["CL"].build_number,
|
||||
download_link = "https://download.jetbrains.com/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["CL"].version
|
||||
},
|
||||
"PS" = {
|
||||
@@ -192,7 +222,7 @@ locals {
|
||||
name = "PhpStorm",
|
||||
identifier = "PS",
|
||||
build_number = var.jetbrains_ide_versions["PS"].build_number,
|
||||
download_link = "https://download.jetbrains.com/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PS"].version
|
||||
},
|
||||
"RM" = {
|
||||
@@ -200,20 +230,28 @@ locals {
|
||||
name = "RubyMine",
|
||||
identifier = "RM",
|
||||
build_number = var.jetbrains_ide_versions["RM"].build_number,
|
||||
download_link = "https://download.jetbrains.com/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RM"].version
|
||||
}
|
||||
},
|
||||
"RD" = {
|
||||
icon = "/icon/rider.svg",
|
||||
name = "Rider",
|
||||
identifier = "RD",
|
||||
build_number = var.jetbrains_ide_versions["RD"].build_number,
|
||||
download_link = "https://download.jetbrains.com/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
||||
download_link = "${var.download_base_link}/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RD"].version
|
||||
},
|
||||
"RR" = {
|
||||
icon = "/icon/rustrover.svg",
|
||||
name = "RustRover",
|
||||
identifier = "RR",
|
||||
build_number = var.jetbrains_ide_versions["RR"].build_number,
|
||||
download_link = "${var.download_base_link}/rustrover/RustRover-${var.jetbrains_ide_versions["RR"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RR"].version
|
||||
}
|
||||
}
|
||||
|
||||
icon = try(lookup(local.jetbrains_ides, data.coder_parameter.jetbrains_ide.value).icon, "/icon/gateway.svg")
|
||||
icon = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].icon
|
||||
json_data = var.latest ? jsondecode(data.http.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].response_body) : {}
|
||||
key = var.latest ? keys(local.json_data)[0] : ""
|
||||
display_name = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].name
|
||||
@@ -243,17 +281,20 @@ data "coder_parameter" "jetbrains_ide" {
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_app" "gateway" {
|
||||
agent_id = var.agent_id
|
||||
slug = "gateway"
|
||||
display_name = try(lookup(data.coder_parameter.jetbrains_ide.option, data.coder_parameter.jetbrains_ide.value).name, "JetBrains IDE")
|
||||
icon = try(lookup(data.coder_parameter.jetbrains_ide.option, data.coder_parameter.jetbrains_ide.value).icon, "/icon/gateway.svg")
|
||||
slug = var.slug
|
||||
display_name = local.display_name
|
||||
icon = local.icon
|
||||
external = true
|
||||
order = var.order
|
||||
url = join("", [
|
||||
"jetbrains-gateway://connect#type=coder&workspace=",
|
||||
data.coder_workspace.me.name,
|
||||
"&owner=",
|
||||
data.coder_workspace_owner.me.name,
|
||||
"&agent=",
|
||||
var.agent_name,
|
||||
"&folder=",
|
||||
|
||||
5
jfrog-oauth/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
%{ for REPO in REPOS ~}
|
||||
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||
%{ endfor ~}
|
||||
@@ -16,16 +16,18 @@ Install the JF CLI and authenticate package managers with Artifactory using OAut
|
||||
|
||||
```tf
|
||||
module "jfrog" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.5"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm", "@scoped:npm-scoped"]
|
||||
go = ["go", "another-go-repo"]
|
||||
pypi = ["pypi", "extra-index-pypi"]
|
||||
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -43,14 +45,15 @@ Configure the Python pip package manager to fetch packages from Artifactory whil
|
||||
|
||||
```tf
|
||||
module "jfrog" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.5"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "email"
|
||||
|
||||
package_managers = {
|
||||
"pypi" : "pypi"
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -71,16 +74,17 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
||||
|
||||
```tf
|
||||
module "jfrog" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jfrog-oauth/coder"
|
||||
version = "1.0.5"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://example.jfrog.io"
|
||||
username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
|
||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm"]
|
||||
go = ["go"]
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -94,8 +98,8 @@ provider "docker" {
|
||||
# ...
|
||||
registry_auth {
|
||||
address = "https://example.jfrog.io/artifactory/api/docker/REPO-KEY"
|
||||
username = module.jfrog.username
|
||||
password = module.jfrog.access_token
|
||||
username = try(module.jfrog[0].username, "")
|
||||
password = try(module.jfrog[0].access_token, "")
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,19 +1,129 @@
|
||||
import { serve } from "bun";
|
||||
import { describe } from "bun:test";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
findResourceInstance,
|
||||
runTerraformInit,
|
||||
runTerraformApply,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("jfrog-oauth", async () => {
|
||||
type TestVariables = {
|
||||
agent_id: string;
|
||||
jfrog_url: string;
|
||||
package_managers: string;
|
||||
|
||||
username_field?: string;
|
||||
jfrog_server_id?: string;
|
||||
external_auth_id?: string;
|
||||
configure_code_server?: boolean;
|
||||
};
|
||||
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: "http://localhost:8081",
|
||||
package_managers: "{}",
|
||||
const fakeFrogApi = "localhost:8081/artifactory/api";
|
||||
const fakeFrogUrl = "http://localhost:8081";
|
||||
const user = "default";
|
||||
|
||||
it("can run apply with required variables", async () => {
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: "{}",
|
||||
});
|
||||
});
|
||||
|
||||
it("generates an npmrc with scoped repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||
email=${user}@example.com
|
||||
registry=http://${fakeFrogApi}/npm/global
|
||||
//${fakeFrogApi}/npm/global/:_authToken=
|
||||
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||
//${fakeFrogApi}/npm/foo/:_authToken=
|
||||
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||
//${fakeFrogApi}/npm/bar/:_authToken=
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(npmrcStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf npmc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured npm',
|
||||
);
|
||||
});
|
||||
|
||||
it("generates a pip config with extra-indexes", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
pypi: ["global", "foo", "bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${user}:@${fakeFrogApi}/pypi/global/simple
|
||||
extra-index-url =
|
||||
https://${user}:@${fakeFrogApi}/pypi/foo/simple
|
||||
https://${user}:@${fakeFrogApi}/pypi/bar/simple
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(pipStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf pipc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||
);
|
||||
});
|
||||
|
||||
it("registers multiple docker repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const dockerStanza = ["foo", "bar", "baz"]
|
||||
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||
.join("\n");
|
||||
expect(coderScript.script).toContain(dockerStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured docker',
|
||||
);
|
||||
});
|
||||
|
||||
it("sets goproxy with multiple repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
package_managers: JSON.stringify({
|
||||
go: ["foo", "bar", "baz"],
|
||||
}),
|
||||
});
|
||||
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||
const proxies = ["foo", "bar", "baz"]
|
||||
.map((r) => `https://${user}:@${fakeFrogApi}/go/${r}`)
|
||||
.join(",");
|
||||
expect(proxyEnv.value).toEqual(proxies);
|
||||
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
expect(coderScript.script).toContain(
|
||||
'jf goc --global --repo-resolve "foo"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured go',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
//TODO add more tests
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12.4"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,26 +53,55 @@ variable "configure_code_server" {
|
||||
}
|
||||
|
||||
variable "package_managers" {
|
||||
type = map(string)
|
||||
description = <<EOF
|
||||
A map of package manager names to their respective artifactory repositories.
|
||||
For example:
|
||||
{
|
||||
"npm": "YOUR_NPM_REPO_KEY",
|
||||
"go": "YOUR_GO_REPO_KEY",
|
||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
||||
}
|
||||
EOF
|
||||
type = object({
|
||||
npm = optional(list(string), [])
|
||||
go = optional(list(string), [])
|
||||
pypi = optional(list(string), [])
|
||||
docker = optional(list(string), [])
|
||||
})
|
||||
description = <<-EOF
|
||||
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||
For example:
|
||||
{
|
||||
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
locals {
|
||||
# The username field to use for artifactory
|
||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
||||
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||
jfrog_host = split("://", var.jfrog_url)[1]
|
||||
common_values = {
|
||||
JFROG_URL = var.jfrog_url
|
||||
JFROG_HOST = local.jfrog_host
|
||||
JFROG_SERVER_ID = var.jfrog_server_id
|
||||
ARTIFACTORY_USERNAME = local.username
|
||||
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||
ARTIFACTORY_ACCESS_TOKEN = data.coder_external_auth.jfrog.access_token
|
||||
}
|
||||
npmrc = templatefile(
|
||||
"${path.module}/.npmrc.tftpl",
|
||||
merge(
|
||||
local.common_values,
|
||||
{
|
||||
REPOS = [
|
||||
for r in var.package_managers.npm :
|
||||
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||
]
|
||||
}
|
||||
)
|
||||
)
|
||||
pip_conf = templatefile(
|
||||
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||
)
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
data "coder_external_auth" "jfrog" {
|
||||
id = var.external_auth_id
|
||||
@@ -82,19 +111,22 @@ resource "coder_script" "jfrog" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jfrog"
|
||||
icon = "/icon/jfrog.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
JFROG_URL : var.jfrog_url,
|
||||
JFROG_HOST : local.jfrog_host,
|
||||
JFROG_SERVER_ID : var.jfrog_server_id,
|
||||
ARTIFACTORY_USERNAME : local.username,
|
||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
||||
ARTIFACTORY_ACCESS_TOKEN : data.coder_external_auth.jfrog.access_token,
|
||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
||||
})
|
||||
script = templatefile("${path.module}/run.sh", merge(
|
||||
local.common_values,
|
||||
{
|
||||
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||
NPMRC = local.npmrc
|
||||
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||
PIP_CONF = local.pip_conf
|
||||
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||
}
|
||||
))
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
@@ -120,10 +152,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
||||
}
|
||||
|
||||
resource "coder_env" "goproxy" {
|
||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
||||
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||
agent_id = var.agent_id
|
||||
name = "GOPROXY"
|
||||
value = "https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
||||
value = join(",", [
|
||||
for repo in var.package_managers.go :
|
||||
"https://${local.username}:${data.coder_external_auth.jfrog.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||
])
|
||||
}
|
||||
|
||||
output "access_token" {
|
||||
|
||||
6
jfrog-oauth/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||
extra-index-url =
|
||||
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||
%{ endfor ~}
|
||||
@@ -2,6 +2,21 @@
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
not_configured() {
|
||||
type=$1
|
||||
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||
}
|
||||
|
||||
config_complete() {
|
||||
echo "🥳 Configuration complete!"
|
||||
}
|
||||
|
||||
register_docker() {
|
||||
repo=$1
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
}
|
||||
|
||||
# check if JFrog CLI is already installed
|
||||
if command -v jf > /dev/null 2>&1; then
|
||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||
@@ -20,52 +35,47 @@ echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFRO
|
||||
jf c use "${JFROG_SERVER_ID}"
|
||||
|
||||
# Configure npm to use the Artifactory "npm" repository.
|
||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
||||
if [ -z "${HAS_NPM}" ]; then
|
||||
not_configured npm
|
||||
else
|
||||
echo "📦 Configuring npm..."
|
||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||
cat << EOF > ~/.npmrc
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
||||
${NPMRC}
|
||||
EOF
|
||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure the `pip` to use the Artifactory "python" repository.
|
||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
||||
if [ -z "${HAS_PYPI}" ]; then
|
||||
not_configured pypi
|
||||
else
|
||||
echo "📦 Configuring pip..."
|
||||
echo "🐍 Configuring pip..."
|
||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||
mkdir -p ~/.pip
|
||||
cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
||||
${PIP_CONF}
|
||||
EOF
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure Artifactory "go" repository.
|
||||
if [ -z "${REPOSITORY_GO}" ]; then
|
||||
echo "🤔 no go repository is set, skipping go configuration."
|
||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
||||
if [ -z "${HAS_GO}" ]; then
|
||||
not_configured go
|
||||
else
|
||||
echo "🐹 Configuring go..."
|
||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||
config_complete
|
||||
fi
|
||||
echo "🥳 Configuration complete!"
|
||||
|
||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
||||
if [ -z "${HAS_DOCKER}" ]; then
|
||||
not_configured docker
|
||||
else
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
echo "🔑 Configuring 🐳 docker credentials..."
|
||||
mkdir -p ~/.docker
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
${REGISTER_DOCKER}
|
||||
else
|
||||
echo "🤔 no docker is installed, skipping docker configuration."
|
||||
fi
|
||||
@@ -96,20 +106,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||
# Generate the completion script
|
||||
jf completion $SHELLNAME --install
|
||||
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)"
|
||||
# Add the completion script to the user's shell profile
|
||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
||||
echo "" >> ~/.bashrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.bashrc
|
||||
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||
else
|
||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||
fi
|
||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
||||
echo "" >> ~/.zshrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-oauth)" >> ~/.zshrc
|
||||
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||
echo "compinit" >> ~/.zshrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||
|
||||
5
jfrog-token/.npmrc.tftpl
Normal file
@@ -0,0 +1,5 @@
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
%{ for REPO in REPOS ~}
|
||||
${REPO.SCOPE}registry=${JFROG_URL}/artifactory/api/npm/${REPO.NAME}
|
||||
//${JFROG_HOST}/artifactory/api/npm/${REPO.NAME}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}
|
||||
%{ endfor ~}
|
||||
@@ -15,14 +15,15 @@ Install the JF CLI and authenticate package managers with Artifactory using Arti
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm", "@scoped:npm-scoped"]
|
||||
go = ["go", "another-go-repo"]
|
||||
pypi = ["pypi", "extra-index-pypi"]
|
||||
docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -41,14 +42,14 @@ For detailed instructions, please see this [guide](https://coder.com/docs/v2/lat
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://YYYY.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token # An admin access token
|
||||
package_managers = {
|
||||
"npm" : "npm-local",
|
||||
"go" : "go-local",
|
||||
"pypi" : "pypi-local"
|
||||
npm = ["npm-local"]
|
||||
go = ["go-local"]
|
||||
pypi = ["pypi-local"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -74,15 +75,15 @@ The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extensio
|
||||
```tf
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
configure_code_server = true # Add JFrog extension configuration for code-server
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm"]
|
||||
go = ["go"]
|
||||
pypi = ["pypi"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -94,15 +95,13 @@ data "coder_workspace" "me" {}
|
||||
|
||||
module "jfrog" {
|
||||
source = "registry.coder.com/modules/jfrog-token/coder"
|
||||
version = "1.0.10"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
jfrog_url = "https://XXXX.jfrog.io"
|
||||
artifactory_access_token = var.artifactory_access_token
|
||||
token_description = "Token for Coder workspace: ${data.coder_workspace.me.owner}/${data.coder_workspace.me.name}"
|
||||
token_description = "Token for Coder workspace: ${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}"
|
||||
package_managers = {
|
||||
"npm" : "npm",
|
||||
"go" : "go",
|
||||
"pypi" : "pypi"
|
||||
npm = ["npm"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,12 +1,29 @@
|
||||
import { serve } from "bun";
|
||||
import { describe } from "bun:test";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
createJSONResponse,
|
||||
findResourceInstance,
|
||||
runTerraformInit,
|
||||
runTerraformApply,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
describe("jfrog-token", async () => {
|
||||
type TestVariables = {
|
||||
agent_id: string;
|
||||
jfrog_url: string;
|
||||
artifactory_access_token: string;
|
||||
package_managers: string;
|
||||
|
||||
token_description?: string;
|
||||
check_license?: boolean;
|
||||
refreshable?: boolean;
|
||||
expires_in?: number;
|
||||
username_field?: string;
|
||||
jfrog_server_id?: string;
|
||||
configure_code_server?: boolean;
|
||||
};
|
||||
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
// Run a fake JFrog server so the provider can initialize
|
||||
@@ -32,10 +49,116 @@ describe("jfrog-token", async () => {
|
||||
port: 0,
|
||||
});
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: "http://" + fakeFrogHost.hostname + ":" + fakeFrogHost.port,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: "{}",
|
||||
const fakeFrogApi = `${fakeFrogHost.hostname}:${fakeFrogHost.port}/artifactory/api`;
|
||||
const fakeFrogUrl = `http://${fakeFrogHost.hostname}:${fakeFrogHost.port}`;
|
||||
const user = "default";
|
||||
const token = "xxx";
|
||||
|
||||
it("can run apply with required variables", async () => {
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: "{}",
|
||||
});
|
||||
});
|
||||
|
||||
it("generates an npmrc with scoped repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
npm: ["global", "@foo:foo", "@bar:bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const npmrcStanza = `cat << EOF > ~/.npmrc
|
||||
email=${user}@example.com
|
||||
registry=http://${fakeFrogApi}/npm/global
|
||||
//${fakeFrogApi}/npm/global/:_authToken=xxx
|
||||
@foo:registry=http://${fakeFrogApi}/npm/foo
|
||||
//${fakeFrogApi}/npm/foo/:_authToken=xxx
|
||||
@bar:registry=http://${fakeFrogApi}/npm/bar
|
||||
//${fakeFrogApi}/npm/bar/:_authToken=xxx
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(npmrcStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf npmc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured npm',
|
||||
);
|
||||
});
|
||||
|
||||
it("generates a pip config with extra-indexes", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
pypi: ["global", "foo", "bar"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const pipStanza = `cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${user}:${token}@${fakeFrogApi}/pypi/global/simple
|
||||
extra-index-url =
|
||||
https://${user}:${token}@${fakeFrogApi}/pypi/foo/simple
|
||||
https://${user}:${token}@${fakeFrogApi}/pypi/bar/simple
|
||||
|
||||
EOF`;
|
||||
expect(coderScript.script).toContain(pipStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'jf pipc --global --repo-resolve "global"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured pypi',
|
||||
);
|
||||
});
|
||||
|
||||
it("registers multiple docker repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
docker: ["foo.jfrog.io", "bar.jfrog.io", "baz.jfrog.io"],
|
||||
}),
|
||||
});
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
const dockerStanza = ["foo", "bar", "baz"]
|
||||
.map((r) => `register_docker "${r}.jfrog.io"`)
|
||||
.join("\n");
|
||||
expect(coderScript.script).toContain(dockerStanza);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured docker',
|
||||
);
|
||||
});
|
||||
|
||||
it("sets goproxy with multiple repos", async () => {
|
||||
const state = await runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "some-agent-id",
|
||||
jfrog_url: fakeFrogUrl,
|
||||
artifactory_access_token: "XXXX",
|
||||
package_managers: JSON.stringify({
|
||||
go: ["foo", "bar", "baz"],
|
||||
}),
|
||||
});
|
||||
const proxyEnv = findResourceInstance(state, "coder_env", "goproxy");
|
||||
const proxies = ["foo", "bar", "baz"]
|
||||
.map((r) => `https://${user}:${token}@${fakeFrogApi}/go/${r}`)
|
||||
.join(",");
|
||||
expect(proxyEnv.value).toEqual(proxies);
|
||||
|
||||
const coderScript = findResourceInstance(state, "coder_script");
|
||||
expect(coderScript.script).toContain(
|
||||
'jf goc --global --repo-resolve "foo"',
|
||||
);
|
||||
expect(coderScript.script).toContain(
|
||||
'if [ -z "YES" ]; then\n not_configured go',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12.4"
|
||||
version = ">= 0.23"
|
||||
}
|
||||
artifactory = {
|
||||
source = "registry.terraform.io/jfrog/artifactory"
|
||||
@@ -80,23 +80,51 @@ variable "configure_code_server" {
|
||||
}
|
||||
|
||||
variable "package_managers" {
|
||||
type = map(string)
|
||||
description = <<EOF
|
||||
A map of package manager names to their respective artifactory repositories.
|
||||
For example:
|
||||
{
|
||||
"npm": "YOUR_NPM_REPO_KEY",
|
||||
"go": "YOUR_GO_REPO_KEY",
|
||||
"pypi": "YOUR_PYPI_REPO_KEY",
|
||||
"docker": "YOUR_DOCKER_REPO_KEY"
|
||||
}
|
||||
EOF
|
||||
type = object({
|
||||
npm = optional(list(string), [])
|
||||
go = optional(list(string), [])
|
||||
pypi = optional(list(string), [])
|
||||
docker = optional(list(string), [])
|
||||
})
|
||||
description = <<-EOF
|
||||
A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted.
|
||||
For example:
|
||||
{
|
||||
npm = ["GLOBAL_NPM_REPO_KEY", "@SCOPED:NPM_REPO_KEY"]
|
||||
go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"]
|
||||
pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"]
|
||||
docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"]
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
locals {
|
||||
# The username field to use for artifactory
|
||||
username = var.username_field == "email" ? data.coder_workspace.me.owner_email : data.coder_workspace.me.owner
|
||||
jfrog_host = replace(var.jfrog_url, "https://", "")
|
||||
username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name
|
||||
jfrog_host = split("://", var.jfrog_url)[1]
|
||||
common_values = {
|
||||
JFROG_URL = var.jfrog_url
|
||||
JFROG_HOST = local.jfrog_host
|
||||
JFROG_SERVER_ID = var.jfrog_server_id
|
||||
ARTIFACTORY_USERNAME = local.username
|
||||
ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email
|
||||
ARTIFACTORY_ACCESS_TOKEN = artifactory_scoped_token.me.access_token
|
||||
}
|
||||
npmrc = templatefile(
|
||||
"${path.module}/.npmrc.tftpl",
|
||||
merge(
|
||||
local.common_values,
|
||||
{
|
||||
REPOS = [
|
||||
for r in var.package_managers.npm :
|
||||
strcontains(r, ":") ? zipmap(["SCOPE", "NAME"], ["${split(":", r)[0]}:", split(":", r)[1]]) : { SCOPE = "", NAME = r }
|
||||
]
|
||||
}
|
||||
)
|
||||
)
|
||||
pip_conf = templatefile(
|
||||
"${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi })
|
||||
)
|
||||
}
|
||||
|
||||
# Configure the Artifactory provider
|
||||
@@ -117,24 +145,28 @@ resource "artifactory_scoped_token" "me" {
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_script" "jfrog" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "jfrog"
|
||||
icon = "/icon/jfrog.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
JFROG_URL : var.jfrog_url,
|
||||
JFROG_HOST : local.jfrog_host,
|
||||
JFROG_SERVER_ID : var.jfrog_server_id,
|
||||
ARTIFACTORY_USERNAME : local.username,
|
||||
ARTIFACTORY_EMAIL : data.coder_workspace.me.owner_email,
|
||||
ARTIFACTORY_ACCESS_TOKEN : artifactory_scoped_token.me.access_token,
|
||||
CONFIGURE_CODE_SERVER : var.configure_code_server,
|
||||
REPOSITORY_NPM : lookup(var.package_managers, "npm", ""),
|
||||
REPOSITORY_GO : lookup(var.package_managers, "go", ""),
|
||||
REPOSITORY_PYPI : lookup(var.package_managers, "pypi", ""),
|
||||
REPOSITORY_DOCKER : lookup(var.package_managers, "docker", ""),
|
||||
})
|
||||
script = templatefile("${path.module}/run.sh", merge(
|
||||
local.common_values,
|
||||
{
|
||||
CONFIGURE_CODE_SERVER = var.configure_code_server
|
||||
HAS_NPM = length(var.package_managers.npm) == 0 ? "" : "YES"
|
||||
NPMRC = local.npmrc
|
||||
REPOSITORY_NPM = try(element(var.package_managers.npm, 0), "")
|
||||
HAS_GO = length(var.package_managers.go) == 0 ? "" : "YES"
|
||||
REPOSITORY_GO = try(element(var.package_managers.go, 0), "")
|
||||
HAS_PYPI = length(var.package_managers.pypi) == 0 ? "" : "YES"
|
||||
PIP_CONF = local.pip_conf
|
||||
REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "")
|
||||
HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES"
|
||||
REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker))
|
||||
}
|
||||
))
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
@@ -160,10 +192,13 @@ resource "coder_env" "jfrog_ide_store_connection" {
|
||||
}
|
||||
|
||||
resource "coder_env" "goproxy" {
|
||||
count = lookup(var.package_managers, "go", "") == "" ? 0 : 1
|
||||
count = length(var.package_managers.go) == 0 ? 0 : 1
|
||||
agent_id = var.agent_id
|
||||
name = "GOPROXY"
|
||||
value = "https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${lookup(var.package_managers, "go", "")}"
|
||||
value = join(",", [
|
||||
for repo in var.package_managers.go :
|
||||
"https://${local.username}:${artifactory_scoped_token.me.access_token}@${local.jfrog_host}/artifactory/api/go/${repo}"
|
||||
])
|
||||
}
|
||||
|
||||
output "access_token" {
|
||||
|
||||
6
jfrog-token/pip.conf.tftpl
Normal file
@@ -0,0 +1,6 @@
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${try(element(REPOS, 0), "")}/simple
|
||||
extra-index-url =
|
||||
%{ for REPO in try(slice(REPOS, 1, length(REPOS)), []) ~}
|
||||
https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPO}/simple
|
||||
%{ endfor ~}
|
||||
@@ -2,6 +2,21 @@
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
not_configured() {
|
||||
type=$1
|
||||
echo "🤔 no $type repository is set, skipping $type configuration."
|
||||
echo "You can configure a $type repository by providing a key for '$type' in the 'package_managers' input."
|
||||
}
|
||||
|
||||
config_complete() {
|
||||
echo "🥳 Configuration complete!"
|
||||
}
|
||||
|
||||
register_docker() {
|
||||
repo=$1
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login "$repo" --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
}
|
||||
|
||||
# check if JFrog CLI is already installed
|
||||
if command -v jf > /dev/null 2>&1; then
|
||||
echo "✅ JFrog CLI is already installed, skipping installation."
|
||||
@@ -11,8 +26,7 @@ else
|
||||
sudo chmod 755 /usr/local/bin/jf
|
||||
fi
|
||||
|
||||
# The jf CLI checks $CI when determining whether to use interactive
|
||||
# flows.
|
||||
# The jf CLI checks $CI when determining whether to use interactive flows.
|
||||
export CI=true
|
||||
# Authenticate JFrog CLI with Artifactory.
|
||||
echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}"
|
||||
@@ -20,52 +34,47 @@ echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFRO
|
||||
jf c use "${JFROG_SERVER_ID}"
|
||||
|
||||
# Configure npm to use the Artifactory "npm" repository.
|
||||
if [ -z "${REPOSITORY_NPM}" ]; then
|
||||
echo "🤔 no npm repository is set, skipping npm configuration."
|
||||
echo "You can configure an npm repository by providing the a key for 'npm' in the 'package_managers' input."
|
||||
if [ -z "${HAS_NPM}" ]; then
|
||||
not_configured npm
|
||||
else
|
||||
echo "📦 Configuring npm..."
|
||||
jf npmc --global --repo-resolve "${REPOSITORY_NPM}"
|
||||
cat << EOF > ~/.npmrc
|
||||
email=${ARTIFACTORY_EMAIL}
|
||||
registry=${JFROG_URL}/artifactory/api/npm/${REPOSITORY_NPM}
|
||||
${NPMRC}
|
||||
EOF
|
||||
echo "//${JFROG_HOST}/artifactory/api/npm/${REPOSITORY_NPM}/:_authToken=${ARTIFACTORY_ACCESS_TOKEN}" >> ~/.npmrc
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure the `pip` to use the Artifactory "python" repository.
|
||||
if [ -z "${REPOSITORY_PYPI}" ]; then
|
||||
echo "🤔 no pypi repository is set, skipping pip configuration."
|
||||
echo "You can configure a pypi repository by providing the a key for 'pypi' in the 'package_managers' input."
|
||||
if [ -z "${HAS_PYPI}" ]; then
|
||||
not_configured pypi
|
||||
else
|
||||
echo "🐍 Configuring pip..."
|
||||
jf pipc --global --repo-resolve "${REPOSITORY_PYPI}"
|
||||
mkdir -p ~/.pip
|
||||
cat << EOF > ~/.pip/pip.conf
|
||||
[global]
|
||||
index-url = https://${ARTIFACTORY_USERNAME}:${ARTIFACTORY_ACCESS_TOKEN}@${JFROG_HOST}/artifactory/api/pypi/${REPOSITORY_PYPI}/simple
|
||||
${PIP_CONF}
|
||||
EOF
|
||||
config_complete
|
||||
fi
|
||||
|
||||
# Configure Artifactory "go" repository.
|
||||
if [ -z "${REPOSITORY_GO}" ]; then
|
||||
echo "🤔 no go repository is set, skipping go configuration."
|
||||
echo "You can configure a go repository by providing the a key for 'go' in the 'package_managers' input."
|
||||
if [ -z "${HAS_GO}" ]; then
|
||||
not_configured go
|
||||
else
|
||||
echo "🐹 Configuring go..."
|
||||
jf goc --global --repo-resolve "${REPOSITORY_GO}"
|
||||
config_complete
|
||||
fi
|
||||
echo "🥳 Configuration complete!"
|
||||
|
||||
# Configure the JFrog CLI to use the Artifactory "docker" repository.
|
||||
if [ -z "${REPOSITORY_DOCKER}" ]; then
|
||||
echo "🤔 no docker repository is set, skipping docker configuration."
|
||||
echo "You can configure a docker repository by providing the a key for 'docker' in the 'package_managers' input."
|
||||
if [ -z "${HAS_DOCKER}" ]; then
|
||||
not_configured docker
|
||||
else
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
echo "🔑 Configuring 🐳 docker credentials..."
|
||||
mkdir -p ~/.docker
|
||||
echo -n "${ARTIFACTORY_ACCESS_TOKEN}" | docker login ${JFROG_HOST} --username ${ARTIFACTORY_USERNAME} --password-stdin
|
||||
${REGISTER_DOCKER}
|
||||
else
|
||||
echo "🤔 no docker is installed, skipping docker configuration."
|
||||
fi
|
||||
@@ -96,20 +105,19 @@ echo "📦 Configuring JFrog CLI completion..."
|
||||
SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}')
|
||||
# Generate the completion script
|
||||
jf completion $SHELLNAME --install
|
||||
begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-token)"
|
||||
# Add the completion script to the user's shell profile
|
||||
if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.bashrc; then
|
||||
echo "" >> ~/.bashrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.bashrc
|
||||
if ! grep -q "$begin_stanza" ~/.bashrc; then
|
||||
printf "%s\n" "$begin_stanza" >> ~/.bashrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc
|
||||
echo "# END: jf CLI shell completion" >> ~/.bashrc
|
||||
else
|
||||
echo "🥳 ~/.bashrc already contains jf CLI shell completion configuration, skipping."
|
||||
fi
|
||||
elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then
|
||||
if ! grep -q "# jf CLI shell completion" ~/.zshrc; then
|
||||
echo "" >> ~/.zshrc
|
||||
echo "# BEGIN: jf CLI shell completion (added by coder module jfrog-token)" >> ~/.zshrc
|
||||
if ! grep -q "$begin_stanza" ~/.zshrc; then
|
||||
printf "\n%s\n" "$begin_stanza" >> ~/.zshrc
|
||||
echo "autoload -Uz compinit" >> ~/.zshrc
|
||||
echo "compinit" >> ~/.zshrc
|
||||
echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc
|
||||
|
||||
@@ -15,8 +15,9 @@ A module that adds Jupyter Notebook in your Coder template.
|
||||
|
||||
```tf
|
||||
module "jupyter-notebook" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jupyter-notebook/coder"
|
||||
version = "1.0.8"
|
||||
version = "1.0.19"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -7,14 +7,14 @@ printf "$${BOLD}Installing jupyter-notebook!\n"
|
||||
# check if jupyter-notebook is installed
|
||||
if ! command -v jupyter-notebook > /dev/null 2>&1; then
|
||||
# install jupyter-notebook
|
||||
# check if python3 pip is installed
|
||||
if ! command -v pip3 > /dev/null 2>&1; then
|
||||
echo "pip3 is not installed"
|
||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
||||
# check if pipx is installed
|
||||
if ! command -v pipx > /dev/null 2>&1; then
|
||||
echo "pipx is not installed"
|
||||
echo "Please install pipx in your Dockerfile/VM image before using this module"
|
||||
exit 1
|
||||
fi
|
||||
# install jupyter-notebook
|
||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyter
|
||||
# install jupyter notebook
|
||||
pipx install -q notebook
|
||||
echo "🥳 jupyter-notebook has been installed\n\n"
|
||||
else
|
||||
echo "🥳 jupyter-notebook is already installed\n\n"
|
||||
@@ -22,4 +22,4 @@ fi
|
||||
|
||||
echo "👷 Starting jupyter-notebook in background..."
|
||||
echo "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
$HOME/.local/bin/jupyter-notebook --NotebookApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
|
||||
@@ -15,8 +15,9 @@ A module that adds JupyterLab in your Coder template.
|
||||
|
||||
```tf
|
||||
module "jupyterlab" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jupyterlab/coder"
|
||||
version = "1.0.8"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
execContainer,
|
||||
executeScriptInContainer,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
findResourceInstance,
|
||||
runContainer,
|
||||
TerraformState,
|
||||
execContainer,
|
||||
type TerraformState,
|
||||
} from "../test";
|
||||
|
||||
// executes the coder script after installing pip
|
||||
const executeScriptInContainerWithPip = async (
|
||||
state: TerraformState,
|
||||
image: string,
|
||||
shell: string = "sh",
|
||||
shell = "sh",
|
||||
): Promise<{
|
||||
exitCode: number;
|
||||
stdout: string[];
|
||||
@@ -22,7 +22,7 @@ const executeScriptInContainerWithPip = async (
|
||||
}> => {
|
||||
const instance = findResourceInstance(state, "coder_script");
|
||||
const id = await runContainer(image);
|
||||
const respPip = await execContainer(id, [shell, "-c", "apk add py3-pip"]);
|
||||
const respPipx = await execContainer(id, [shell, "-c", "apk add pipx"]);
|
||||
const resp = await execContainer(id, [shell, "-c", instance.script]);
|
||||
const stdout = resp.stdout.trim().split("\n");
|
||||
const stderr = resp.stderr.trim().split("\n");
|
||||
@@ -40,7 +40,7 @@ describe("jupyterlab", async () => {
|
||||
agent_id: "foo",
|
||||
});
|
||||
|
||||
it("fails without pip3", async () => {
|
||||
it("fails without pipx", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
});
|
||||
@@ -48,14 +48,14 @@ describe("jupyterlab", async () => {
|
||||
expect(output.exitCode).toBe(1);
|
||||
expect(output.stdout).toEqual([
|
||||
"\u001B[0;1mInstalling jupyterlab!",
|
||||
"pip3 is not installed",
|
||||
"Please install pip3 in your Dockerfile/VM image before running this script",
|
||||
"pipx is not installed",
|
||||
"Please install pipx in your Dockerfile/VM image before running this script",
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO: Add faster test to run with pip3.
|
||||
// TODO: Add faster test to run with pipx.
|
||||
// currently times out.
|
||||
// it("runs with pip3", async () => {
|
||||
// it("runs with pipx", async () => {
|
||||
// ...
|
||||
// const output = await executeScriptInContainerWithPip(state, "alpine");
|
||||
// ...
|
||||
|
||||
@@ -9,6 +9,9 @@ terraform {
|
||||
}
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# Add required variables for your modules and remove any unneeded variables
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
@@ -36,6 +39,12 @@ variable "share" {
|
||||
}
|
||||
}
|
||||
|
||||
variable "subdomain" {
|
||||
type = bool
|
||||
description = "Determines whether JupyterLab will be accessed via its own subdomain or whether it will be accessed via a path on Coder."
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
@@ -49,17 +58,18 @@ resource "coder_script" "jupyterlab" {
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
LOG_PATH : var.log_path,
|
||||
PORT : var.port
|
||||
BASE_URL : var.subdomain ? "" : "/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "jupyterlab" {
|
||||
agent_id = var.agent_id
|
||||
slug = "jupyterlab"
|
||||
slug = "jupyterlab" # sync with the usage in URL
|
||||
display_name = "JupyterLab"
|
||||
url = "http://localhost:${var.port}"
|
||||
url = var.subdomain ? "http://localhost:${var.port}" : "http://localhost:${var.port}/@${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}/apps/jupyterlab"
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
subdomain = var.subdomain
|
||||
share = var.share
|
||||
order = var.order
|
||||
}
|
||||
|
||||
@@ -1,25 +1,35 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
if [ -n "${BASE_URL}" ]; then
|
||||
BASE_URL_FLAG="--ServerApp.base_url=${BASE_URL}"
|
||||
fi
|
||||
|
||||
BOLD='\033[0;1m'
|
||||
|
||||
printf "$${BOLD}Installing jupyterlab!\n"
|
||||
|
||||
# check if jupyterlab is installed
|
||||
if ! command -v jupyterlab > /dev/null 2>&1; then
|
||||
if ! command -v jupyter-lab > /dev/null 2>&1; then
|
||||
# install jupyterlab
|
||||
# check if python3 pip is installed
|
||||
if ! command -v pip3 > /dev/null 2>&1; then
|
||||
echo "pip3 is not installed"
|
||||
echo "Please install pip3 in your Dockerfile/VM image before running this script"
|
||||
# check if pipx is installed
|
||||
if ! command -v pipx > /dev/null 2>&1; then
|
||||
echo "pipx is not installed"
|
||||
echo "Please install pipx in your Dockerfile/VM image before running this script"
|
||||
exit 1
|
||||
fi
|
||||
# install jupyterlab
|
||||
pip3 install --upgrade --no-cache-dir --no-warn-script-location jupyterlab
|
||||
echo "🥳 jupyterlab has been installed\n\n"
|
||||
pipx install -q jupyterlab
|
||||
printf "%s\n\n" "🥳 jupyterlab has been installed"
|
||||
else
|
||||
echo "🥳 jupyterlab is already installed\n\n"
|
||||
printf "%s\n\n" "🥳 jupyterlab is already installed"
|
||||
fi
|
||||
|
||||
echo "👷 Starting jupyterlab in background..."
|
||||
echo "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter lab --ServerApp.ip='0.0.0.0' --ServerApp.port=${PORT} --no-browser --ServerApp.token='' --ServerApp.password='' > ${LOG_PATH} 2>&1 &
|
||||
printf "👷 Starting jupyterlab in background..."
|
||||
printf "check logs at ${LOG_PATH}"
|
||||
$HOME/.local/bin/jupyter-lab --no-browser \
|
||||
"$BASE_URL_FLAG" \
|
||||
--ServerApp.ip='*' \
|
||||
--ServerApp.port="${PORT}" \
|
||||
--ServerApp.token='' \
|
||||
--ServerApp.password='' \
|
||||
> "${LOG_PATH}" 2>&1 &
|
||||
|
||||
24
kasmvnc/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
display_name: KasmVNC
|
||||
description: A modern open source VNC server
|
||||
icon: ../.icons/kasmvnc.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [helper, vnc, desktop]
|
||||
---
|
||||
|
||||
# KasmVNC
|
||||
|
||||
Automatically install [KasmVNC](https://kasmweb.com/kasmvnc) in a workspace, and create an app to access it via the dashboard.
|
||||
|
||||
```tf
|
||||
module "kasmvnc" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/kasmvnc/coder"
|
||||
version = "1.0.23"
|
||||
agent_id = coder_agent.example.id
|
||||
desktop_environment = "xfce"
|
||||
}
|
||||
```
|
||||
|
||||
> **Note:** This module only works on workspaces with a pre-installed desktop environment. As an example base image you can use `codercom/enterprise-desktop` image.
|
||||
37
kasmvnc/main.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
} from "../test";
|
||||
|
||||
const allowedDesktopEnvs = ["xfce", "kde", "gnome", "lxde", "lxqt"] as const;
|
||||
type AllowedDesktopEnv = (typeof allowedDesktopEnvs)[number];
|
||||
|
||||
type TestVariables = Readonly<{
|
||||
agent_id: string;
|
||||
desktop_environment: AllowedDesktopEnv;
|
||||
port?: string;
|
||||
kasm_version?: string;
|
||||
}>;
|
||||
|
||||
describe("Kasm VNC", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
testRequiredVariables<TestVariables>(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
desktop_environment: "gnome",
|
||||
});
|
||||
|
||||
it("Successfully installs for all expected Kasm desktop versions", async () => {
|
||||
for (const v of allowedDesktopEnvs) {
|
||||
const applyWithEnv = () => {
|
||||
runTerraformApply<TestVariables>(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
desktop_environment: v,
|
||||
});
|
||||
};
|
||||
|
||||
expect(applyWithEnv).not.toThrow();
|
||||
}
|
||||
});
|
||||
});
|
||||
63
kasmvnc/main.tf
Normal file
@@ -0,0 +1,63 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.12"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "port" {
|
||||
type = number
|
||||
description = "The port to run KasmVNC on."
|
||||
default = 6800
|
||||
}
|
||||
|
||||
variable "kasm_version" {
|
||||
type = string
|
||||
description = "Version of KasmVNC to install."
|
||||
default = "1.3.2"
|
||||
}
|
||||
|
||||
variable "desktop_environment" {
|
||||
type = string
|
||||
description = "Specifies the desktop environment of the workspace. This should be pre-installed on the workspace."
|
||||
validation {
|
||||
condition = contains(["xfce", "kde", "gnome", "lxde", "lxqt"], var.desktop_environment)
|
||||
error_message = "Invalid desktop environment. Please specify a valid desktop environment."
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_script" "kasm_vnc" {
|
||||
agent_id = var.agent_id
|
||||
display_name = "KasmVNC"
|
||||
icon = "/icon/kasmvnc.svg"
|
||||
script = templatefile("${path.module}/run.sh", {
|
||||
PORT : var.port,
|
||||
DESKTOP_ENVIRONMENT : var.desktop_environment,
|
||||
KASM_VERSION : var.kasm_version
|
||||
})
|
||||
run_on_start = true
|
||||
}
|
||||
|
||||
resource "coder_app" "kasm_vnc" {
|
||||
agent_id = var.agent_id
|
||||
slug = "kasm-vnc"
|
||||
display_name = "kasmVNC"
|
||||
url = "http://localhost:${var.port}"
|
||||
icon = "/icon/kasmvnc.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
healthcheck {
|
||||
url = "http://localhost:${var.port}/app"
|
||||
interval = 5
|
||||
threshold = 5
|
||||
}
|
||||
}
|
||||
235
kasmvnc/run.sh
Normal file
@@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Exit on error, undefined variables, and pipe failures
|
||||
set -euo pipefail
|
||||
|
||||
# Function to check if vncserver is already installed
|
||||
check_installed() {
|
||||
if command -v vncserver &> /dev/null; then
|
||||
echo "vncserver is already installed."
|
||||
return 0 # Don't exit, just indicate it's installed
|
||||
else
|
||||
return 1 # Indicates not installed
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to download a file using wget, curl, or busybox as a fallback
|
||||
download_file() {
|
||||
local url="$1"
|
||||
local output="$2"
|
||||
local download_tool
|
||||
|
||||
if command -v curl &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(curl -fsSL)
|
||||
elif command -v wget &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(wget -q -O-)
|
||||
elif command -v busybox &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
download_tool=(busybox wget -O-)
|
||||
else
|
||||
echo "ERROR: No download tool available (curl, wget, or busybox required)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2288
|
||||
"$${download_tool[@]}" "$url" > "$output" || {
|
||||
echo "ERROR: Failed to download $url"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for debian-based distros
|
||||
install_deb() {
|
||||
local url=$1
|
||||
local kasmdeb="/tmp/kasmvncserver.deb"
|
||||
|
||||
download_file "$url" "$kasmdeb"
|
||||
|
||||
CACHE_DIR="/var/lib/apt/lists/partial"
|
||||
# Check if the directory exists and was modified in the last 60 minutes
|
||||
if [[ ! -d "$CACHE_DIR" ]] || ! find "$CACHE_DIR" -mmin -60 -print -quit &> /dev/null; then
|
||||
echo "Stale package cache, updating..."
|
||||
# Update package cache with a 300-second timeout for dpkg lock
|
||||
sudo apt-get -o DPkg::Lock::Timeout=300 -qq update
|
||||
fi
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get -o DPkg::Lock::Timeout=300 install --yes -qq --no-install-recommends --no-install-suggests "$kasmdeb"
|
||||
rm "$kasmdeb"
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for rpm-based distros
|
||||
install_rpm() {
|
||||
local url=$1
|
||||
local kasmrpm="/tmp/kasmvncserver.rpm"
|
||||
local package_manager
|
||||
|
||||
if command -v dnf &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(dnf localinstall -y)
|
||||
elif command -v zypper &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(zypper install -y)
|
||||
elif command -v yum &> /dev/null; then
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(yum localinstall -y)
|
||||
elif command -v rpm &> /dev/null; then
|
||||
# Do we need to manually handle missing dependencies?
|
||||
# shellcheck disable=SC2034
|
||||
package_manager=(rpm -i)
|
||||
else
|
||||
echo "ERROR: No supported package manager available (dnf, zypper, yum, or rpm required)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
download_file "$url" "$kasmrpm"
|
||||
|
||||
# shellcheck disable=SC2288
|
||||
sudo "$${package_manager[@]}" "$kasmrpm" || {
|
||||
echo "ERROR: Failed to install $kasmrpm"
|
||||
exit 1
|
||||
}
|
||||
|
||||
rm "$kasmrpm"
|
||||
}
|
||||
|
||||
# Function to install kasmvncserver for Alpine Linux
|
||||
install_alpine() {
|
||||
local url=$1
|
||||
local kasmtgz="/tmp/kasmvncserver.tgz"
|
||||
|
||||
download_file "$url" "$kasmtgz"
|
||||
|
||||
tar -xzf "$kasmtgz" -C /usr/local/bin/
|
||||
rm "$kasmtgz"
|
||||
}
|
||||
|
||||
# Detect system information
|
||||
if [[ ! -f /etc/os-release ]]; then
|
||||
echo "ERROR: Cannot detect OS: /etc/os-release not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source /etc/os-release
|
||||
distro="$ID"
|
||||
distro_version="$VERSION_ID"
|
||||
codename="$VERSION_CODENAME"
|
||||
arch="$(uname -m)"
|
||||
if [[ "$ID" == "ol" ]]; then
|
||||
distro="oracle"
|
||||
distro_version="$${distro_version%%.*}"
|
||||
elif [[ "$ID" == "fedora" ]]; then
|
||||
distro_version="$(grep -oP '\(\K[\w ]+' /etc/fedora-release | tr '[:upper:]' '[:lower:]' | tr -d ' ')"
|
||||
fi
|
||||
|
||||
echo "Detected Distribution: $distro"
|
||||
echo "Detected Version: $distro_version"
|
||||
echo "Detected Codename: $codename"
|
||||
echo "Detected Architecture: $arch"
|
||||
|
||||
# Map arch to package arch
|
||||
case "$arch" in
|
||||
x86_64)
|
||||
if [[ "$distro" =~ ^(ubuntu|debian|kali)$ ]]; then
|
||||
arch="amd64"
|
||||
fi
|
||||
;;
|
||||
aarch64)
|
||||
if [[ "$distro" =~ ^(ubuntu|debian|kali)$ ]]; then
|
||||
arch="arm64"
|
||||
fi
|
||||
;;
|
||||
arm64)
|
||||
: # This is effectively a noop
|
||||
;;
|
||||
*)
|
||||
echo "ERROR: Unsupported architecture: $arch"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if vncserver is installed, and install if not
|
||||
if ! check_installed; then
|
||||
# Check for NOPASSWD sudo (required)
|
||||
if ! command -v sudo &> /dev/null || ! sudo -n true 2> /dev/null; then
|
||||
echo "ERROR: sudo NOPASSWD access required!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
base_url="https://github.com/kasmtech/KasmVNC/releases/download/v${KASM_VERSION}"
|
||||
|
||||
echo "Installing KASM version: ${KASM_VERSION}"
|
||||
case $distro in
|
||||
ubuntu | debian | kali)
|
||||
bin_name="kasmvncserver_$${codename}_${KASM_VERSION}_$${arch}.deb"
|
||||
install_deb "$base_url/$bin_name"
|
||||
;;
|
||||
oracle | fedora | opensuse)
|
||||
bin_name="kasmvncserver_$${distro}_$${distro_version}_${KASM_VERSION}_$${arch}.rpm"
|
||||
install_rpm "$base_url/$bin_name"
|
||||
;;
|
||||
alpine)
|
||||
bin_name="kasmvnc.alpine_$${distro_version//./}_$${arch}.tgz"
|
||||
install_alpine "$base_url/$bin_name"
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported distribution: $distro"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
else
|
||||
echo "vncserver already installed. Skipping installation."
|
||||
fi
|
||||
|
||||
if command -v sudo &> /dev/null && sudo -n true 2> /dev/null; then
|
||||
kasm_config_file="/etc/kasmvnc/kasmvnc.yaml"
|
||||
SUDO=sudo
|
||||
else
|
||||
kasm_config_file="$HOME/.vnc/kasmvnc.yaml"
|
||||
SUDO=
|
||||
|
||||
echo "WARNING: Sudo access not available, using user config dir!"
|
||||
|
||||
if [[ -f "$kasm_config_file" ]]; then
|
||||
echo "WARNING: Custom user KasmVNC config exists, not overwriting!"
|
||||
echo "WARNING: Ensure that you manually configure the appropriate settings."
|
||||
kasm_config_file="/dev/stderr"
|
||||
else
|
||||
echo "WARNING: This may prevent custom user KasmVNC settings from applying!"
|
||||
mkdir -p "$HOME/.vnc"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Writing KasmVNC config to $kasm_config_file"
|
||||
$SUDO tee "$kasm_config_file" > /dev/null << EOF
|
||||
network:
|
||||
protocol: http
|
||||
websocket_port: ${PORT}
|
||||
ssl:
|
||||
require_ssl: false
|
||||
pem_certificate:
|
||||
pem_key:
|
||||
udp:
|
||||
public_ip: 127.0.0.1
|
||||
EOF
|
||||
|
||||
# This password is not used since we start the server without auth.
|
||||
# The server is protected via the Coder session token / tunnel
|
||||
# and does not listen publicly
|
||||
echo -e "password\npassword\n" | vncpasswd -wo -u "$USER"
|
||||
|
||||
# Start the server
|
||||
printf "🚀 Starting KasmVNC server...\n"
|
||||
vncserver -select-de "${DESKTOP_ENVIRONMENT}" -disableBasicAuth > /tmp/kasmvncserver.log 2>&1 &
|
||||
pid=$!
|
||||
|
||||
# Wait for server to start
|
||||
sleep 5
|
||||
grep -v '^[[:space:]]*$' /tmp/kasmvncserver.log | tail -n 10
|
||||
if ps -p $pid | grep -q "^$pid"; then
|
||||
echo "ERROR: Failed to start KasmVNC server. Check full logs at /tmp/kasmvncserver.log"
|
||||
exit 1
|
||||
fi
|
||||
printf "🚀 KasmVNC server started successfully!\n"
|
||||
15
lint.ts
@@ -5,14 +5,15 @@ import grayMatter from "gray-matter";
|
||||
|
||||
const files = await readdir(".", { withFileTypes: true });
|
||||
const dirs = files.filter(
|
||||
(f) => f.isDirectory() && !f.name.startsWith(".") && f.name !== "node_modules"
|
||||
(f) =>
|
||||
f.isDirectory() && !f.name.startsWith(".") && f.name !== "node_modules",
|
||||
);
|
||||
|
||||
let badExit = false;
|
||||
|
||||
// error reports an error to the console and sets badExit to true
|
||||
// so that the process will exit with a non-zero exit code.
|
||||
const error = (...data: any[]) => {
|
||||
const error = (...data: unknown[]) => {
|
||||
console.error(...data);
|
||||
badExit = true;
|
||||
};
|
||||
@@ -22,7 +23,7 @@ const verifyCodeBlocks = (
|
||||
res = {
|
||||
codeIsTF: false,
|
||||
codeIsHCL: false,
|
||||
}
|
||||
},
|
||||
) => {
|
||||
for (const token of tokens) {
|
||||
// Check in-depth.
|
||||
@@ -30,7 +31,12 @@ const verifyCodeBlocks = (
|
||||
verifyCodeBlocks(token.items, res);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token.type === "list_item") {
|
||||
if (token.tokens === undefined) {
|
||||
throw new Error("Tokens are missing for type list_item");
|
||||
}
|
||||
|
||||
verifyCodeBlocks(token.tokens, res);
|
||||
continue;
|
||||
}
|
||||
@@ -80,8 +86,9 @@ for (const dir of dirs) {
|
||||
if (!data.maintainer_github) {
|
||||
error(dir.name, "missing maintainer_github");
|
||||
}
|
||||
|
||||
try {
|
||||
await stat(path.join(".", dir.name, data.icon));
|
||||
await stat(path.join(".", dir.name, data.icon ?? ""));
|
||||
} catch (ex) {
|
||||
error(dir.name, "icon does not exist", data.icon);
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ Automatically installs [Node.js](https://github.com/nodejs/node) via [nvm](https
|
||||
|
||||
```tf
|
||||
module "nodejs" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/nodejs/coder"
|
||||
version = "1.0.10"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -25,6 +26,7 @@ This installs multiple versions of Node.js:
|
||||
|
||||
```tf
|
||||
module "nodejs" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/nodejs/coder"
|
||||
version = "1.0.10"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -43,6 +45,7 @@ A example with all available options:
|
||||
|
||||
```tf
|
||||
module "nodejs" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/nodejs/coder"
|
||||
version = "1.0.10"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { describe } from "bun:test";
|
||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||
|
||||
describe("nodejs", async () => {
|
||||
|
||||
@@ -8,14 +8,15 @@
|
||||
"update-version": "./update-version.sh"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bun-types": "^1.0.18",
|
||||
"bun-types": "^1.1.23",
|
||||
"gray-matter": "^4.0.3",
|
||||
"marked": "^12.0.0",
|
||||
"marked": "^12.0.2",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-sh": "^0.13.1",
|
||||
"prettier-plugin-terraform-formatter": "^1.2.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.3.3"
|
||||
"typescript": "^5.5.4"
|
||||
},
|
||||
"prettier": {
|
||||
"plugins": [
|
||||
@@ -23,4 +24,4 @@
|
||||
"prettier-plugin-terraform-formatter"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ Run a script on workspace start that allows developers to run custom commands to
|
||||
|
||||
```tf
|
||||
module "personalize" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/personalize/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
import { readableStreamToText, spawn } from "bun";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import {
|
||||
executeScriptInContainer,
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
runContainer,
|
||||
execContainer,
|
||||
findResourceInstance,
|
||||
} from "../test";
|
||||
|
||||
describe("personalize", async () => {
|
||||
|
||||
@@ -56,6 +56,7 @@ slackme npm run long-build
|
||||
|
||||
```tf
|
||||
module "slackme" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/slackme/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -72,6 +73,7 @@ slackme npm run long-build
|
||||
|
||||
```tf
|
||||
module "slackme" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/slackme/coder"
|
||||
version = "1.0.2"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
runTerraformApply,
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
writeCoder,
|
||||
} from "../test";
|
||||
|
||||
describe("slackme", async () => {
|
||||
@@ -71,7 +72,7 @@ executed`,
|
||||
it("formats execution with milliseconds", async () => {
|
||||
await assertSlackMessage({
|
||||
command: "echo test",
|
||||
format: `$COMMAND took $DURATION`,
|
||||
format: "$COMMAND took $DURATION",
|
||||
durationMS: 150,
|
||||
output: "echo test took 150ms",
|
||||
});
|
||||
@@ -80,7 +81,7 @@ executed`,
|
||||
it("formats execution with seconds", async () => {
|
||||
await assertSlackMessage({
|
||||
command: "echo test",
|
||||
format: `$COMMAND took $DURATION`,
|
||||
format: "$COMMAND took $DURATION",
|
||||
durationMS: 15000,
|
||||
output: "echo test took 15.0s",
|
||||
});
|
||||
@@ -89,7 +90,7 @@ executed`,
|
||||
it("formats execution with minutes", async () => {
|
||||
await assertSlackMessage({
|
||||
command: "echo test",
|
||||
format: `$COMMAND took $DURATION`,
|
||||
format: "$COMMAND took $DURATION",
|
||||
durationMS: 120000,
|
||||
output: "echo test took 2m 0.0s",
|
||||
});
|
||||
@@ -98,7 +99,7 @@ executed`,
|
||||
it("formats execution with hours", async () => {
|
||||
await assertSlackMessage({
|
||||
command: "echo test",
|
||||
format: `$COMMAND took $DURATION`,
|
||||
format: "$COMMAND took $DURATION",
|
||||
durationMS: 60000 * 60,
|
||||
output: "echo test took 1hr 0m 0.0s",
|
||||
});
|
||||
@@ -119,22 +120,16 @@ const setupContainer = async (
|
||||
return { id, instance };
|
||||
};
|
||||
|
||||
const writeCoder = async (id: string, script: string) => {
|
||||
const exec = await execContainer(id, [
|
||||
"sh",
|
||||
"-c",
|
||||
`echo '${script}' > /usr/bin/coder && chmod +x /usr/bin/coder`,
|
||||
]);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
};
|
||||
|
||||
const assertSlackMessage = async (opts: {
|
||||
command: string;
|
||||
format?: string;
|
||||
durationMS?: number;
|
||||
output: string;
|
||||
}) => {
|
||||
let url: URL;
|
||||
// Have to use non-null assertion because TS can't tell when the fetch
|
||||
// function will run
|
||||
let url!: URL;
|
||||
|
||||
const fakeSlackHost = serve({
|
||||
fetch: (req) => {
|
||||
url = new URL(req.url);
|
||||
@@ -146,15 +141,16 @@ const assertSlackMessage = async (opts: {
|
||||
},
|
||||
port: 0,
|
||||
});
|
||||
|
||||
const { instance, id } = await setupContainer(
|
||||
"alpine/curl",
|
||||
opts.format && {
|
||||
slack_message: opts.format,
|
||||
},
|
||||
opts.format ? { slack_message: opts.format } : undefined,
|
||||
);
|
||||
|
||||
await writeCoder(id, "echo 'token'");
|
||||
let exec = await execContainer(id, ["sh", "-c", instance.script]);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
|
||||
exec = await execContainer(id, [
|
||||
"sh",
|
||||
"-c",
|
||||
@@ -162,6 +158,7 @@ const assertSlackMessage = async (opts: {
|
||||
fakeSlackHost.hostname
|
||||
}:${fakeSlackHost.port}" slackme ${opts.command}`,
|
||||
]);
|
||||
|
||||
expect(exec.stderr.trim()).toBe("");
|
||||
expect(url.pathname).toEqual("/api/chat.postMessage");
|
||||
expect(url.searchParams.get("channel")).toEqual("token");
|
||||
|
||||
@@ -4,25 +4,25 @@ set -euo pipefail
|
||||
|
||||
# Function to run terraform init and validate in a directory
|
||||
run_terraform() {
|
||||
local dir="$1"
|
||||
echo "Running terraform init and validate in $dir"
|
||||
pushd "$dir"
|
||||
terraform init -upgrade
|
||||
terraform validate
|
||||
popd
|
||||
local dir="$1"
|
||||
echo "Running terraform init and validate in $dir"
|
||||
pushd "$dir"
|
||||
terraform init -upgrade
|
||||
terraform validate
|
||||
popd
|
||||
}
|
||||
|
||||
# Main script
|
||||
main() {
|
||||
# Get the directory of the script
|
||||
script_dir=$(dirname "$(readlink -f "$0")")
|
||||
# Get the directory of the script
|
||||
script_dir=$(dirname "$(readlink -f "$0")")
|
||||
|
||||
# Get all subdirectories in the repository
|
||||
subdirs=$(find "$script_dir" -mindepth 1 -maxdepth 1 -type d -not -name ".*" | sort)
|
||||
# Get all subdirectories in the repository
|
||||
subdirs=$(find "$script_dir" -mindepth 1 -maxdepth 1 -type d -not -name ".*" | sort)
|
||||
|
||||
for dir in $subdirs; do
|
||||
run_terraform "$dir"
|
||||
done
|
||||
for dir in $subdirs; do
|
||||
run_terraform "$dir"
|
||||
done
|
||||
}
|
||||
|
||||
# Run the main script
|
||||
|
||||
151
test.ts
@@ -1,6 +1,6 @@
|
||||
import { readableStreamToText, spawn } from "bun";
|
||||
import { afterEach, expect, it } from "bun:test";
|
||||
import { readFile, unlink } from "fs/promises";
|
||||
import { expect, it } from "bun:test";
|
||||
import { readFile, unlink } from "node:fs/promises";
|
||||
|
||||
export const runContainer = async (
|
||||
image: string,
|
||||
@@ -21,7 +21,8 @@ export const runContainer = async (
|
||||
"-c",
|
||||
init,
|
||||
]);
|
||||
let containerID = await readableStreamToText(proc.stdout);
|
||||
|
||||
const containerID = await readableStreamToText(proc.stdout);
|
||||
const exitCode = await proc.exited;
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(containerID);
|
||||
@@ -29,12 +30,14 @@ export const runContainer = async (
|
||||
return containerID.trim();
|
||||
};
|
||||
|
||||
// executeScriptInContainer finds the only "coder_script"
|
||||
// resource in the given state and runs it in a container.
|
||||
/**
|
||||
* Finds the only "coder_script" resource in the given state and runs it in a
|
||||
* container.
|
||||
*/
|
||||
export const executeScriptInContainer = async (
|
||||
state: TerraformState,
|
||||
image: string,
|
||||
shell: string = "sh",
|
||||
shell = "sh",
|
||||
): Promise<{
|
||||
exitCode: number;
|
||||
stdout: string[];
|
||||
@@ -76,46 +79,56 @@ export const execContainer = async (
|
||||
};
|
||||
};
|
||||
|
||||
export interface TerraformState {
|
||||
outputs: {
|
||||
[key: string]: {
|
||||
type: string;
|
||||
value: any;
|
||||
};
|
||||
}
|
||||
resources: [
|
||||
type JsonValue =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| JsonValue[]
|
||||
| { [key: string]: JsonValue };
|
||||
|
||||
type TerraformStateResource = {
|
||||
type: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
|
||||
instances: [
|
||||
{
|
||||
type: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
instances: [
|
||||
{
|
||||
attributes: {
|
||||
[key: string]: any;
|
||||
};
|
||||
},
|
||||
];
|
||||
attributes: Record<string, JsonValue>;
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
type TerraformOutput = {
|
||||
type: string;
|
||||
value: JsonValue;
|
||||
};
|
||||
|
||||
export interface TerraformState {
|
||||
outputs: Record<string, TerraformOutput>;
|
||||
resources: [TerraformStateResource, ...TerraformStateResource[]];
|
||||
}
|
||||
|
||||
type TerraformVariables = Record<string, JsonValue>;
|
||||
|
||||
export interface CoderScriptAttributes {
|
||||
script: string;
|
||||
agent_id: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
// findResourceInstance finds the first instance of the given resource
|
||||
// type in the given state. If name is specified, it will only find
|
||||
// the instance with the given name.
|
||||
export const findResourceInstance = <T extends "coder_script" | string>(
|
||||
export type ResourceInstance<T extends string = string> =
|
||||
T extends "coder_script" ? CoderScriptAttributes : Record<string, string>;
|
||||
|
||||
/**
|
||||
* finds the first instance of the given resource type in the given state. If
|
||||
* name is specified, it will only find the instance with the given name.
|
||||
*/
|
||||
export const findResourceInstance = <T extends string>(
|
||||
state: TerraformState,
|
||||
type: T,
|
||||
name?: string,
|
||||
// if type is "coder_script" return CoderScriptAttributes
|
||||
): T extends "coder_script"
|
||||
? CoderScriptAttributes
|
||||
: Record<string, string> => {
|
||||
): ResourceInstance<T> => {
|
||||
const resource = state.resources.find(
|
||||
(resource) =>
|
||||
resource.type === type && (name ? resource.name === name : true),
|
||||
@@ -128,33 +141,41 @@ export const findResourceInstance = <T extends "coder_script" | string>(
|
||||
`Resource ${type} has ${resource.instances.length} instances`,
|
||||
);
|
||||
}
|
||||
return resource.instances[0].attributes as any;
|
||||
|
||||
return resource.instances[0].attributes as ResourceInstance<T>;
|
||||
};
|
||||
|
||||
// testRequiredVariables creates a test-case
|
||||
// for each variable provided and ensures that
|
||||
// the apply fails without it.
|
||||
export const testRequiredVariables = (
|
||||
/**
|
||||
* Creates a test-case for each variable provided and ensures that the apply
|
||||
* fails without it.
|
||||
*/
|
||||
export const testRequiredVariables = <TVars extends TerraformVariables>(
|
||||
dir: string,
|
||||
vars: Record<string, string>,
|
||||
vars: Readonly<TVars>,
|
||||
) => {
|
||||
// Ensures that all required variables are provided.
|
||||
it("required variables", async () => {
|
||||
await runTerraformApply(dir, vars);
|
||||
});
|
||||
|
||||
const varNames = Object.keys(vars);
|
||||
varNames.forEach((varName) => {
|
||||
for (const varName of varNames) {
|
||||
// Ensures that every variable provided is required!
|
||||
it("missing variable " + varName, async () => {
|
||||
const localVars = {};
|
||||
varNames.forEach((otherVarName) => {
|
||||
it(`missing variable: ${varName}`, async () => {
|
||||
const localVars: TerraformVariables = {};
|
||||
for (const otherVarName of varNames) {
|
||||
if (otherVarName !== varName) {
|
||||
localVars[otherVarName] = vars[otherVarName];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await runTerraformApply(dir, localVars);
|
||||
} catch (ex) {
|
||||
if (!(ex instanceof Error)) {
|
||||
throw new Error("Unknown error generated");
|
||||
}
|
||||
|
||||
expect(ex.message).toContain(
|
||||
`input variable \"${varName}\" is not set`,
|
||||
);
|
||||
@@ -162,19 +183,26 @@ export const testRequiredVariables = (
|
||||
}
|
||||
throw new Error(`${varName} is not a required variable!`);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// runTerraformApply runs terraform apply in the given directory
|
||||
// with the given variables. It is fine to run in parallel with
|
||||
// other instances of this function, as it uses a random state file.
|
||||
export const runTerraformApply = async (
|
||||
/**
|
||||
* Runs terraform apply in the given directory with the given variables. It is
|
||||
* fine to run in parallel with other instances of this function, as it uses a
|
||||
* random state file.
|
||||
*/
|
||||
export const runTerraformApply = async <TVars extends TerraformVariables>(
|
||||
dir: string,
|
||||
vars: Record<string, string>,
|
||||
env: Record<string, string> = {},
|
||||
vars: Readonly<TVars>,
|
||||
env?: Record<string, string>,
|
||||
): Promise<TerraformState> => {
|
||||
const stateFile = `${dir}/${crypto.randomUUID()}.tfstate`;
|
||||
Object.keys(vars).forEach((key) => (env[`TF_VAR_${key}`] = vars[key]));
|
||||
|
||||
const combinedEnv = env === undefined ? {} : { ...env };
|
||||
for (const [key, value] of Object.entries(vars)) {
|
||||
combinedEnv[`TF_VAR_${key}`] = String(value);
|
||||
}
|
||||
|
||||
const proc = spawn(
|
||||
[
|
||||
"terraform",
|
||||
@@ -188,22 +216,26 @@ export const runTerraformApply = async (
|
||||
],
|
||||
{
|
||||
cwd: dir,
|
||||
env,
|
||||
env: combinedEnv,
|
||||
stderr: "pipe",
|
||||
stdout: "pipe",
|
||||
},
|
||||
);
|
||||
|
||||
const text = await readableStreamToText(proc.stderr);
|
||||
const exitCode = await proc.exited;
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(text);
|
||||
}
|
||||
|
||||
const content = await readFile(stateFile, "utf8");
|
||||
await unlink(stateFile);
|
||||
return JSON.parse(content);
|
||||
};
|
||||
|
||||
// runTerraformInit runs terraform init in the given directory.
|
||||
/**
|
||||
* Runs terraform init in the given directory.
|
||||
*/
|
||||
export const runTerraformInit = async (dir: string) => {
|
||||
const proc = spawn(["terraform", "init"], {
|
||||
cwd: dir,
|
||||
@@ -221,5 +253,14 @@ export const createJSONResponse = (obj: object, statusCode = 200): Response => {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
status: statusCode,
|
||||
})
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const writeCoder = async (id: string, script: string) => {
|
||||
const exec = await execContainer(id, [
|
||||
"sh",
|
||||
"-c",
|
||||
`echo '${script}' > /usr/bin/coder && chmod +x /usr/bin/coder`,
|
||||
]);
|
||||
expect(exec.exitCode).toBe(0);
|
||||
};
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"module": "esnext",
|
||||
// If we were just compiling for the tests, we could safely target ESNext at
|
||||
// all times, but just because we've been starting to add more runtime logic
|
||||
// files to some of the modules, erring on the side of caution by having a
|
||||
// older compilation target
|
||||
"target": "ES6",
|
||||
"module": "ESNext",
|
||||
"strict": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "nodenext",
|
||||
"moduleResolution": "node",
|
||||
"types": ["bun-types"]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script updates the version number in the README.md files of all modules
|
||||
# to the latest tag in the repository. It is intended to be run from the root
|
||||
# This script increments the version number in the README.md files of all modules
|
||||
# by 1 patch version. It is intended to be run from the root
|
||||
# of the repository or by using the `bun update-version` command.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
current_tag=$(git describe --tags --abbrev=0)
|
||||
previous_tag=$(git describe --tags --abbrev=0 $current_tag^)
|
||||
mapfile -t changed_dirs < <(git diff --name-only "$previous_tag"..."$current_tag" -- ':!**/README.md' ':!**/*.test.ts' | xargs dirname | grep -v '^\.' | sort -u)
|
||||
|
||||
LATEST_TAG=$(git describe --abbrev=0 --tags | sed 's/^v//') || exit $?
|
||||
# Increment the patch version
|
||||
LATEST_TAG=$(echo "$current_tag" | sed 's/^v//' | awk -F. '{print $1"."$2"."$3+1}') || exit $?
|
||||
|
||||
# List directories with changes that are not README.md or test files
|
||||
mapfile -t changed_dirs < <(git diff --name-only "$current_tag" -- ':!**/README.md' ':!**/*.test.ts' | xargs dirname | grep -v '^\.' | sort -u)
|
||||
|
||||
echo "Directories with changes: ${changed_dirs[*]}"
|
||||
|
||||
# Iterate over directories and update version in README.md
|
||||
for dir in "${changed_dirs[@]}"; do
|
||||
if [[ -f "$dir/README.md" ]]; then
|
||||
echo "Bumping version in $dir/README.md"
|
||||
file="$dir/README.md"
|
||||
tmpfile=$(mktemp /tmp/tempfile.XXXXXX)
|
||||
awk -v tag="$LATEST_TAG" '{
|
||||
if ($1 == "version" && $2 == "=") {
|
||||
sub(/"[^"]*"/, "\"" tag "\"")
|
||||
print
|
||||
} else {
|
||||
awk -v tag="$LATEST_TAG" '
|
||||
BEGIN { in_code_block = 0; in_nested_block = 0 }
|
||||
{
|
||||
# Detect the start and end of Markdown code blocks.
|
||||
if ($0 ~ /^```/) {
|
||||
in_code_block = !in_code_block
|
||||
# Reset nested block tracking when exiting a code block.
|
||||
if (!in_code_block) {
|
||||
in_nested_block = 0
|
||||
}
|
||||
}
|
||||
|
||||
# Handle nested blocks within a code block.
|
||||
if (in_code_block) {
|
||||
# Detect the start of a nested block (skipping "module" blocks).
|
||||
if ($0 ~ /{/ && !($1 == "module" || $1 ~ /^[a-zA-Z0-9_]+$/)) {
|
||||
in_nested_block++
|
||||
}
|
||||
|
||||
# Detect the end of a nested block.
|
||||
if ($0 ~ /}/ && in_nested_block > 0) {
|
||||
in_nested_block--
|
||||
}
|
||||
|
||||
# Update "version" only if not in a nested block.
|
||||
if (!in_nested_block && $1 == "version" && $2 == "=") {
|
||||
sub(/"[^"]*"/, "\"" tag "\"")
|
||||
}
|
||||
}
|
||||
|
||||
print
|
||||
}
|
||||
}' "$file" > "$tmpfile" && mv "$tmpfile" "$file"
|
||||
' "$file" > "$tmpfile" && mv "$tmpfile" "$file"
|
||||
|
||||
# Check if the README.md file has changed
|
||||
if ! git diff --quiet -- "$dir/README.md"; then
|
||||
echo "Bumping version in $dir/README.md from $current_tag to $LATEST_TAG (incremented)"
|
||||
else
|
||||
echo "Version in $dir/README.md is already up to date"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -14,6 +14,7 @@ This module lets you authenticate with [Hashicorp Vault](https://www.vaultprojec
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-github/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -45,6 +46,7 @@ To configure the Vault module, you must set up a Vault GitHub auth method. See t
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-github/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -57,6 +59,7 @@ module "vault" {
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-github/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
@@ -70,6 +73,7 @@ module "vault" {
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-github/coder"
|
||||
version = "1.0.7"
|
||||
agent_id = coder_agent.example.id
|
||||
|
||||
81
vault-jwt/README.md
Normal file
@@ -0,0 +1,81 @@
|
||||
---
|
||||
display_name: Hashicorp Vault Integration (JWT)
|
||||
description: Authenticates with Vault using a JWT from Coder's OIDC provider
|
||||
icon: ../.icons/vault.svg
|
||||
maintainer_github: coder
|
||||
partner_github: hashicorp
|
||||
verified: true
|
||||
tags: [helper, integration, vault, jwt, oidc]
|
||||
---
|
||||
|
||||
# Hashicorp Vault Integration (JWT)
|
||||
|
||||
This module lets you authenticate with [Hashicorp Vault](https://www.vaultproject.io/) in your Coder workspaces by reusing the [OIDC](https://coder.com/docs/admin/auth#openid-connect) access token from Coder's OIDC authentication method. This requires configuring the Vault [JWT/OIDC](https://developer.hashicorp.com/vault/docs/auth/jwt#configuration) auth method.
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||
version = "1.0.20"
|
||||
agent_id = coder_agent.example.id
|
||||
vault_addr = "https://vault.example.com"
|
||||
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||
}
|
||||
```
|
||||
|
||||
Then you can use the Vault CLI in your workspaces to fetch secrets from Vault:
|
||||
|
||||
```shell
|
||||
vault kv get -namespace=coder -mount=secrets coder
|
||||
```
|
||||
|
||||
or using the Vault API:
|
||||
|
||||
```shell
|
||||
curl -H "X-Vault-Token: ${VAULT_TOKEN}" -X GET "${VAULT_ADDR}/v1/coder/secrets/data/coder"
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Configure Vault integration with a non standard auth path (default is "jwt")
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||
version = "1.0.20"
|
||||
agent_id = coder_agent.example.id
|
||||
vault_addr = "https://vault.example.com"
|
||||
vault_jwt_auth_path = "oidc"
|
||||
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||
}
|
||||
```
|
||||
|
||||
### Map workspace owner's group to a Vault role
|
||||
|
||||
```tf
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||
version = "1.0.20"
|
||||
agent_id = coder_agent.example.id
|
||||
vault_addr = "https://vault.example.com"
|
||||
vault_jwt_role = data.coder_workspace_owner.me.groups[0]
|
||||
}
|
||||
```
|
||||
|
||||
### Install a specific version of the Vault CLI
|
||||
|
||||
```tf
|
||||
module "vault" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/vault-jwt/coder"
|
||||
version = "1.0.20"
|
||||
agent_id = coder_agent.example.id
|
||||
vault_addr = "https://vault.example.com"
|
||||
vault_jwt_role = "coder" # The Vault role to use for authentication
|
||||
vault_cli_version = "1.17.5"
|
||||
}
|
||||
```
|
||||
12
vault-jwt/main.test.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { describe } from "bun:test";
|
||||
import { runTerraformInit, testRequiredVariables } from "../test";
|
||||
|
||||
describe("vault-jwt", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
vault_addr: "foo",
|
||||
vault_jwt_role: "foo",
|
||||
});
|
||||
});
|
||||