diff --git a/.claude/docs/TROUBLESHOOTING.md b/.claude/docs/TROUBLESHOOTING.md index 28851b5b640f0..1788d5df84a94 100644 --- a/.claude/docs/TROUBLESHOOTING.md +++ b/.claude/docs/TROUBLESHOOTING.md @@ -91,6 +91,9 @@ ## Systematic Debugging Approach +YOU MUST ALWAYS find the root cause of any issue you are debugging +YOU MUST NEVER fix a symptom or add a workaround instead of finding a root cause, even if it is faster. + ### Multi-Issue Problem Solving When facing multiple failing tests or complex integration issues: @@ -98,16 +101,21 @@ When facing multiple failing tests or complex integration issues: 1. **Identify Root Causes**: - Run failing tests individually to isolate issues - Use LSP tools to trace through call chains - - Check both compilation and runtime errors + - Read Error Messages Carefully: Check both compilation and runtime errors + - Reproduce Consistently: Ensure you can reliably reproduce the issue before investigating + - Check Recent Changes: What changed that could have caused this? Git diff, recent commits, etc. + - When You Don't Know: Say "I don't understand X" rather than pretending to know 2. **Fix in Logical Order**: - Address compilation issues first (imports, syntax) - Fix authorization and RBAC issues next - Resolve business logic and validation issues - Handle edge cases and race conditions last + - IF your first fix doesn't work, STOP and re-analyze rather than adding more fixes 3. **Verification Strategy**: - - Test each fix individually before moving to next issue + - Always Test each fix individually before moving to next issue + - Verify Before Continuing: Did your test work? If not, form new hypothesis - don't add more fixes - Use `make lint` and `make gen` after database changes - Verify RFC compliance with actual specifications - Run comprehensive test suites before considering complete diff --git a/.claude/docs/WORKFLOWS.md b/.claude/docs/WORKFLOWS.md index 8fc43002bba7d..4e9dfb78599ee 100644 --- a/.claude/docs/WORKFLOWS.md +++ b/.claude/docs/WORKFLOWS.md @@ -40,11 +40,15 @@ - Use proper error types - Pattern: `xerrors.Errorf("failed to X: %w", err)` -### Naming Conventions +## Naming Conventions -- Use clear, descriptive names -- Abbreviate only when obvious +- Names MUST tell what code does, not how it's implemented or its history - Follow Go and TypeScript naming conventions +- When changing code, never document the old behavior or the behavior change +- NEVER use implementation details in names (e.g., "ZodValidator", "MCPWrapper", "JSONParser") +- NEVER use temporal/historical context in names (e.g., "LegacyHandler", "UnifiedTool", "ImprovedInterface", "EnhancedParser") +- NEVER use pattern names unless they add clarity (e.g., prefer "Tool" over "ToolFactory") +- Abbreviate only when obvious ### Comments diff --git a/.devcontainer/scripts/post_create.sh b/.devcontainer/scripts/post_create.sh index a1b774f98d2ca..ab5be4ba1bc74 100755 --- a/.devcontainer/scripts/post_create.sh +++ b/.devcontainer/scripts/post_create.sh @@ -10,8 +10,12 @@ install_devcontainer_cli() { install_ssh_config() { echo "πŸ”‘ Installing SSH configuration..." - rsync -a /mnt/home/coder/.ssh/ ~/.ssh/ - chmod 0700 ~/.ssh + if [ -d /mnt/home/coder/.ssh ]; then + rsync -a /mnt/home/coder/.ssh/ ~/.ssh/ + chmod 0700 ~/.ssh + else + echo "⚠️ SSH directory not found." + fi } install_git_config() { diff --git a/.github/.linkspector.yml b/.github/.linkspector.yml index f5f99caf57708..cd052c53b251e 100644 --- a/.github/.linkspector.yml +++ b/.github/.linkspector.yml @@ -26,5 +26,6 @@ ignorePatterns: - pattern: "claude.ai" - pattern: "splunk.com" - pattern: "stackoverflow.com/questions" + - pattern: "developer.hashicorp.com/terraform/language" aliveStatusCodes: - 200 diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index 097a1b6cfd119..02b54830cdf61 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.24.6" + default: "1.24.10" use-preinstalled-go: description: "Whether to use preinstalled Go." default: "false" diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml index 67d1f1342dcaf..fbf713d16b5bd 100644 --- a/.github/dependabot.yaml +++ b/.github/dependabot.yaml @@ -80,6 +80,9 @@ updates: mui: patterns: - "@mui*" + radix: + patterns: + - "@radix-ui/*" react: patterns: - "react" @@ -104,6 +107,7 @@ updates: - dependency-name: "*" update-types: - version-update:semver-major + - dependency-name: "@playwright/test" open-pull-requests-limit: 15 - package-ecosystem: "terraform" diff --git a/.github/fly-wsproxies/sao-paulo-coder.toml b/.github/fly-wsproxies/sao-paulo-coder.toml deleted file mode 100644 index b6c9b964631ef..0000000000000 --- a/.github/fly-wsproxies/sao-paulo-coder.toml +++ /dev/null @@ -1,34 +0,0 @@ -app = "sao-paulo-coder" -primary_region = "gru" - -[experimental] - entrypoint = ["/bin/sh", "-c", "CODER_DERP_SERVER_RELAY_URL=\"http://[${FLY_PRIVATE_IP}]:3000\" /opt/coder wsproxy server"] - auto_rollback = true - -[build] - image = "ghcr.io/coder/coder-preview:main" - -[env] - CODER_ACCESS_URL = "https://sao-paulo.fly.dev.coder.com" - CODER_HTTP_ADDRESS = "0.0.0.0:3000" - CODER_PRIMARY_ACCESS_URL = "https://dev.coder.com" - CODER_WILDCARD_ACCESS_URL = "*--apps.sao-paulo.fly.dev.coder.com" - CODER_VERBOSE = "true" - -[http_service] - internal_port = 3000 - force_https = true - auto_stop_machines = true - auto_start_machines = true - min_machines_running = 0 - -# Ref: https://fly.io/docs/reference/configuration/#http_service-concurrency -[http_service.concurrency] - type = "requests" - soft_limit = 50 - hard_limit = 100 - -[[vm]] - cpu_kind = "shared" - cpus = 2 - memory_mb = 512 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2d41883287e54..96d0ce23953cf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -4,6 +4,7 @@ on: push: branches: - main + - release/* pull_request: workflow_dispatch: @@ -180,7 +181,7 @@ jobs: echo "LINT_CACHE_DIR=$dir" >> "$GITHUB_ENV" - name: golangci-lint cache - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: | ${{ env.LINT_CACHE_DIR }} @@ -190,7 +191,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@85f62a8a84f939ae994ab3763f01a0296d61a7ee # v1.36.2 + uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1 with: config: .github/workflows/typos.toml @@ -375,13 +376,6 @@ jobs: id: go-paths uses: ./.github/actions/setup-go-paths - - name: Download Go Build Cache - id: download-go-build-cache - uses: ./.github/actions/test-cache/download - with: - key-prefix: test-go-build-${{ runner.os }}-${{ runner.arch }} - cache-path: ${{ steps.go-paths.outputs.cached-dirs }} - - name: Setup Go uses: ./.github/actions/setup-go with: @@ -389,8 +383,7 @@ jobs: # download the toolchain configured in go.mod, so we don't # need to reinstall it. It's faster on Windows runners. use-preinstalled-go: ${{ runner.os == 'Windows' }} - # Cache is already downloaded above - use-cache: false + use-cache: true - name: Setup Terraform uses: ./.github/actions/setup-tf @@ -499,17 +492,11 @@ jobs: make test - name: Upload failed test db dumps - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: failed-test-db-dump-${{matrix.os}} path: "**/*.test.sql" - - name: Upload Go Build Cache - uses: ./.github/actions/test-cache/upload - with: - cache-key: ${{ steps.download-go-build-cache.outputs.cache-key }} - cache-path: ${{ steps.go-paths.outputs.cached-dirs }} - - name: Upload Test Cache uses: ./.github/actions/test-cache/upload with: @@ -761,7 +748,7 @@ jobs: - name: Upload Playwright Failed Tests if: always() && github.actor != 'dependabot[bot]' && runner.os == 'Linux' && !github.event.pull_request.head.repo.fork - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: failed-test-videos${{ matrix.variant.premium && '-premium' || '' }} path: ./site/test-results/**/*.webm @@ -769,7 +756,7 @@ jobs: - name: Upload pprof dumps if: always() && github.actor != 'dependabot[bot]' && runner.os == 'Linux' && !github.event.pull_request.head.repo.fork - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: debug-pprof-dumps${{ matrix.variant.premium && '-premium' || '' }} path: ./site/test-results/**/debug-pprof-*.txt @@ -805,7 +792,7 @@ jobs: # the check to pass. This is desired in PRs, but not in mainline. - name: Publish to Chromatic (non-mainline) if: github.ref != 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@20c7e42e1b2f6becd5d188df9acb02f3e2f51519 # v13.2.0 + uses: chromaui/action@bc2d84ad2b60813a67d995c5582d696104a19383 # v13.3.2 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -837,7 +824,7 @@ jobs: # infinitely "in progress" in mainline unless we re-review each build. - name: Publish to Chromatic (mainline) if: github.ref == 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@20c7e42e1b2f6becd5d188df9acb02f3e2f51519 # v13.2.0 + uses: chromaui/action@bc2d84ad2b60813a67d995c5582d696104a19383 # v13.3.2 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -919,6 +906,7 @@ jobs: required: runs-on: ubuntu-latest needs: + - changes - fmt - lint - gen @@ -942,6 +930,7 @@ jobs: - name: Ensure required checks run: | # zizmor: ignore[template-injection] We're just reading needs.x.result here, no risk of injection echo "Checking required checks" + echo "- changes: ${{ needs.changes.result }}" echo "- fmt: ${{ needs.fmt.result }}" echo "- lint: ${{ needs.lint.result }}" echo "- gen: ${{ needs.gen.result }}" @@ -967,7 +956,7 @@ jobs: needs: changes # We always build the dylibs on Go changes to verify we're not merging unbuildable code, # but they need only be signed and uploaded on coder/coder main. - if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' + if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') runs-on: ${{ github.repository_owner == 'coder' && 'depot-macos-latest' || 'macos-latest' }} steps: # Harden Runner doesn't work on macOS @@ -995,7 +984,7 @@ jobs: uses: ./.github/actions/setup-go - name: Install rcodesign - if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }} + if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }} run: | set -euo pipefail wget -O /tmp/rcodesign.tar.gz https://github.com/indygreg/apple-platform-rs/releases/download/apple-codesign%2F0.22.0/apple-codesign-0.22.0-macos-universal.tar.gz @@ -1006,7 +995,7 @@ jobs: rm /tmp/rcodesign.tar.gz - name: Setup Apple Developer certificate and API key - if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }} + if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }} run: | set -euo pipefail touch /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8} @@ -1027,13 +1016,13 @@ jobs: make gen/mark-fresh make build/coder-dylib env: - CODER_SIGN_DARWIN: ${{ github.ref == 'refs/heads/main' && '1' || '0' }} + CODER_SIGN_DARWIN: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && '1' || '0' }} AC_CERTIFICATE_FILE: /tmp/apple_cert.p12 AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt - name: Upload build artifacts - if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }} + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: dylibs path: | @@ -1042,7 +1031,7 @@ jobs: retention-days: 7 - name: Delete Apple Developer certificate and API key - if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }} + if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }} run: rm -f /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8} check-build: @@ -1092,7 +1081,7 @@ jobs: needs: - changes - build-dylib - if: github.ref == 'refs/heads/main' && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork + if: (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-22.04' }} permissions: # Necessary to push docker images to ghcr.io. @@ -1120,7 +1109,7 @@ jobs: persist-credentials: false - name: GHCR Login - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -1198,7 +1187,7 @@ jobs: uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1 - name: Download dylibs - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: dylibs path: ./build @@ -1245,40 +1234,45 @@ jobs: id: build-docker env: CODER_IMAGE_BASE: ghcr.io/coder/coder-preview - CODER_IMAGE_TAG_PREFIX: main DOCKER_CLI_EXPERIMENTAL: "enabled" run: | set -euxo pipefail # build Docker images for each architecture version="$(./scripts/version.sh)" - tag="main-${version//+/-}" + tag="${version//+/-}" echo "tag=$tag" >> "$GITHUB_OUTPUT" # build images for each architecture # note: omitting the -j argument to avoid race conditions when pushing make build/coder_"$version"_linux_{amd64,arm64,armv7}.tag - # only push if we are on main branch - if [ "${GITHUB_REF}" == "refs/heads/main" ]; then + # only push if we are on main branch or release branch + if [[ "${GITHUB_REF}" == "refs/heads/main" || "${GITHUB_REF}" == refs/heads/release/* ]]; then # build and push multi-arch manifest, this depends on the other images # being pushed so will automatically push them # note: omitting the -j argument to avoid race conditions when pushing make push/build/coder_"$version"_linux_{amd64,arm64,armv7}.tag # Define specific tags - tags=("$tag" "main" "latest") + tags=("$tag") + if [ "${GITHUB_REF}" == "refs/heads/main" ]; then + tags+=("main" "latest") + elif [[ "${GITHUB_REF}" == refs/heads/release/* ]]; then + tags+=("release-${GITHUB_REF#refs/heads/release/}") + fi # Create and push a multi-arch manifest for each tag # we are adding `latest` tag and keeping `main` for backward # compatibality for t in "${tags[@]}"; do - # shellcheck disable=SC2046 - ./scripts/build_docker_multiarch.sh \ - --push \ - --target "ghcr.io/coder/coder-preview:$t" \ - --version "$version" \ - $(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag) + echo "Pushing multi-arch manifest for tag: $t" + # shellcheck disable=SC2046 + ./scripts/build_docker_multiarch.sh \ + --push \ + --target "ghcr.io/coder/coder-preview:$t" \ + --version "$version" \ + $(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag) done fi @@ -1460,7 +1454,7 @@ jobs: - name: Upload build artifacts if: github.ref == 'refs/heads/main' - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: coder path: | @@ -1469,112 +1463,28 @@ jobs: ./build/*.deb retention-days: 7 + # Deploy is handled in deploy.yaml so we can apply concurrency limits. deploy: - name: "deploy" - runs-on: ubuntu-latest - timeout-minutes: 30 needs: - changes - build if: | - github.ref == 'refs/heads/main' && !github.event.pull_request.head.repo.fork + (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && needs.changes.outputs.docs-only == 'false' + && !github.event.pull_request.head.repo.fork + uses: ./.github/workflows/deploy.yaml + with: + image: ${{ needs.build.outputs.IMAGE }} permissions: contents: read id-token: write - steps: - - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 - with: - egress-policy: audit - - - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - fetch-depth: 0 - persist-credentials: false - - - name: Authenticate to Google Cloud - uses: google-github-actions/auth@7c6bc770dae815cd3e89ee6cdf493a5fab2cc093 # v3.0.0 - with: - workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }} - service_account: ${{ vars.GCP_SERVICE_ACCOUNT }} - - - name: Set up Google Cloud SDK - uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1 - - - name: Set up Flux CLI - uses: fluxcd/flux2/action@6bf37f6a560fd84982d67f853162e4b3c2235edb # v2.6.4 - with: - # Keep this and the github action up to date with the version of flux installed in dogfood cluster - version: "2.5.1" - - - name: Get Cluster Credentials - uses: google-github-actions/get-gke-credentials@3da1e46a907576cefaa90c484278bb5b259dd395 # v3.0.0 - with: - cluster_name: dogfood-v2 - location: us-central1-a - project_id: coder-dogfood-v2 - - - name: Reconcile Flux - run: | - set -euxo pipefail - flux --namespace flux-system reconcile source git flux-system - flux --namespace flux-system reconcile source git coder-main - flux --namespace flux-system reconcile kustomization flux-system - flux --namespace flux-system reconcile kustomization coder - flux --namespace flux-system reconcile source chart coder-coder - flux --namespace flux-system reconcile source chart coder-coder-provisioner - flux --namespace coder reconcile helmrelease coder - flux --namespace coder reconcile helmrelease coder-provisioner - - # Just updating Flux is usually not enough. The Helm release may get - # redeployed, but unless something causes the Deployment to update the - # pods won't be recreated. It's important that the pods get recreated, - # since we use `imagePullPolicy: Always` to ensure we're running the - # latest image. - - name: Rollout Deployment - run: | - set -euxo pipefail - kubectl --namespace coder rollout restart deployment/coder - kubectl --namespace coder rollout status deployment/coder - kubectl --namespace coder rollout restart deployment/coder-provisioner - kubectl --namespace coder rollout status deployment/coder-provisioner - kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged - kubectl --namespace coder rollout status deployment/coder-provisioner-tagged - - deploy-wsproxies: - runs-on: ubuntu-latest - needs: build - if: github.ref == 'refs/heads/main' && !github.event.pull_request.head.repo.fork - steps: - - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 - with: - egress-policy: audit - - - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - fetch-depth: 0 - persist-credentials: false - - - name: Setup flyctl - uses: superfly/flyctl-actions/setup-flyctl@fc53c09e1bc3be6f54706524e3b82c4f462f77be # v1.5 - - - name: Deploy workspace proxies - run: | - flyctl deploy --image "$IMAGE" --app paris-coder --config ./.github/fly-wsproxies/paris-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_PARIS" --yes - flyctl deploy --image "$IMAGE" --app sydney-coder --config ./.github/fly-wsproxies/sydney-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SYDNEY" --yes - flyctl deploy --image "$IMAGE" --app sao-paulo-coder --config ./.github/fly-wsproxies/sao-paulo-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SAO_PAULO" --yes - flyctl deploy --image "$IMAGE" --app jnb-coder --config ./.github/fly-wsproxies/jnb-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_JNB" --yes - env: - FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} - IMAGE: ${{ needs.build.outputs.IMAGE }} - TOKEN_PARIS: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }} - TOKEN_SYDNEY: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }} - TOKEN_SAO_PAULO: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }} - TOKEN_JNB: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }} + packages: write # to retag image as dogfood + secrets: + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + FLY_PARIS_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }} + FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }} + FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }} + FLY_JNB_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }} # sqlc-vet runs a postgres docker container, runs Coder migrations, and then # runs sqlc-vet to ensure all queries are valid. This catches any mistakes @@ -1613,7 +1523,7 @@ jobs: steps: - name: Send Slack notification run: | - ESCAPED_PROMPT=$(printf "%s" "<@U08TJ4YNCA3> $BLINK_CI_FAILURE_PROMPT" | jq -Rsa .) + ESCAPED_PROMPT=$(printf "%s" "<@U09LQ75AHKR> $BLINK_CI_FAILURE_PROMPT" | jq -Rsa .) curl -X POST -H 'Content-type: application/json' \ --data '{ "blocks": [ diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml new file mode 100644 index 0000000000000..6ea750a11caac --- /dev/null +++ b/.github/workflows/deploy.yaml @@ -0,0 +1,172 @@ +name: deploy + +on: + # Via workflow_call, called from ci.yaml + workflow_call: + inputs: + image: + description: "Image and tag to potentially deploy. Current branch will be validated against should-deploy check." + required: true + type: string + secrets: + FLY_API_TOKEN: + required: true + FLY_PARIS_CODER_PROXY_SESSION_TOKEN: + required: true + FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN: + required: true + FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN: + required: true + FLY_JNB_CODER_PROXY_SESSION_TOKEN: + required: true + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }} # no per-branch concurrency + cancel-in-progress: false + +jobs: + # Determines if the given branch should be deployed to dogfood. + should-deploy: + name: should-deploy + runs-on: ubuntu-latest + outputs: + verdict: ${{ steps.check.outputs.verdict }} # DEPLOY or NOOP + steps: + - name: Harden Runner + uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + with: + egress-policy: audit + + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Check if deploy is enabled + id: check + run: | + set -euo pipefail + verdict="$(./scripts/should_deploy.sh)" + echo "verdict=$verdict" >> "$GITHUB_OUTPUT" + + deploy: + name: "deploy" + runs-on: ubuntu-latest + timeout-minutes: 30 + needs: should-deploy + if: needs.should-deploy.outputs.verdict == 'DEPLOY' + permissions: + contents: read + id-token: write + packages: write # to retag image as dogfood + steps: + - name: Harden Runner + uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + with: + egress-policy: audit + + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + persist-credentials: false + + - name: GHCR Login + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@7c6bc770dae815cd3e89ee6cdf493a5fab2cc093 # v3.0.0 + with: + workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_SERVICE_ACCOUNT }} + + - name: Set up Google Cloud SDK + uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1 + + - name: Set up Flux CLI + uses: fluxcd/flux2/action@4a15fa6a023259353ef750acf1c98fe88407d4d0 # v2.7.2 + with: + # Keep this and the github action up to date with the version of flux installed in dogfood cluster + version: "2.7.0" + + - name: Get Cluster Credentials + uses: google-github-actions/get-gke-credentials@3da1e46a907576cefaa90c484278bb5b259dd395 # v3.0.0 + with: + cluster_name: dogfood-v2 + location: us-central1-a + project_id: coder-dogfood-v2 + + # Retag image as dogfood while maintaining the multi-arch manifest + - name: Tag image as dogfood + run: docker buildx imagetools create --tag "ghcr.io/coder/coder-preview:dogfood" "$IMAGE" + env: + IMAGE: ${{ inputs.image }} + + - name: Reconcile Flux + run: | + set -euxo pipefail + flux --namespace flux-system reconcile source git flux-system + flux --namespace flux-system reconcile source git coder-main + flux --namespace flux-system reconcile kustomization flux-system + flux --namespace flux-system reconcile kustomization coder + flux --namespace flux-system reconcile source chart coder-coder + flux --namespace flux-system reconcile source chart coder-coder-provisioner + flux --namespace coder reconcile helmrelease coder + flux --namespace coder reconcile helmrelease coder-provisioner + flux --namespace coder reconcile helmrelease coder-provisioner-tagged + flux --namespace coder reconcile helmrelease coder-provisioner-tagged-prebuilds + + # Just updating Flux is usually not enough. The Helm release may get + # redeployed, but unless something causes the Deployment to update the + # pods won't be recreated. It's important that the pods get recreated, + # since we use `imagePullPolicy: Always` to ensure we're running the + # latest image. + - name: Rollout Deployment + run: | + set -euxo pipefail + kubectl --namespace coder rollout restart deployment/coder + kubectl --namespace coder rollout status deployment/coder + kubectl --namespace coder rollout restart deployment/coder-provisioner + kubectl --namespace coder rollout status deployment/coder-provisioner + kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged + kubectl --namespace coder rollout status deployment/coder-provisioner-tagged + kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged-prebuilds + kubectl --namespace coder rollout status deployment/coder-provisioner-tagged-prebuilds + + deploy-wsproxies: + runs-on: ubuntu-latest + needs: deploy + steps: + - name: Harden Runner + uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + with: + egress-policy: audit + + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Setup flyctl + uses: superfly/flyctl-actions/setup-flyctl@fc53c09e1bc3be6f54706524e3b82c4f462f77be # v1.5 + + - name: Deploy workspace proxies + run: | + flyctl deploy --image "$IMAGE" --app paris-coder --config ./.github/fly-wsproxies/paris-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_PARIS" --yes + flyctl deploy --image "$IMAGE" --app sydney-coder --config ./.github/fly-wsproxies/sydney-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SYDNEY" --yes + flyctl deploy --image "$IMAGE" --app jnb-coder --config ./.github/fly-wsproxies/jnb-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_JNB" --yes + env: + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + IMAGE: ${{ inputs.image }} + TOKEN_PARIS: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }} + TOKEN_SYDNEY: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }} + TOKEN_JNB: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }} diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index a62d43d0b6a6c..2998aae1b5a79 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -48,7 +48,7 @@ jobs: persist-credentials: false - name: Docker login - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: registry: ghcr.io username: ${{ github.actor }} diff --git a/.github/workflows/docs-ci.yaml b/.github/workflows/docs-ci.yaml index eb64a35ffa3f4..749bdce9b25c3 100644 --- a/.github/workflows/docs-ci.yaml +++ b/.github/workflows/docs-ci.yaml @@ -30,7 +30,7 @@ jobs: - name: Setup Node uses: ./.github/actions/setup-node - - uses: tj-actions/changed-files@4563c729c555b4141fac99c80f699f571219b836 # v45.0.7 + - uses: tj-actions/changed-files@dbf178ceecb9304128c8e0648591d71208c6e2c9 # v45.0.7 id: changed-files with: files: | diff --git a/.github/workflows/dogfood.yaml b/.github/workflows/dogfood.yaml index 5793b64616703..e1d4a7a22787a 100644 --- a/.github/workflows/dogfood.yaml +++ b/.github/workflows/dogfood.yaml @@ -36,11 +36,11 @@ jobs: persist-credentials: false - name: Setup Nix - uses: nixbuild/nix-quick-install-action@1f095fee853b33114486cfdeae62fa099cda35a9 # v33 + uses: nixbuild/nix-quick-install-action@2c9db80fb984ceb1bcaa77cdda3fdf8cfba92035 # v34 with: # Pinning to 2.28 here, as Nix gets a "error: [json.exception.type_error.302] type must be array, but is string" # on version 2.29 and above. - nix_version: "2.28.4" + nix_version: "2.28.5" - uses: nix-community/cache-nix-action@135667ec418502fa5a3598af6fb9eb733888ce6a # v6.1.3 with: @@ -82,7 +82,7 @@ jobs: - name: Login to DockerHub if: github.ref == 'refs/heads/main' - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index 6000a0f79e4b0..f27e703800086 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -170,7 +170,7 @@ jobs: steps: - name: Send Slack notification run: | - ESCAPED_PROMPT=$(printf "%s" "<@U08TJ4YNCA3> $BLINK_CI_FAILURE_PROMPT" | jq -Rsa .) + ESCAPED_PROMPT=$(printf "%s" "<@U09LQ75AHKR> $BLINK_CI_FAILURE_PROMPT" | jq -Rsa .) curl -X POST -H 'Content-type: application/json' \ --data '{ "blocks": [ diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index 1fd4351503b9c..eb0eb296923c3 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -189,7 +189,7 @@ jobs: egress-policy: audit - name: Find Comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 + uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0 id: fc with: issue-number: ${{ needs.get_info.outputs.PR_NUMBER }} @@ -199,7 +199,7 @@ jobs: - name: Comment on PR id: comment_id - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 + uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5.0.0 with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ needs.get_info.outputs.PR_NUMBER }} @@ -248,7 +248,7 @@ jobs: uses: ./.github/actions/setup-sqlc - name: GHCR Login - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -491,7 +491,7 @@ jobs: PASSWORD: ${{ steps.setup_deployment.outputs.password }} - name: Find Comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 + uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0 id: fc with: issue-number: ${{ env.PR_NUMBER }} @@ -500,7 +500,7 @@ jobs: direction: last - name: Comment on PR - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 + uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5.0.0 env: STATUS: ${{ needs.get_info.outputs.NEW == 'true' && 'Created' || 'Updated' }} with: diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index cdd2ae96ffcb4..7c06701836714 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -131,7 +131,7 @@ jobs: AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt - name: Upload build artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: dylibs path: | @@ -239,7 +239,7 @@ jobs: cat "$CODER_RELEASE_NOTES_FILE" - name: Docker Login - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -327,7 +327,7 @@ jobs: uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1 - name: Download dylibs - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: dylibs path: ./build @@ -761,7 +761,7 @@ jobs: - name: Upload artifacts to actions (if dry-run) if: ${{ inputs.dry_run }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: release-artifacts path: | @@ -777,7 +777,7 @@ jobs: - name: Upload latest sbom artifact to actions (if dry-run) if: inputs.dry_run && steps.build_docker.outputs.created_latest_tag == 'true' - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: latest-sbom-artifact path: ./coder_latest_sbom.spdx.json @@ -785,7 +785,7 @@ jobs: - name: Send repository-dispatch event if: ${{ !inputs.dry_run }} - uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0 + uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0 with: token: ${{ secrets.CDRCI_GITHUB_TOKEN }} repository: coder/packages diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index ba366fb72428c..c18b2d09a8233 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -30,7 +30,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 with: results_file: results.sarif results_format: sarif @@ -39,7 +39,7 @@ jobs: # Upload the results as artifacts. - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: SARIF file path: results.sarif @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@192325c86100d080feab897ff886c34abd4c83a3 # v3.29.5 + uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index c9debc6c87f66..21452b0b89f6f 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -40,7 +40,7 @@ jobs: uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.29.5 + uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 with: languages: go, javascript @@ -50,7 +50,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.29.5 + uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 - name: Send Slack notification on failure if: ${{ failure() }} @@ -154,13 +154,13 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@192325c86100d080feab897ff886c34abd4c83a3 # v3.29.5 + uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 with: sarif_file: trivy-results.sarif category: "Trivy" - name: Upload Trivy scan results as an artifact - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: trivy path: trivy-results.sarif diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 15b3996ab61eb..75fb201bd5753 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -23,7 +23,7 @@ jobs: egress-policy: audit - name: stale - uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 + uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 with: stale-issue-label: "stale" stale-pr-label: "stale" @@ -125,7 +125,7 @@ jobs: egress-policy: audit - name: Delete PR Cleanup workflow runs - uses: Mattraks/delete-workflow-runs@39f0bbed25d76b34de5594dceab824811479e5de # v2.0.6 + uses: Mattraks/delete-workflow-runs@ab482449ba468316e9a8801e092d0405715c5e6d # v2.1.0 with: token: ${{ github.token }} repository: ${{ github.repository }} @@ -134,7 +134,7 @@ jobs: delete_workflow_pattern: pr-cleanup.yaml - name: Delete PR Deploy workflow skipped runs - uses: Mattraks/delete-workflow-runs@39f0bbed25d76b34de5594dceab824811479e5de # v2.0.6 + uses: Mattraks/delete-workflow-runs@ab482449ba468316e9a8801e092d0405715c5e6d # v2.1.0 with: token: ${{ github.token }} repository: ${{ github.repository }} diff --git a/.github/workflows/traiage.yaml b/.github/workflows/traiage.yaml index 615c5bec19e7f..8560af091d348 100644 --- a/.github/workflows/traiage.yaml +++ b/.github/workflows/traiage.yaml @@ -1,6 +1,9 @@ name: AI Triage Automation on: + issues: + types: + - labeled workflow_dispatch: inputs: issue_url: @@ -10,50 +13,122 @@ on: template_name: description: "Coder template to use for workspace" required: true - default: "traiage" + default: "coder" type: string template_preset: description: "Template preset to use" required: true - default: "Default" + default: "none" type: string prefix: description: "Prefix for workspace name" required: false default: "traiage" type: string - cleanup: - description: "Cleanup workspace after triage." - required: false - default: false - type: boolean jobs: traiage: name: Triage GitHub Issue with Claude Code runs-on: ubuntu-latest + if: github.event.label.name == 'traiage' || github.event_name == 'workflow_dispatch' timeout-minutes: 30 env: CODER_URL: ${{ secrets.TRAIAGE_CODER_URL }} CODER_SESSION_TOKEN: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }} - TEMPLATE_NAME: ${{ inputs.template_name }} permissions: contents: read issues: write actions: write steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - fetch-depth: 0 + # This is only required for testing locally using nektos/act, so leaving commented out. + # An alternative is to use a larger or custom image. + # - name: Install Github CLI + # id: install-gh + # run: | + # (type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \ + # && sudo mkdir -p -m 755 /etc/apt/keyrings \ + # && out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + # && cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \ + # && sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + # && sudo mkdir -p -m 755 /etc/apt/sources.list.d \ + # && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + # && sudo apt update \ + # && sudo apt install gh -y + + - name: Determine Inputs + id: determine-inputs + if: always() + env: + GITHUB_ACTOR: ${{ github.actor }} + GITHUB_EVENT_ISSUE_HTML_URL: ${{ github.event.issue.html_url }} + GITHUB_EVENT_NAME: ${{ github.event_name }} + GITHUB_EVENT_USER_ID: ${{ github.event.sender.id }} + GITHUB_EVENT_USER_LOGIN: ${{ github.event.sender.login }} + INPUTS_ISSUE_URL: ${{ inputs.issue_url }} + INPUTS_TEMPLATE_NAME: ${{ inputs.template_name || 'coder' }} + INPUTS_TEMPLATE_PRESET: ${{ inputs.template_preset || 'none'}} + INPUTS_PREFIX: ${{ inputs.prefix || 'traiage' }} + GH_TOKEN: ${{ github.token }} + run: | + echo "Using template name: ${INPUTS_TEMPLATE_NAME}" + echo "template_name=${INPUTS_TEMPLATE_NAME}" >> "${GITHUB_OUTPUT}" + + echo "Using template preset: ${INPUTS_TEMPLATE_PRESET}" + echo "template_preset=${INPUTS_TEMPLATE_PRESET}" >> "${GITHUB_OUTPUT}" + + echo "Using prefix: ${INPUTS_PREFIX}" + echo "prefix=${INPUTS_PREFIX}" >> "${GITHUB_OUTPUT}" + + # For workflow_dispatch, use the actor who triggered it + # For issues events, use the issue author. + if [[ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]]; then + if ! GITHUB_USER_ID=$(gh api "users/${GITHUB_ACTOR}" --jq '.id'); then + echo "::error::Failed to get GitHub user ID for actor ${GITHUB_ACTOR}" + exit 1 + fi + echo "Using workflow_dispatch actor: ${GITHUB_ACTOR} (ID: ${GITHUB_USER_ID})" + echo "github_user_id=${GITHUB_USER_ID}" >> "${GITHUB_OUTPUT}" + echo "github_username=${GITHUB_ACTOR}" >> "${GITHUB_OUTPUT}" + + echo "Using issue URL: ${INPUTS_ISSUE_URL}" + echo "issue_url=${INPUTS_ISSUE_URL}" >> "${GITHUB_OUTPUT}" + + exit 0 + elif [[ "${GITHUB_EVENT_NAME}" == "issues" ]]; then + GITHUB_USER_ID=${GITHUB_EVENT_USER_ID} + echo "Using issue author: ${GITHUB_EVENT_USER_LOGIN} (ID: ${GITHUB_USER_ID})" + echo "github_user_id=${GITHUB_USER_ID}" >> "${GITHUB_OUTPUT}" + echo "github_username=${GITHUB_EVENT_USER_LOGIN}" >> "${GITHUB_OUTPUT}" + + echo "Using issue URL: ${GITHUB_EVENT_ISSUE_HTML_URL}" + echo "issue_url=${GITHUB_EVENT_ISSUE_HTML_URL}" >> "${GITHUB_OUTPUT}" + + exit 0 + else + echo "::error::Unsupported event type: ${GITHUB_EVENT_NAME}" + exit 1 + fi + + - name: Verify push access + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GH_TOKEN: ${{ github.token }} + GITHUB_USERNAME: ${{ steps.determine-inputs.outputs.github_username }} + GITHUB_USER_ID: ${{ steps.determine-inputs.outputs.github_user_id }} + run: | + # Query the actor’s permission on this repo + can_push="$(gh api "/repos/${GITHUB_REPOSITORY}/collaborators/${GITHUB_USERNAME}/permission" --jq '.user.permissions.push')" + if [[ "${can_push}" != "true" ]]; then + echo "::error title=Access Denied::${GITHUB_USERNAME} does not have push access to ${GITHUB_REPOSITORY}" + exit 1 + fi - name: Extract context key from issue id: extract-context env: - ISSUE_URL: ${{ inputs.issue_url }} - GITHUB_TOKEN: ${{ github.token }} + ISSUE_URL: ${{ steps.determine-inputs.outputs.issue_url }} + GH_TOKEN: ${{ github.token }} run: | issue_number="$(gh issue view "${ISSUE_URL}" --json number --jq '.number')" context_key="gh-${issue_number}" @@ -82,11 +157,9 @@ jobs: id: get-coder-username env: CODER_SESSION_TOKEN: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }} - GITHUB_USER_ID: ${{ - (github.event_name == 'workflow_dispatch' && github.actor_id) - }} + GH_TOKEN: ${{ github.token }} + GITHUB_USER_ID: ${{ steps.determine-inputs.outputs.github_user_id }} run: | - [[ -z "${GITHUB_USER_ID}" || "${GITHUB_USER_ID}" == "null" ]] && echo "No GitHub actor ID found" && exit 1 user_json=$( coder users list --github-user-id="${GITHUB_USER_ID}" --output=json ) @@ -94,29 +167,39 @@ jobs: [[ -z "${coder_username}" || "${coder_username}" == "null" ]] && echo "No Coder user with GitHub user ID ${GITHUB_USER_ID} found" && exit 1 echo "coder_username=${coder_username}" >> "${GITHUB_OUTPUT}" + - name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + fetch-depth: 0 + # TODO(Cian): this is a good use-case for 'recipes' - name: Create Coder task id: create-task env: CODER_USERNAME: ${{ steps.get-coder-username.outputs.coder_username }} CONTEXT_KEY: ${{ steps.extract-context.outputs.context_key }} - GITHUB_TOKEN: ${{ github.token }} - ISSUE_URL: ${{ inputs.issue_url }} - PREFIX: ${{ inputs.prefix }} + GH_TOKEN: ${{ github.token }} + GITHUB_REPOSITORY: ${{ github.repository }} + ISSUE_URL: ${{ steps.determine-inputs.outputs.issue_url }} + PREFIX: ${{ steps.determine-inputs.outputs.prefix }} RUN_ID: ${{ github.run_id }} + TEMPLATE_NAME: ${{ steps.determine-inputs.outputs.template_name }} TEMPLATE_PARAMETERS: ${{ secrets.TRAIAGE_TEMPLATE_PARAMETERS }} - TEMPLATE_PRESET: ${{ inputs.template_preset }} + TEMPLATE_PRESET: ${{ steps.determine-inputs.outputs.template_preset }} run: | # Fetch issue description using `gh` CLI - issue_description=$(gh issue view "${ISSUE_URL}") + #shellcheck disable=SC2016 # The template string should not be subject to shell expansion + issue_description=$(gh issue view "${ISSUE_URL}" \ + --json 'title,body,comments' \ + --template '{{printf "%s\n\n%s\n\nComments:\n" .title .body}}{{range $k, $v := .comments}} - {{index $v.author "login"}}: {{printf "%s\n" $v.body}}{{end}}') # Write a prompt to PROMPT_FILE PROMPT=$(cat <> "${GITHUB_OUTPUT}" - echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_ENV}" - - - name: Create and upload archive - id: create-archive - if: inputs.cleanup - env: - BUCKET_PREFIX: "gs://coder-traiage-outputs/traiage" - run: | - echo "Creating archive for workspace: $TASK_NAME" - ./scripts/traiage.sh archive - echo "archive_url=${BUCKET_PREFIX%%/}/$TASK_NAME.tar.gz" >> "${GITHUB_OUTPUT}" - - - name: Generate a summary of the changes and post a comment on GitHub. - id: generate-summary - if: inputs.cleanup - env: - ARCHIVE_URL: ${{ steps.create-archive.outputs.archive_url }} - BUCKET_PREFIX: "gs://coder-traiage-outputs/traiage" - CONTEXT_KEY: ${{ steps.extract-context.outputs.context_key }} - GITHUB_TOKEN: ${{ github.token }} - GITHUB_REPOSITORY: ${{ github.repository }} - ISSUE_URL: ${{ inputs.issue_url }} - TASK_NAME: ${{ steps.create-task.outputs.TASK_NAME }} - run: | - SUMMARY_FILE=$(mktemp) - trap 'rm -f "${SUMMARY_FILE}"' EXIT - AUTO_SUMMARY=$(./scripts/traiage.sh summary) - { - echo "## TrAIage Results" - echo "- **Issue URL:** ${ISSUE_URL}" - echo "- **Context Key:** ${CONTEXT_KEY}" - echo "- **Workspace:** ${TASK_NAME}" - echo "- **Archive URL:** ${ARCHIVE_URL}" - echo - echo "${AUTO_SUMMARY}" - echo - echo "To fetch the output to your own workspace:" - echo - echo '```bash' - echo "BUCKET_PREFIX=${BUCKET_PREFIX} TASK_NAME=${TASK_NAME} ./scripts/traiage.sh resume" - echo '```' - echo - } >> "${SUMMARY_FILE}" - if [[ "${ISSUE_URL}" == "https://github.com/${GITHUB_REPOSITORY}"* ]]; then - gh issue comment "${ISSUE_URL}" --body-file "${SUMMARY_FILE}" --create-if-none --edit-last + gh issue comment "${ISSUE_URL}" --body "Task created: https://dev.coder.com/tasks/${CODER_USERNAME}/${TASK_NAME}" --create-if-none --edit-last else echo "Skipping comment on other repo." fi - cat "${SUMMARY_FILE}" >> "${GITHUB_STEP_SUMMARY}" - - - name: Cleanup task - if: inputs.cleanup && steps.create-task.outputs.TASK_NAME != '' && steps.create-archive.outputs.archive_url != '' - run: | - echo "Cleaning up task: $TASK_NAME" - ./scripts/traiage.sh delete || true + echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_OUTPUT}" + echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_ENV}" diff --git a/.github/workflows/weekly-docs.yaml b/.github/workflows/weekly-docs.yaml index 85e5ce58e0ee0..a7ae448902d0c 100644 --- a/.github/workflows/weekly-docs.yaml +++ b/.github/workflows/weekly-docs.yaml @@ -31,7 +31,7 @@ jobs: persist-credentials: false - name: Check Markdown links - uses: umbrelladocs/action-linkspector@874d01cae9fd488e3077b08952093235bd626977 # v1.3.7 + uses: umbrelladocs/action-linkspector@652f85bc57bb1e7d4327260decc10aa68f7694c3 # v1.4.0 id: markdown-link-check # checks all markdown files from /docs including all subfolders with: diff --git a/.github/zizmor.yml b/.github/zizmor.yml new file mode 100644 index 0000000000000..e125592cfdc6a --- /dev/null +++ b/.github/zizmor.yml @@ -0,0 +1,4 @@ +rules: + cache-poisoning: + ignore: + - "ci.yaml:184" diff --git a/.gitignore b/.gitignore index 5aa08b2512527..9b1edcec2d8f9 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,9 @@ node_modules/ vendor/ yarn-error.log +# Test output files +test-output/ + # VSCode settings. **/.vscode/* # Allow VSCode recommendations and default settings in project root. @@ -86,3 +89,5 @@ result __debug_bin* **/.claude/settings.local.json + +/.env diff --git a/.golangci.yaml b/.golangci.yaml index aeebaf47e29a6..f03007f81e847 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -169,6 +169,16 @@ linters-settings: - name: var-declaration - name: var-naming - name: waitgroup-by-value + usetesting: + # Only os-setenv is enabled because we migrated to usetesting from another linter that + # only covered os-setenv. + os-setenv: true + os-create-temp: false + os-mkdir-temp: false + os-temp-dir: false + os-chdir: false + context-background: false + context-todo: false # irrelevant as of Go v1.22: https://go.dev/blog/loopvar-preview govet: @@ -252,7 +262,6 @@ linters: # - wastedassign - staticcheck - - tenv # In Go, it's possible for a package to test it's internal functionality # without testing any exported functions. This is enabled to promote # decomposing a package before testing it's internals. A function caller @@ -265,4 +274,5 @@ linters: - typecheck - unconvert - unused + - usetesting - dupl diff --git a/.vscode/settings.json b/.vscode/settings.json index 6057824039b6d..762ed91595ded 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -61,5 +61,6 @@ "typos.config": ".github/workflows/typos.toml", "[markdown]": { "editor.defaultFormatter": "DavidAnson.vscode-markdownlint" - } + }, + "biome.lsp.bin": "site/node_modules/.bin/biome" } diff --git a/CLAUDE.md b/CLAUDE.md index 3de33a5466054..e6d8f0bcf9a29 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,11 +1,41 @@ # Coder Development Guidelines +You are an experienced, pragmatic software engineer. You don't over-engineer a solution when a simple one is possible. +Rule #1: If you want exception to ANY rule, YOU MUST STOP and get explicit permission first. BREAKING THE LETTER OR SPIRIT OF THE RULES IS FAILURE. + +## Foundational rules + +- Doing it right is better than doing it fast. You are not in a rush. NEVER skip steps or take shortcuts. +- Tedious, systematic work is often the correct solution. Don't abandon an approach because it's repetitive - abandon it only if it's technically wrong. +- Honesty is a core value. + +## Our relationship + +- Act as a critical peer reviewer. Your job is to disagree with me when I'm wrong, not to please me. Prioritize accuracy and reasoning over agreement. +- YOU MUST speak up immediately when you don't know something or we're in over our heads +- YOU MUST call out bad ideas, unreasonable expectations, and mistakes - I depend on this +- NEVER be agreeable just to be nice - I NEED your HONEST technical judgment +- NEVER write the phrase "You're absolutely right!" You are not a sycophant. We're working together because I value your opinion. Do not agree with me unless you can justify it with evidence or reasoning. +- YOU MUST ALWAYS STOP and ask for clarification rather than making assumptions. +- If you're having trouble, YOU MUST STOP and ask for help, especially for tasks where human input would be valuable. +- When you disagree with my approach, YOU MUST push back. Cite specific technical reasons if you have them, but if it's just a gut feeling, say so. +- If you're uncomfortable pushing back out loud, just say "Houston, we have a problem". I'll know what you mean +- We discuss architectutral decisions (framework changes, major refactoring, system design) together before implementation. Routine fixes and clear implementations don't need discussion. + +## Proactiveness + +When asked to do something, just do it - including obvious follow-up actions needed to complete the task properly. +Only pause to ask for confirmation when: + +- Multiple valid approaches exist and the choice matters +- The action would delete or significantly restructure existing code +- You genuinely don't understand what's being asked +- Your partner asked a question (answer the question, don't jump to implementation) + @.claude/docs/WORKFLOWS.md -@.cursorrules -@README.md @package.json -## πŸš€ Essential Commands +## Essential Commands | Task | Command | Notes | |-------------------|--------------------------|----------------------------------| @@ -21,22 +51,13 @@ | **Format** | `make fmt` | Auto-format code | | **Clean** | `make clean` | Clean build artifacts | -### Frontend Commands (site directory) - -- `pnpm build` - Build frontend -- `pnpm dev` - Run development server -- `pnpm check` - Run code checks -- `pnpm format` - Format frontend code -- `pnpm lint` - Lint frontend code -- `pnpm test` - Run frontend tests - ### Documentation Commands - `pnpm run format-docs` - Format markdown tables in docs - `pnpm run lint-docs` - Lint and fix markdown files - `pnpm run storybook` - Run Storybook (from site directory) -## πŸ”§ Critical Patterns +## Critical Patterns ### Database Changes (ALWAYS FOLLOW) @@ -78,7 +99,7 @@ app, err := api.Database.GetOAuth2ProviderAppByClientID(dbauthz.AsSystemRestrict app, err := api.Database.GetOAuth2ProviderAppByClientID(ctx, clientID) ``` -## πŸ“‹ Quick Reference +## Quick Reference ### Full workflows available in imported WORKFLOWS.md @@ -88,14 +109,14 @@ app, err := api.Database.GetOAuth2ProviderAppByClientID(ctx, clientID) - [ ] Check if feature touches database - you'll need migrations - [ ] Check if feature touches audit logs - update `enterprise/audit/table.go` -## πŸ—οΈ Architecture +## Architecture - **coderd**: Main API service - **provisionerd**: Infrastructure provisioning - **Agents**: Workspace services (SSH, port forwarding) - **Database**: PostgreSQL with `dbauthz` authorization -## πŸ§ͺ Testing +## Testing ### Race Condition Prevention @@ -112,21 +133,21 @@ app, err := api.Database.GetOAuth2ProviderAppByClientID(ctx, clientID) NEVER use `time.Sleep` to mitigate timing issues. If an issue seems like it should use `time.Sleep`, read through https://github.com/coder/quartz and specifically the [README](https://github.com/coder/quartz/blob/main/README.md) to better understand how to handle timing issues. -## 🎯 Code Style +## Code Style ### Detailed guidelines in imported WORKFLOWS.md - Follow [Uber Go Style Guide](https://github.com/uber-go/guide/blob/master/style.md) - Commit format: `type(scope): message` -## πŸ“š Detailed Development Guides +## Detailed Development Guides @.claude/docs/OAUTH2.md @.claude/docs/TESTING.md @.claude/docs/TROUBLESHOOTING.md @.claude/docs/DATABASE.md -## 🚨 Common Pitfalls +## Common Pitfalls 1. **Audit table errors** β†’ Update `enterprise/audit/table.go` 2. **OAuth2 errors** β†’ Return RFC-compliant format diff --git a/CODEOWNERS b/CODEOWNERS index fde24a9d874ed..a3889d27bf16d 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -18,18 +18,6 @@ coderd/rbac/ @Emyrk scripts/apitypings/ @Emyrk scripts/gensite/ @aslilac -site/ @aslilac @Parkreiner -site/src/hooks/ @Parkreiner -# These rules intentionally do not specify any owners. More specific rules -# override less specific rules, so these files are "ignored" by the site/ rule. -site/e2e/google/protobuf/timestampGenerated.ts -site/e2e/provisionerGenerated.ts -site/src/api/countriesGenerated.ts -site/src/api/rbacresourcesGenerated.ts -site/src/api/typesGenerated.ts -site/src/testHelpers/entities.ts -site/CLAUDE.md - # The blood and guts of the autostop algorithm, which is quite complex and # requires elite ball knowledge of most of the scheduling code to make changes # without inadvertently affecting other parts of the codebase. diff --git a/Makefile b/Makefile index 8f7bc9d17eb28..7ecb64975e548 100644 --- a/Makefile +++ b/Makefile @@ -636,8 +636,8 @@ TAILNETTEST_MOCKS := \ tailnet/tailnettest/subscriptionmock.go AIBRIDGED_MOCKS := \ - enterprise/x/aibridged/aibridgedmock/clientmock.go \ - enterprise/x/aibridged/aibridgedmock/poolmock.go + enterprise/aibridged/aibridgedmock/clientmock.go \ + enterprise/aibridged/aibridgedmock/poolmock.go GEN_FILES := \ tailnet/proto/tailnet.pb.go \ @@ -645,7 +645,7 @@ GEN_FILES := \ provisionersdk/proto/provisioner.pb.go \ provisionerd/proto/provisionerd.pb.go \ vpn/vpn.pb.go \ - enterprise/x/aibridged/proto/aibridged.pb.go \ + enterprise/aibridged/proto/aibridged.pb.go \ $(DB_GEN_FILES) \ $(SITE_GEN_FILES) \ coderd/rbac/object_gen.go \ @@ -676,6 +676,7 @@ gen/db: $(DB_GEN_FILES) .PHONY: gen/db gen/golden-files: \ + agent/unit/testdata/.gen-golden \ cli/testdata/.gen-golden \ coderd/.gen-golden \ coderd/notifications/.gen-golden \ @@ -696,7 +697,7 @@ gen/mark-fresh: provisionersdk/proto/provisioner.pb.go \ provisionerd/proto/provisionerd.pb.go \ vpn/vpn.pb.go \ - enterprise/x/aibridged/proto/aibridged.pb.go \ + enterprise/aibridged/proto/aibridged.pb.go \ coderd/database/dump.sql \ $(DB_GEN_FILES) \ site/src/api/typesGenerated.ts \ @@ -767,8 +768,8 @@ codersdk/workspacesdk/agentconnmock/agentconnmock.go: codersdk/workspacesdk/agen go generate ./codersdk/workspacesdk/agentconnmock/ touch "$@" -$(AIBRIDGED_MOCKS): enterprise/x/aibridged/client.go enterprise/x/aibridged/pool.go - go generate ./enterprise/x/aibridged/aibridgedmock/ +$(AIBRIDGED_MOCKS): enterprise/aibridged/client.go enterprise/aibridged/pool.go + go generate ./enterprise/aibridged/aibridgedmock/ touch "$@" agent/agentcontainers/dcspec/dcspec_gen.go: \ @@ -821,13 +822,13 @@ vpn/vpn.pb.go: vpn/vpn.proto --go_opt=paths=source_relative \ ./vpn/vpn.proto -enterprise/x/aibridged/proto/aibridged.pb.go: enterprise/x/aibridged/proto/aibridged.proto +enterprise/aibridged/proto/aibridged.pb.go: enterprise/aibridged/proto/aibridged.proto protoc \ --go_out=. \ --go_opt=paths=source_relative \ --go-drpc_out=. \ --go-drpc_opt=paths=source_relative \ - ./enterprise/x/aibridged/proto/aibridged.proto + ./enterprise/aibridged/proto/aibridged.proto site/src/api/typesGenerated.ts: site/node_modules/.installed $(wildcard scripts/apitypings/*) $(shell find ./codersdk $(FIND_EXCLUSIONS) -type f -name '*.go') # -C sets the directory for the go run command @@ -952,6 +953,10 @@ clean/golden-files: -type f -name '*.golden' -delete .PHONY: clean/golden-files +agent/unit/testdata/.gen-golden: $(wildcard agent/unit/testdata/*.golden) $(GO_SRC_FILES) $(wildcard agent/unit/*_test.go) + TZ=UTC go test ./agent/unit -run="TestGraph" -update + touch "$@" + cli/testdata/.gen-golden: $(wildcard cli/testdata/*.golden) $(wildcard cli/*.tpl) $(GO_SRC_FILES) $(wildcard cli/*_test.go) TZ=UTC go test ./cli -run="Test(CommandHelp|ServerYAML|ErrorExamples|.*Golden)" -update touch "$@" @@ -1020,19 +1025,11 @@ endif TEST_PACKAGES ?= ./... -warm-go-cache-db-cleaner: - # ensure Go's build cache for the cleanercmd is fresh so that tests don't have to build from scratch. This - # could take some time and counts against the test's timeout, which can lead to flakes. - # c.f. https://github.com/coder/internal/issues/1026 - mkdir -p build - $(GIT_FLAGS) go build -o ./build/cleaner github.com/coder/coder/v2/coderd/database/dbtestutil/cleanercmd -.PHONY: warm-go-cache-db-cleaner - -test: warm-go-cache-db-cleaner +test: $(GIT_FLAGS) gotestsum --format standard-quiet $(GOTESTSUM_RETRY_FLAGS) --packages="$(TEST_PACKAGES)" -- $(GOTEST_FLAGS) .PHONY: test -test-cli: warm-go-cache-db-cleaner +test-cli: $(MAKE) test TEST_PACKAGES="./cli..." .PHONY: test-cli @@ -1185,3 +1182,8 @@ endif dogfood/coder/nix.hash: flake.nix flake.lock sha256sum flake.nix flake.lock >./dogfood/coder/nix.hash + +# Count the number of test databases created per test package. +count-test-databases: + PGPASSWORD=postgres psql -h localhost -U postgres -d coder_testing -P pager=off -c 'SELECT test_package, count(*) as count from test_databases GROUP BY test_package ORDER BY count DESC' +.PHONY: count-test-databases diff --git a/agent/agent.go b/agent/agent.go index aed6652de612c..ab882a80efa4a 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -781,11 +781,15 @@ func (a *agent) reportConnectionsLoop(ctx context.Context, aAPI proto.DRPCAgentC logger.Debug(ctx, "reporting connection") _, err := aAPI.ReportConnection(ctx, payload) if err != nil { - return xerrors.Errorf("failed to report connection: %w", err) + // Do not fail the loop if we fail to report a connection, just + // log a warning. + // Related to https://github.com/coder/coder/issues/20194 + logger.Warn(ctx, "failed to report connection to server", slog.Error(err)) + // keep going, we still need to remove it from the slice + } else { + logger.Debug(ctx, "successfully reported connection") } - logger.Debug(ctx, "successfully reported connection") - // Remove the payload we sent. a.reportConnectionsMu.Lock() a.reportConnections[0] = nil // Release the pointer from the underlying array. @@ -816,6 +820,13 @@ func (a *agent) reportConnection(id uuid.UUID, connectionType proto.Connection_T ip = host } + // If the IP is "localhost" (which it can be in some cases), set it to + // 127.0.0.1 instead. + // Related to https://github.com/coder/coder/issues/20194 + if ip == "localhost" { + ip = "127.0.0.1" + } + a.reportConnectionsMu.Lock() defer a.reportConnectionsMu.Unlock() diff --git a/agent/agent_test.go b/agent/agent_test.go index e8b3b99a95387..d4d40b56bb92e 100644 --- a/agent/agent_test.go +++ b/agent/agent_test.go @@ -1807,11 +1807,12 @@ func TestAgent_ReconnectingPTY(t *testing.T) { //nolint:dogsled conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0) + idConnectionReport := uuid.New() id := uuid.New() // Test that the connection is reported. This must be tested in the // first connection because we care about verifying all of these. - netConn0, err := conn.ReconnectingPTY(ctx, id, 80, 80, "bash --norc") + netConn0, err := conn.ReconnectingPTY(ctx, idConnectionReport, 80, 80, "bash --norc") require.NoError(t, err) _ = netConn0.Close() assertConnectionReport(t, agentClient, proto.Connection_RECONNECTING_PTY, 0, "") @@ -2027,7 +2028,8 @@ func runSubAgentMain() int { ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() req = req.WithContext(ctx) - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) if err != nil { _, _ = fmt.Fprintf(os.Stderr, "agent connection failed: %v\n", err) return 11 @@ -3460,11 +3462,7 @@ func TestAgent_Metrics_SSH(t *testing.T) { registry := prometheus.NewRegistry() //nolint:dogsled - conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{ - // Make sure we always get a DERP connection for - // currently_reachable_peers. - DisableDirectConnections: true, - }, 0, func(_ *agenttest.Client, o *agent.Options) { + conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) { o.PrometheusRegistry = registry }) @@ -3479,16 +3477,31 @@ func TestAgent_Metrics_SSH(t *testing.T) { err = session.Shell() require.NoError(t, err) - expected := []*proto.Stats_Metric{ + expected := []struct { + Name string + Type proto.Stats_Metric_Type + CheckFn func(float64) error + Labels []*proto.Stats_Metric_Label + }{ { - Name: "agent_reconnecting_pty_connections_total", - Type: proto.Stats_Metric_COUNTER, - Value: 0, + Name: "agent_reconnecting_pty_connections_total", + Type: proto.Stats_Metric_COUNTER, + CheckFn: func(v float64) error { + if v == 0 { + return nil + } + return xerrors.Errorf("expected 0, got %f", v) + }, }, { - Name: "agent_sessions_total", - Type: proto.Stats_Metric_COUNTER, - Value: 1, + Name: "agent_sessions_total", + Type: proto.Stats_Metric_COUNTER, + CheckFn: func(v float64) error { + if v == 1 { + return nil + } + return xerrors.Errorf("expected 1, got %f", v) + }, Labels: []*proto.Stats_Metric_Label{ { Name: "magic_type", @@ -3501,24 +3514,44 @@ func TestAgent_Metrics_SSH(t *testing.T) { }, }, { - Name: "agent_ssh_server_failed_connections_total", - Type: proto.Stats_Metric_COUNTER, - Value: 0, + Name: "agent_ssh_server_failed_connections_total", + Type: proto.Stats_Metric_COUNTER, + CheckFn: func(v float64) error { + if v == 0 { + return nil + } + return xerrors.Errorf("expected 0, got %f", v) + }, }, { - Name: "agent_ssh_server_sftp_connections_total", - Type: proto.Stats_Metric_COUNTER, - Value: 0, + Name: "agent_ssh_server_sftp_connections_total", + Type: proto.Stats_Metric_COUNTER, + CheckFn: func(v float64) error { + if v == 0 { + return nil + } + return xerrors.Errorf("expected 0, got %f", v) + }, }, { - Name: "agent_ssh_server_sftp_server_errors_total", - Type: proto.Stats_Metric_COUNTER, - Value: 0, + Name: "agent_ssh_server_sftp_server_errors_total", + Type: proto.Stats_Metric_COUNTER, + CheckFn: func(v float64) error { + if v == 0 { + return nil + } + return xerrors.Errorf("expected 0, got %f", v) + }, }, { - Name: "coderd_agentstats_currently_reachable_peers", - Type: proto.Stats_Metric_GAUGE, - Value: 1, + Name: "coderd_agentstats_currently_reachable_peers", + Type: proto.Stats_Metric_GAUGE, + CheckFn: func(float64) error { + // We can't reliably ping a peer here, and networking is out of + // scope of this test, so we just test that the metric exists + // with the correct labels. + return nil + }, Labels: []*proto.Stats_Metric_Label{ { Name: "connection_type", @@ -3527,9 +3560,11 @@ func TestAgent_Metrics_SSH(t *testing.T) { }, }, { - Name: "coderd_agentstats_currently_reachable_peers", - Type: proto.Stats_Metric_GAUGE, - Value: 0, + Name: "coderd_agentstats_currently_reachable_peers", + Type: proto.Stats_Metric_GAUGE, + CheckFn: func(float64) error { + return nil + }, Labels: []*proto.Stats_Metric_Label{ { Name: "connection_type", @@ -3538,9 +3573,20 @@ func TestAgent_Metrics_SSH(t *testing.T) { }, }, { - Name: "coderd_agentstats_startup_script_seconds", - Type: proto.Stats_Metric_GAUGE, - Value: 1, + Name: "coderd_agentstats_startup_script_seconds", + Type: proto.Stats_Metric_GAUGE, + CheckFn: func(f float64) error { + if f >= 0 { + return nil + } + return xerrors.Errorf("expected >= 0, got %f", f) + }, + Labels: []*proto.Stats_Metric_Label{ + { + Name: "success", + Value: "true", + }, + }, }, } @@ -3562,11 +3608,10 @@ func TestAgent_Metrics_SSH(t *testing.T) { for _, m := range mf.GetMetric() { assert.Equal(t, expected[i].Name, mf.GetName()) assert.Equal(t, expected[i].Type.String(), mf.GetType().String()) - // Value is max expected if expected[i].Type == proto.Stats_Metric_GAUGE { - assert.GreaterOrEqualf(t, expected[i].Value, m.GetGauge().GetValue(), "expected %s to be greater than or equal to %f, got %f", expected[i].Name, expected[i].Value, m.GetGauge().GetValue()) + assert.NoError(t, expected[i].CheckFn(m.GetGauge().GetValue()), "check fn for %s failed", expected[i].Name) } else if expected[i].Type == proto.Stats_Metric_COUNTER { - assert.GreaterOrEqualf(t, expected[i].Value, m.GetCounter().GetValue(), "expected %s to be greater than or equal to %f, got %f", expected[i].Name, expected[i].Value, m.GetCounter().GetValue()) + assert.NoError(t, expected[i].CheckFn(m.GetCounter().GetValue()), "check fn for %s failed", expected[i].Name) } for j, lbl := range expected[i].Labels { assert.Equal(t, m.GetLabel()[j], &promgo.LabelPair{ diff --git a/agent/agentcontainers/api.go b/agent/agentcontainers/api.go index d77d4209cb245..b5a6ed74d597a 100644 --- a/agent/agentcontainers/api.go +++ b/agent/agentcontainers/api.go @@ -682,8 +682,6 @@ func (api *API) updaterLoop() { } else { prevErr = nil } - default: - api.logger.Debug(api.ctx, "updater loop ticker skipped, update in progress") } return nil // Always nil to keep the ticker going. diff --git a/agent/apphealth.go b/agent/apphealth.go index 1c4e1d126902c..4fb551077a30f 100644 --- a/agent/apphealth.go +++ b/agent/apphealth.go @@ -63,6 +63,7 @@ func NewAppHealthReporterWithClock( // run a ticker for each app health check. var mu sync.RWMutex failures := make(map[uuid.UUID]int, 0) + client := &http.Client{} for _, nextApp := range apps { if !shouldStartTicker(nextApp) { continue @@ -91,7 +92,7 @@ func NewAppHealthReporterWithClock( if err != nil { return err } - res, err := http.DefaultClient.Do(req) + res, err := client.Do(req) if err != nil { return err } diff --git a/agent/reconnectingpty/screen.go b/agent/reconnectingpty/screen.go index 04e1861eade94..ffab2f7d5bab8 100644 --- a/agent/reconnectingpty/screen.go +++ b/agent/reconnectingpty/screen.go @@ -25,6 +25,7 @@ import ( // screenReconnectingPTY provides a reconnectable PTY via `screen`. type screenReconnectingPTY struct { + logger slog.Logger execer agentexec.Execer command *pty.Cmd @@ -62,6 +63,7 @@ type screenReconnectingPTY struct { // own which causes it to spawn with the specified size. func newScreen(ctx context.Context, logger slog.Logger, execer agentexec.Execer, cmd *pty.Cmd, options *Options) *screenReconnectingPTY { rpty := &screenReconnectingPTY{ + logger: logger, execer: execer, command: cmd, metrics: options.Metrics, @@ -173,6 +175,7 @@ func (rpty *screenReconnectingPTY) Attach(ctx context.Context, _ string, conn ne ptty, process, err := rpty.doAttach(ctx, conn, height, width, logger) if err != nil { + logger.Debug(ctx, "unable to attach to screen reconnecting pty", slog.Error(err)) if errors.Is(err, context.Canceled) { // Likely the process was too short-lived and canceled the version command. // TODO: Is it worth distinguishing between that and a cancel from the @@ -182,6 +185,7 @@ func (rpty *screenReconnectingPTY) Attach(ctx context.Context, _ string, conn ne } return err } + logger.Debug(ctx, "attached to screen reconnecting pty") defer func() { // Log only for debugging since the process might have already exited on its @@ -403,6 +407,7 @@ func (rpty *screenReconnectingPTY) Wait() { } func (rpty *screenReconnectingPTY) Close(err error) { + rpty.logger.Debug(context.Background(), "closing screen reconnecting pty", slog.Error(err)) // The closing state change will be handled by the lifecycle. rpty.state.setState(StateClosing, err) } diff --git a/agent/unit/graph.go b/agent/unit/graph.go new file mode 100644 index 0000000000000..3d8a6703addf2 --- /dev/null +++ b/agent/unit/graph.go @@ -0,0 +1,174 @@ +package unit + +import ( + "fmt" + "sync" + + "golang.org/x/xerrors" + "gonum.org/v1/gonum/graph/encoding/dot" + "gonum.org/v1/gonum/graph/simple" + "gonum.org/v1/gonum/graph/topo" +) + +// Graph provides a bidirectional interface over gonum's directed graph implementation. +// While the underlying gonum graph is directed, we overlay bidirectional semantics +// by distinguishing between forward and reverse edges. Wanting and being wanted by +// other units are related but different concepts that have different graph traversal +// implications when Units update their status. +// +// The graph stores edge types to represent different relationships between units, +// allowing for domain-specific semantics beyond simple connectivity. +type Graph[EdgeType, VertexType comparable] struct { + mu sync.RWMutex + // The underlying gonum graph. It stores vertices and edges without knowing about the types of the vertices and edges. + gonumGraph *simple.DirectedGraph + // Maps vertices to their IDs so that a gonum vertex ID can be used to lookup the vertex type. + vertexToID map[VertexType]int64 + // Maps vertex IDs to their types so that a vertex type can be used to lookup the gonum vertex ID. + idToVertex map[int64]VertexType + // The next ID to assign to a vertex. + nextID int64 + // Store edge types by "fromID->toID" key. This is used to lookup the edge type for a given edge. + edgeTypes map[string]EdgeType +} + +// Edge is a convenience type for representing an edge in the graph. +// It encapsulates the from and to vertices and the edge type itself. +type Edge[EdgeType, VertexType comparable] struct { + From VertexType + To VertexType + Edge EdgeType +} + +// AddEdge adds an edge to the graph. It initializes the graph and metadata on first use, +// checks for cycles, and adds the edge to the gonum graph. +func (g *Graph[EdgeType, VertexType]) AddEdge(from, to VertexType, edge EdgeType) error { + g.mu.Lock() + defer g.mu.Unlock() + + if g.gonumGraph == nil { + g.gonumGraph = simple.NewDirectedGraph() + g.vertexToID = make(map[VertexType]int64) + g.idToVertex = make(map[int64]VertexType) + g.edgeTypes = make(map[string]EdgeType) + g.nextID = 1 + } + + fromID := g.getOrCreateVertexID(from) + toID := g.getOrCreateVertexID(to) + + if g.canReach(to, from) { + return xerrors.Errorf("adding edge (%v -> %v) would create a cycle", from, to) + } + + g.gonumGraph.SetEdge(simple.Edge{F: simple.Node(fromID), T: simple.Node(toID)}) + + edgeKey := fmt.Sprintf("%d->%d", fromID, toID) + g.edgeTypes[edgeKey] = edge + + return nil +} + +// GetForwardAdjacentVertices returns all the edges that originate from the given vertex. +func (g *Graph[EdgeType, VertexType]) GetForwardAdjacentVertices(from VertexType) []Edge[EdgeType, VertexType] { + g.mu.RLock() + defer g.mu.RUnlock() + + fromID, exists := g.vertexToID[from] + if !exists { + return []Edge[EdgeType, VertexType]{} + } + + edges := []Edge[EdgeType, VertexType]{} + toNodes := g.gonumGraph.From(fromID) + for toNodes.Next() { + toID := toNodes.Node().ID() + to := g.idToVertex[toID] + + // Get the edge type + edgeKey := fmt.Sprintf("%d->%d", fromID, toID) + edgeType := g.edgeTypes[edgeKey] + + edges = append(edges, Edge[EdgeType, VertexType]{From: from, To: to, Edge: edgeType}) + } + + return edges +} + +// GetReverseAdjacentVertices returns all the edges that terminate at the given vertex. +func (g *Graph[EdgeType, VertexType]) GetReverseAdjacentVertices(to VertexType) []Edge[EdgeType, VertexType] { + g.mu.RLock() + defer g.mu.RUnlock() + + toID, exists := g.vertexToID[to] + if !exists { + return []Edge[EdgeType, VertexType]{} + } + + edges := []Edge[EdgeType, VertexType]{} + fromNodes := g.gonumGraph.To(toID) + for fromNodes.Next() { + fromID := fromNodes.Node().ID() + from := g.idToVertex[fromID] + + // Get the edge type + edgeKey := fmt.Sprintf("%d->%d", fromID, toID) + edgeType := g.edgeTypes[edgeKey] + + edges = append(edges, Edge[EdgeType, VertexType]{From: from, To: to, Edge: edgeType}) + } + + return edges +} + +// getOrCreateVertexID returns the ID for a vertex, creating it if it doesn't exist. +func (g *Graph[EdgeType, VertexType]) getOrCreateVertexID(vertex VertexType) int64 { + if id, exists := g.vertexToID[vertex]; exists { + return id + } + + id := g.nextID + g.nextID++ + g.vertexToID[vertex] = id + g.idToVertex[id] = vertex + + // Add the node to the gonum graph + g.gonumGraph.AddNode(simple.Node(id)) + + return id +} + +// canReach checks if there is a path from the start vertex to the end vertex. +func (g *Graph[EdgeType, VertexType]) canReach(start, end VertexType) bool { + if start == end { + return true + } + + startID, startExists := g.vertexToID[start] + endID, endExists := g.vertexToID[end] + + if !startExists || !endExists { + return false + } + + // Use gonum's built-in path existence check + return topo.PathExistsIn(g.gonumGraph, simple.Node(startID), simple.Node(endID)) +} + +// ToDOT exports the graph to DOT format for visualization +func (g *Graph[EdgeType, VertexType]) ToDOT(name string) (string, error) { + g.mu.RLock() + defer g.mu.RUnlock() + + if g.gonumGraph == nil { + return "", xerrors.New("graph is not initialized") + } + + // Marshal the graph to DOT format + dotBytes, err := dot.Marshal(g.gonumGraph, name, "", " ") + if err != nil { + return "", xerrors.Errorf("failed to marshal graph to DOT: %w", err) + } + + return string(dotBytes), nil +} diff --git a/agent/unit/graph_test.go b/agent/unit/graph_test.go new file mode 100644 index 0000000000000..3c76756aee88c --- /dev/null +++ b/agent/unit/graph_test.go @@ -0,0 +1,454 @@ +// Package unit_test provides tests for the unit package. +// +// DOT Graph Testing: +// The graph tests use golden files for DOT representation verification. +// To update the golden files: +// make gen/golden-files +// +// The golden files contain the expected DOT representation and can be easily +// inspected, version controlled, and updated when the graph structure changes. +package unit_test + +import ( + "bytes" + "flag" + "fmt" + "os" + "path/filepath" + "sync" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/cryptorand" +) + +type testGraphEdge string + +const ( + testEdgeStarted testGraphEdge = "started" + testEdgeCompleted testGraphEdge = "completed" +) + +type testGraphVertex struct { + Name string +} + +type ( + testGraph = unit.Graph[testGraphEdge, *testGraphVertex] + testEdge = unit.Edge[testGraphEdge, *testGraphVertex] +) + +// randInt generates a random integer in the range [0, limit). +func randInt(limit int) int { + if limit <= 0 { + return 0 + } + n, err := cryptorand.Int63n(int64(limit)) + if err != nil { + return 0 + } + return int(n) +} + +// UpdateGoldenFiles indicates golden files should be updated. +// To update the golden files: +// make gen/golden-files +var UpdateGoldenFiles = flag.Bool("update", false, "update .golden files") + +// assertDOTGraph requires that the graph's DOT representation matches the golden file +func assertDOTGraph(t *testing.T, graph *testGraph, goldenName string) { + t.Helper() + + dot, err := graph.ToDOT(goldenName) + require.NoError(t, err) + + goldenFile := filepath.Join("testdata", goldenName+".golden") + if *UpdateGoldenFiles { + t.Logf("update golden file for: %q: %s", goldenName, goldenFile) + err := os.MkdirAll(filepath.Dir(goldenFile), 0o755) + require.NoError(t, err, "want no error creating golden file directory") + err = os.WriteFile(goldenFile, []byte(dot), 0o600) + require.NoError(t, err, "update golden file") + } + + expected, err := os.ReadFile(goldenFile) + require.NoError(t, err, "read golden file, run \"make gen/golden-files\" and commit the changes") + + // Normalize line endings for cross-platform compatibility + expected = normalizeLineEndings(expected) + normalizedDot := normalizeLineEndings([]byte(dot)) + + assert.Empty(t, cmp.Diff(string(expected), string(normalizedDot)), "golden file mismatch (-want +got): %s, run \"make gen/golden-files\", verify and commit the changes", goldenFile) +} + +// normalizeLineEndings ensures that all line endings are normalized to \n. +// Required for Windows compatibility. +func normalizeLineEndings(content []byte) []byte { + content = bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n")) + content = bytes.ReplaceAll(content, []byte("\r"), []byte("\n")) + return content +} + +func TestGraph(t *testing.T) { + t.Parallel() + + testFuncs := map[string]func(t *testing.T) *unit.Graph[testGraphEdge, *testGraphVertex]{ + "ForwardAndReverseEdges": func(t *testing.T) *unit.Graph[testGraphEdge, *testGraphVertex] { + graph := &unit.Graph[testGraphEdge, *testGraphVertex]{} + unit1 := &testGraphVertex{Name: "unit1"} + unit2 := &testGraphVertex{Name: "unit2"} + unit3 := &testGraphVertex{Name: "unit3"} + err := graph.AddEdge(unit1, unit2, testEdgeCompleted) + require.NoError(t, err) + err = graph.AddEdge(unit1, unit3, testEdgeStarted) + require.NoError(t, err) + + // Check for forward edge + vertices := graph.GetForwardAdjacentVertices(unit1) + require.Len(t, vertices, 2) + // Unit 1 depends on the completion of Unit2 + require.Contains(t, vertices, testEdge{ + From: unit1, + To: unit2, + Edge: testEdgeCompleted, + }) + // Unit 1 depends on the start of Unit3 + require.Contains(t, vertices, testEdge{ + From: unit1, + To: unit3, + Edge: testEdgeStarted, + }) + + // Check for reverse edges + unit2ReverseEdges := graph.GetReverseAdjacentVertices(unit2) + require.Len(t, unit2ReverseEdges, 1) + // Unit 2 must be completed before Unit 1 can start + require.Contains(t, unit2ReverseEdges, testEdge{ + From: unit1, + To: unit2, + Edge: testEdgeCompleted, + }) + + unit3ReverseEdges := graph.GetReverseAdjacentVertices(unit3) + require.Len(t, unit3ReverseEdges, 1) + // Unit 3 must be started before Unit 1 can complete + require.Contains(t, unit3ReverseEdges, testEdge{ + From: unit1, + To: unit3, + Edge: testEdgeStarted, + }) + + return graph + }, + "SelfReference": func(t *testing.T) *testGraph { + graph := &testGraph{} + unit1 := &testGraphVertex{Name: "unit1"} + err := graph.AddEdge(unit1, unit1, testEdgeCompleted) + require.Error(t, err) + require.ErrorContains(t, err, fmt.Sprintf("adding edge (%v -> %v) would create a cycle", unit1, unit1)) + + return graph + }, + "Cycle": func(t *testing.T) *testGraph { + graph := &testGraph{} + unit1 := &testGraphVertex{Name: "unit1"} + unit2 := &testGraphVertex{Name: "unit2"} + err := graph.AddEdge(unit1, unit2, testEdgeCompleted) + require.NoError(t, err) + err = graph.AddEdge(unit2, unit1, testEdgeStarted) + require.Error(t, err) + require.ErrorContains(t, err, fmt.Sprintf("adding edge (%v -> %v) would create a cycle", unit2, unit1)) + + return graph + }, + "MultipleDependenciesSameStatus": func(t *testing.T) *testGraph { + graph := &testGraph{} + unit1 := &testGraphVertex{Name: "unit1"} + unit2 := &testGraphVertex{Name: "unit2"} + unit3 := &testGraphVertex{Name: "unit3"} + unit4 := &testGraphVertex{Name: "unit4"} + + // Unit1 depends on completion of both unit2 and unit3 (same status type) + err := graph.AddEdge(unit1, unit2, testEdgeCompleted) + require.NoError(t, err) + err = graph.AddEdge(unit1, unit3, testEdgeCompleted) + require.NoError(t, err) + + // Unit1 also depends on starting of unit4 (different status type) + err = graph.AddEdge(unit1, unit4, testEdgeStarted) + require.NoError(t, err) + + // Check that unit1 has 3 forward dependencies + forwardEdges := graph.GetForwardAdjacentVertices(unit1) + require.Len(t, forwardEdges, 3) + + // Verify all expected dependencies exist + expectedDependencies := []testEdge{ + {From: unit1, To: unit2, Edge: testEdgeCompleted}, + {From: unit1, To: unit3, Edge: testEdgeCompleted}, + {From: unit1, To: unit4, Edge: testEdgeStarted}, + } + + for _, expected := range expectedDependencies { + require.Contains(t, forwardEdges, expected) + } + + // Check reverse dependencies + unit2ReverseEdges := graph.GetReverseAdjacentVertices(unit2) + require.Len(t, unit2ReverseEdges, 1) + require.Contains(t, unit2ReverseEdges, testEdge{ + From: unit1, To: unit2, Edge: testEdgeCompleted, + }) + + unit3ReverseEdges := graph.GetReverseAdjacentVertices(unit3) + require.Len(t, unit3ReverseEdges, 1) + require.Contains(t, unit3ReverseEdges, testEdge{ + From: unit1, To: unit3, Edge: testEdgeCompleted, + }) + + unit4ReverseEdges := graph.GetReverseAdjacentVertices(unit4) + require.Len(t, unit4ReverseEdges, 1) + require.Contains(t, unit4ReverseEdges, testEdge{ + From: unit1, To: unit4, Edge: testEdgeStarted, + }) + + return graph + }, + } + + for testName, testFunc := range testFuncs { + var graph *testGraph + t.Run(testName, func(t *testing.T) { + t.Parallel() + graph = testFunc(t) + assertDOTGraph(t, graph, testName) + }) + } +} + +func TestGraphThreadSafety(t *testing.T) { + t.Parallel() + + t.Run("ConcurrentReadWrite", func(t *testing.T) { + t.Parallel() + + graph := &testGraph{} + var wg sync.WaitGroup + const numWriters = 50 + const numReaders = 100 + const operationsPerWriter = 1000 + const operationsPerReader = 2000 + + barrier := make(chan struct{}) + // Launch writers + for i := 0; i < numWriters; i++ { + wg.Add(1) + go func(writerID int) { + defer wg.Done() + <-barrier + for j := 0; j < operationsPerWriter; j++ { + from := &testGraphVertex{Name: fmt.Sprintf("writer-%d-%d", writerID, j)} + to := &testGraphVertex{Name: fmt.Sprintf("writer-%d-%d", writerID, j+1)} + graph.AddEdge(from, to, testEdgeCompleted) + } + }(i) + } + + // Launch readers + readerResults := make([]struct { + panicked bool + readCount int + }, numReaders) + + for i := 0; i < numReaders; i++ { + wg.Add(1) + go func(readerID int) { + defer wg.Done() + <-barrier + defer func() { + if r := recover(); r != nil { + readerResults[readerID].panicked = true + } + }() + + readCount := 0 + for j := 0; j < operationsPerReader; j++ { + // Create a test vertex and read + testUnit := &testGraphVertex{Name: fmt.Sprintf("test-reader-%d-%d", readerID, j)} + forwardEdges := graph.GetForwardAdjacentVertices(testUnit) + reverseEdges := graph.GetReverseAdjacentVertices(testUnit) + + // Just verify no panics (results may be nil for non-existent vertices) + _ = forwardEdges + _ = reverseEdges + readCount++ + } + readerResults[readerID].readCount = readCount + }(i) + } + + close(barrier) + wg.Wait() + + // Verify no panics occurred in readers + for i, result := range readerResults { + require.False(t, result.panicked, "reader %d panicked", i) + require.Equal(t, operationsPerReader, result.readCount, "reader %d should have performed expected reads", i) + } + }) + + t.Run("ConcurrentCycleDetection", func(t *testing.T) { + t.Parallel() + + graph := &testGraph{} + + // Pre-create chain: Aβ†’Bβ†’Cβ†’D + unitA := &testGraphVertex{Name: "A"} + unitB := &testGraphVertex{Name: "B"} + unitC := &testGraphVertex{Name: "C"} + unitD := &testGraphVertex{Name: "D"} + + err := graph.AddEdge(unitA, unitB, testEdgeCompleted) + require.NoError(t, err) + err = graph.AddEdge(unitB, unitC, testEdgeCompleted) + require.NoError(t, err) + err = graph.AddEdge(unitC, unitD, testEdgeCompleted) + require.NoError(t, err) + + barrier := make(chan struct{}) + var wg sync.WaitGroup + const numGoroutines = 50 + cycleErrors := make([]error, numGoroutines) + + // Launch goroutines trying to add Dβ†’A (creates cycle) + for i := 0; i < numGoroutines; i++ { + wg.Add(1) + go func(goroutineID int) { + defer wg.Done() + <-barrier + err := graph.AddEdge(unitD, unitA, testEdgeCompleted) + cycleErrors[goroutineID] = err + }(i) + } + + close(barrier) + wg.Wait() + + // Verify all attempts correctly returned cycle error + for i, err := range cycleErrors { + require.Error(t, err, "goroutine %d should have detected cycle", i) + require.Contains(t, err.Error(), "would create a cycle") + } + + // Verify graph remains valid (original chain intact) + dot, err := graph.ToDOT("test") + require.NoError(t, err) + require.NotEmpty(t, dot) + }) + + t.Run("ConcurrentToDOT", func(t *testing.T) { + t.Parallel() + + graph := &testGraph{} + + // Pre-populate graph + for i := 0; i < 20; i++ { + from := &testGraphVertex{Name: fmt.Sprintf("dot-unit-%d", i)} + to := &testGraphVertex{Name: fmt.Sprintf("dot-unit-%d", i+1)} + err := graph.AddEdge(from, to, testEdgeCompleted) + require.NoError(t, err) + } + + barrier := make(chan struct{}) + var wg sync.WaitGroup + const numReaders = 100 + const numWriters = 20 + dotResults := make([]string, numReaders) + + // Launch readers calling ToDOT + dotErrors := make([]error, numReaders) + for i := 0; i < numReaders; i++ { + wg.Add(1) + go func(readerID int) { + defer wg.Done() + <-barrier + dot, err := graph.ToDOT(fmt.Sprintf("test-%d", readerID)) + dotErrors[readerID] = err + if err == nil { + dotResults[readerID] = dot + } + }(i) + } + + // Launch writers adding edges + for i := 0; i < numWriters; i++ { + wg.Add(1) + go func(writerID int) { + defer wg.Done() + <-barrier + from := &testGraphVertex{Name: fmt.Sprintf("writer-dot-%d", writerID)} + to := &testGraphVertex{Name: fmt.Sprintf("writer-dot-target-%d", writerID)} + graph.AddEdge(from, to, testEdgeCompleted) + }(i) + } + + close(barrier) + wg.Wait() + + // Verify no errors occurred during DOT generation + for i, err := range dotErrors { + require.NoError(t, err, "DOT generation error at index %d", i) + } + + // Verify all DOT results are valid + for i, dot := range dotResults { + require.NotEmpty(t, dot, "DOT result %d should not be empty", i) + } + }) +} + +func BenchmarkGraph_ConcurrentMixedOperations(b *testing.B) { + graph := &testGraph{} + var wg sync.WaitGroup + const numGoroutines = 200 + + b.ResetTimer() + for i := 0; i < b.N; i++ { + // Launch goroutines performing random operations + for j := 0; j < numGoroutines; j++ { + wg.Add(1) + go func(goroutineID int) { + defer wg.Done() + operationCount := 0 + + for operationCount < 50 { + operation := float32(randInt(100)) / 100.0 + + if operation < 0.6 { // 60% reads + // Read operation + testUnit := &testGraphVertex{Name: fmt.Sprintf("bench-read-%d-%d", goroutineID, operationCount)} + forwardEdges := graph.GetForwardAdjacentVertices(testUnit) + reverseEdges := graph.GetReverseAdjacentVertices(testUnit) + + // Just verify no panics (results may be nil for non-existent vertices) + _ = forwardEdges + _ = reverseEdges + } else { // 40% writes + // Write operation + from := &testGraphVertex{Name: fmt.Sprintf("bench-write-%d-%d", goroutineID, operationCount)} + to := &testGraphVertex{Name: fmt.Sprintf("bench-write-target-%d-%d", goroutineID, operationCount)} + graph.AddEdge(from, to, testEdgeCompleted) + } + + operationCount++ + } + }(j) + } + + wg.Wait() + } +} diff --git a/agent/unit/testdata/Cycle.golden b/agent/unit/testdata/Cycle.golden new file mode 100644 index 0000000000000..6fb842460101c --- /dev/null +++ b/agent/unit/testdata/Cycle.golden @@ -0,0 +1,8 @@ +strict digraph Cycle { + // Node definitions. + 1; + 2; + + // Edge definitions. + 1 -> 2; +} \ No newline at end of file diff --git a/agent/unit/testdata/ForwardAndReverseEdges.golden b/agent/unit/testdata/ForwardAndReverseEdges.golden new file mode 100644 index 0000000000000..36cf2218fbbc2 --- /dev/null +++ b/agent/unit/testdata/ForwardAndReverseEdges.golden @@ -0,0 +1,10 @@ +strict digraph ForwardAndReverseEdges { + // Node definitions. + 1; + 2; + 3; + + // Edge definitions. + 1 -> 2; + 1 -> 3; +} \ No newline at end of file diff --git a/agent/unit/testdata/MultipleDependenciesSameStatus.golden b/agent/unit/testdata/MultipleDependenciesSameStatus.golden new file mode 100644 index 0000000000000..af7cbb71e0e22 --- /dev/null +++ b/agent/unit/testdata/MultipleDependenciesSameStatus.golden @@ -0,0 +1,12 @@ +strict digraph MultipleDependenciesSameStatus { + // Node definitions. + 1; + 2; + 3; + 4; + + // Edge definitions. + 1 -> 2; + 1 -> 3; + 1 -> 4; +} \ No newline at end of file diff --git a/agent/unit/testdata/SelfReference.golden b/agent/unit/testdata/SelfReference.golden new file mode 100644 index 0000000000000..d0d036d6fb66a --- /dev/null +++ b/agent/unit/testdata/SelfReference.golden @@ -0,0 +1,4 @@ +strict digraph SelfReference { + // Node definitions. + 1; +} \ No newline at end of file diff --git a/biome.jsonc b/biome.jsonc index ae81184cdca0c..42a920eeeaf77 100644 --- a/biome.jsonc +++ b/biome.jsonc @@ -6,10 +6,7 @@ "defaultBranch": "main" }, "files": { - "includes": [ - "**", - "!**/pnpm-lock.yaml" - ], + "includes": ["**", "!**/pnpm-lock.yaml"], "ignoreUnknown": true }, "linter": { @@ -48,13 +45,14 @@ "options": { "paths": { "@mui/material": "Use @mui/material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", - "@mui/icons-material": "Use @mui/icons-material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", "@mui/material/Avatar": "Use components/Avatar/Avatar instead.", "@mui/material/Alert": "Use components/Alert/Alert instead.", "@mui/material/Popover": "Use components/Popover/Popover instead.", "@mui/material/Typography": "Use native HTML elements instead. Eg: ,

,

, etc.", "@mui/material/Box": "Use a
instead.", + "@mui/material/Button": "Use a components/Button/Button instead.", "@mui/material/styles": "Import from @emotion/react instead.", + "@mui/material/Table*": "Import from components/Table/Table instead.", "lodash": "Use lodash/ instead." } } @@ -69,11 +67,7 @@ "noConsole": { "level": "error", "options": { - "allow": [ - "error", - "info", - "warn" - ] + "allow": ["error", "info", "warn"] } } }, @@ -82,5 +76,5 @@ } } }, - "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json" + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json" } diff --git a/cli/allowlistflag.go b/cli/allowlistflag.go new file mode 100644 index 0000000000000..208bf24b3ed30 --- /dev/null +++ b/cli/allowlistflag.go @@ -0,0 +1,78 @@ +package cli + +import ( + "encoding/csv" + "strings" + + "github.com/spf13/pflag" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/codersdk" +) + +var ( + _ pflag.SliceValue = &AllowListFlag{} + _ pflag.Value = &AllowListFlag{} +) + +// AllowListFlag implements pflag.SliceValue for codersdk.APIAllowListTarget entries. +type AllowListFlag []codersdk.APIAllowListTarget + +func AllowListFlagOf(al *[]codersdk.APIAllowListTarget) *AllowListFlag { + return (*AllowListFlag)(al) +} + +func (a AllowListFlag) String() string { + return strings.Join(a.GetSlice(), ",") +} + +func (a AllowListFlag) Value() []codersdk.APIAllowListTarget { + return []codersdk.APIAllowListTarget(a) +} + +func (AllowListFlag) Type() string { return "allow-list" } + +func (a *AllowListFlag) Set(set string) error { + values, err := csv.NewReader(strings.NewReader(set)).Read() + if err != nil { + return xerrors.Errorf("parse allow list entries as csv: %w", err) + } + for _, v := range values { + if err := a.Append(v); err != nil { + return err + } + } + return nil +} + +func (a *AllowListFlag) Append(value string) error { + value = strings.TrimSpace(value) + if value == "" { + return xerrors.New("allow list entry cannot be empty") + } + var target codersdk.APIAllowListTarget + if err := target.UnmarshalText([]byte(value)); err != nil { + return err + } + + *a = append(*a, target) + return nil +} + +func (a *AllowListFlag) Replace(items []string) error { + *a = []codersdk.APIAllowListTarget{} + for _, item := range items { + if err := a.Append(item); err != nil { + return err + } + } + return nil +} + +func (a *AllowListFlag) GetSlice() []string { + out := make([]string, len(*a)) + for i, entry := range *a { + out[i] = entry.String() + } + return out +} diff --git a/cli/cliui/agent.go b/cli/cliui/agent.go index 3bb6fee7be769..b6262bdf631fe 100644 --- a/cli/cliui/agent.go +++ b/cli/cliui/agent.go @@ -53,6 +53,9 @@ func Agent(ctx context.Context, writer io.Writer, agentID uuid.UUID, opts AgentO t := time.NewTimer(0) defer t.Stop() + startTime := time.Now() + baseInterval := opts.FetchInterval + for { select { case <-ctx.Done(): @@ -68,7 +71,11 @@ func Agent(ctx context.Context, writer io.Writer, agentID uuid.UUID, opts AgentO return } fetchedAgent <- fetchAgent{agent: agent} - t.Reset(opts.FetchInterval) + + // Adjust the interval based on how long we've been waiting. + elapsed := time.Since(startTime) + currentInterval := GetProgressiveInterval(baseInterval, elapsed) + t.Reset(currentInterval) } } }() @@ -293,6 +300,24 @@ func safeDuration(sw *stageWriter, a, b *time.Time) time.Duration { return a.Sub(*b) } +// GetProgressiveInterval returns an interval that increases over time. +// The interval starts at baseInterval and increases to +// a maximum of baseInterval * 16 over time. +func GetProgressiveInterval(baseInterval time.Duration, elapsed time.Duration) time.Duration { + switch { + case elapsed < 60*time.Second: + return baseInterval // 500ms for first 60 seconds + case elapsed < 2*time.Minute: + return baseInterval * 2 // 1s for next 1 minute + case elapsed < 5*time.Minute: + return baseInterval * 4 // 2s for next 3 minutes + case elapsed < 10*time.Minute: + return baseInterval * 8 // 4s for next 5 minutes + default: + return baseInterval * 16 // 8s after 10 minutes + } +} + type closeFunc func() error func (c closeFunc) Close() error { diff --git a/cli/cliui/agent_test.go b/cli/cliui/agent_test.go index 7c3b71a204c3d..7e5ea692f7821 100644 --- a/cli/cliui/agent_test.go +++ b/cli/cliui/agent_test.go @@ -866,3 +866,31 @@ func TestConnDiagnostics(t *testing.T) { }) } } + +func TestGetProgressiveInterval(t *testing.T) { + t.Parallel() + + baseInterval := 500 * time.Millisecond + + testCases := []struct { + name string + elapsed time.Duration + expected time.Duration + }{ + {"first_minute", 30 * time.Second, baseInterval}, + {"second_minute", 90 * time.Second, baseInterval * 2}, + {"third_to_fifth_minute", 3 * time.Minute, baseInterval * 4}, + {"sixth_to_tenth_minute", 7 * time.Minute, baseInterval * 8}, + {"after_ten_minutes", 15 * time.Minute, baseInterval * 16}, + {"boundary_first_minute", 59 * time.Second, baseInterval}, + {"boundary_second_minute", 61 * time.Second, baseInterval * 2}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + result := cliui.GetProgressiveInterval(baseInterval, tc.elapsed) + require.Equal(t, tc.expected, result) + }) + } +} diff --git a/cli/cliui/table.go b/cli/cliui/table.go index 478bbe2260f91..c82854802224d 100644 --- a/cli/cliui/table.go +++ b/cli/cliui/table.go @@ -296,22 +296,23 @@ func renderTable(out any, sort string, headers table.Row, filterColumns []string // returned. If the table tag is malformed, an error is returned. // // The returned name is transformed from "snake_case" to "normal text". -func parseTableStructTag(field reflect.StructField) (name string, defaultSort, noSortOpt, recursive, skipParentName bool, err error) { +func parseTableStructTag(field reflect.StructField) (name string, defaultSort, noSortOpt, recursive, skipParentName, emptyNil bool, err error) { tags, err := structtag.Parse(string(field.Tag)) if err != nil { - return "", false, false, false, false, xerrors.Errorf("parse struct field tag %q: %w", string(field.Tag), err) + return "", false, false, false, false, false, xerrors.Errorf("parse struct field tag %q: %w", string(field.Tag), err) } tag, err := tags.Get("table") if err != nil || tag.Name == "-" { // tags.Get only returns an error if the tag is not found. - return "", false, false, false, false, nil + return "", false, false, false, false, false, nil } defaultSortOpt := false noSortOpt = false recursiveOpt := false skipParentNameOpt := false + emptyNilOpt := false for _, opt := range tag.Options { switch opt { case "default_sort": @@ -326,12 +327,14 @@ func parseTableStructTag(field reflect.StructField) (name string, defaultSort, n // make sure the child name is unique across all nested structs in the parent. recursiveOpt = true skipParentNameOpt = true + case "empty_nil": + emptyNilOpt = true default: - return "", false, false, false, false, xerrors.Errorf("unknown option %q in struct field tag", opt) + return "", false, false, false, false, false, xerrors.Errorf("unknown option %q in struct field tag", opt) } } - return strings.ReplaceAll(tag.Name, "_", " "), defaultSortOpt, noSortOpt, recursiveOpt, skipParentNameOpt, nil + return strings.ReplaceAll(tag.Name, "_", " "), defaultSortOpt, noSortOpt, recursiveOpt, skipParentNameOpt, emptyNilOpt, nil } func isStructOrStructPointer(t reflect.Type) bool { @@ -358,7 +361,7 @@ func typeToTableHeaders(t reflect.Type, requireDefault bool) ([]string, string, noSortOpt := false for i := 0; i < t.NumField(); i++ { field := t.Field(i) - name, defaultSort, noSort, recursive, skip, err := parseTableStructTag(field) + name, defaultSort, noSort, recursive, skip, _, err := parseTableStructTag(field) if err != nil { return nil, "", xerrors.Errorf("parse struct tags for field %q in type %q: %w", field.Name, t.String(), err) } @@ -435,7 +438,7 @@ func valueToTableMap(val reflect.Value) (map[string]any, error) { for i := 0; i < val.NumField(); i++ { field := val.Type().Field(i) fieldVal := val.Field(i) - name, _, _, recursive, skip, err := parseTableStructTag(field) + name, _, _, recursive, skip, emptyNil, err := parseTableStructTag(field) if err != nil { return nil, xerrors.Errorf("parse struct tags for field %q in type %T: %w", field.Name, val, err) } @@ -443,8 +446,14 @@ func valueToTableMap(val reflect.Value) (map[string]any, error) { continue } - // Recurse if it's a struct. fieldType := field.Type + + // If empty_nil is set and this is a nil pointer, use a zero value. + if emptyNil && fieldVal.Kind() == reflect.Pointer && fieldVal.IsNil() { + fieldVal = reflect.New(fieldType.Elem()) + } + + // Recurse if it's a struct. if recursive { if !isStructOrStructPointer(fieldType) { return nil, xerrors.Errorf("field %q in type %q is marked as recursive but does not contain a struct or a pointer to a struct", field.Name, fieldType.String()) @@ -467,7 +476,7 @@ func valueToTableMap(val reflect.Value) (map[string]any, error) { } // Otherwise, we just use the field value. - row[name] = val.Field(i).Interface() + row[name] = fieldVal.Interface() } return row, nil diff --git a/cli/cliui/table_test.go b/cli/cliui/table_test.go index 4e82707f3fec8..424b9c9a7d6f3 100644 --- a/cli/cliui/table_test.go +++ b/cli/cliui/table_test.go @@ -400,6 +400,78 @@ foo 10 [a, b, c] foo1 11 foo2 12 fo }) }) }) + + t.Run("EmptyNil", func(t *testing.T) { + t.Parallel() + + type emptyNilTest struct { + Name string `table:"name,default_sort"` + EmptyOnNil *string `table:"empty_on_nil,empty_nil"` + NormalBehavior *string `table:"normal_behavior"` + } + + value := "value" + in := []emptyNilTest{ + { + Name: "has_value", + EmptyOnNil: &value, + NormalBehavior: &value, + }, + { + Name: "has_nil", + EmptyOnNil: nil, + NormalBehavior: nil, + }, + } + + expected := ` +NAME EMPTY ON NIL NORMAL BEHAVIOR +has_nil +has_value value value + ` + + out, err := cliui.DisplayTable(in, "", nil) + log.Println("rendered table:\n" + out) + require.NoError(t, err) + compareTables(t, expected, out) + }) + + t.Run("EmptyNilWithRecursiveInline", func(t *testing.T) { + t.Parallel() + + type nestedData struct { + Name string `table:"name"` + } + + type inlineTest struct { + Nested *nestedData `table:"ignored,recursive_inline,empty_nil"` + Count int `table:"count,default_sort"` + } + + in := []inlineTest{ + { + Nested: &nestedData{ + Name: "alice", + }, + Count: 1, + }, + { + Nested: nil, + Count: 2, + }, + } + + expected := ` +NAME COUNT +alice 1 + 2 + ` + + out, err := cliui.DisplayTable(in, "", nil) + log.Println("rendered table:\n" + out) + require.NoError(t, err) + compareTables(t, expected, out) + }) } // compareTables normalizes the incoming table lines diff --git a/cli/delete_test.go b/cli/delete_test.go index 2e550d74849ab..271f5342ea91c 100644 --- a/cli/delete_test.go +++ b/cli/delete_test.go @@ -185,9 +185,6 @@ func TestDelete(t *testing.T) { t.Run("WarnNoProvisioners", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -228,9 +225,6 @@ func TestDelete(t *testing.T) { t.Run("Prebuilt workspace delete permissions", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } // Setup db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) diff --git a/cli/exp_scaletest.go b/cli/exp_scaletest.go index 4a8852cf8a4fc..559ffbebd165d 100644 --- a/cli/exp_scaletest.go +++ b/cli/exp_scaletest.go @@ -33,6 +33,7 @@ import ( "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/workspacesdk" "github.com/coder/coder/v2/scaletest/agentconn" + "github.com/coder/coder/v2/scaletest/autostart" "github.com/coder/coder/v2/scaletest/createusers" "github.com/coder/coder/v2/scaletest/createworkspaces" "github.com/coder/coder/v2/scaletest/dashboard" @@ -57,9 +58,13 @@ func (r *RootCmd) scaletestCmd() *serpent.Command { Children: []*serpent.Command{ r.scaletestCleanup(), r.scaletestDashboard(), + r.scaletestDynamicParameters(), r.scaletestCreateWorkspaces(), r.scaletestWorkspaceUpdates(), r.scaletestWorkspaceTraffic(), + r.scaletestAutostart(), + r.scaletestNotifications(), + r.scaletestSMTP(), }, } @@ -1682,6 +1687,239 @@ func (r *RootCmd) scaletestDashboard() *serpent.Command { return cmd } +const ( + autostartTestName = "autostart" +) + +func (r *RootCmd) scaletestAutostart() *serpent.Command { + var ( + workspaceCount int64 + workspaceJobTimeout time.Duration + autostartDelay time.Duration + autostartTimeout time.Duration + template string + noCleanup bool + + parameterFlags workspaceParameterFlags + tracingFlags = &scaletestTracingFlags{} + timeoutStrategy = &timeoutFlags{} + cleanupStrategy = newScaletestCleanupStrategy() + output = &scaletestOutputFlags{} + prometheusFlags = &scaletestPrometheusFlags{} + ) + + cmd := &serpent.Command{ + Use: "autostart", + Short: "Replicate a thundering herd of autostarting workspaces", + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + client, err := r.InitClient(inv) + if err != nil { + return err + } + + notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...) // Checked later. + defer stop() + ctx = notifyCtx + + me, err := requireAdmin(ctx, client) + if err != nil { + return err + } + + client.HTTPClient = &http.Client{ + Transport: &codersdk.HeaderTransport{ + Transport: http.DefaultTransport, + Header: map[string][]string{ + codersdk.BypassRatelimitHeader: {"true"}, + }, + }, + } + + if workspaceCount <= 0 { + return xerrors.Errorf("--workspace-count must be greater than zero") + } + + outputs, err := output.parse() + if err != nil { + return xerrors.Errorf("could not parse --output flags") + } + + tpl, err := parseTemplate(ctx, client, me.OrganizationIDs, template) + if err != nil { + return xerrors.Errorf("parse template: %w", err) + } + + cliRichParameters, err := asWorkspaceBuildParameters(parameterFlags.richParameters) + if err != nil { + return xerrors.Errorf("can't parse given parameter values: %w", err) + } + + richParameters, err := prepWorkspaceBuild(inv, client, prepWorkspaceBuildArgs{ + Action: WorkspaceCreate, + TemplateVersionID: tpl.ActiveVersionID, + + RichParameterFile: parameterFlags.richParameterFile, + RichParameters: cliRichParameters, + }) + if err != nil { + return xerrors.Errorf("prepare build: %w", err) + } + + tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) + if err != nil { + return xerrors.Errorf("create tracer provider: %w", err) + } + tracer := tracerProvider.Tracer(scaletestTracerName) + + reg := prometheus.NewRegistry() + metrics := autostart.NewMetrics(reg) + + setupBarrier := new(sync.WaitGroup) + setupBarrier.Add(int(workspaceCount)) + + th := harness.NewTestHarness(timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), cleanupStrategy.toStrategy()) + for i := range workspaceCount { + id := strconv.Itoa(int(i)) + config := autostart.Config{ + User: createusers.Config{ + OrganizationID: me.OrganizationIDs[0], + }, + Workspace: workspacebuild.Config{ + OrganizationID: me.OrganizationIDs[0], + Request: codersdk.CreateWorkspaceRequest{ + TemplateID: tpl.ID, + RichParameterValues: richParameters, + }, + }, + WorkspaceJobTimeout: workspaceJobTimeout, + AutostartDelay: autostartDelay, + AutostartTimeout: autostartTimeout, + Metrics: metrics, + SetupBarrier: setupBarrier, + } + if err := config.Validate(); err != nil { + return xerrors.Errorf("validate config: %w", err) + } + var runner harness.Runnable = autostart.NewRunner(client, config) + if tracingEnabled { + runner = &runnableTraceWrapper{ + tracer: tracer, + spanName: fmt.Sprintf("%s/%s", autostartTestName, id), + runner: runner, + } + } + th.AddRun(autostartTestName, id, runner) + } + + logger := inv.Logger + prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus") + defer prometheusSrvClose() + + defer func() { + _, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...") + if err := closeTracing(ctx); err != nil { + _, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err) + } + // Wait for prometheus metrics to be scraped + _, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait) + <-time.After(prometheusFlags.Wait) + }() + + _, _ = fmt.Fprintln(inv.Stderr, "Running autostart load test...") + testCtx, testCancel := timeoutStrategy.toContext(ctx) + defer testCancel() + err = th.Run(testCtx) + if err != nil { + return xerrors.Errorf("run test harness (harness failure, not a test failure): %w", err) + } + + // If the command was interrupted, skip stats. + if notifyCtx.Err() != nil { + return notifyCtx.Err() + } + + res := th.Results() + for _, o := range outputs { + err = o.write(res, inv.Stdout) + if err != nil { + return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err) + } + } + + if !noCleanup { + _, _ = fmt.Fprintln(inv.Stderr, "\nCleaning up...") + cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx) + defer cleanupCancel() + err = th.Cleanup(cleanupCtx) + if err != nil { + return xerrors.Errorf("cleanup tests: %w", err) + } + } + + if res.TotalFail > 0 { + return xerrors.New("load test failed, see above for more details") + } + + return nil + }, + } + + cmd.Options = serpent.OptionSet{ + { + Flag: "workspace-count", + FlagShorthand: "c", + Env: "CODER_SCALETEST_WORKSPACE_COUNT", + Description: "Required: Total number of workspaces to create.", + Value: serpent.Int64Of(&workspaceCount), + Required: true, + }, + { + Flag: "workspace-job-timeout", + Env: "CODER_SCALETEST_WORKSPACE_JOB_TIMEOUT", + Default: "5m", + Description: "Timeout for workspace jobs (e.g. build, start).", + Value: serpent.DurationOf(&workspaceJobTimeout), + }, + { + Flag: "autostart-delay", + Env: "CODER_SCALETEST_AUTOSTART_DELAY", + Default: "2m", + Description: "How long after all the workspaces have been stopped to schedule them to be started again.", + Value: serpent.DurationOf(&autostartDelay), + }, + { + Flag: "autostart-timeout", + Env: "CODER_SCALETEST_AUTOSTART_TIMEOUT", + Default: "5m", + Description: "Timeout for the autostart build to be initiated after the scheduled start time.", + Value: serpent.DurationOf(&autostartTimeout), + }, + { + Flag: "template", + FlagShorthand: "t", + Env: "CODER_SCALETEST_TEMPLATE", + Description: "Required: Name or ID of the template to use for workspaces.", + Value: serpent.StringOf(&template), + Required: true, + }, + { + Flag: "no-cleanup", + Env: "CODER_SCALETEST_NO_CLEANUP", + Description: "Do not clean up resources after the test completes.", + Value: serpent.BoolOf(&noCleanup), + }, + } + + cmd.Options = append(cmd.Options, parameterFlags.cliParameters()...) + tracingFlags.attach(&cmd.Options) + timeoutStrategy.attach(&cmd.Options) + cleanupStrategy.attach(&cmd.Options) + output.attach(&cmd.Options) + prometheusFlags.attach(&cmd.Options) + return cmd +} + type runnableTraceWrapper struct { tracer trace.Tracer spanName string @@ -1691,8 +1929,9 @@ type runnableTraceWrapper struct { } var ( - _ harness.Runnable = &runnableTraceWrapper{} - _ harness.Cleanable = &runnableTraceWrapper{} + _ harness.Runnable = &runnableTraceWrapper{} + _ harness.Cleanable = &runnableTraceWrapper{} + _ harness.Collectable = &runnableTraceWrapper{} ) func (r *runnableTraceWrapper) Run(ctx context.Context, id string, logs io.Writer) error { @@ -1734,6 +1973,14 @@ func (r *runnableTraceWrapper) Cleanup(ctx context.Context, id string, logs io.W return c.Cleanup(ctx, id, logs) } +func (r *runnableTraceWrapper) GetMetrics() map[string]any { + c, ok := r.runner.(harness.Collectable) + if !ok { + return nil + } + return c.GetMetrics() +} + func getScaletestWorkspaces(ctx context.Context, client *codersdk.Client, owner, template string) ([]codersdk.Workspace, int, error) { var ( pageNumber = 0 diff --git a/cli/exp_scaletest_dynamicparameters.go b/cli/exp_scaletest_dynamicparameters.go new file mode 100644 index 0000000000000..31b6766ac6acf --- /dev/null +++ b/cli/exp_scaletest_dynamicparameters.go @@ -0,0 +1,181 @@ +//go:build !slim + +package cli + +import ( + "fmt" + "net/http" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/serpent" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/dynamicparameters" + "github.com/coder/coder/v2/scaletest/harness" +) + +const ( + dynamicParametersTestName = "dynamic-parameters" +) + +func (r *RootCmd) scaletestDynamicParameters() *serpent.Command { + var ( + templateName string + provisionerTags []string + numEvals int64 + tracingFlags = &scaletestTracingFlags{} + prometheusFlags = &scaletestPrometheusFlags{} + // This test requires unlimited concurrency + timeoutStrategy = &timeoutFlags{} + ) + orgContext := NewOrganizationContext() + output := &scaletestOutputFlags{} + + cmd := &serpent.Command{ + Use: "dynamic-parameters", + Short: "Generates load on the Coder server evaluating dynamic parameters", + Long: `It is recommended that all rate limits are disabled on the server before running this scaletest. This test generates many login events which will be rate limited against the (most likely single) IP.`, + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + + outputs, err := output.parse() + if err != nil { + return xerrors.Errorf("could not parse --output flags") + } + + client, err := r.InitClient(inv) + if err != nil { + return err + } + if templateName == "" { + return xerrors.Errorf("template cannot be empty") + } + + tags, err := ParseProvisionerTags(provisionerTags) + if err != nil { + return err + } + + org, err := orgContext.Selected(inv, client) + if err != nil { + return err + } + + _, err = requireAdmin(ctx, client) + if err != nil { + return err + } + + client.HTTPClient = &http.Client{ + Transport: &codersdk.HeaderTransport{ + Transport: http.DefaultTransport, + Header: map[string][]string{ + codersdk.BypassRatelimitHeader: {"true"}, + }, + }, + } + + reg := prometheus.NewRegistry() + metrics := dynamicparameters.NewMetrics(reg, "concurrent_evaluations") + + logger := slog.Make(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) + prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus") + defer prometheusSrvClose() + + tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) + if err != nil { + return xerrors.Errorf("create tracer provider: %w", err) + } + defer func() { + // Allow time for traces to flush even if command context is + // canceled. This is a no-op if tracing is not enabled. + _, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...") + if err := closeTracing(ctx); err != nil { + _, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err) + } + // Wait for prometheus metrics to be scraped + _, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait) + <-time.After(prometheusFlags.Wait) + }() + tracer := tracerProvider.Tracer(scaletestTracerName) + + partitions, err := dynamicparameters.SetupPartitions(ctx, client, org.ID, templateName, tags, numEvals, logger) + if err != nil { + return xerrors.Errorf("setup dynamic parameters partitions: %w", err) + } + + th := harness.NewTestHarness( + timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), + // there is no cleanup since it's just a connection that we sever. + nil) + + for i, part := range partitions { + for j := range part.ConcurrentEvaluations { + cfg := dynamicparameters.Config{ + TemplateVersion: part.TemplateVersion.ID, + Metrics: metrics, + MetricLabelValues: []string{fmt.Sprintf("%d", part.ConcurrentEvaluations)}, + } + var runner harness.Runnable = dynamicparameters.NewRunner(client, cfg) + if tracingEnabled { + runner = &runnableTraceWrapper{ + tracer: tracer, + spanName: fmt.Sprintf("%s/%d/%d", dynamicParametersTestName, i, j), + runner: runner, + } + } + th.AddRun(dynamicParametersTestName, fmt.Sprintf("%d/%d", j, i), runner) + } + } + + testCtx, testCancel := timeoutStrategy.toContext(ctx) + defer testCancel() + err = th.Run(testCtx) + if err != nil { + return xerrors.Errorf("run test harness: %w", err) + } + + res := th.Results() + for _, o := range outputs { + err = o.write(res, inv.Stdout) + if err != nil { + return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err) + } + } + + return nil + }, + } + + cmd.Options = serpent.OptionSet{ + { + Flag: "template", + Description: "Name of the template to use. If it does not exist, it will be created.", + Default: "scaletest-dynamic-parameters", + Value: serpent.StringOf(&templateName), + }, + { + Flag: "concurrent-evaluations", + Description: "Number of concurrent dynamic parameter evaluations to perform.", + Default: "100", + Value: serpent.Int64Of(&numEvals), + }, + { + Flag: "provisioner-tag", + Description: "Specify a set of tags to target provisioner daemons.", + Value: serpent.StringArrayOf(&provisionerTags), + }, + } + orgContext.AttachOptions(cmd) + output.attach(&cmd.Options) + tracingFlags.attach(&cmd.Options) + prometheusFlags.attach(&cmd.Options) + timeoutStrategy.attach(&cmd.Options) + return cmd +} diff --git a/cli/exp_scaletest_notifications.go b/cli/exp_scaletest_notifications.go new file mode 100644 index 0000000000000..1ea47858933f1 --- /dev/null +++ b/cli/exp_scaletest_notifications.go @@ -0,0 +1,447 @@ +//go:build !slim + +package cli + +import ( + "context" + "fmt" + "net/http" + "os/signal" + "strconv" + "strings" + "sync" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "golang.org/x/xerrors" + + "cdr.dev/slog" + + notificationsLib "github.com/coder/coder/v2/coderd/notifications" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/notifications" + "github.com/coder/serpent" +) + +func (r *RootCmd) scaletestNotifications() *serpent.Command { + var ( + userCount int64 + ownerUserPercentage float64 + notificationTimeout time.Duration + dialTimeout time.Duration + noCleanup bool + smtpAPIURL string + + tracingFlags = &scaletestTracingFlags{} + + // This test requires unlimited concurrency. + timeoutStrategy = &timeoutFlags{} + cleanupStrategy = newScaletestCleanupStrategy() + output = &scaletestOutputFlags{} + prometheusFlags = &scaletestPrometheusFlags{} + ) + + cmd := &serpent.Command{ + Use: "notifications", + Short: "Simulate notification delivery by creating many users listening to notifications.", + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + client, err := r.InitClient(inv) + if err != nil { + return err + } + + notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...) + defer stop() + ctx = notifyCtx + + me, err := requireAdmin(ctx, client) + if err != nil { + return err + } + + client.HTTPClient = &http.Client{ + Transport: &codersdk.HeaderTransport{ + Transport: http.DefaultTransport, + Header: map[string][]string{ + codersdk.BypassRatelimitHeader: {"true"}, + }, + }, + } + + if userCount <= 0 { + return xerrors.Errorf("--user-count must be greater than 0") + } + + if ownerUserPercentage < 0 || ownerUserPercentage > 100 { + return xerrors.Errorf("--owner-user-percentage must be between 0 and 100") + } + + if smtpAPIURL != "" && !strings.HasPrefix(smtpAPIURL, "http://") && !strings.HasPrefix(smtpAPIURL, "https://") { + return xerrors.Errorf("--smtp-api-url must start with http:// or https://") + } + + ownerUserCount := int64(float64(userCount) * ownerUserPercentage / 100) + if ownerUserCount == 0 && ownerUserPercentage > 0 { + ownerUserCount = 1 + } + regularUserCount := userCount - ownerUserCount + + _, _ = fmt.Fprintf(inv.Stderr, "Distribution plan:\n") + _, _ = fmt.Fprintf(inv.Stderr, " Total users: %d\n", userCount) + _, _ = fmt.Fprintf(inv.Stderr, " Owner users: %d (%.1f%%)\n", ownerUserCount, ownerUserPercentage) + _, _ = fmt.Fprintf(inv.Stderr, " Regular users: %d (%.1f%%)\n", regularUserCount, 100.0-ownerUserPercentage) + + outputs, err := output.parse() + if err != nil { + return xerrors.Errorf("could not parse --output flags") + } + + tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) + if err != nil { + return xerrors.Errorf("create tracer provider: %w", err) + } + tracer := tracerProvider.Tracer(scaletestTracerName) + + reg := prometheus.NewRegistry() + metrics := notifications.NewMetrics(reg) + + logger := inv.Logger + prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus") + defer prometheusSrvClose() + + defer func() { + _, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...") + if err := closeTracing(ctx); err != nil { + _, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err) + } + // Wait for prometheus metrics to be scraped + _, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait) + <-time.After(prometheusFlags.Wait) + }() + + _, _ = fmt.Fprintln(inv.Stderr, "Creating users...") + + dialBarrier := &sync.WaitGroup{} + ownerWatchBarrier := &sync.WaitGroup{} + dialBarrier.Add(int(userCount)) + ownerWatchBarrier.Add(int(ownerUserCount)) + + expectedNotificationIDs := map[uuid.UUID]struct{}{ + notificationsLib.TemplateUserAccountCreated: {}, + notificationsLib.TemplateUserAccountDeleted: {}, + } + + triggerTimes := make(map[uuid.UUID]chan time.Time, len(expectedNotificationIDs)) + for id := range expectedNotificationIDs { + triggerTimes[id] = make(chan time.Time, 1) + } + + configs := make([]notifications.Config, 0, userCount) + for range ownerUserCount { + config := notifications.Config{ + User: createusers.Config{ + OrganizationID: me.OrganizationIDs[0], + }, + Roles: []string{codersdk.RoleOwner}, + NotificationTimeout: notificationTimeout, + DialTimeout: dialTimeout, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: ownerWatchBarrier, + ExpectedNotificationsIDs: expectedNotificationIDs, + Metrics: metrics, + SMTPApiURL: smtpAPIURL, + } + if err := config.Validate(); err != nil { + return xerrors.Errorf("validate config: %w", err) + } + configs = append(configs, config) + } + for range regularUserCount { + config := notifications.Config{ + User: createusers.Config{ + OrganizationID: me.OrganizationIDs[0], + }, + Roles: []string{}, + NotificationTimeout: notificationTimeout, + DialTimeout: dialTimeout, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: ownerWatchBarrier, + Metrics: metrics, + SMTPApiURL: smtpAPIURL, + } + if err := config.Validate(); err != nil { + return xerrors.Errorf("validate config: %w", err) + } + configs = append(configs, config) + } + + go triggerUserNotifications( + ctx, + logger, + client, + me.OrganizationIDs[0], + dialBarrier, + dialTimeout, + triggerTimes, + ) + + th := harness.NewTestHarness(timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), cleanupStrategy.toStrategy()) + + for i, config := range configs { + id := strconv.Itoa(i) + name := fmt.Sprintf("notifications-%s", id) + var runner harness.Runnable = notifications.NewRunner(client, config) + if tracingEnabled { + runner = &runnableTraceWrapper{ + tracer: tracer, + spanName: name, + runner: runner, + } + } + + th.AddRun(name, id, runner) + } + + _, _ = fmt.Fprintln(inv.Stderr, "Running notification delivery scaletest...") + testCtx, testCancel := timeoutStrategy.toContext(ctx) + defer testCancel() + err = th.Run(testCtx) + if err != nil { + return xerrors.Errorf("run test harness (harness failure, not a test failure): %w", err) + } + + // If the command was interrupted, skip stats. + if notifyCtx.Err() != nil { + return notifyCtx.Err() + } + + res := th.Results() + + if err := computeNotificationLatencies(ctx, logger, triggerTimes, res, metrics); err != nil { + return xerrors.Errorf("compute notification latencies: %w", err) + } + + for _, o := range outputs { + err = o.write(res, inv.Stdout) + if err != nil { + return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err) + } + } + + if !noCleanup { + _, _ = fmt.Fprintln(inv.Stderr, "\nCleaning up...") + cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx) + defer cleanupCancel() + err = th.Cleanup(cleanupCtx) + if err != nil { + return xerrors.Errorf("cleanup tests: %w", err) + } + } + + if res.TotalFail > 0 { + return xerrors.New("load test failed, see above for more details") + } + + return nil + }, + } + + cmd.Options = serpent.OptionSet{ + { + Flag: "user-count", + FlagShorthand: "c", + Env: "CODER_SCALETEST_NOTIFICATION_USER_COUNT", + Description: "Required: Total number of users to create.", + Value: serpent.Int64Of(&userCount), + Required: true, + }, + { + Flag: "owner-user-percentage", + Env: "CODER_SCALETEST_NOTIFICATION_OWNER_USER_PERCENTAGE", + Default: "20.0", + Description: "Percentage of users to assign Owner role to (0-100).", + Value: serpent.Float64Of(&ownerUserPercentage), + }, + { + Flag: "notification-timeout", + Env: "CODER_SCALETEST_NOTIFICATION_TIMEOUT", + Default: "5m", + Description: "How long to wait for notifications after triggering.", + Value: serpent.DurationOf(¬ificationTimeout), + }, + { + Flag: "dial-timeout", + Env: "CODER_SCALETEST_DIAL_TIMEOUT", + Default: "2m", + Description: "Timeout for dialing the notification websocket endpoint.", + Value: serpent.DurationOf(&dialTimeout), + }, + { + Flag: "no-cleanup", + Env: "CODER_SCALETEST_NO_CLEANUP", + Description: "Do not clean up resources after the test completes.", + Value: serpent.BoolOf(&noCleanup), + }, + { + Flag: "smtp-api-url", + Env: "CODER_SCALETEST_SMTP_API_URL", + Description: "SMTP mock HTTP API address.", + Value: serpent.StringOf(&smtpAPIURL), + }, + } + + tracingFlags.attach(&cmd.Options) + timeoutStrategy.attach(&cmd.Options) + cleanupStrategy.attach(&cmd.Options) + output.attach(&cmd.Options) + prometheusFlags.attach(&cmd.Options) + return cmd +} + +func computeNotificationLatencies( + ctx context.Context, + logger slog.Logger, + expectedNotifications map[uuid.UUID]chan time.Time, + results harness.Results, + metrics *notifications.Metrics, +) error { + triggerTimes := make(map[uuid.UUID]time.Time) + for notificationID, triggerTimeChan := range expectedNotifications { + select { + case triggerTime := <-triggerTimeChan: + triggerTimes[notificationID] = triggerTime + logger.Info(ctx, "received trigger time", + slog.F("notification_id", notificationID), + slog.F("trigger_time", triggerTime)) + default: + logger.Warn(ctx, "no trigger time received for notification", + slog.F("notification_id", notificationID)) + } + } + + if len(triggerTimes) == 0 { + logger.Warn(ctx, "no trigger times available, skipping latency computation") + return nil + } + + var totalLatencies int + for runID, runResult := range results.Runs { + if runResult.Error != nil { + logger.Debug(ctx, "skipping failed run for latency computation", + slog.F("run_id", runID)) + continue + } + + if runResult.Metrics == nil { + continue + } + + // Process websocket notifications. + if wsReceiptTimes, ok := runResult.Metrics[notifications.WebsocketNotificationReceiptTimeMetric].(map[uuid.UUID]time.Time); ok { + for notificationID, receiptTime := range wsReceiptTimes { + if triggerTime, ok := triggerTimes[notificationID]; ok { + latency := receiptTime.Sub(triggerTime) + metrics.RecordLatency(latency, notificationID.String(), notifications.NotificationTypeWebsocket) + totalLatencies++ + logger.Debug(ctx, "computed websocket latency", + slog.F("run_id", runID), + slog.F("notification_id", notificationID), + slog.F("latency", latency)) + } + } + } + + // Process SMTP notifications + if smtpReceiptTimes, ok := runResult.Metrics[notifications.SMTPNotificationReceiptTimeMetric].(map[uuid.UUID]time.Time); ok { + for notificationID, receiptTime := range smtpReceiptTimes { + if triggerTime, ok := triggerTimes[notificationID]; ok { + latency := receiptTime.Sub(triggerTime) + metrics.RecordLatency(latency, notificationID.String(), notifications.NotificationTypeSMTP) + totalLatencies++ + logger.Debug(ctx, "computed SMTP latency", + slog.F("run_id", runID), + slog.F("notification_id", notificationID), + slog.F("latency", latency)) + } + } + } + } + + logger.Info(ctx, "finished computing notification latencies", + slog.F("total_runs", results.TotalRuns), + slog.F("total_latencies_computed", totalLatencies)) + + return nil +} + +// triggerUserNotifications waits for all test users to connect, +// then creates and deletes a test user to trigger notification events for testing. +func triggerUserNotifications( + ctx context.Context, + logger slog.Logger, + client *codersdk.Client, + orgID uuid.UUID, + dialBarrier *sync.WaitGroup, + dialTimeout time.Duration, + expectedNotifications map[uuid.UUID]chan time.Time, +) { + logger.Info(ctx, "waiting for all users to connect") + + // Wait for all users to connect + waitCtx, cancel := context.WithTimeout(ctx, dialTimeout+30*time.Second) + defer cancel() + + done := make(chan struct{}) + go func() { + dialBarrier.Wait() + close(done) + }() + + select { + case <-done: + logger.Info(ctx, "all users connected") + case <-waitCtx.Done(): + if waitCtx.Err() == context.DeadlineExceeded { + logger.Error(ctx, "timeout waiting for users to connect") + } else { + logger.Info(ctx, "context canceled while waiting for users") + } + return + } + + const ( + triggerUsername = "scaletest-trigger-user" + triggerEmail = "scaletest-trigger@example.com" + ) + + logger.Info(ctx, "creating test user to test notifications", + slog.F("username", triggerUsername), + slog.F("email", triggerEmail), + slog.F("org_id", orgID)) + + testUser, err := client.CreateUserWithOrgs(ctx, codersdk.CreateUserRequestWithOrgs{ + OrganizationIDs: []uuid.UUID{orgID}, + Username: triggerUsername, + Email: triggerEmail, + Password: "test-password-123", + }) + if err != nil { + logger.Error(ctx, "create test user", slog.Error(err)) + return + } + expectedNotifications[notificationsLib.TemplateUserAccountCreated] <- time.Now() + + err = client.DeleteUser(ctx, testUser.ID) + if err != nil { + logger.Error(ctx, "delete test user", slog.Error(err)) + return + } + expectedNotifications[notificationsLib.TemplateUserAccountDeleted] <- time.Now() + close(expectedNotifications[notificationsLib.TemplateUserAccountCreated]) + close(expectedNotifications[notificationsLib.TemplateUserAccountDeleted]) +} diff --git a/cli/exp_scaletest_smtp.go b/cli/exp_scaletest_smtp.go new file mode 100644 index 0000000000000..3713005de56dc --- /dev/null +++ b/cli/exp_scaletest_smtp.go @@ -0,0 +1,112 @@ +//go:build !slim + +package cli + +import ( + "fmt" + "os/signal" + "time" + + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/coder/v2/scaletest/smtpmock" + "github.com/coder/serpent" +) + +func (*RootCmd) scaletestSMTP() *serpent.Command { + var ( + hostAddress string + smtpPort int64 + apiPort int64 + purgeAtCount int64 + ) + cmd := &serpent.Command{ + Use: "smtp", + Short: "Start a mock SMTP server for testing", + Long: `Start a mock SMTP server with an HTTP API server that can be used to purge +messages and get messages by email.`, + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...) + defer stop() + ctx = notifyCtx + + logger := slog.Make(sloghuman.Sink(inv.Stderr)).Leveled(slog.LevelInfo) + config := smtpmock.Config{ + HostAddress: hostAddress, + SMTPPort: int(smtpPort), + APIPort: int(apiPort), + Logger: logger, + } + srv := new(smtpmock.Server) + + if err := srv.Start(ctx, config); err != nil { + return xerrors.Errorf("start mock SMTP server: %w", err) + } + defer func() { + _ = srv.Stop() + }() + + _, _ = fmt.Fprintf(inv.Stdout, "Mock SMTP server started on %s\n", srv.SMTPAddress()) + _, _ = fmt.Fprintf(inv.Stdout, "HTTP API server started on %s\n", srv.APIAddress()) + if purgeAtCount > 0 { + _, _ = fmt.Fprintf(inv.Stdout, " Auto-purge when message count reaches %d\n", purgeAtCount) + } + + ticker := time.NewTicker(10 * time.Second) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + _, _ = fmt.Fprintf(inv.Stdout, "\nTotal messages received since last purge: %d\n", srv.MessageCount()) + return nil + case <-ticker.C: + count := srv.MessageCount() + if count > 0 { + _, _ = fmt.Fprintf(inv.Stdout, "Messages received: %d\n", count) + } + + if purgeAtCount > 0 && int64(count) >= purgeAtCount { + _, _ = fmt.Fprintf(inv.Stdout, "Message count (%d) reached threshold (%d). Purging...\n", count, purgeAtCount) + srv.Purge() + continue + } + } + } + }, + } + + cmd.Options = []serpent.Option{ + { + Flag: "host-address", + Env: "CODER_SCALETEST_SMTP_HOST_ADDRESS", + Default: "localhost", + Description: "Host address to bind the mock SMTP and API servers.", + Value: serpent.StringOf(&hostAddress), + }, + { + Flag: "smtp-port", + Env: "CODER_SCALETEST_SMTP_PORT", + Description: "Port for the mock SMTP server. Uses a random port if not specified.", + Value: serpent.Int64Of(&smtpPort), + }, + { + Flag: "api-port", + Env: "CODER_SCALETEST_SMTP_API_PORT", + Description: "Port for the HTTP API server. Uses a random port if not specified.", + Value: serpent.Int64Of(&apiPort), + }, + { + Flag: "purge-at-count", + Env: "CODER_SCALETEST_SMTP_PURGE_AT_COUNT", + Default: "100000", + Description: "Maximum number of messages to keep before auto-purging. Set to 0 to disable.", + Value: serpent.Int64Of(&purgeAtCount), + }, + } + + return cmd +} diff --git a/cli/exp_task_create.go b/cli/exp_task_create.go index 7aeea13a44a7f..b506d679eba3f 100644 --- a/cli/exp_task_create.go +++ b/cli/exp_task_create.go @@ -29,6 +29,28 @@ func (r *RootCmd) taskCreate() *serpent.Command { cmd := &serpent.Command{ Use: "create [input]", Short: "Create an experimental task", + Long: FormatExamples( + Example{ + Description: "Create a task with direct input", + Command: "coder exp task create \"Add authentication to the user service\"", + }, + Example{ + Description: "Create a task with stdin input", + Command: "echo \"Add authentication to the user service\" | coder exp task create", + }, + Example{ + Description: "Create a task with a specific name", + Command: "coder exp task create --name task1 \"Add authentication to the user service\"", + }, + Example{ + Description: "Create a task from a specific template / preset", + Command: "coder exp task create --template backend-dev --preset \"My Preset\" \"Add authentication to the user service\"", + }, + Example{ + Description: "Create a task for another user (requires appropriate permissions)", + Command: "coder exp task create --owner user@example.com \"Add authentication to the user service\"", + }, + ), Middleware: serpent.Chain( serpent.RequireRangeArgs(0, 1), ), diff --git a/cli/exp_task_delete.go b/cli/exp_task_delete.go index 361a0cfe7c576..1611e4196e6c0 100644 --- a/cli/exp_task_delete.go +++ b/cli/exp_task_delete.go @@ -5,7 +5,6 @@ import ( "strings" "time" - "github.com/google/uuid" "golang.org/x/xerrors" "github.com/coder/pretty" @@ -19,6 +18,20 @@ func (r *RootCmd) taskDelete() *serpent.Command { cmd := &serpent.Command{ Use: "delete [ ...]", Short: "Delete experimental tasks", + Long: FormatExamples( + Example{ + Description: "Delete a single task.", + Command: "$ coder exp task delete task1", + }, + Example{ + Description: "Delete multiple tasks.", + Command: "$ coder exp task delete task1 task2 task3", + }, + Example{ + Description: "Delete a task without confirmation.", + Command: "$ coder exp task delete task4 --yes", + }, + ), Middleware: serpent.Chain( serpent.RequireRangeArgs(1, -1), ), @@ -33,43 +46,19 @@ func (r *RootCmd) taskDelete() *serpent.Command { } exp := codersdk.NewExperimentalClient(client) - type toDelete struct { - ID uuid.UUID - Owner string - Display string - } - - var items []toDelete + var tasks []codersdk.Task for _, identifier := range inv.Args { - identifier = strings.TrimSpace(identifier) - if identifier == "" { - return xerrors.New("task identifier cannot be empty or whitespace") - } - - // Check task identifier, try UUID first. - if id, err := uuid.Parse(identifier); err == nil { - task, err := exp.TaskByID(ctx, id) - if err != nil { - return xerrors.Errorf("resolve task %q: %w", identifier, err) - } - display := fmt.Sprintf("%s/%s", task.OwnerName, task.Name) - items = append(items, toDelete{ID: id, Display: display, Owner: task.OwnerName}) - continue - } - - // Non-UUID, treat as a workspace identifier (name or owner/name). - ws, err := namedWorkspace(ctx, client, identifier) + task, err := exp.TaskByIdentifier(ctx, identifier) if err != nil { return xerrors.Errorf("resolve task %q: %w", identifier, err) } - display := ws.FullName() - items = append(items, toDelete{ID: ws.ID, Display: display, Owner: ws.OwnerName}) + tasks = append(tasks, task) } // Confirm deletion of the tasks. var displayList []string - for _, it := range items { - displayList = append(displayList, it.Display) + for _, task := range tasks { + displayList = append(displayList, fmt.Sprintf("%s/%s", task.OwnerName, task.Name)) } _, err = cliui.Prompt(inv, cliui.PromptOptions{ Text: fmt.Sprintf("Delete these tasks: %s?", pretty.Sprint(cliui.DefaultStyles.Code, strings.Join(displayList, ", "))), @@ -80,12 +69,13 @@ func (r *RootCmd) taskDelete() *serpent.Command { return err } - for _, item := range items { - if err := exp.DeleteTask(ctx, item.Owner, item.ID); err != nil { - return xerrors.Errorf("delete task %q: %w", item.Display, err) + for i, task := range tasks { + display := displayList[i] + if err := exp.DeleteTask(ctx, task.OwnerName, task.ID); err != nil { + return xerrors.Errorf("delete task %q: %w", display, err) } _, _ = fmt.Fprintln( - inv.Stdout, "Deleted task "+pretty.Sprint(cliui.DefaultStyles.Keyword, item.Display)+" at "+cliui.Timestamp(time.Now()), + inv.Stdout, "Deleted task "+pretty.Sprint(cliui.DefaultStyles.Keyword, display)+" at "+cliui.Timestamp(time.Now()), ) } diff --git a/cli/exp_task_delete_test.go b/cli/exp_task_delete_test.go index 0b288c4ca3379..e90ee8c5b19ba 100644 --- a/cli/exp_task_delete_test.go +++ b/cli/exp_task_delete_test.go @@ -56,12 +56,18 @@ func TestExpTaskDelete(t *testing.T) { taskID := uuid.MustParse(id1) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/v2/users/me/workspace/exists": + case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - Name: "exists", - OwnerName: "me", + httpapi.Write(r.Context(), w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: taskID, + Name: "exists", + OwnerName: "me", + }}, + Count: 1, }) case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id1: c.deleteCalls.Add(1) @@ -104,12 +110,18 @@ func TestExpTaskDelete(t *testing.T) { firstID := uuid.MustParse(id3) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/v2/users/me/workspace/first": + case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Workspace{ - ID: firstID, - Name: "first", - OwnerName: "me", + httpapi.Write(r.Context(), w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: firstID, + Name: "first", + OwnerName: "me", + }}, + Count: 1, }) case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks/me/"+id4: httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Task{ @@ -139,8 +151,14 @@ func TestExpTaskDelete(t *testing.T) { buildHandler: func(_ *testCounters) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/v2/users/me/workspace/doesnotexist": - httpapi.ResourceNotFound(w) + case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": + httpapi.Write(r.Context(), w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{}, + Count: 0, + }) default: httpapi.InternalServerError(w, xerrors.New("unwanted path: "+r.Method+" "+r.URL.Path)) } @@ -156,12 +174,18 @@ func TestExpTaskDelete(t *testing.T) { taskID := uuid.MustParse(id5) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/v2/users/me/workspace/bad": + case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - Name: "bad", - OwnerName: "me", + httpapi.Write(r.Context(), w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: taskID, + Name: "bad", + OwnerName: "me", + }}, + Count: 1, }) case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id5: httpapi.InternalServerError(w, xerrors.New("boom")) diff --git a/cli/exp_task_list.go b/cli/exp_task_list.go index 18b2bec95db91..89b313a1f49c5 100644 --- a/cli/exp_task_list.go +++ b/cli/exp_task_list.go @@ -8,6 +8,7 @@ import ( "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/serpent" ) @@ -67,8 +68,30 @@ func (r *RootCmd) taskList() *serpent.Command { ) cmd := &serpent.Command{ - Use: "list", - Short: "List experimental tasks", + Use: "list", + Short: "List experimental tasks", + Long: FormatExamples( + Example{ + Description: "List tasks for the current user.", + Command: "coder exp task list", + }, + Example{ + Description: "List tasks for a specific user.", + Command: "coder exp task list --user someone-else", + }, + Example{ + Description: "List all tasks you can view.", + Command: "coder exp task list --all", + }, + Example{ + Description: "List all your running tasks.", + Command: "coder exp task list --status running", + }, + Example{ + Description: "As above, but only show IDs.", + Command: "coder exp task list --status running --quiet", + }, + ), Aliases: []string{"ls"}, Middleware: serpent.Chain( serpent.RequireNArgs(0), @@ -76,10 +99,10 @@ func (r *RootCmd) taskList() *serpent.Command { Options: serpent.OptionSet{ { Name: "status", - Description: "Filter by task status (e.g. running, failed, etc).", + Description: "Filter by task status.", Flag: "status", Default: "", - Value: serpent.StringOf(&statusFilter), + Value: serpent.EnumOf(&statusFilter, slice.ToStrings(codersdk.AllTaskStatuses())...), }, { Name: "all", @@ -121,7 +144,7 @@ func (r *RootCmd) taskList() *serpent.Command { tasks, err := exp.Tasks(ctx, &codersdk.TasksFilter{ Owner: targetUser, - Status: statusFilter, + Status: codersdk.TaskStatus(statusFilter), }) if err != nil { return xerrors.Errorf("list tasks: %w", err) diff --git a/cli/exp_task_list_test.go b/cli/exp_task_list_test.go index 2761588a3859e..d297310dc4fc3 100644 --- a/cli/exp_task_list_test.go +++ b/cli/exp_task_list_test.go @@ -22,6 +22,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/pty/ptytest" @@ -29,7 +30,7 @@ import ( ) // makeAITask creates an AI-task workspace. -func makeAITask(t *testing.T, db database.Store, orgID, adminID, ownerID uuid.UUID, transition database.WorkspaceTransition, prompt string) (workspace database.WorkspaceTable) { +func makeAITask(t *testing.T, db database.Store, orgID, adminID, ownerID uuid.UUID, transition database.WorkspaceTransition, prompt string) database.Task { t.Helper() tv := dbfake.TemplateVersion(t, db). @@ -91,7 +92,27 @@ func makeAITask(t *testing.T, db database.Store, orgID, adminID, ownerID uuid.UU ) require.NoError(t, err) - return build.Workspace + // Create a task record in the tasks table for the new data model. + task := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: orgID, + OwnerID: ownerID, + Name: build.Workspace.Name, + WorkspaceID: uuid.NullUUID{UUID: build.Workspace.ID, Valid: true}, + TemplateVersionID: tv.TemplateVersion.ID, + TemplateParameters: []byte("{}"), + Prompt: prompt, + CreatedAt: dbtime.Now(), + }) + + // Link the task to the workspace app. + dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{ + TaskID: task.ID, + WorkspaceBuildNumber: build.Build.BuildNumber, + WorkspaceAgentID: uuid.NullUUID{UUID: agentID, Valid: true}, + WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, + }) + + return task } func TestExpTaskList(t *testing.T) { @@ -128,7 +149,7 @@ func TestExpTaskList(t *testing.T) { memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) wantPrompt := "build me a web app" - ws := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, wantPrompt) + task := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, wantPrompt) inv, root := clitest.New(t, "exp", "task", "list", "--column", "id,name,status,initial prompt") clitest.SetupConfig(t, memberClient, root) @@ -140,8 +161,8 @@ func TestExpTaskList(t *testing.T) { require.NoError(t, err) // Validate the table includes the task and status. - pty.ExpectMatch(ws.Name) - pty.ExpectMatch("running") + pty.ExpectMatch(task.Name) + pty.ExpectMatch("initializing") pty.ExpectMatch(wantPrompt) }) @@ -154,12 +175,12 @@ func TestExpTaskList(t *testing.T) { owner := coderdtest.CreateFirstUser(t, client) memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - // Create two AI tasks: one running, one stopped. - running := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "keep me running") - stopped := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStop, "stop me please") + // Create two AI tasks: one initializing, one paused. + initializingTask := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "keep me initializing") + pausedTask := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStop, "stop me please") // Use JSON output to reliably validate filtering. - inv, root := clitest.New(t, "exp", "task", "list", "--status=stopped", "--output=json") + inv, root := clitest.New(t, "exp", "task", "list", "--status=paused", "--output=json") clitest.SetupConfig(t, memberClient, root) ctx := testutil.Context(t, testutil.WaitShort) @@ -173,10 +194,10 @@ func TestExpTaskList(t *testing.T) { var tasks []codersdk.Task require.NoError(t, json.Unmarshal(stdout.Bytes(), &tasks)) - // Only the stopped task is returned. + // Only the paused task is returned. require.Len(t, tasks, 1, "expected one task after filtering") - require.Equal(t, stopped.ID, tasks[0].ID) - require.NotEqual(t, running.ID, tasks[0].ID) + require.Equal(t, pausedTask.ID, tasks[0].ID) + require.NotEqual(t, initializingTask.ID, tasks[0].ID) }) t.Run("UserFlag_Me_Table", func(t *testing.T) { @@ -188,7 +209,7 @@ func TestExpTaskList(t *testing.T) { _, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) _ = makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "other-task") - ws := makeAITask(t, db, owner.OrganizationID, owner.UserID, owner.UserID, database.WorkspaceTransitionStart, "me-task") + task := makeAITask(t, db, owner.OrganizationID, owner.UserID, owner.UserID, database.WorkspaceTransitionStart, "me-task") inv, root := clitest.New(t, "exp", "task", "list", "--user", "me") //nolint:gocritic // Owner client is intended here smoke test the member task not showing up. @@ -200,7 +221,7 @@ func TestExpTaskList(t *testing.T) { err := inv.WithContext(ctx).Run() require.NoError(t, err) - pty.ExpectMatch(ws.Name) + pty.ExpectMatch(task.Name) }) t.Run("Quiet", func(t *testing.T) { @@ -213,7 +234,7 @@ func TestExpTaskList(t *testing.T) { memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) // Given: We have two tasks - task1 := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "keep me running") + task1 := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "keep me active") task2 := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStop, "stop me please") // Given: We add the `--quiet` flag diff --git a/cli/exp_task_logs.go b/cli/exp_task_logs.go index c7224e6185bee..d1d4a826cd9ce 100644 --- a/cli/exp_task_logs.go +++ b/cli/exp_task_logs.go @@ -3,7 +3,6 @@ package cli import ( "fmt" - "github.com/google/uuid" "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" @@ -26,6 +25,11 @@ func (r *RootCmd) taskLogs() *serpent.Command { cmd := &serpent.Command{ Use: "logs ", Short: "Show a task's logs", + Long: FormatExamples( + Example{ + Description: "Show logs for a given task.", + Command: "coder exp task logs task1", + }), Middleware: serpent.Chain( serpent.RequireNArgs(1), ), @@ -36,24 +40,17 @@ func (r *RootCmd) taskLogs() *serpent.Command { } var ( - ctx = inv.Context() - exp = codersdk.NewExperimentalClient(client) - task = inv.Args[0] - taskID uuid.UUID + ctx = inv.Context() + exp = codersdk.NewExperimentalClient(client) + identifier = inv.Args[0] ) - if id, err := uuid.Parse(task); err == nil { - taskID = id - } else { - ws, err := namedWorkspace(ctx, client, task) - if err != nil { - return xerrors.Errorf("resolve task %q: %w", task, err) - } - - taskID = ws.ID + task, err := exp.TaskByIdentifier(ctx, identifier) + if err != nil { + return xerrors.Errorf("resolve task %q: %w", identifier, err) } - logs, err := exp.TaskLogs(ctx, codersdk.Me, taskID) + logs, err := exp.TaskLogs(ctx, codersdk.Me, task.ID) if err != nil { return xerrors.Errorf("get task logs: %w", err) } diff --git a/cli/exp_task_logs_test.go b/cli/exp_task_logs_test.go index 5dc3a995810e2..859ff135d0d63 100644 --- a/cli/exp_task_logs_test.go +++ b/cli/exp_task_logs_test.go @@ -1,11 +1,8 @@ package cli_test import ( - "context" "encoding/json" - "fmt" "net/http" - "net/http/httptest" "strings" "testing" "time" @@ -14,7 +11,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + agentapisdk "github.com/coder/agentapi-sdk-go" + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" @@ -23,178 +23,165 @@ import ( func Test_TaskLogs(t *testing.T) { t.Parallel() - var ( - clock = time.Date(2025, 8, 26, 12, 34, 56, 0, time.UTC) - - taskID = uuid.MustParse("11111111-1111-1111-1111-111111111111") - taskName = "task-workspace" - - taskLogs = []codersdk.TaskLogEntry{ - { - ID: 0, - Content: "What is 1 + 1?", - Type: codersdk.TaskLogTypeInput, - Time: clock, - }, - { - ID: 1, - Content: "2", - Type: codersdk.TaskLogTypeOutput, - Time: clock.Add(1 * time.Second), - }, - } - ) - - tests := []struct { - args []string - expectTable string - expectLogs []codersdk.TaskLogEntry - expectError string - handler func(t *testing.T, ctx context.Context) http.HandlerFunc - }{ - { - args: []string{taskName, "--output", "json"}, - expectLogs: taskLogs, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/v2/users/me/workspace/%s", taskName): - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - }) - case fmt.Sprintf("/api/experimental/tasks/me/%s/logs", taskID.String()): - httpapi.Write(ctx, w, http.StatusOK, codersdk.TaskLogsResponse{ - Logs: taskLogs, - }) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{taskID.String(), "--output", "json"}, - expectLogs: taskLogs, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/logs", taskID.String()): - httpapi.Write(ctx, w, http.StatusOK, codersdk.TaskLogsResponse{ - Logs: taskLogs, - }) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{taskID.String()}, - expectTable: ` -TYPE CONTENT -input What is 1 + 1? -output 2`, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/logs", taskID.String()): - httpapi.Write(ctx, w, http.StatusOK, codersdk.TaskLogsResponse{ - Logs: taskLogs, - }) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, + testMessages := []agentapisdk.Message{ { - args: []string{"doesnotexist"}, - expectError: httpapi.ResourceNotFoundResponse.Message, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/api/v2/users/me/workspace/doesnotexist": - httpapi.ResourceNotFound(w) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, + Id: 0, + Role: agentapisdk.RoleUser, + Content: "What is 1 + 1?", + Time: time.Now().Add(-2 * time.Minute), }, { - args: []string{uuid.Nil.String()}, // uuid does not exist - expectError: httpapi.ResourceNotFoundResponse.Message, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/logs", uuid.Nil.String()): - httpapi.ResourceNotFound(w) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, + Id: 1, + Role: agentapisdk.RoleAgent, + Content: "2", + Time: time.Now().Add(-1 * time.Minute), }, - { - args: []string{"err-fetching-logs"}, - expectError: assert.AnError.Error(), - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/api/v2/users/me/workspace/err-fetching-logs": - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - }) - case fmt.Sprintf("/api/experimental/tasks/me/%s/logs", taskID.String()): - httpapi.InternalServerError(w, assert.AnError) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, + } + + t.Run("ByTaskName_JSON", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskLogsOK(testMessages)) + userClient := client // user already has access to their own workspace + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "logs", task.Name, "--output", "json") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + var logs []codersdk.TaskLogEntry + err = json.NewDecoder(strings.NewReader(stdout.String())).Decode(&logs) + require.NoError(t, err) + + require.Len(t, logs, 2) + require.Equal(t, "What is 1 + 1?", logs[0].Content) + require.Equal(t, codersdk.TaskLogTypeInput, logs[0].Type) + require.Equal(t, "2", logs[1].Content) + require.Equal(t, codersdk.TaskLogTypeOutput, logs[1].Type) + }) + + t.Run("ByTaskID_JSON", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskLogsOK(testMessages)) + userClient := client + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String(), "--output", "json") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + var logs []codersdk.TaskLogEntry + err = json.NewDecoder(strings.NewReader(stdout.String())).Decode(&logs) + require.NoError(t, err) + + require.Len(t, logs, 2) + require.Equal(t, "What is 1 + 1?", logs[0].Content) + require.Equal(t, codersdk.TaskLogTypeInput, logs[0].Type) + require.Equal(t, "2", logs[1].Content) + require.Equal(t, codersdk.TaskLogTypeOutput, logs[1].Type) + }) + + t.Run("ByTaskID_Table", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskLogsOK(testMessages)) + userClient := client + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String()) + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + output := stdout.String() + require.Contains(t, output, "What is 1 + 1?") + require.Contains(t, output, "2") + require.Contains(t, output, "input") + require.Contains(t, output, "output") + }) + + t.Run("TaskNotFound_ByName", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "logs", "doesnotexist") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.Error(t, err) + require.ErrorContains(t, err, httpapi.ResourceNotFoundResponse.Message) + }) + + t.Run("TaskNotFound_ByID", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "logs", uuid.Nil.String()) + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.Error(t, err) + require.ErrorContains(t, err, httpapi.ResourceNotFoundResponse.Message) + }) + + t.Run("ErrorFetchingLogs", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskLogsErr(assert.AnError)) + userClient := client + + inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String()) + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.ErrorContains(t, err, assert.AnError.Error()) + }) +} + +func fakeAgentAPITaskLogsOK(messages []agentapisdk.Message) map[string]http.HandlerFunc { + return map[string]http.HandlerFunc{ + "/messages": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{ + "messages": messages, + }) }, } +} - for _, tt := range tests { - t.Run(strings.Join(tt.args, ","), func(t *testing.T) { - t.Parallel() - - var ( - ctx = testutil.Context(t, testutil.WaitShort) - srv = httptest.NewServer(tt.handler(t, ctx)) - client = codersdk.New(testutil.MustURL(t, srv.URL)) - args = []string{"exp", "task", "logs"} - stdout strings.Builder - err error - ) - - t.Cleanup(srv.Close) - - inv, root := clitest.New(t, append(args, tt.args...)...) - inv.Stdout = &stdout - inv.Stderr = &stdout - clitest.SetupConfig(t, client, root) - - err = inv.WithContext(ctx).Run() - if tt.expectError == "" { - assert.NoError(t, err) - } else { - assert.ErrorContains(t, err, tt.expectError) - } - - if tt.expectTable != "" { - if diff := tableDiff(tt.expectTable, stdout.String()); diff != "" { - t.Errorf("unexpected output diff (-want +got):\n%s", diff) - } - } - - if tt.expectLogs != nil { - var logs []codersdk.TaskLogEntry - err = json.NewDecoder(strings.NewReader(stdout.String())).Decode(&logs) - require.NoError(t, err) - - assert.Equal(t, tt.expectLogs, logs) - } - }) +func fakeAgentAPITaskLogsErr(err error) map[string]http.HandlerFunc { + return map[string]http.HandlerFunc{ + "/messages": func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{ + "error": err.Error(), + }) + }, } } diff --git a/cli/exp_task_send.go b/cli/exp_task_send.go index c30c3597db177..e8985d55d97da 100644 --- a/cli/exp_task_send.go +++ b/cli/exp_task_send.go @@ -3,7 +3,6 @@ package cli import ( "io" - "github.com/google/uuid" "golang.org/x/xerrors" "github.com/coder/coder/v2/codersdk" @@ -14,8 +13,15 @@ func (r *RootCmd) taskSend() *serpent.Command { var stdin bool cmd := &serpent.Command{ - Use: "send [ | --stdin]", - Short: "Send input to a task", + Use: "send [ | --stdin]", + Short: "Send input to a task", + Long: FormatExamples(Example{ + Description: "Send direct input to a task.", + Command: "coder exp task send task1 \"Please also add unit tests\"", + }, Example{ + Description: "Send input from stdin to a task.", + Command: "echo \"Please also add unit tests\" | coder exp task send task1 --stdin", + }), Middleware: serpent.RequireRangeArgs(1, 2), Options: serpent.OptionSet{ { @@ -32,12 +38,11 @@ func (r *RootCmd) taskSend() *serpent.Command { } var ( - ctx = inv.Context() - exp = codersdk.NewExperimentalClient(client) - task = inv.Args[0] + ctx = inv.Context() + exp = codersdk.NewExperimentalClient(client) + identifier = inv.Args[0] taskInput string - taskID uuid.UUID ) if stdin { @@ -55,18 +60,12 @@ func (r *RootCmd) taskSend() *serpent.Command { taskInput = inv.Args[1] } - if id, err := uuid.Parse(task); err == nil { - taskID = id - } else { - ws, err := namedWorkspace(ctx, client, task) - if err != nil { - return xerrors.Errorf("resolve task: %w", err) - } - - taskID = ws.ID + task, err := exp.TaskByIdentifier(ctx, identifier) + if err != nil { + return xerrors.Errorf("resolve task: %w", err) } - if err = exp.TaskSend(ctx, codersdk.Me, taskID, codersdk.TaskSendRequest{Input: taskInput}); err != nil { + if err = exp.TaskSend(ctx, codersdk.Me, task.ID, codersdk.TaskSendRequest{Input: taskInput}); err != nil { return xerrors.Errorf("send input to task: %w", err) } diff --git a/cli/exp_task_send_test.go b/cli/exp_task_send_test.go index 1d7b863f5f91e..3529cf2e0b9b5 100644 --- a/cli/exp_task_send_test.go +++ b/cli/exp_task_send_test.go @@ -1,173 +1,171 @@ package cli_test import ( - "context" - "fmt" + "encoding/json" "net/http" - "net/http/httptest" "strings" "testing" + "time" "github.com/google/uuid" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + agentapisdk "github.com/coder/agentapi-sdk-go" "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/httpapi" - "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" ) func Test_TaskSend(t *testing.T) { t.Parallel() - var ( - taskName = "task-workspace" - taskID = uuid.MustParse("11111111-1111-1111-1111-111111111111") - ) - - tests := []struct { - args []string - stdin string - expectError string - handler func(t *testing.T, ctx context.Context) http.HandlerFunc - }{ - { - args: []string{taskName, "carry on with the task"}, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/v2/users/me/workspace/%s", taskName): - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - }) - case fmt.Sprintf("/api/experimental/tasks/me/%s/send", taskID.String()): - var req codersdk.TaskSendRequest - if !httpapi.Read(ctx, w, r, &req) { - return - } - - assert.Equal(t, "carry on with the task", req.Input) - - httpapi.Write(ctx, w, http.StatusNoContent, nil) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{taskID.String(), "carry on with the task"}, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/send", taskID.String()): - var req codersdk.TaskSendRequest - if !httpapi.Read(ctx, w, r, &req) { - return - } - - assert.Equal(t, "carry on with the task", req.Input) - - httpapi.Write(ctx, w, http.StatusNoContent, nil) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{taskName, "--stdin"}, - stdin: "carry on with the task", - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/v2/users/me/workspace/%s", taskName): - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: taskID, - }) - case fmt.Sprintf("/api/experimental/tasks/me/%s/send", taskID.String()): - var req codersdk.TaskSendRequest - if !httpapi.Read(ctx, w, r, &req) { - return - } - - assert.Equal(t, "carry on with the task", req.Input) - - httpapi.Write(ctx, w, http.StatusNoContent, nil) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{"doesnotexist", "some task input"}, - expectError: httpapi.ResourceNotFoundResponse.Message, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/api/v2/users/me/workspace/doesnotexist": - httpapi.ResourceNotFound(w) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{uuid.Nil.String(), "some task input"}, - expectError: httpapi.ResourceNotFoundResponse.Message, - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/send", uuid.Nil.String()): - httpapi.ResourceNotFound(w) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, + t.Run("ByTaskName_WithArgument", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskSendOK(t, "carry on with the task", "you got it")) + userClient := client + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", task.Name, "carry on with the task") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + }) + + t.Run("ByTaskID_WithArgument", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskSendOK(t, "carry on with the task", "you got it")) + userClient := client + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", task.ID.String(), "carry on with the task") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + }) + + t.Run("ByTaskName_WithStdin", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskSendOK(t, "carry on with the task", "you got it")) + userClient := client + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", task.Name, "--stdin") + inv.Stdout = &stdout + inv.Stdin = strings.NewReader("carry on with the task") + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + }) + + t.Run("TaskNotFound_ByName", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", "doesnotexist", "some task input") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.Error(t, err) + require.ErrorContains(t, err, httpapi.ResourceNotFoundResponse.Message) + }) + + t.Run("TaskNotFound_ByID", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", uuid.Nil.String(), "some task input") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.Error(t, err) + require.ErrorContains(t, err, httpapi.ResourceNotFoundResponse.Message) + }) + + t.Run("SendError", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + userClient, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskSendErr(t, assert.AnError)) + + var stdout strings.Builder + inv, root := clitest.New(t, "exp", "task", "send", task.Name, "some task input") + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + + err := inv.WithContext(ctx).Run() + require.ErrorContains(t, err, assert.AnError.Error()) + }) +} + +func fakeAgentAPITaskSendOK(t *testing.T, expectMessage, returnMessage string) map[string]http.HandlerFunc { + return map[string]http.HandlerFunc{ + "/status": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]string{ + "status": "stable", + }) }, - { - args: []string{uuid.Nil.String(), "some task input"}, - expectError: assert.AnError.Error(), - handler: func(t *testing.T, ctx context.Context) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case fmt.Sprintf("/api/experimental/tasks/me/%s/send", uuid.Nil.String()): - httpapi.InternalServerError(w, assert.AnError) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, + "/message": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + var msg agentapisdk.PostMessageParams + if err := json.NewDecoder(r.Body).Decode(&msg); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + assert.Equal(t, expectMessage, msg.Content) + message := agentapisdk.Message{ + Id: 999, + Role: agentapisdk.RoleAgent, + Content: returnMessage, + Time: time.Now(), + } + _ = json.NewEncoder(w).Encode(message) }, } +} - for _, tt := range tests { - t.Run(strings.Join(tt.args, ","), func(t *testing.T) { - t.Parallel() - - var ( - ctx = testutil.Context(t, testutil.WaitShort) - srv = httptest.NewServer(tt.handler(t, ctx)) - client = codersdk.New(testutil.MustURL(t, srv.URL)) - args = []string{"exp", "task", "send"} - err error - ) - - t.Cleanup(srv.Close) - - inv, root := clitest.New(t, append(args, tt.args...)...) - inv.Stdin = strings.NewReader(tt.stdin) - clitest.SetupConfig(t, client, root) - - err = inv.WithContext(ctx).Run() - if tt.expectError == "" { - assert.NoError(t, err) - } else { - assert.ErrorContains(t, err, tt.expectError) +func fakeAgentAPITaskSendErr(t *testing.T, returnErr error) map[string]http.HandlerFunc { + return map[string]http.HandlerFunc{ + "/status": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]string{ + "status": "stable", + }) + }, + "/message": func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return } - }) + w.WriteHeader(http.StatusInternalServerError) + _, _ = w.Write([]byte(returnErr.Error())) + }, } } diff --git a/cli/exp_task_status.go b/cli/exp_task_status.go index f0e1e7b865860..1bd77f5f7f5b3 100644 --- a/cli/exp_task_status.go +++ b/cli/exp_task_status.go @@ -5,7 +5,6 @@ import ( "strings" "time" - "github.com/google/uuid" "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" @@ -44,7 +43,17 @@ func (r *RootCmd) taskStatus() *serpent.Command { watchIntervalArg time.Duration ) cmd := &serpent.Command{ - Short: "Show the status of a task.", + Short: "Show the status of a task.", + Long: FormatExamples( + Example{ + Description: "Show the status of a given task.", + Command: "coder exp task status task1", + }, + Example{ + Description: "Watch the status of a given task until it completes (idle or stopped).", + Command: "coder exp task status task1 --watch", + }, + ), Use: "status", Aliases: []string{"stat"}, Options: serpent.OptionSet{ @@ -74,21 +83,10 @@ func (r *RootCmd) taskStatus() *serpent.Command { } ctx := i.Context() - ec := codersdk.NewExperimentalClient(client) + exp := codersdk.NewExperimentalClient(client) identifier := i.Args[0] - taskID, err := uuid.Parse(identifier) - if err != nil { - // Try to resolve the task as a named workspace - // TODO: right now tasks are still "workspaces" under the hood. - // We should update this once we have a proper task model. - ws, err := namedWorkspace(ctx, client, identifier) - if err != nil { - return err - } - taskID = ws.ID - } - task, err := ec.TaskByID(ctx, taskID) + task, err := exp.TaskByIdentifier(ctx, identifier) if err != nil { return err } @@ -109,7 +107,7 @@ func (r *RootCmd) taskStatus() *serpent.Command { // TODO: implement streaming updates instead of polling lastStatusRow := tsr for range t.C { - task, err := ec.TaskByID(ctx, taskID) + task, err := exp.TaskByID(ctx, task.ID) if err != nil { return err } @@ -142,7 +140,7 @@ func (r *RootCmd) taskStatus() *serpent.Command { } func taskWatchIsEnded(task codersdk.Task) bool { - if task.Status == codersdk.WorkspaceStatusStopped { + if task.WorkspaceStatus == codersdk.WorkspaceStatusStopped { return true } if task.WorkspaceAgentHealth == nil || !task.WorkspaceAgentHealth.Healthy { @@ -158,28 +156,21 @@ func taskWatchIsEnded(task codersdk.Task) bool { } type taskStatusRow struct { - codersdk.Task `table:"-"` - ChangedAgo string `json:"-" table:"state changed,default_sort"` - Timestamp time.Time `json:"-" table:"-"` - TaskStatus string `json:"-" table:"status"` - Healthy bool `json:"-" table:"healthy"` - TaskState string `json:"-" table:"state"` - Message string `json:"-" table:"message"` + codersdk.Task `table:"r,recursive_inline"` + ChangedAgo string `json:"-" table:"state changed"` + Healthy bool `json:"-" table:"healthy"` } func taskStatusRowEqual(r1, r2 taskStatusRow) bool { - return r1.TaskStatus == r2.TaskStatus && + return r1.Status == r2.Status && r1.Healthy == r2.Healthy && - r1.TaskState == r2.TaskState && - r1.Message == r2.Message + taskStateEqual(r1.CurrentState, r2.CurrentState) } func toStatusRow(task codersdk.Task) taskStatusRow { tsr := taskStatusRow{ Task: task, ChangedAgo: time.Since(task.UpdatedAt).Truncate(time.Second).String() + " ago", - Timestamp: task.UpdatedAt, - TaskStatus: string(task.Status), } tsr.Healthy = task.WorkspaceAgentHealth != nil && task.WorkspaceAgentHealth.Healthy && @@ -189,9 +180,19 @@ func toStatusRow(task codersdk.Task) taskStatusRow { if task.CurrentState != nil { tsr.ChangedAgo = time.Since(task.CurrentState.Timestamp).Truncate(time.Second).String() + " ago" - tsr.Timestamp = task.CurrentState.Timestamp - tsr.TaskState = string(task.CurrentState.State) - tsr.Message = task.CurrentState.Message } return tsr } + +func taskStateEqual(se1, se2 *codersdk.TaskStateEntry) bool { + var s1, m1, s2, m2 string + if se1 != nil { + s1 = string(se1.State) + m1 = se1.Message + } + if se2 != nil { + s2 = string(se2.State) + m2 = se2.Message + } + return s1 == s2 && m1 == m2 +} diff --git a/cli/exp_task_status_test.go b/cli/exp_task_status_test.go index be62a76476d35..f15222d51b0fb 100644 --- a/cli/exp_task_status_test.go +++ b/cli/exp_task_status_test.go @@ -36,26 +36,17 @@ func Test_TaskStatus(t *testing.T) { hf: func(ctx context.Context, _ time.Time) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/v2/users/me/workspace/doesnotexist": - httpapi.ResourceNotFound(w) - default: - t.Errorf("unexpected path: %s", r.URL.Path) - } - } - }, - }, - { - args: []string{"err-fetching-workspace"}, - expectError: assert.AnError.Error(), - hf: func(ctx context.Context, _ time.Time) func(w http.ResponseWriter, r *http.Request) { - return func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/api/v2/users/me/workspace/err-fetching-workspace": - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - }) - case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": - httpapi.InternalServerError(w, assert.AnError) + case "/api/experimental/tasks": + if r.URL.Query().Get("q") == "owner:\"me\"" { + httpapi.Write(ctx, w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{}, + Count: 0, + }) + return + } default: t.Errorf("unexpected path: %s", r.URL.Path) } @@ -64,21 +55,45 @@ func Test_TaskStatus(t *testing.T) { }, { args: []string{"exists"}, - expectOutput: `STATE CHANGED STATUS HEALTHY STATE MESSAGE -0s ago running true working Thinking furiously...`, + expectOutput: `STATE CHANGED STATUS HEALTHY STATE MESSAGE +0s ago active true working Thinking furiously...`, hf: func(ctx context.Context, now time.Time) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/v2/users/me/workspace/exists": - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - }) + case "/api/experimental/tasks": + if r.URL.Query().Get("q") == "owner:\"me\"" { + httpapi.Write(ctx, w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + OwnerName: "me", + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now, + UpdatedAt: now, + CurrentState: &codersdk.TaskStateEntry{ + State: codersdk.TaskStateWorking, + Timestamp: now, + Message: "Thinking furiously...", + }, + WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ + Healthy: true, + }, + WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusActive, + }}, + Count: 1, + }) + return + } case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusRunning, - CreatedAt: now, - UpdatedAt: now, + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now, + UpdatedAt: now, CurrentState: &codersdk.TaskStateEntry{ State: codersdk.TaskStateWorking, Timestamp: now, @@ -88,7 +103,9 @@ func Test_TaskStatus(t *testing.T) { Healthy: true, }, WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusActive, }) + return default: t.Errorf("unexpected path: %s", r.URL.Path) } @@ -97,50 +114,77 @@ func Test_TaskStatus(t *testing.T) { }, { args: []string{"exists", "--watch"}, - expectOutput: ` -STATE CHANGED STATUS HEALTHY STATE MESSAGE -4s ago running true -3s ago running true working Reticulating splines... -2s ago running true complete Splines reticulated successfully!`, + expectOutput: `STATE CHANGED STATUS HEALTHY STATE MESSAGE +5s ago pending true +4s ago initializing true +4s ago active true +3s ago active true working Reticulating splines... +2s ago active true complete Splines reticulated successfully!`, hf: func(ctx context.Context, now time.Time) func(http.ResponseWriter, *http.Request) { var calls atomic.Int64 return func(w http.ResponseWriter, r *http.Request) { - defer calls.Add(1) switch r.URL.Path { - case "/api/v2/users/me/workspace/exists": - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - }) + case "/api/experimental/tasks": + if r.URL.Query().Get("q") == "owner:\"me\"" { + // Return initial task state for --watch test + httpapi.Write(ctx, w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + OwnerName: "me", + WorkspaceStatus: codersdk.WorkspaceStatusPending, + CreatedAt: now.Add(-5 * time.Second), + UpdatedAt: now.Add(-5 * time.Second), + WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ + Healthy: true, + }, + WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusPending, + }}, + Count: 1, + }) + return + } case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": + defer calls.Add(1) switch calls.Load() { case 0: httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusPending, - CreatedAt: now.Add(-5 * time.Second), - UpdatedAt: now.Add(-5 * time.Second), + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + OwnerName: "me", + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now.Add(-5 * time.Second), + UpdatedAt: now.Add(-4 * time.Second), WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ Healthy: true, }, WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusInitializing, }) + return case 1: httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusRunning, - CreatedAt: now.Add(-5 * time.Second), + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now.Add(-5 * time.Second), WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ Healthy: true, }, WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), UpdatedAt: now.Add(-4 * time.Second), + Status: codersdk.TaskStatusActive, }) + return case 2: httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusRunning, - CreatedAt: now.Add(-5 * time.Second), - UpdatedAt: now.Add(-4 * time.Second), + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now.Add(-5 * time.Second), + UpdatedAt: now.Add(-4 * time.Second), WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ Healthy: true, }, @@ -150,13 +194,15 @@ STATE CHANGED STATUS HEALTHY STATE MESSAGE Timestamp: now.Add(-3 * time.Second), Message: "Reticulating splines...", }, + Status: codersdk.TaskStatusActive, }) + return case 3: httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusRunning, - CreatedAt: now.Add(-5 * time.Second), - UpdatedAt: now.Add(-4 * time.Second), + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: now.Add(-5 * time.Second), + UpdatedAt: now.Add(-4 * time.Second), WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ Healthy: true, }, @@ -166,13 +212,16 @@ STATE CHANGED STATUS HEALTHY STATE MESSAGE Timestamp: now.Add(-2 * time.Second), Message: "Splines reticulated successfully!", }, + Status: codersdk.TaskStatusActive, }) + return default: httpapi.InternalServerError(w, xerrors.New("too many calls!")) return } default: httpapi.InternalServerError(w, xerrors.Errorf("unexpected path: %q", r.URL.Path)) + return } } }, @@ -183,18 +232,24 @@ STATE CHANGED STATUS HEALTHY STATE MESSAGE "id": "11111111-1111-1111-1111-111111111111", "organization_id": "00000000-0000-0000-0000-000000000000", "owner_id": "00000000-0000-0000-0000-000000000000", - "owner_name": "", - "name": "", + "owner_name": "me", + "name": "exists", "template_id": "00000000-0000-0000-0000-000000000000", + "template_version_id": "00000000-0000-0000-0000-000000000000", "template_name": "", "template_display_name": "", "template_icon": "", "workspace_id": null, + "workspace_name": "", + "workspace_status": "running", "workspace_agent_id": null, - "workspace_agent_lifecycle": null, - "workspace_agent_health": null, + "workspace_agent_lifecycle": "ready", + "workspace_agent_health": { + "healthy": true + }, + "workspace_app_id": null, "initial_prompt": "", - "status": "running", + "status": "active", "current_state": { "timestamp": "2025-08-26T12:34:57Z", "state": "working", @@ -204,26 +259,52 @@ STATE CHANGED STATUS HEALTHY STATE MESSAGE "created_at": "2025-08-26T12:34:56Z", "updated_at": "2025-08-26T12:34:56Z" }`, - hf: func(ctx context.Context, _ time.Time) func(w http.ResponseWriter, r *http.Request) { + hf: func(ctx context.Context, now time.Time) func(http.ResponseWriter, *http.Request) { ts := time.Date(2025, 8, 26, 12, 34, 56, 0, time.UTC) return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/v2/users/me/workspace/exists": - httpapi.Write(ctx, w, http.StatusOK, codersdk.Workspace{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - }) + case "/api/experimental/tasks": + if r.URL.Query().Get("q") == "owner:\"me\"" { + httpapi.Write(ctx, w, http.StatusOK, struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` + }{ + Tasks: []codersdk.Task{{ + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + OwnerName: "me", + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: ts, + UpdatedAt: ts, + CurrentState: &codersdk.TaskStateEntry{ + State: codersdk.TaskStateWorking, + Timestamp: ts.Add(time.Second), + Message: "Thinking furiously...", + }, + WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ + Healthy: true, + }, + WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusActive, + }}, + Count: 1, + }) + return + } case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Status: codersdk.WorkspaceStatusRunning, - CreatedAt: ts, - UpdatedAt: ts, + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: ts, + UpdatedAt: ts, CurrentState: &codersdk.TaskStateEntry{ State: codersdk.TaskStateWorking, Timestamp: ts.Add(time.Second), Message: "Thinking furiously...", }, + Status: codersdk.TaskStatusActive, }) + return default: t.Errorf("unexpected path: %s", r.URL.Path) } diff --git a/cli/exp_task_test.go b/cli/exp_task_test.go new file mode 100644 index 0000000000000..d2d3728aeb280 --- /dev/null +++ b/cli/exp_task_test.go @@ -0,0 +1,425 @@ +package cli_test + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "slices" + "strings" + "sync" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/xerrors" + + agentapisdk "github.com/coder/agentapi-sdk-go" + "github.com/coder/coder/v2/agent" + "github.com/coder/coder/v2/agent/agenttest" + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/coder/v2/provisioner/echo" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/testutil" +) + +// This test performs an integration-style test for tasks functionality. +// +//nolint:tparallel // The sub-tests of this test must be run sequentially. +func Test_Tasks(t *testing.T) { + t.Parallel() + + // Given: a template configured for tasks + var ( + ctx = testutil.Context(t, testutil.WaitLong) + client = coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner = coderdtest.CreateFirstUser(t, client) + userClient, _ = coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + initMsg = agentapisdk.Message{ + Content: "test task input for " + t.Name(), + Id: 0, + Role: "user", + Time: time.Now().UTC(), + } + authToken = uuid.NewString() + echoAgentAPI = startFakeAgentAPI(t, fakeAgentAPIEcho(ctx, t, initMsg, "hello")) + taskTpl = createAITaskTemplate(t, client, owner.OrganizationID, withAgentToken(authToken), withSidebarURL(echoAgentAPI.URL())) + taskName = strings.ReplaceAll(testutil.GetRandomName(t), "_", "-") + ) + + //nolint:paralleltest // The sub-tests of this test must be run sequentially. + for _, tc := range []struct { + name string + cmdArgs []string + assertFn func(stdout string, userClient *codersdk.Client) + }{ + { + name: "create task", + cmdArgs: []string{"exp", "task", "create", "test task input for " + t.Name(), "--name", taskName, "--template", taskTpl.Name}, + assertFn: func(stdout string, userClient *codersdk.Client) { + require.Contains(t, stdout, taskName, "task name should be in output") + }, + }, + { + name: "list tasks after create", + cmdArgs: []string{"exp", "task", "list", "--output", "json"}, + assertFn: func(stdout string, userClient *codersdk.Client) { + var tasks []codersdk.Task + err := json.NewDecoder(strings.NewReader(stdout)).Decode(&tasks) + require.NoError(t, err, "list output should unmarshal properly") + require.Len(t, tasks, 1, "expected one task") + require.Equal(t, taskName, tasks[0].Name, "task name should match") + require.Equal(t, initMsg.Content, tasks[0].InitialPrompt, "initial prompt should match") + require.True(t, tasks[0].WorkspaceID.Valid, "workspace should be created") + // For the next test, we need to wait for the workspace to be healthy + ws := coderdtest.MustWorkspace(t, userClient, tasks[0].WorkspaceID.UUID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(authToken)) + _ = agenttest.New(t, client.URL, authToken, func(o *agent.Options) { + o.Client = agentClient + }) + coderdtest.NewWorkspaceAgentWaiter(t, userClient, tasks[0].WorkspaceID.UUID).WithContext(ctx).WaitFor(coderdtest.AgentsReady) + }, + }, + { + name: "get task status after create", + cmdArgs: []string{"exp", "task", "status", taskName, "--output", "json"}, + assertFn: func(stdout string, userClient *codersdk.Client) { + var task codersdk.Task + require.NoError(t, json.NewDecoder(strings.NewReader(stdout)).Decode(&task), "should unmarshal task status") + require.Equal(t, task.Name, taskName, "task name should match") + require.Equal(t, codersdk.TaskStatusActive, task.Status, "task should be active") + }, + }, + { + name: "send task message", + cmdArgs: []string{"exp", "task", "send", taskName, "hello"}, + // Assertions for this happen in the fake agent API handler. + }, + { + name: "read task logs", + cmdArgs: []string{"exp", "task", "logs", taskName, "--output", "json"}, + assertFn: func(stdout string, userClient *codersdk.Client) { + var logs []codersdk.TaskLogEntry + require.NoError(t, json.NewDecoder(strings.NewReader(stdout)).Decode(&logs), "should unmarshal task logs") + require.Len(t, logs, 3, "should have 3 logs") + require.Equal(t, logs[0].Content, initMsg.Content, "first message should be the init message") + require.Equal(t, logs[0].Type, codersdk.TaskLogTypeInput, "first message should be an input") + require.Equal(t, logs[1].Content, "hello", "second message should be the sent message") + require.Equal(t, logs[1].Type, codersdk.TaskLogTypeInput, "second message should be an input") + require.Equal(t, logs[2].Content, "hello", "third message should be the echoed message") + require.Equal(t, logs[2].Type, codersdk.TaskLogTypeOutput, "third message should be an output") + }, + }, + { + name: "delete task", + cmdArgs: []string{"exp", "task", "delete", taskName, "--yes"}, + assertFn: func(stdout string, userClient *codersdk.Client) { + // The task should eventually no longer show up in the list of tasks + testutil.Eventually(ctx, t, func(ctx context.Context) bool { + expClient := codersdk.NewExperimentalClient(userClient) + tasks, err := expClient.Tasks(ctx, &codersdk.TasksFilter{}) + if !assert.NoError(t, err) { + return false + } + return slices.IndexFunc(tasks, func(task codersdk.Task) bool { + return task.Name == taskName + }) == -1 + }, testutil.IntervalMedium) + }, + }, + } { + t.Run(tc.name, func(t *testing.T) { + var stdout strings.Builder + inv, root := clitest.New(t, tc.cmdArgs...) + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + require.NoError(t, inv.WithContext(ctx).Run()) + if tc.assertFn != nil { + tc.assertFn(stdout.String(), userClient) + } + }) + } +} + +func fakeAgentAPIEcho(ctx context.Context, t testing.TB, initMsg agentapisdk.Message, want ...string) map[string]http.HandlerFunc { + t.Helper() + var mmu sync.RWMutex + msgs := []agentapisdk.Message{initMsg} + wantCpy := make([]string, len(want)) + copy(wantCpy, want) + t.Cleanup(func() { + mmu.Lock() + defer mmu.Unlock() + if !t.Failed() { + assert.Empty(t, wantCpy, "not all expected messages received: missing %v", wantCpy) + } + }) + writeAgentAPIError := func(w http.ResponseWriter, err error, status int) { + w.WriteHeader(status) + _ = json.NewEncoder(w).Encode(agentapisdk.ErrorModel{ + Errors: ptr.Ref([]agentapisdk.ErrorDetail{ + { + Message: ptr.Ref(err.Error()), + }, + }), + }) + } + return map[string]http.HandlerFunc{ + "/status": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(agentapisdk.GetStatusResponse{ + Status: "stable", + }) + }, + "/messages": func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + mmu.RLock() + defer mmu.RUnlock() + bs, err := json.Marshal(agentapisdk.GetMessagesResponse{ + Messages: msgs, + }) + if err != nil { + writeAgentAPIError(w, err, http.StatusBadRequest) + return + } + _, _ = w.Write(bs) + }, + "/message": func(w http.ResponseWriter, r *http.Request) { + mmu.Lock() + defer mmu.Unlock() + var params agentapisdk.PostMessageParams + w.Header().Set("Content-Type", "application/json") + err := json.NewDecoder(r.Body).Decode(¶ms) + if !assert.NoError(t, err, "decode message") { + writeAgentAPIError(w, err, http.StatusBadRequest) + return + } + + if len(wantCpy) == 0 { + assert.Fail(t, "unexpected message", "received message %v, but no more expected messages", params) + writeAgentAPIError(w, xerrors.New("no more expected messages"), http.StatusBadRequest) + return + } + exp := wantCpy[0] + wantCpy = wantCpy[1:] + + if !assert.Equal(t, exp, params.Content, "message content mismatch") { + writeAgentAPIError(w, xerrors.New("unexpected message content: expected "+exp+", got "+params.Content), http.StatusBadRequest) + return + } + + msgs = append(msgs, agentapisdk.Message{ + Id: int64(len(msgs) + 1), + Content: params.Content, + Role: agentapisdk.RoleUser, + Time: time.Now().UTC(), + }) + msgs = append(msgs, agentapisdk.Message{ + Id: int64(len(msgs) + 1), + Content: params.Content, + Role: agentapisdk.RoleAgent, + Time: time.Now().UTC(), + }) + assert.NoError(t, json.NewEncoder(w).Encode(agentapisdk.PostMessageResponse{ + Ok: true, + })) + }, + } +} + +// setupCLITaskTest creates a test workspace with an AI task template and agent, +// with a fake agent API configured with the provided set of handlers. +// Returns the user client and workspace. +func setupCLITaskTest(ctx context.Context, t *testing.T, agentAPIHandlers map[string]http.HandlerFunc) (*codersdk.Client, codersdk.Task) { + t.Helper() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + fakeAPI := startFakeAgentAPI(t, agentAPIHandlers) + + authToken := uuid.NewString() + template := createAITaskTemplate(t, client, owner.OrganizationID, withSidebarURL(fakeAPI.URL()), withAgentToken(authToken)) + + wantPrompt := "test prompt" + exp := codersdk.NewExperimentalClient(userClient) + task, err := exp.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: wantPrompt, + Name: "test-task", + }) + require.NoError(t, err) + + // Wait for the task's underlying workspace to be built + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + workspace, err := userClient.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(authToken)) + _ = agenttest.New(t, client.URL, authToken, func(o *agent.Options) { + o.Client = agentClient + }) + + coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID). + WaitFor(coderdtest.AgentsReady) + + return userClient, task +} + +// createAITaskTemplate creates a template configured for AI tasks with a sidebar app. +func createAITaskTemplate(t *testing.T, client *codersdk.Client, orgID uuid.UUID, opts ...aiTemplateOpt) codersdk.Template { + t.Helper() + + opt := aiTemplateOpts{ + authToken: uuid.NewString(), + } + for _, o := range opts { + o(&opt) + } + + taskAppID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, orgID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Name: "example", + Type: "aws_instance", + Agents: []*proto.Agent{ + { + Id: uuid.NewString(), + Name: "example", + Auth: &proto.Agent_Token{ + Token: opt.authToken, + }, + Apps: []*proto.App{ + { + Id: taskAppID.String(), + Slug: "task-sidebar", + DisplayName: "Task Sidebar", + Url: opt.appURL, + }, + }, + }, + }, + }, + }, + AiTasks: []*proto.AITask{ + { + SidebarApp: &proto.AITaskSidebarApp{ + Id: taskAppID.String(), + }, + }, + }, + }, + }, + }, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, orgID, version.ID) + + return template +} + +// fakeAgentAPI implements a fake AgentAPI HTTP server for testing. +type fakeAgentAPI struct { + t *testing.T + server *httptest.Server + handlers map[string]http.HandlerFunc + called map[string]bool + mu sync.Mutex +} + +// startFakeAgentAPI starts an HTTP server that implements the AgentAPI endpoints. +// handlers is a map of path -> handler function. +func startFakeAgentAPI(t *testing.T, handlers map[string]http.HandlerFunc) *fakeAgentAPI { + t.Helper() + + fake := &fakeAgentAPI{ + t: t, + handlers: handlers, + called: make(map[string]bool), + } + + mux := http.NewServeMux() + + // Register all provided handlers with call tracking + for path, handler := range handlers { + mux.HandleFunc(path, func(w http.ResponseWriter, r *http.Request) { + fake.mu.Lock() + fake.called[path] = true + fake.mu.Unlock() + handler(w, r) + }) + } + + knownEndpoints := []string{"/status", "/messages", "/message"} + for _, endpoint := range knownEndpoints { + if handlers[endpoint] == nil { + endpoint := endpoint // capture loop variable + mux.HandleFunc(endpoint, func(w http.ResponseWriter, r *http.Request) { + t.Fatalf("unexpected call to %s %s - no handler defined", r.Method, endpoint) + }) + } + } + // Default handler for unknown endpoints should cause the test to fail. + mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + t.Fatalf("unexpected call to %s %s - no handler defined", r.Method, r.URL.Path) + }) + + fake.server = httptest.NewServer(mux) + + // Register cleanup to check that all defined handlers were called + t.Cleanup(func() { + fake.server.Close() + fake.mu.Lock() + for path := range handlers { + if !fake.called[path] { + t.Errorf("handler for %s was defined but never called", path) + } + } + }) + return fake +} + +func (f *fakeAgentAPI) URL() string { + return f.server.URL +} + +type aiTemplateOpts struct { + appURL string + authToken string +} + +type aiTemplateOpt func(*aiTemplateOpts) + +func withSidebarURL(url string) aiTemplateOpt { + return func(o *aiTemplateOpts) { o.appURL = url } +} + +func withAgentToken(token string) aiTemplateOpt { + return func(o *aiTemplateOpts) { o.authToken = token } +} diff --git a/cli/provisionerjobs.go b/cli/provisionerjobs.go index 3ce7da20b7dcb..ee29476ef09dd 100644 --- a/cli/provisionerjobs.go +++ b/cli/provisionerjobs.go @@ -43,8 +43,9 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command { cliui.TableFormat([]provisionerJobRow{}, []string{"created at", "id", "type", "template display name", "status", "queue", "tags"}), cliui.JSONFormat(), ) - status []string - limit int64 + status []string + limit int64 + initiator string ) cmd := &serpent.Command{ @@ -65,9 +66,18 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command { return xerrors.Errorf("current organization: %w", err) } + if initiator != "" { + user, err := client.User(ctx, initiator) + if err != nil { + return xerrors.Errorf("initiator not found: %s", initiator) + } + initiator = user.ID.String() + } + jobs, err := client.OrganizationProvisionerJobs(ctx, org.ID, &codersdk.OrganizationProvisionerJobsOptions{ - Status: slice.StringEnums[codersdk.ProvisionerJobStatus](status), - Limit: int(limit), + Status: slice.StringEnums[codersdk.ProvisionerJobStatus](status), + Limit: int(limit), + Initiator: initiator, }) if err != nil { return xerrors.Errorf("list provisioner jobs: %w", err) @@ -122,6 +132,13 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command { Default: "50", Value: serpent.Int64Of(&limit), }, + { + Flag: "initiator", + FlagShorthand: "i", + Env: "CODER_PROVISIONER_JOB_LIST_INITIATOR", + Description: "Filter by initiator (user ID or username).", + Value: serpent.StringOf(&initiator), + }, }...) orgContext.AttachOptions(cmd) diff --git a/cli/provisionerjobs_test.go b/cli/provisionerjobs_test.go index 4db42e8e3c9e7..57072a6156738 100644 --- a/cli/provisionerjobs_test.go +++ b/cli/provisionerjobs_test.go @@ -5,6 +5,7 @@ import ( "database/sql" "encoding/json" "fmt" + "strings" "testing" "time" @@ -26,33 +27,32 @@ import ( func TestProvisionerJobs(t *testing.T) { t.Parallel() - db, ps := dbtestutil.NewDB(t) - client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{ - IncludeProvisionerDaemon: false, - Database: db, - Pubsub: ps, - }) - owner := coderdtest.CreateFirstUser(t, client) - templateAdminClient, templateAdmin := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID)) - memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - - // These CLI tests are related to provisioner job CRUD operations and as such - // do not require the overhead of starting a provisioner. Other provisioner job - // functionalities (acquisition etc.) are tested elsewhere. - template := dbgen.Template(t, db, database.Template{ - OrganizationID: owner.OrganizationID, - CreatedBy: owner.UserID, - AllowUserCancelWorkspaceJobs: true, - }) - version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ - OrganizationID: owner.OrganizationID, - CreatedBy: owner.UserID, - TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, - }) - t.Run("Cancel", func(t *testing.T) { t.Parallel() + db, ps := dbtestutil.NewDB(t) + client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{ + IncludeProvisionerDaemon: false, + Database: db, + Pubsub: ps, + }) + owner := coderdtest.CreateFirstUser(t, client) + templateAdminClient, templateAdmin := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID)) + memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // These CLI tests are related to provisioner job CRUD operations and as such + // do not require the overhead of starting a provisioner. Other provisioner job + // functionalities (acquisition etc.) are tested elsewhere. + template := dbgen.Template(t, db, database.Template{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + AllowUserCancelWorkspaceJobs: true, + }) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + }) // Test helper to create a provisioner job of a given type with a given input. prepareJob := func(t *testing.T, jobType database.ProvisionerJobType, input json.RawMessage) database.ProvisionerJob { t.Helper() @@ -178,4 +178,148 @@ func TestProvisionerJobs(t *testing.T) { }) } }) + + t.Run("List", func(t *testing.T) { + t.Parallel() + + db, ps := dbtestutil.NewDB(t) + client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{ + IncludeProvisionerDaemon: false, + Database: db, + Pubsub: ps, + }) + owner := coderdtest.CreateFirstUser(t, client) + _, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // These CLI tests are related to provisioner job CRUD operations and as such + // do not require the overhead of starting a provisioner. Other provisioner job + // functionalities (acquisition etc.) are tested elsewhere. + template := dbgen.Template(t, db, database.Template{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + AllowUserCancelWorkspaceJobs: true, + }) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + }) + // Create some test jobs + job1 := dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{ + OrganizationID: owner.OrganizationID, + InitiatorID: owner.UserID, + Type: database.ProvisionerJobTypeTemplateVersionImport, + Input: []byte(`{"template_version_id":"` + version.ID.String() + `"}`), + Tags: database.StringMap{provisionersdk.TagScope: provisionersdk.ScopeOrganization}, + }) + + job2 := dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{ + OrganizationID: owner.OrganizationID, + InitiatorID: member.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + Input: []byte(`{"workspace_build_id":"` + uuid.New().String() + `"}`), + Tags: database.StringMap{provisionersdk.TagScope: provisionersdk.ScopeOrganization}, + }) + // Test basic list command + t.Run("Basic", func(t *testing.T) { + t.Parallel() + + inv, root := clitest.New(t, "provisioner", "jobs", "list") + clitest.SetupConfig(t, client, root) + var buf bytes.Buffer + inv.Stdout = &buf + err := inv.Run() + require.NoError(t, err) + + // Should contain both jobs + output := buf.String() + assert.Contains(t, output, job1.ID.String()) + assert.Contains(t, output, job2.ID.String()) + }) + + // Test list with JSON output + t.Run("JSON", func(t *testing.T) { + t.Parallel() + + inv, root := clitest.New(t, "provisioner", "jobs", "list", "--output", "json") + clitest.SetupConfig(t, client, root) + var buf bytes.Buffer + inv.Stdout = &buf + err := inv.Run() + require.NoError(t, err) + + // Parse JSON output + var jobs []codersdk.ProvisionerJob + err = json.Unmarshal(buf.Bytes(), &jobs) + require.NoError(t, err) + + // Should contain both jobs + jobIDs := make([]uuid.UUID, len(jobs)) + for i, job := range jobs { + jobIDs[i] = job.ID + } + assert.Contains(t, jobIDs, job1.ID) + assert.Contains(t, jobIDs, job2.ID) + }) + + // Test list with limit + t.Run("Limit", func(t *testing.T) { + t.Parallel() + + inv, root := clitest.New(t, "provisioner", "jobs", "list", "--limit", "1") + clitest.SetupConfig(t, client, root) + var buf bytes.Buffer + inv.Stdout = &buf + err := inv.Run() + require.NoError(t, err) + + // Should contain at most 1 job + output := buf.String() + jobCount := 0 + if strings.Contains(output, job1.ID.String()) { + jobCount++ + } + if strings.Contains(output, job2.ID.String()) { + jobCount++ + } + assert.LessOrEqual(t, jobCount, 1) + }) + + // Test list with initiator filter + t.Run("InitiatorFilter", func(t *testing.T) { + t.Parallel() + + // Get owner user details to access username + ctx := testutil.Context(t, testutil.WaitShort) + ownerUser, err := client.User(ctx, owner.UserID.String()) + require.NoError(t, err) + + // Test filtering by initiator (using username) + inv, root := clitest.New(t, "provisioner", "jobs", "list", "--initiator", ownerUser.Username) + clitest.SetupConfig(t, client, root) + var buf bytes.Buffer + inv.Stdout = &buf + err = inv.Run() + require.NoError(t, err) + + // Should only contain job1 (initiated by owner) + output := buf.String() + assert.Contains(t, output, job1.ID.String()) + assert.NotContains(t, output, job2.ID.String()) + }) + + // Test list with invalid user + t.Run("InvalidUser", func(t *testing.T) { + t.Parallel() + + // Test with non-existent user + inv, root := clitest.New(t, "provisioner", "jobs", "list", "--initiator", "nonexistent-user") + clitest.SetupConfig(t, client, root) + var buf bytes.Buffer + inv.Stdout = &buf + err := inv.Run() + require.Error(t, err) + assert.Contains(t, err.Error(), "initiator not found: nonexistent-user") + }) + }) } diff --git a/cli/schedule.go b/cli/schedule.go index 15f837bc16779..a4b02d6d8be9e 100644 --- a/cli/schedule.go +++ b/cli/schedule.go @@ -176,6 +176,22 @@ func (r *RootCmd) scheduleStart() *serpent.Command { } schedStr = ptr.Ref(sched.String()) + + // Check if the template has autostart requirements that may conflict + // with the user's schedule. + template, err := client.Template(inv.Context(), workspace.TemplateID) + if err != nil { + return xerrors.Errorf("get template: %w", err) + } + + if len(template.AutostartRequirement.DaysOfWeek) > 0 { + _, _ = fmt.Fprintf( + inv.Stderr, + "Warning: your workspace template restricts autostart to the following days: %s.\n"+ + "Your workspace may only autostart on these days.\n", + strings.Join(template.AutostartRequirement.DaysOfWeek, ", "), + ) + } } err = client.UpdateWorkspaceAutostart(inv.Context(), workspace.ID, codersdk.UpdateWorkspaceAutostartRequest{ diff --git a/cli/schedule_test.go b/cli/schedule_test.go index b161f41cbcebc..bc473279f7ca4 100644 --- a/cli/schedule_test.go +++ b/cli/schedule_test.go @@ -373,3 +373,67 @@ func TestScheduleOverride(t *testing.T) { }) } } + +//nolint:paralleltest // t.Setenv +func TestScheduleStart_TemplateAutostartRequirement(t *testing.T) { + t.Setenv("TZ", "UTC") + loc, err := tz.TimezoneIANA() + require.NoError(t, err) + require.Equal(t, "UTC", loc.String()) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + // Update template to have autostart requirement + // Note: In AGPL, this will be ignored and all days will be allowed (enterprise feature). + template, err = client.UpdateTemplateMeta(context.Background(), template.ID, codersdk.UpdateTemplateMeta{ + AutostartRequirement: &codersdk.TemplateAutostartRequirement{ + DaysOfWeek: []string{"monday", "wednesday", "friday"}, + }, + }) + require.NoError(t, err) + + // Verify the template - in AGPL, AutostartRequirement will have all days (enterprise feature) + template, err = client.Template(context.Background(), template.ID) + require.NoError(t, err) + require.NotEmpty(t, template.AutostartRequirement.DaysOfWeek, "template should have autostart requirement days") + + workspace := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + t.Run("ShowsWarning", func(t *testing.T) { + // When: user sets autostart schedule + inv, root := clitest.New(t, + "schedule", "start", workspace.Name, "9:30AM", "Mon-Fri", + ) + clitest.SetupConfig(t, client, root) + pty := ptytest.New(t).Attach(inv) + require.NoError(t, inv.Run()) + + // Then: warning should be shown + // In AGPL, this will show all days (enterprise feature defaults to all days allowed) + pty.ExpectMatch("Warning") + pty.ExpectMatch("may only autostart") + }) + + t.Run("NoWarningWhenManual", func(t *testing.T) { + // When: user sets manual schedule + inv, root := clitest.New(t, + "schedule", "start", workspace.Name, "manual", + ) + clitest.SetupConfig(t, client, root) + + var stderrBuf bytes.Buffer + inv.Stderr = &stderrBuf + + require.NoError(t, inv.Run()) + + // Then: no warning should be shown on stderr + stderrOutput := stderrBuf.String() + require.NotContains(t, stderrOutput, "Warning") + }) +} diff --git a/cli/server.go b/cli/server.go index 6edd14b7d5f1f..b12f5e0189c47 100644 --- a/cli/server.go +++ b/cli/server.go @@ -29,6 +29,7 @@ import ( "strings" "sync" "sync/atomic" + "testing" "time" "github.com/charmbracelet/lipgloss" @@ -1377,6 +1378,7 @@ func IsLocalURL(ctx context.Context, u *url.URL) (bool, error) { } func shutdownWithTimeout(shutdown func(context.Context) error, timeout time.Duration) error { + // nolint:gocritic // The magic number is parameterized. ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() return shutdown(ctx) @@ -2134,50 +2136,83 @@ func startBuiltinPostgres(ctx context.Context, cfg config.Root, logger slog.Logg return "", nil, xerrors.New("The built-in PostgreSQL cannot run as the root user. Create a non-root user and run again!") } - // Ensure a password and port have been generated! - connectionURL, err := embeddedPostgresURL(cfg) - if err != nil { - return "", nil, err - } - pgPassword, err := cfg.PostgresPassword().Read() - if err != nil { - return "", nil, xerrors.Errorf("read postgres password: %w", err) - } - pgPortRaw, err := cfg.PostgresPort().Read() - if err != nil { - return "", nil, xerrors.Errorf("read postgres port: %w", err) - } - pgPort, err := strconv.ParseUint(pgPortRaw, 10, 16) - if err != nil { - return "", nil, xerrors.Errorf("parse postgres port: %w", err) - } - cachePath := filepath.Join(cfg.PostgresPath(), "cache") if customCacheDir != "" { cachePath = filepath.Join(customCacheDir, "postgres") } stdlibLogger := slog.Stdlib(ctx, logger.Named("postgres"), slog.LevelDebug) - ep := embeddedpostgres.NewDatabase( - embeddedpostgres.DefaultConfig(). - Version(embeddedpostgres.V13). - BinariesPath(filepath.Join(cfg.PostgresPath(), "bin")). - // Default BinaryRepositoryURL repo1.maven.org is flaky. - BinaryRepositoryURL("https://repo.maven.apache.org/maven2"). - DataPath(filepath.Join(cfg.PostgresPath(), "data")). - RuntimePath(filepath.Join(cfg.PostgresPath(), "runtime")). - CachePath(cachePath). - Username("coder"). - Password(pgPassword). - Database("coder"). - Encoding("UTF8"). - Port(uint32(pgPort)). - Logger(stdlibLogger.Writer()), - ) - err = ep.Start() - if err != nil { - return "", nil, xerrors.Errorf("Failed to start built-in PostgreSQL. Optionally, specify an external deployment with `--postgres-url`: %w", err) + + // If the port is not defined, an available port will be found dynamically. + maxAttempts := 1 + _, err = cfg.PostgresPort().Read() + retryPortDiscovery := errors.Is(err, os.ErrNotExist) && testing.Testing() + if retryPortDiscovery { + // There is no way to tell Postgres to use an ephemeral port, so in order to avoid + // flaky tests in CI we need to retry EmbeddedPostgres.Start in case of a race + // condition where the port we quickly listen on and close in embeddedPostgresURL() + // is not free by the time the embedded postgres starts up. This maximum_should + // cover most cases where port conflicts occur in CI and cause flaky tests. + maxAttempts = 3 + } + + var startErr error + for attempt := 0; attempt < maxAttempts; attempt++ { + // Ensure a password and port have been generated. + connectionURL, err := embeddedPostgresURL(cfg) + if err != nil { + return "", nil, err + } + pgPassword, err := cfg.PostgresPassword().Read() + if err != nil { + return "", nil, xerrors.Errorf("read postgres password: %w", err) + } + pgPortRaw, err := cfg.PostgresPort().Read() + if err != nil { + return "", nil, xerrors.Errorf("read postgres port: %w", err) + } + pgPort, err := strconv.ParseUint(pgPortRaw, 10, 16) + if err != nil { + return "", nil, xerrors.Errorf("parse postgres port: %w", err) + } + + ep := embeddedpostgres.NewDatabase( + embeddedpostgres.DefaultConfig(). + Version(embeddedpostgres.V13). + BinariesPath(filepath.Join(cfg.PostgresPath(), "bin")). + // Default BinaryRepositoryURL repo1.maven.org is flaky. + BinaryRepositoryURL("https://repo.maven.apache.org/maven2"). + DataPath(filepath.Join(cfg.PostgresPath(), "data")). + RuntimePath(filepath.Join(cfg.PostgresPath(), "runtime")). + CachePath(cachePath). + Username("coder"). + Password(pgPassword). + Database("coder"). + Encoding("UTF8"). + Port(uint32(pgPort)). + Logger(stdlibLogger.Writer()), + ) + + startErr = ep.Start() + if startErr == nil { + return connectionURL, ep.Stop, nil + } + + logger.Warn(ctx, "failed to start embedded postgres", + slog.F("attempt", attempt+1), + slog.F("max_attempts", maxAttempts), + slog.F("port", pgPort), + slog.Error(startErr), + ) + + if retryPortDiscovery { + // Since a retry is needed, we wipe the port stored here at the beginning of the loop. + _ = cfg.PostgresPort().Delete() + } } - return connectionURL, ep.Stop, nil + + return "", nil, xerrors.Errorf("failed to start built-in PostgreSQL after %d attempts. "+ + "Optionally, specify an external deployment. See https://coder.com/docs/tutorials/external-database "+ + "for more details: %w", maxAttempts, startErr) } func ConfigureHTTPClient(ctx context.Context, clientCertFile, clientKeyFile string, tlsClientCAFile string) (context.Context, *http.Client, error) { @@ -2286,7 +2321,7 @@ func ConnectToPostgres(ctx context.Context, logger slog.Logger, driver string, d var err error var sqlDB *sql.DB dbNeedsClosing := true - // Try to connect for 30 seconds. + // nolint:gocritic // Try to connect for 30 seconds. ctx, cancel := context.WithTimeout(ctx, 30*time.Second) defer cancel() @@ -2382,6 +2417,7 @@ func ConnectToPostgres(ctx context.Context, logger slog.Logger, driver string, d } func pingPostgres(ctx context.Context, db *sql.DB) error { + // nolint:gocritic // This is a reasonable magic number for a ping timeout. ctx, cancel := context.WithTimeout(ctx, 5*time.Second) defer cancel() return db.PingContext(ctx) diff --git a/cli/server_regenerate_vapid_keypair_test.go b/cli/server_regenerate_vapid_keypair_test.go index cbaff3681df11..6c9603e00929c 100644 --- a/cli/server_regenerate_vapid_keypair_test.go +++ b/cli/server_regenerate_vapid_keypair_test.go @@ -17,9 +17,6 @@ import ( func TestRegenerateVapidKeypair(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test is only supported on postgres") - } t.Run("NoExistingVAPIDKeys", func(t *testing.T) { t.Parallel() diff --git a/cli/server_test.go b/cli/server_test.go index 18d59f7811fc3..d6278fc7669c0 100644 --- a/cli/server_test.go +++ b/cli/server_test.go @@ -348,9 +348,6 @@ func TestServer(t *testing.T) { runGitHubProviderTest := func(t *testing.T, tc testCase) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test requires postgres") - } ctx, cancelFunc := context.WithCancel(testutil.Context(t, testutil.WaitLong)) defer cancelFunc() @@ -1254,8 +1251,9 @@ func TestServer(t *testing.T) { t.Logf("error creating request: %s", err.Error()) return false } + client := &http.Client{} // nolint:bodyclose - res, err := http.DefaultClient.Do(req) + res, err := client.Do(req) if err != nil { t.Logf("error hitting prometheus endpoint: %s", err.Error()) return false @@ -1316,8 +1314,9 @@ func TestServer(t *testing.T) { t.Logf("error creating request: %s", err.Error()) return false } + client := &http.Client{} // nolint:bodyclose - res, err := http.DefaultClient.Do(req) + res, err := client.Do(req) if err != nil { t.Logf("error hitting prometheus endpoint: %s", err.Error()) return false @@ -2140,10 +2139,6 @@ func TestServerYAMLConfig(t *testing.T) { func TestConnectToPostgres(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test does not make sense without postgres") - } - t.Run("Migrate", func(t *testing.T) { t.Parallel() @@ -2254,10 +2249,6 @@ type runServerOpts struct { func TestServer_TelemetryDisabled_FinalReport(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - telemetryServerURL, deployment, snapshot := mockTelemetryServer(t) dbConnURL, err := dbtestutil.Open(t) require.NoError(t, err) diff --git a/cli/sharing_test.go b/cli/sharing_test.go index 71e153c64e4f6..19e185347027b 100644 --- a/cli/sharing_test.go +++ b/cli/sharing_test.go @@ -54,6 +54,7 @@ func TestSharingShare(t *testing.T) { MinimalUser: codersdk.MinimalUser{ ID: toShareWithUser.ID, Username: toShareWithUser.Username, + Name: toShareWithUser.Name, AvatarURL: toShareWithUser.AvatarURL, }, Role: codersdk.WorkspaceRole("use"), @@ -103,6 +104,7 @@ func TestSharingShare(t *testing.T) { MinimalUser: codersdk.MinimalUser{ ID: toShareWithUser1.ID, Username: toShareWithUser1.Username, + Name: toShareWithUser1.Name, AvatarURL: toShareWithUser1.AvatarURL, }, Role: codersdk.WorkspaceRoleUse, @@ -111,6 +113,7 @@ func TestSharingShare(t *testing.T) { MinimalUser: codersdk.MinimalUser{ ID: toShareWithUser2.ID, Username: toShareWithUser2.Username, + Name: toShareWithUser2.Name, AvatarURL: toShareWithUser2.AvatarURL, }, Role: codersdk.WorkspaceRoleUse, @@ -155,6 +158,7 @@ func TestSharingShare(t *testing.T) { MinimalUser: codersdk.MinimalUser{ ID: toShareWithUser.ID, Username: toShareWithUser.Username, + Name: toShareWithUser.Name, AvatarURL: toShareWithUser.AvatarURL, }, Role: codersdk.WorkspaceRoleAdmin, diff --git a/cli/ssh.go b/cli/ssh.go index 323d2913ae370..37000da1786de 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -109,6 +109,51 @@ func (r *RootCmd) ssh() *serpent.Command { } }, ), + CompletionHandler: func(inv *serpent.Invocation) []string { + client, err := r.InitClient(inv) + if err != nil { + return []string{} + } + + res, err := client.Workspaces(inv.Context(), codersdk.WorkspaceFilter{ + Owner: codersdk.Me, + }) + if err != nil { + return []string{} + } + + var mu sync.Mutex + var completions []string + var wg sync.WaitGroup + for _, ws := range res.Workspaces { + wg.Add(1) + go func() { + defer wg.Done() + resources, err := client.TemplateVersionResources(inv.Context(), ws.LatestBuild.TemplateVersionID) + if err != nil { + return + } + var agents []codersdk.WorkspaceAgent + for _, resource := range resources { + agents = append(agents, resource.Agents...) + } + + mu.Lock() + defer mu.Unlock() + if len(agents) == 1 { + completions = append(completions, ws.Name) + } else { + for _, agent := range agents { + completions = append(completions, fmt.Sprintf("%s.%s", ws.Name, agent.Name)) + } + } + }() + } + wg.Wait() + + slices.Sort(completions) + return completions + }, Handler: func(inv *serpent.Invocation) (retErr error) { client, err := r.InitClient(inv) if err != nil { @@ -906,6 +951,8 @@ func GetWorkspaceAndAgent(ctx context.Context, inv *serpent.Invocation, client * return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, nil, xerrors.Errorf("start workspace with active template version: %w", err) } _, _ = fmt.Fprintln(inv.Stdout, "Unable to start the workspace with template version from last build. Your workspace has been updated to the current active template version.") + default: + return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, nil, xerrors.Errorf("start workspace with current template version: %w", err) } } else if err != nil { return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, nil, xerrors.Errorf("start workspace with current template version: %w", err) diff --git a/cli/ssh_test.go b/cli/ssh_test.go index be3166cc4d32a..7ce9d85258fa0 100644 --- a/cli/ssh_test.go +++ b/cli/ssh_test.go @@ -1242,7 +1242,8 @@ func TestSSH(t *testing.T) { // true exits the loop. return true } - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) if err != nil { t.Logf("HTTP GET http://localhost:8222/ %s", err) return false @@ -2446,3 +2447,99 @@ func tempDirUnixSocket(t *testing.T) string { return t.TempDir() } + +func TestSSH_Completion(t *testing.T) { + t.Parallel() + + t.Run("SingleAgent", func(t *testing.T) { + t.Parallel() + + client, workspace, agentToken := setupWorkspaceForAgent(t) + _ = agenttest.New(t, client.URL, agentToken) + coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID) + + var stdout bytes.Buffer + inv, root := clitest.New(t, "ssh", "") + inv.Stdout = &stdout + inv.Environ.Set("COMPLETION_MODE", "1") + clitest.SetupConfig(t, client, root) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitMedium) + defer cancel() + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + // For single-agent workspaces, the only completion should be the + // bare workspace name. + output := stdout.String() + t.Logf("Completion output: %q", output) + require.Contains(t, output, workspace.Name) + }) + + t.Run("MultiAgent", func(t *testing.T) { + t.Parallel() + + client, store := coderdtest.NewWithDatabase(t, nil) + first := coderdtest.CreateFirstUser(t, client) + userClient, user := coderdtest.CreateAnotherUserMutators(t, client, first.OrganizationID, nil, func(r *codersdk.CreateUserRequestWithOrgs) { + r.Username = "multiuser" + }) + + r := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "multiworkspace", + OrganizationID: first.OrganizationID, + OwnerID: user.ID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + return []*proto.Agent{ + { + Name: "agent1", + Auth: &proto.Agent_Token{}, + }, + { + Name: "agent2", + Auth: &proto.Agent_Token{}, + }, + } + }).Do() + + var stdout bytes.Buffer + inv, root := clitest.New(t, "ssh", "") + inv.Stdout = &stdout + inv.Environ.Set("COMPLETION_MODE", "1") + clitest.SetupConfig(t, userClient, root) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitMedium) + defer cancel() + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + // For multi-agent workspaces, completions should include the + // workspace.agent format but NOT the bare workspace name. + output := stdout.String() + t.Logf("Completion output: %q", output) + lines := strings.Split(strings.TrimSpace(output), "\n") + require.NotContains(t, lines, r.Workspace.Name) + require.Contains(t, output, r.Workspace.Name+".agent1") + require.Contains(t, output, r.Workspace.Name+".agent2") + }) + + t.Run("NetworkError", func(t *testing.T) { + t.Parallel() + + var stdout bytes.Buffer + inv, _ := clitest.New(t, "ssh", "") + inv.Stdout = &stdout + inv.Environ.Set("COMPLETION_MODE", "1") + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + output := stdout.String() + require.Empty(t, output) + }) +} diff --git a/cli/templatepush.go b/cli/templatepush.go index 7a21a0f8defad..03e1ca1cee88c 100644 --- a/cli/templatepush.go +++ b/cli/templatepush.go @@ -9,6 +9,7 @@ import ( "os" "path/filepath" "slices" + "strconv" "strings" "time" @@ -461,10 +462,14 @@ func createValidTemplateVersion(inv *serpent.Invocation, args createValidTemplat }) if err != nil { var jobErr *cliui.ProvisionerJobError - if errors.As(err, &jobErr) && !codersdk.JobIsMissingParameterErrorCode(jobErr.Code) { - return nil, err + if errors.As(err, &jobErr) { + if codersdk.JobIsMissingRequiredTemplateVariableErrorCode(jobErr.Code) { + return handleMissingTemplateVariables(inv, args, version.ID) + } + if !codersdk.JobIsMissingParameterErrorCode(jobErr.Code) { + return nil, err + } } - return nil, err } version, err = client.TemplateVersion(inv.Context(), version.ID) @@ -528,3 +533,153 @@ func prettyDirectoryPath(dir string) string { } return prettyDir } + +func handleMissingTemplateVariables(inv *serpent.Invocation, args createValidTemplateVersionArgs, failedVersionID uuid.UUID) (*codersdk.TemplateVersion, error) { + client := args.Client + + templateVariables, err := client.TemplateVersionVariables(inv.Context(), failedVersionID) + if err != nil { + return nil, xerrors.Errorf("fetch template variables: %w", err) + } + + existingValues := make(map[string]string) + for _, v := range args.UserVariableValues { + existingValues[v.Name] = v.Value + } + + var missingVariables []codersdk.TemplateVersionVariable + for _, variable := range templateVariables { + if !variable.Required { + continue + } + + if existingValue, exists := existingValues[variable.Name]; exists && existingValue != "" { + continue + } + + // Only prompt for variables that don't have a default value or have a redacted default + // Sensitive variables have a default value of "*redacted*" + // See: https://github.com/coder/coder/blob/a78790c632974e04babfef6de0e2ddf044787a7a/coderd/provisionerdserver/provisionerdserver.go#L3206 + if variable.DefaultValue == "" || (variable.Sensitive && variable.DefaultValue == "*redacted*") { + missingVariables = append(missingVariables, variable) + } + } + + if len(missingVariables) == 0 { + return nil, xerrors.New("no missing required variables found") + } + + _, _ = fmt.Fprintf(inv.Stderr, "Found %d missing required variables:\n", len(missingVariables)) + for _, v := range missingVariables { + _, _ = fmt.Fprintf(inv.Stderr, " - %s (%s): %s\n", v.Name, v.Type, v.Description) + } + + _, _ = fmt.Fprintln(inv.Stderr, "\nThe template requires values for the following variables:") + + var promptedValues []codersdk.VariableValue + for _, variable := range missingVariables { + value, err := promptForTemplateVariable(inv, variable) + if err != nil { + return nil, xerrors.Errorf("prompt for variable %q: %w", variable.Name, err) + } + promptedValues = append(promptedValues, codersdk.VariableValue{ + Name: variable.Name, + Value: value, + }) + } + + combinedValues := codersdk.CombineVariableValues(args.UserVariableValues, promptedValues) + + _, _ = fmt.Fprintln(inv.Stderr, "\nRetrying template build with provided variables...") + + retryArgs := args + retryArgs.UserVariableValues = combinedValues + + return createValidTemplateVersion(inv, retryArgs) +} + +func promptForTemplateVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) { + displayVariableInfo(inv, variable) + + switch variable.Type { + case "bool": + return promptForBoolVariable(inv, variable) + case "number": + return promptForNumberVariable(inv, variable) + default: + return promptForStringVariable(inv, variable) + } +} + +func displayVariableInfo(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) { + _, _ = fmt.Fprintf(inv.Stderr, "var.%s", cliui.Bold(variable.Name)) + if variable.Required { + _, _ = fmt.Fprint(inv.Stderr, pretty.Sprint(cliui.DefaultStyles.Error, " (required)")) + } + if variable.Sensitive { + _, _ = fmt.Fprint(inv.Stderr, pretty.Sprint(cliui.DefaultStyles.Warn, ", sensitive")) + } + _, _ = fmt.Fprintln(inv.Stderr, "") + + if variable.Description != "" { + _, _ = fmt.Fprintf(inv.Stderr, " Description: %s\n", variable.Description) + } + _, _ = fmt.Fprintf(inv.Stderr, " Type: %s\n", variable.Type) + _, _ = fmt.Fprintf(inv.Stderr, " Current value: %s\n", pretty.Sprint(cliui.DefaultStyles.Placeholder, "")) +} + +func promptForBoolVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) { + defaultValue := variable.DefaultValue + if defaultValue == "" { + defaultValue = "false" + } + + return cliui.Select(inv, cliui.SelectOptions{ + Options: []string{"true", "false"}, + Default: defaultValue, + Message: "Select value:", + }) +} + +func promptForNumberVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) { + prompt := "Enter value:" + if !variable.Required && variable.DefaultValue != "" { + prompt = fmt.Sprintf("Enter value (default: %q):", variable.DefaultValue) + } + + return cliui.Prompt(inv, cliui.PromptOptions{ + Text: prompt, + Default: variable.DefaultValue, + Validate: createVariableValidator(variable), + }) +} + +func promptForStringVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) { + prompt := "Enter value:" + if !variable.Sensitive { + if !variable.Required && variable.DefaultValue != "" { + prompt = fmt.Sprintf("Enter value (default: %q):", variable.DefaultValue) + } + } + + return cliui.Prompt(inv, cliui.PromptOptions{ + Text: prompt, + Default: variable.DefaultValue, + Secret: variable.Sensitive, + Validate: createVariableValidator(variable), + }) +} + +func createVariableValidator(variable codersdk.TemplateVersionVariable) func(string) error { + return func(s string) error { + if variable.Required && s == "" && variable.DefaultValue == "" { + return xerrors.New("value is required") + } + if variable.Type == "number" && s != "" { + if _, err := strconv.ParseFloat(s, 64); err != nil { + return xerrors.Errorf("must be a valid number, got: %q", s) + } + } + return nil + } +} diff --git a/cli/templatepush_test.go b/cli/templatepush_test.go index 7c8007c96a210..28c5adc20f213 100644 --- a/cli/templatepush_test.go +++ b/cli/templatepush_test.go @@ -852,54 +852,6 @@ func TestTemplatePush(t *testing.T) { require.Equal(t, "foobar", templateVariables[1].Value) }) - t.Run("VariableIsRequiredButNotProvided", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - owner := coderdtest.CreateFirstUser(t, client) - templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) - - templateVersion := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, createEchoResponsesWithTemplateVariables(initialTemplateVariables)) - _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, templateVersion.ID) - template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, templateVersion.ID) - - // Test the cli command. - //nolint:gocritic - modifiedTemplateVariables := append(initialTemplateVariables, - &proto.TemplateVariable{ - Name: "second_variable", - Description: "This is the second variable.", - Type: "string", - Required: true, - }, - ) - source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(modifiedTemplateVariables)) - inv, root := clitest.New(t, "templates", "push", template.Name, "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho), "--name", "example") - clitest.SetupConfig(t, templateAdmin, root) - pty := ptytest.New(t) - inv.Stdin = pty.Input() - inv.Stdout = pty.Output() - - execDone := make(chan error) - go func() { - execDone <- inv.Run() - }() - - matches := []struct { - match string - write string - }{ - {match: "Upload", write: "yes"}, - } - for _, m := range matches { - pty.ExpectMatch(m.match) - pty.WriteLine(m.write) - } - - wantErr := <-execDone - require.Error(t, wantErr) - require.Contains(t, wantErr.Error(), "required template variables need values") - }) - t.Run("VariableIsOptionalButNotProvided", func(t *testing.T) { t.Parallel() client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) @@ -1115,6 +1067,240 @@ func TestTemplatePush(t *testing.T) { require.Len(t, templateVersions, 2) require.Equal(t, "example", templateVersions[1].Name) }) + + t.Run("PromptForDifferentRequiredTypes", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + templateVariables := []*proto.TemplateVariable{ + { + Name: "string_var", + Description: "A string variable", + Type: "string", + Required: true, + }, + { + Name: "number_var", + Description: "A number variable", + Type: "number", + Required: true, + }, + { + Name: "bool_var", + Description: "A boolean variable", + Type: "bool", + Required: true, + }, + { + Name: "sensitive_var", + Description: "A sensitive variable", + Type: "string", + Required: true, + Sensitive: true, + }, + } + + source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables)) + inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho)) + clitest.SetupConfig(t, templateAdmin, root) + pty := ptytest.New(t).Attach(inv) + + execDone := make(chan error) + go func() { + execDone <- inv.Run() + }() + + // Select "Yes" for the "Upload " prompt + pty.ExpectMatch("Upload") + pty.WriteLine("yes") + + pty.ExpectMatch("var.string_var") + pty.ExpectMatch("Enter value:") + pty.WriteLine("test-string") + + pty.ExpectMatch("var.number_var") + pty.ExpectMatch("Enter value:") + pty.WriteLine("42") + + // Boolean variable automatically selects the first option ("true") + pty.ExpectMatch("var.bool_var") + + pty.ExpectMatch("var.sensitive_var") + pty.ExpectMatch("Enter value:") + pty.WriteLine("secret-value") + + require.NoError(t, <-execDone) + }) + + t.Run("ValidateNumberInput", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + templateVariables := []*proto.TemplateVariable{ + { + Name: "number_var", + Description: "A number that requires validation", + Type: "number", + Required: true, + }, + } + + source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables)) + inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho)) + clitest.SetupConfig(t, templateAdmin, root) + pty := ptytest.New(t).Attach(inv) + + execDone := make(chan error) + go func() { + execDone <- inv.Run() + }() + + // Select "Yes" for the "Upload " prompt + pty.ExpectMatch("Upload") + pty.WriteLine("yes") + + pty.ExpectMatch("var.number_var") + + pty.WriteLine("not-a-number") + pty.ExpectMatch("must be a valid number") + + pty.WriteLine("123.45") + + require.NoError(t, <-execDone) + }) + + t.Run("DontPromptForDefaultValues", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + templateVariables := []*proto.TemplateVariable{ + { + Name: "with_default", + Type: "string", + Required: true, + DefaultValue: "default-value", + }, + { + Name: "without_default", + Type: "string", + Required: true, + }, + } + + source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables)) + inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho)) + clitest.SetupConfig(t, templateAdmin, root) + pty := ptytest.New(t).Attach(inv) + + execDone := make(chan error) + go func() { + execDone <- inv.Run() + }() + + // Select "Yes" for the "Upload " prompt + pty.ExpectMatch("Upload") + pty.WriteLine("yes") + + pty.ExpectMatch("var.without_default") + pty.WriteLine("test-value") + + require.NoError(t, <-execDone) + }) + + t.Run("VariableSourcesPriority", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + templateVariables := []*proto.TemplateVariable{ + { + Name: "cli_flag_var", + Description: "Variable provided via CLI flag", + Type: "string", + Required: true, + }, + { + Name: "file_var", + Description: "Variable provided via file", + Type: "string", + Required: true, + }, + { + Name: "prompt_var", + Description: "Variable provided via prompt", + Type: "string", + Required: true, + }, + { + Name: "cli_overrides_file_var", + Description: "Variable in both CLI and file", + Type: "string", + Required: true, + }, + } + + source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables)) + + // Create a temporary variables file. + tempDir := t.TempDir() + removeTmpDirUntilSuccessAfterTest(t, tempDir) + variablesFile, err := os.CreateTemp(tempDir, "variables*.yaml") + require.NoError(t, err) + _, err = variablesFile.WriteString(`file_var: from-file +cli_overrides_file_var: from-file`) + require.NoError(t, err) + require.NoError(t, variablesFile.Close()) + + inv, root := clitest.New(t, "templates", "push", "test-template", + "--directory", source, + "--test.provisioner", string(database.ProvisionerTypeEcho), + "--variables-file", variablesFile.Name(), + "--variable", "cli_flag_var=from-cli-flag", + "--variable", "cli_overrides_file_var=from-cli-override", + ) + clitest.SetupConfig(t, templateAdmin, root) + pty := ptytest.New(t).Attach(inv) + + execDone := make(chan error) + go func() { + execDone <- inv.Run() + }() + + // Select "Yes" for the "Upload " prompt + pty.ExpectMatch("Upload") + pty.WriteLine("yes") + + // Only check for prompt_var, other variables should not prompt + pty.ExpectMatch("var.prompt_var") + pty.ExpectMatch("Enter value:") + pty.WriteLine("from-prompt") + + require.NoError(t, <-execDone) + + template, err := client.TemplateByName(context.Background(), owner.OrganizationID, "test-template") + require.NoError(t, err) + + templateVersionVars, err := client.TemplateVersionVariables(context.Background(), template.ActiveVersionID) + require.NoError(t, err) + require.Len(t, templateVersionVars, 4) + + varMap := make(map[string]string) + for _, tv := range templateVersionVars { + varMap[tv.Name] = tv.Value + } + + require.Equal(t, "from-cli-flag", varMap["cli_flag_var"]) + require.Equal(t, "from-file", varMap["file_var"]) + require.Equal(t, "from-prompt", varMap["prompt_var"]) + require.Equal(t, "from-cli-override", varMap["cli_overrides_file_var"]) + }) }) } diff --git a/cli/testdata/coder_list_--output_json.golden b/cli/testdata/coder_list_--output_json.golden index 82b73f7b24989..8da57536338f8 100644 --- a/cli/testdata/coder_list_--output_json.golden +++ b/cli/testdata/coder_list_--output_json.golden @@ -45,6 +45,7 @@ "queue_position": 0, "queue_size": 0, "organization_id": "===========[first org ID]===========", + "initiator_id": "==========[first user ID]===========", "input": { "workspace_build_id": "========[workspace build ID]========" }, @@ -89,6 +90,7 @@ "allow_renames": false, "favorite": false, "next_start_at": "====[timestamp]=====", - "is_prebuild": false + "is_prebuild": false, + "task_id": null } ] diff --git a/cli/testdata/coder_provisioner_jobs_list_--help.golden b/cli/testdata/coder_provisioner_jobs_list_--help.golden index 8e22f78e978f2..3a581bd880829 100644 --- a/cli/testdata/coder_provisioner_jobs_list_--help.golden +++ b/cli/testdata/coder_provisioner_jobs_list_--help.golden @@ -11,9 +11,12 @@ OPTIONS: -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. - -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) + -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) Columns to display in table output. + -i, --initiator string, $CODER_PROVISIONER_JOB_LIST_INITIATOR + Filter by initiator (user ID or username). + -l, --limit int, $CODER_PROVISIONER_JOB_LIST_LIMIT (default: 50) Limit the number of jobs returned. diff --git a/cli/testdata/coder_provisioner_jobs_list_--output_json.golden b/cli/testdata/coder_provisioner_jobs_list_--output_json.golden index 6ccf672360a55..3ee6c25e34082 100644 --- a/cli/testdata/coder_provisioner_jobs_list_--output_json.golden +++ b/cli/testdata/coder_provisioner_jobs_list_--output_json.golden @@ -15,6 +15,7 @@ "queue_position": 0, "queue_size": 0, "organization_id": "===========[first org ID]===========", + "initiator_id": "==========[first user ID]===========", "input": { "template_version_id": "============[version ID]============" }, @@ -45,6 +46,7 @@ "queue_position": 0, "queue_size": 0, "organization_id": "===========[first org ID]===========", + "initiator_id": "==========[first user ID]===========", "input": { "workspace_build_id": "========[workspace build ID]========" }, diff --git a/cli/testdata/coder_provisioner_list_--output_json.golden b/cli/testdata/coder_provisioner_list_--output_json.golden index ad25a3ec48549..32de8cbd857f4 100644 --- a/cli/testdata/coder_provisioner_list_--output_json.golden +++ b/cli/testdata/coder_provisioner_list_--output_json.golden @@ -7,7 +7,7 @@ "last_seen_at": "====[timestamp]=====", "name": "test-daemon", "version": "v0.0.0-devel", - "api_version": "1.10", + "api_version": "1.11", "provisioners": [ "echo" ], diff --git a/cli/testdata/coder_server_--help.golden b/cli/testdata/coder_server_--help.golden index 447ce1ae4fce2..7e7a7ece0d958 100644 --- a/cli/testdata/coder_server_--help.golden +++ b/cli/testdata/coder_server_--help.golden @@ -80,6 +80,41 @@ OPTIONS: Periodically check for new releases of Coder and inform the owner. The check is performed once per day. +AIBRIDGE OPTIONS: + --aibridge-anthropic-base-url string, $CODER_AIBRIDGE_ANTHROPIC_BASE_URL (default: https://api.anthropic.com/) + The base URL of the Anthropic API. + + --aibridge-anthropic-key string, $CODER_AIBRIDGE_ANTHROPIC_KEY + The key to authenticate against the Anthropic API. + + --aibridge-bedrock-access-key string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY + The access key to authenticate against the AWS Bedrock API. + + --aibridge-bedrock-access-key-secret string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET + The access key secret to use with the access key to authenticate + against the AWS Bedrock API. + + --aibridge-bedrock-model string, $CODER_AIBRIDGE_BEDROCK_MODEL (default: global.anthropic.claude-sonnet-4-5-20250929-v1:0) + The model to use when making requests to the AWS Bedrock API. + + --aibridge-bedrock-region string, $CODER_AIBRIDGE_BEDROCK_REGION + The AWS Bedrock API region. + + --aibridge-bedrock-small-fastmodel string, $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL (default: global.anthropic.claude-haiku-4-5-20251001-v1:0) + The small fast model to use when making requests to the AWS Bedrock + API. Claude Code uses Haiku-class models to perform background tasks. + See + https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + + --aibridge-enabled bool, $CODER_AIBRIDGE_ENABLED (default: false) + Whether to start an in-memory aibridged instance. + + --aibridge-openai-base-url string, $CODER_AIBRIDGE_OPENAI_BASE_URL (default: https://api.openai.com/v1/) + The base URL of the OpenAI API. + + --aibridge-openai-key string, $CODER_AIBRIDGE_OPENAI_KEY + The key to authenticate against the OpenAI API. + CLIENT OPTIONS: These options change the behavior of how clients interact with the Coder. Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI. diff --git a/cli/testdata/coder_templates_init_--help.golden b/cli/testdata/coder_templates_init_--help.golden index d44db24aee27b..44be7a95293f4 100644 --- a/cli/testdata/coder_templates_init_--help.golden +++ b/cli/testdata/coder_templates_init_--help.golden @@ -6,7 +6,7 @@ USAGE: Get started with a templated template. OPTIONS: - --id aws-devcontainer|aws-linux|aws-windows|azure-linux|digitalocean-linux|docker|docker-devcontainer|docker-envbuilder|gcp-devcontainer|gcp-linux|gcp-vm-container|gcp-windows|kubernetes|kubernetes-devcontainer|nomad-docker|scratch + --id aws-devcontainer|aws-linux|aws-windows|azure-linux|digitalocean-linux|docker|docker-devcontainer|docker-envbuilder|gcp-devcontainer|gcp-linux|gcp-vm-container|gcp-windows|kubernetes|kubernetes-devcontainer|nomad-docker|scratch|tasks-docker Specify a given example template by ID. β€”β€”β€” diff --git a/cli/testdata/coder_tokens_--help.golden b/cli/testdata/coder_tokens_--help.golden index 7247c42a4bd1d..fb58dab8b3e69 100644 --- a/cli/testdata/coder_tokens_--help.golden +++ b/cli/testdata/coder_tokens_--help.golden @@ -16,6 +16,10 @@ USAGE: $ coder tokens ls + - Create a scoped token: + + $ coder tokens create --scope workspace:read --allow workspace: + - Remove a token by ID: $ coder tokens rm WuoWs4ZsMX @@ -24,6 +28,7 @@ SUBCOMMANDS: create Create a token list List tokens remove Delete a token + view Display detailed information about a token β€”β€”β€” Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_tokens_create_--help.golden b/cli/testdata/coder_tokens_create_--help.golden index 9399635563a11..19e9beac20060 100644 --- a/cli/testdata/coder_tokens_create_--help.golden +++ b/cli/testdata/coder_tokens_create_--help.golden @@ -6,12 +6,20 @@ USAGE: Create a token OPTIONS: + --allow allow-list + Repeatable allow-list entry (:, e.g. workspace:1234-...). + --lifetime string, $CODER_TOKEN_LIFETIME - Specify a duration for the lifetime of the token. + Duration for the token lifetime. Supports standard Go duration units + (ns, us, ms, s, m, h) plus d (days) and y (years). Examples: 8h, 30d, + 1y, 1d12h30m. -n, --name string, $CODER_TOKEN_NAME Specify a human-readable name. + --scope string-array + Repeatable scope to attach to the token (e.g. workspace:read). + -u, --user string, $CODER_TOKEN_USER Specify the user to create the token for (Only works if logged in user is admin). diff --git a/cli/testdata/coder_tokens_list_--help.golden b/cli/testdata/coder_tokens_list_--help.golden index 9ad17fbafb8e6..a3c24bcd0fabe 100644 --- a/cli/testdata/coder_tokens_list_--help.golden +++ b/cli/testdata/coder_tokens_list_--help.golden @@ -12,7 +12,7 @@ OPTIONS: Specifies whether all users' tokens will be listed or not (must have Owner role to see all tokens). - -c, --column [id|name|last used|expires at|created at|owner] (default: id,name,last used,expires at,created at) + -c, --column [id|name|scopes|allow list|last used|expires at|created at|owner] (default: id,name,scopes,allow list,last used,expires at,created at) Columns to display in table output. -o, --output table|json (default: table) diff --git a/cli/testdata/coder_tokens_view_--help.golden b/cli/testdata/coder_tokens_view_--help.golden new file mode 100644 index 0000000000000..1bceac32ce52f --- /dev/null +++ b/cli/testdata/coder_tokens_view_--help.golden @@ -0,0 +1,16 @@ +coder v0.0.0-devel + +USAGE: + coder tokens view [flags] + + Display detailed information about a token + +OPTIONS: + -c, --column [id|name|scopes|allow list|last used|expires at|created at|owner] (default: id,name,scopes,allow list,last used,expires at,created at,owner) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + +β€”β€”β€” +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_users_list_--help.golden b/cli/testdata/coder_users_list_--help.golden index 22c1fe172faf5..e446d63a36d7f 100644 --- a/cli/testdata/coder_users_list_--help.golden +++ b/cli/testdata/coder_users_list_--help.golden @@ -8,7 +8,7 @@ USAGE: Aliases: ls OPTIONS: - -c, --column [id|username|email|created at|updated at|status] (default: username,email,created at,status) + -c, --column [id|username|name|email|created at|updated at|status] (default: username,email,created at,status) Columns to display in table output. --github-user-id int diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index 40666c10e8394..225c240d9e761 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -714,8 +714,7 @@ workspace_prebuilds: # (default: 3, type: int) failure_hard_limit: 3 aibridge: - # Whether to start an in-memory aibridged instance ("aibridge" experiment must be - # enabled, too). + # Whether to start an in-memory aibridged instance. # (default: false, type: bool) enabled: false # The base URL of the OpenAI API. @@ -726,7 +725,25 @@ aibridge: openai_key: "" # The base URL of the Anthropic API. # (default: https://api.anthropic.com/, type: string) - base_url: https://api.anthropic.com/ + anthropic_base_url: https://api.anthropic.com/ # The key to authenticate against the Anthropic API. # (default: , type: string) - key: "" + anthropic_key: "" + # The AWS Bedrock API region. + # (default: , type: string) + bedrock_region: "" + # The access key to authenticate against the AWS Bedrock API. + # (default: , type: string) + bedrock_access_key: "" + # The access key secret to use with the access key to authenticate against the AWS + # Bedrock API. + # (default: , type: string) + bedrock_access_key_secret: "" + # The model to use when making requests to the AWS Bedrock API. + # (default: global.anthropic.claude-sonnet-4-5-20250929-v1:0, type: string) + bedrock_model: global.anthropic.claude-sonnet-4-5-20250929-v1:0 + # The small fast model to use when making requests to the AWS Bedrock API. Claude + # Code uses Haiku-class models to perform background tasks. See + # https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + # (default: global.anthropic.claude-haiku-4-5-20251001-v1:0, type: string) + bedrock_small_fast_model: global.anthropic.claude-haiku-4-5-20251001-v1:0 diff --git a/cli/tokens.go b/cli/tokens.go index 5d63f2e1ae841..9316f5de14313 100644 --- a/cli/tokens.go +++ b/cli/tokens.go @@ -4,12 +4,14 @@ import ( "fmt" "os" "slices" + "sort" "strings" "time" "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/serpent" ) @@ -27,6 +29,10 @@ func (r *RootCmd) tokens() *serpent.Command { Description: "List your tokens", Command: "coder tokens ls", }, + Example{ + Description: "Create a scoped token", + Command: "coder tokens create --scope workspace:read --allow workspace:", + }, Example{ Description: "Remove a token by ID", Command: "coder tokens rm WuoWs4ZsMX", @@ -39,6 +45,7 @@ func (r *RootCmd) tokens() *serpent.Command { Children: []*serpent.Command{ r.createToken(), r.listTokens(), + r.viewToken(), r.removeToken(), }, } @@ -50,6 +57,8 @@ func (r *RootCmd) createToken() *serpent.Command { tokenLifetime string name string user string + scopes []string + allowList []codersdk.APIAllowListTarget ) cmd := &serpent.Command{ Use: "create", @@ -88,10 +97,18 @@ func (r *RootCmd) createToken() *serpent.Command { } } - res, err := client.CreateToken(inv.Context(), userID, codersdk.CreateTokenRequest{ + req := codersdk.CreateTokenRequest{ Lifetime: parsedLifetime, TokenName: name, - }) + } + if len(req.Scopes) == 0 { + req.Scopes = slice.StringEnums[codersdk.APIKeyScope](scopes) + } + if len(allowList) > 0 { + req.AllowList = append([]codersdk.APIAllowListTarget(nil), allowList...) + } + + res, err := client.CreateToken(inv.Context(), userID, req) if err != nil { return xerrors.Errorf("create tokens: %w", err) } @@ -106,7 +123,7 @@ func (r *RootCmd) createToken() *serpent.Command { { Flag: "lifetime", Env: "CODER_TOKEN_LIFETIME", - Description: "Specify a duration for the lifetime of the token.", + Description: "Duration for the token lifetime. Supports standard Go duration units (ns, us, ms, s, m, h) plus d (days) and y (years). Examples: 8h, 30d, 1y, 1d12h30m.", Value: serpent.StringOf(&tokenLifetime), }, { @@ -123,6 +140,16 @@ func (r *RootCmd) createToken() *serpent.Command { Description: "Specify the user to create the token for (Only works if logged in user is admin).", Value: serpent.StringOf(&user), }, + { + Flag: "scope", + Description: "Repeatable scope to attach to the token (e.g. workspace:read).", + Value: serpent.StringArrayOf(&scopes), + }, + { + Flag: "allow", + Description: "Repeatable allow-list entry (:, e.g. workspace:1234-...).", + Value: AllowListFlagOf(&allowList), + }, } return cmd @@ -136,6 +163,8 @@ type tokenListRow struct { // For table format: ID string `json:"-" table:"id,default_sort"` TokenName string `json:"token_name" table:"name"` + Scopes string `json:"-" table:"scopes"` + Allow string `json:"-" table:"allow list"` LastUsed time.Time `json:"-" table:"last used"` ExpiresAt time.Time `json:"-" table:"expires at"` CreatedAt time.Time `json:"-" table:"created at"` @@ -143,20 +172,47 @@ type tokenListRow struct { } func tokenListRowFromToken(token codersdk.APIKeyWithOwner) tokenListRow { + return tokenListRowFromKey(token.APIKey, token.Username) +} + +func tokenListRowFromKey(token codersdk.APIKey, owner string) tokenListRow { return tokenListRow{ - APIKey: token.APIKey, + APIKey: token, ID: token.ID, TokenName: token.TokenName, + Scopes: joinScopes(token.Scopes), + Allow: joinAllowList(token.AllowList), LastUsed: token.LastUsed, ExpiresAt: token.ExpiresAt, CreatedAt: token.CreatedAt, - Owner: token.Username, + Owner: owner, } } +func joinScopes(scopes []codersdk.APIKeyScope) string { + if len(scopes) == 0 { + return "" + } + vals := slice.ToStrings(scopes) + sort.Strings(vals) + return strings.Join(vals, ", ") +} + +func joinAllowList(entries []codersdk.APIAllowListTarget) string { + if len(entries) == 0 { + return "" + } + vals := make([]string, len(entries)) + for i, entry := range entries { + vals[i] = entry.String() + } + sort.Strings(vals) + return strings.Join(vals, ", ") +} + func (r *RootCmd) listTokens() *serpent.Command { // we only display the 'owner' column if the --all argument is passed in - defaultCols := []string{"id", "name", "last used", "expires at", "created at"} + defaultCols := []string{"id", "name", "scopes", "allow list", "last used", "expires at", "created at"} if slices.Contains(os.Args, "-a") || slices.Contains(os.Args, "--all") { defaultCols = append(defaultCols, "owner") } @@ -226,6 +282,48 @@ func (r *RootCmd) listTokens() *serpent.Command { return cmd } +func (r *RootCmd) viewToken() *serpent.Command { + formatter := cliui.NewOutputFormatter( + cliui.TableFormat([]tokenListRow{}, []string{"id", "name", "scopes", "allow list", "last used", "expires at", "created at", "owner"}), + cliui.JSONFormat(), + ) + + cmd := &serpent.Command{ + Use: "view ", + Short: "Display detailed information about a token", + Middleware: serpent.Chain( + serpent.RequireNArgs(1), + ), + Handler: func(inv *serpent.Invocation) error { + client, err := r.InitClient(inv) + if err != nil { + return err + } + + tokenName := inv.Args[0] + token, err := client.APIKeyByName(inv.Context(), codersdk.Me, tokenName) + if err != nil { + maybeID := strings.Split(tokenName, "-")[0] + token, err = client.APIKeyByID(inv.Context(), codersdk.Me, maybeID) + if err != nil { + return xerrors.Errorf("fetch api key by name or id: %w", err) + } + } + + row := tokenListRowFromKey(*token, "") + out, err := formatter.Format(inv.Context(), []tokenListRow{row}) + if err != nil { + return err + } + _, err = fmt.Fprintln(inv.Stdout, out) + return err + }, + } + + formatter.AttachOptions(&cmd.Options) + return cmd +} + func (r *RootCmd) removeToken() *serpent.Command { cmd := &serpent.Command{ Use: "remove ", diff --git a/cli/tokens_test.go b/cli/tokens_test.go index 0c717bb890f9e..990516aa9ba13 100644 --- a/cli/tokens_test.go +++ b/cli/tokens_test.go @@ -4,10 +4,13 @@ import ( "bytes" "context" "encoding/json" + "fmt" "testing" "github.com/stretchr/testify/require" + "github.com/google/uuid" + "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/codersdk" @@ -46,6 +49,18 @@ func TestTokens(t *testing.T) { require.NotEmpty(t, res) id := res[:10] + allowWorkspaceID := uuid.New() + allowSpec := fmt.Sprintf("workspace:%s", allowWorkspaceID.String()) + inv, root = clitest.New(t, "tokens", "create", "--name", "scoped-token", "--scope", string(codersdk.APIKeyScopeWorkspaceRead), "--allow", allowSpec) + clitest.SetupConfig(t, client, root) + buf = new(bytes.Buffer) + inv.Stdout = buf + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + res = buf.String() + require.NotEmpty(t, res) + scopedTokenID := res[:10] + // Test creating a token for second user from first user's (admin) session inv, root = clitest.New(t, "tokens", "create", "--name", "token-two", "--user", secondUser.ID.String()) clitest.SetupConfig(t, client, root) @@ -67,7 +82,7 @@ func TestTokens(t *testing.T) { require.NoError(t, err) res = buf.String() require.NotEmpty(t, res) - // Result should only contain the token created for the admin user + // Result should only contain the tokens created for the admin user require.Contains(t, res, "ID") require.Contains(t, res, "EXPIRES AT") require.Contains(t, res, "CREATED AT") @@ -76,6 +91,16 @@ func TestTokens(t *testing.T) { // Result should not contain the token created for the second user require.NotContains(t, res, secondTokenID) + inv, root = clitest.New(t, "tokens", "view", "scoped-token") + clitest.SetupConfig(t, client, root) + buf = new(bytes.Buffer) + inv.Stdout = buf + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + res = buf.String() + require.Contains(t, res, string(codersdk.APIKeyScopeWorkspaceRead)) + require.Contains(t, res, allowSpec) + // Test listing tokens from the second user's session inv, root = clitest.New(t, "tokens", "ls") clitest.SetupConfig(t, secondUserClient, root) @@ -101,6 +126,14 @@ func TestTokens(t *testing.T) { // User (non-admin) should not be able to create a token for another user require.Error(t, err) + inv, root = clitest.New(t, "tokens", "create", "--name", "invalid-allow", "--allow", "badvalue") + clitest.SetupConfig(t, client, root) + buf = new(bytes.Buffer) + inv.Stdout = buf + err = inv.WithContext(ctx).Run() + require.Error(t, err) + require.Contains(t, err.Error(), "invalid allow_list entry") + inv, root = clitest.New(t, "tokens", "ls", "--output=json") clitest.SetupConfig(t, client, root) buf = new(bytes.Buffer) @@ -110,8 +143,17 @@ func TestTokens(t *testing.T) { var tokens []codersdk.APIKey require.NoError(t, json.Unmarshal(buf.Bytes(), &tokens)) - require.Len(t, tokens, 1) - require.Equal(t, id, tokens[0].ID) + require.Len(t, tokens, 2) + tokenByName := make(map[string]codersdk.APIKey, len(tokens)) + for _, tk := range tokens { + tokenByName[tk.TokenName] = tk + } + require.Contains(t, tokenByName, "token-one") + require.Contains(t, tokenByName, "scoped-token") + scopedToken := tokenByName["scoped-token"] + require.Contains(t, scopedToken.Scopes, codersdk.APIKeyScopeWorkspaceRead) + require.Len(t, scopedToken.AllowList, 1) + require.Equal(t, allowSpec, scopedToken.AllowList[0].String()) // Delete by name inv, root = clitest.New(t, "tokens", "rm", "token-one") @@ -135,6 +177,17 @@ func TestTokens(t *testing.T) { require.NotEmpty(t, res) require.Contains(t, res, "deleted") + // Delete scoped token by ID + inv, root = clitest.New(t, "tokens", "rm", scopedTokenID) + clitest.SetupConfig(t, client, root) + buf = new(bytes.Buffer) + inv.Stdout = buf + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + res = buf.String() + require.NotEmpty(t, res) + require.Contains(t, res, "deleted") + // Create third token inv, root = clitest.New(t, "tokens", "create", "--name", "token-three") clitest.SetupConfig(t, client, root) diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go index dbcb8ea024914..f8f72a5d0d6ea 100644 --- a/coderd/agentapi/api.go +++ b/coderd/agentapi/api.go @@ -239,6 +239,10 @@ func (a *API) Serve(ctx context.Context, l net.Listener) error { return xerrors.Errorf("create agent API server: %w", err) } + if err := a.ResourcesMonitoringAPI.InitMonitors(ctx); err != nil { + return xerrors.Errorf("initialize resource monitoring: %w", err) + } + return server.Serve(ctx, l) } diff --git a/coderd/agentapi/connectionlog.go b/coderd/agentapi/connectionlog.go index f26f835746981..bd11f9e72679e 100644 --- a/coderd/agentapi/connectionlog.go +++ b/coderd/agentapi/connectionlog.go @@ -61,6 +61,14 @@ func (a *ConnLogAPI) ReportConnection(ctx context.Context, req *agentproto.Repor return nil, xerrors.Errorf("get workspace by agent id: %w", err) } + // Some older clients may incorrectly report "localhost" as the IP address. + // Related to https://github.com/coder/coder/issues/20194 + logIPRaw := req.GetConnection().GetIp() + if logIPRaw == "localhost" { + logIPRaw = "127.0.0.1" + } + logIP := database.ParseIP(logIPRaw) // will return null if invalid + reason := req.GetConnection().GetReason() connLogger := *a.ConnectionLogger.Load() err = connLogger.Upsert(ctx, database.UpsertConnectionLogParams{ @@ -73,7 +81,7 @@ func (a *ConnLogAPI) ReportConnection(ctx context.Context, req *agentproto.Repor AgentName: workspaceAgent.Name, Type: connectionType, Code: code, - Ip: database.ParseIP(req.GetConnection().GetIp()), + Ip: logIP, ConnectionID: uuid.NullUUID{ UUID: connectionID, Valid: true, diff --git a/coderd/agentapi/connectionlog_test.go b/coderd/agentapi/connectionlog_test.go index 4a060b8f16faf..81d969e5bad95 100644 --- a/coderd/agentapi/connectionlog_test.go +++ b/coderd/agentapi/connectionlog_test.go @@ -3,13 +3,11 @@ package agentapi_test import ( "context" "database/sql" - "net" "sync/atomic" "testing" "time" "github.com/google/uuid" - "github.com/sqlc-dev/pqtype" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/timestamppb" @@ -75,6 +73,9 @@ func TestConnectionLog(t *testing.T) { action: agentproto.Connection_CONNECT.Enum(), typ: agentproto.Connection_JETBRAINS.Enum(), time: dbtime.Now(), + // Sometimes, JetBrains clients report as localhost, see + // https://github.com/coder/coder/issues/20194 + ip: "localhost", }, { name: "Reconnecting PTY Connect", @@ -129,6 +130,12 @@ func TestConnectionLog(t *testing.T) { }, }) + expectedIPRaw := tt.ip + if expectedIPRaw == "localhost" { + expectedIPRaw = "127.0.0.1" + } + expectedIP := database.ParseIP(expectedIPRaw) + require.True(t, connLogger.Contains(t, database.UpsertConnectionLogParams{ Time: dbtime.Time(tt.time).In(time.UTC), OrganizationID: workspace.OrganizationID, @@ -146,7 +153,7 @@ func TestConnectionLog(t *testing.T) { Int32: tt.status, Valid: *tt.action == agentproto.Connection_DISCONNECT, }, - Ip: pqtype.Inet{Valid: true, IPNet: net.IPNet{IP: net.ParseIP(tt.ip), Mask: net.CIDRMask(32, 32)}}, + Ip: expectedIP, Type: agentProtoConnectionTypeToConnectionLog(t, *tt.typ), DisconnectReason: sql.NullString{ String: tt.reason, diff --git a/coderd/agentapi/resources_monitoring.go b/coderd/agentapi/resources_monitoring.go index e5ee97e681a58..db0d523192280 100644 --- a/coderd/agentapi/resources_monitoring.go +++ b/coderd/agentapi/resources_monitoring.go @@ -5,6 +5,7 @@ import ( "database/sql" "errors" "fmt" + "sync" "time" "golang.org/x/xerrors" @@ -33,42 +34,60 @@ type ResourcesMonitoringAPI struct { Debounce time.Duration Config resourcesmonitor.Config + + // Cache resource monitors on first call to avoid millions of DB queries per day. + memoryMonitor database.WorkspaceAgentMemoryResourceMonitor + volumeMonitors []database.WorkspaceAgentVolumeResourceMonitor + monitorsLock sync.RWMutex } -func (a *ResourcesMonitoringAPI) GetResourcesMonitoringConfiguration(ctx context.Context, _ *proto.GetResourcesMonitoringConfigurationRequest) (*proto.GetResourcesMonitoringConfigurationResponse, error) { - memoryMonitor, memoryErr := a.Database.FetchMemoryResourceMonitorsByAgentID(ctx, a.AgentID) - if memoryErr != nil && !errors.Is(memoryErr, sql.ErrNoRows) { - return nil, xerrors.Errorf("failed to fetch memory resource monitor: %w", memoryErr) +// InitMonitors fetches resource monitors from the database and caches them. +// This must be called once after creating a ResourcesMonitoringAPI, the context should be +// the agent per-RPC connection context. If fetching fails with a real error (not sql.ErrNoRows), the +// connection should be torn down. +func (a *ResourcesMonitoringAPI) InitMonitors(ctx context.Context) error { + memMon, err := a.Database.FetchMemoryResourceMonitorsByAgentID(ctx, a.AgentID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("fetch memory resource monitor: %w", err) + } + // If sql.ErrNoRows, memoryMonitor stays as zero value (CreatedAt.IsZero() = true). + // Otherwise, store the fetched monitor. + if err == nil { + a.memoryMonitor = memMon } - volumeMonitors, err := a.Database.FetchVolumesResourceMonitorsByAgentID(ctx, a.AgentID) + volMons, err := a.Database.FetchVolumesResourceMonitorsByAgentID(ctx, a.AgentID) if err != nil { - return nil, xerrors.Errorf("failed to fetch volume resource monitors: %w", err) + return xerrors.Errorf("fetch volume resource monitors: %w", err) } + // 0 length is valid, indicating none configured, since the volume monitors in the DB can be many. + a.volumeMonitors = volMons + + return nil +} +func (a *ResourcesMonitoringAPI) GetResourcesMonitoringConfiguration(_ context.Context, _ *proto.GetResourcesMonitoringConfigurationRequest) (*proto.GetResourcesMonitoringConfigurationResponse, error) { return &proto.GetResourcesMonitoringConfigurationResponse{ Config: &proto.GetResourcesMonitoringConfigurationResponse_Config{ CollectionIntervalSeconds: int32(a.Config.CollectionInterval.Seconds()), NumDatapoints: a.Config.NumDatapoints, }, Memory: func() *proto.GetResourcesMonitoringConfigurationResponse_Memory { - if memoryErr != nil { + if a.memoryMonitor.CreatedAt.IsZero() { return nil } - return &proto.GetResourcesMonitoringConfigurationResponse_Memory{ - Enabled: memoryMonitor.Enabled, + Enabled: a.memoryMonitor.Enabled, } }(), Volumes: func() []*proto.GetResourcesMonitoringConfigurationResponse_Volume { - volumes := make([]*proto.GetResourcesMonitoringConfigurationResponse_Volume, 0, len(volumeMonitors)) - for _, monitor := range volumeMonitors { + volumes := make([]*proto.GetResourcesMonitoringConfigurationResponse_Volume, 0, len(a.volumeMonitors)) + for _, monitor := range a.volumeMonitors { volumes = append(volumes, &proto.GetResourcesMonitoringConfigurationResponse_Volume{ Enabled: monitor.Enabled, Path: monitor.Path, }) } - return volumes }(), }, nil @@ -77,6 +96,10 @@ func (a *ResourcesMonitoringAPI) GetResourcesMonitoringConfiguration(ctx context func (a *ResourcesMonitoringAPI) PushResourcesMonitoringUsage(ctx context.Context, req *proto.PushResourcesMonitoringUsageRequest) (*proto.PushResourcesMonitoringUsageResponse, error) { var err error + // Lock for the entire push operation since calls are sequential from the agent + a.monitorsLock.Lock() + defer a.monitorsLock.Unlock() + if memoryErr := a.monitorMemory(ctx, req.Datapoints); memoryErr != nil { err = errors.Join(err, xerrors.Errorf("monitor memory: %w", memoryErr)) } @@ -89,18 +112,7 @@ func (a *ResourcesMonitoringAPI) PushResourcesMonitoringUsage(ctx context.Contex } func (a *ResourcesMonitoringAPI) monitorMemory(ctx context.Context, datapoints []*proto.PushResourcesMonitoringUsageRequest_Datapoint) error { - monitor, err := a.Database.FetchMemoryResourceMonitorsByAgentID(ctx, a.AgentID) - if err != nil { - // It is valid for an agent to not have a memory monitor, so we - // do not want to treat it as an error. - if errors.Is(err, sql.ErrNoRows) { - return nil - } - - return xerrors.Errorf("fetch memory resource monitor: %w", err) - } - - if !monitor.Enabled { + if !a.memoryMonitor.Enabled { return nil } @@ -109,15 +121,15 @@ func (a *ResourcesMonitoringAPI) monitorMemory(ctx context.Context, datapoints [ usageDatapoints = append(usageDatapoints, datapoint.Memory) } - usageStates := resourcesmonitor.CalculateMemoryUsageStates(monitor, usageDatapoints) + usageStates := resourcesmonitor.CalculateMemoryUsageStates(a.memoryMonitor, usageDatapoints) - oldState := monitor.State + oldState := a.memoryMonitor.State newState := resourcesmonitor.NextState(a.Config, oldState, usageStates) - debouncedUntil, shouldNotify := monitor.Debounce(a.Debounce, a.Clock.Now(), oldState, newState) + debouncedUntil, shouldNotify := a.memoryMonitor.Debounce(a.Debounce, a.Clock.Now(), oldState, newState) //nolint:gocritic // We need to be able to update the resource monitor here. - err = a.Database.UpdateMemoryResourceMonitor(dbauthz.AsResourceMonitor(ctx), database.UpdateMemoryResourceMonitorParams{ + err := a.Database.UpdateMemoryResourceMonitor(dbauthz.AsResourceMonitor(ctx), database.UpdateMemoryResourceMonitorParams{ AgentID: a.AgentID, State: newState, UpdatedAt: dbtime.Time(a.Clock.Now()), @@ -127,6 +139,11 @@ func (a *ResourcesMonitoringAPI) monitorMemory(ctx context.Context, datapoints [ return xerrors.Errorf("update workspace monitor: %w", err) } + // Update cached state + a.memoryMonitor.State = newState + a.memoryMonitor.DebouncedUntil = dbtime.Time(debouncedUntil) + a.memoryMonitor.UpdatedAt = dbtime.Time(a.Clock.Now()) + if !shouldNotify { return nil } @@ -143,7 +160,7 @@ func (a *ResourcesMonitoringAPI) monitorMemory(ctx context.Context, datapoints [ notifications.TemplateWorkspaceOutOfMemory, map[string]string{ "workspace": workspace.Name, - "threshold": fmt.Sprintf("%d%%", monitor.Threshold), + "threshold": fmt.Sprintf("%d%%", a.memoryMonitor.Threshold), }, map[string]any{ // NOTE(DanielleMaywood): @@ -169,14 +186,9 @@ func (a *ResourcesMonitoringAPI) monitorMemory(ctx context.Context, datapoints [ } func (a *ResourcesMonitoringAPI) monitorVolumes(ctx context.Context, datapoints []*proto.PushResourcesMonitoringUsageRequest_Datapoint) error { - volumeMonitors, err := a.Database.FetchVolumesResourceMonitorsByAgentID(ctx, a.AgentID) - if err != nil { - return xerrors.Errorf("get or insert volume monitor: %w", err) - } - outOfDiskVolumes := make([]map[string]any, 0) - for _, monitor := range volumeMonitors { + for i, monitor := range a.volumeMonitors { if !monitor.Enabled { continue } @@ -219,6 +231,11 @@ func (a *ResourcesMonitoringAPI) monitorVolumes(ctx context.Context, datapoints }); err != nil { return xerrors.Errorf("update workspace monitor: %w", err) } + + // Update cached state + a.volumeMonitors[i].State = newState + a.volumeMonitors[i].DebouncedUntil = dbtime.Time(debouncedUntil) + a.volumeMonitors[i].UpdatedAt = dbtime.Time(a.Clock.Now()) } if len(outOfDiskVolumes) == 0 { diff --git a/coderd/agentapi/resources_monitoring_test.go b/coderd/agentapi/resources_monitoring_test.go index c491d3789355b..7b457dd45331a 100644 --- a/coderd/agentapi/resources_monitoring_test.go +++ b/coderd/agentapi/resources_monitoring_test.go @@ -101,6 +101,9 @@ func TestMemoryResourceMonitorDebounce(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: The monitor is given a state that will trigger NOK _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: []*agentproto.PushResourcesMonitoringUsageRequest_Datapoint{ @@ -304,6 +307,9 @@ func TestMemoryResourceMonitor(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + clock.Set(collectedAt) _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: datapoints, @@ -337,6 +343,8 @@ func TestMemoryResourceMonitorMissingData(t *testing.T) { State: database.WorkspaceAgentMonitorStateOK, Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) // When: A datapoint is missing, surrounded by two NOK datapoints. _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ @@ -387,6 +395,9 @@ func TestMemoryResourceMonitorMissingData(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: A datapoint is missing, surrounded by two OK datapoints. _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: []*agentproto.PushResourcesMonitoringUsageRequest_Datapoint{ @@ -466,6 +477,9 @@ func TestVolumeResourceMonitorDebounce(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: // - First monitor is in a NOK state // - Second monitor is in an OK state @@ -742,6 +756,9 @@ func TestVolumeResourceMonitor(t *testing.T) { Threshold: tt.thresholdPercent, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + clock.Set(collectedAt) _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: datapoints, @@ -780,6 +797,9 @@ func TestVolumeResourceMonitorMultiple(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: both of them move to a NOK state _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: []*agentproto.PushResourcesMonitoringUsageRequest_Datapoint{ @@ -832,6 +852,9 @@ func TestVolumeResourceMonitorMissingData(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: A datapoint is missing, surrounded by two NOK datapoints. _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: []*agentproto.PushResourcesMonitoringUsageRequest_Datapoint{ @@ -891,6 +914,9 @@ func TestVolumeResourceMonitorMissingData(t *testing.T) { Threshold: 80, }) + // Initialize API to fetch and cache the monitors + require.NoError(t, api.InitMonitors(context.Background())) + // When: A datapoint is missing, surrounded by two OK datapoints. _, err := api.PushResourcesMonitoringUsage(context.Background(), &agentproto.PushResourcesMonitoringUsageRequest{ Datapoints: []*agentproto.PushResourcesMonitoringUsageRequest_Datapoint{ diff --git a/coderd/aitasks.go b/coderd/aitasks.go index 31600c69443b4..bf059ea4db95e 100644 --- a/coderd/aitasks.go +++ b/coderd/aitasks.go @@ -1,27 +1,22 @@ package coderd import ( - "bytes" "context" - "database/sql" - "encoding/json" - "errors" "fmt" - "io" "net" "net/http" "net/url" - "path" "slices" "strings" "time" - "github.com/go-chi/chi/v5" "github.com/google/uuid" + "golang.org/x/xerrors" "cdr.dev/slog" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpapi/httperror" "github.com/coder/coder/v2/coderd/httpmw" @@ -29,8 +24,9 @@ import ( "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/searchquery" "github.com/coder/coder/v2/coderd/taskname" - "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" + + aiagentapi "github.com/coder/agentapi-sdk-go" ) // This endpoint is experimental and not guaranteed to be stable, so we're not @@ -84,37 +80,70 @@ func (api *API) aiTasksPrompts(rw http.ResponseWriter, r *http.Request) { }) } -// This endpoint is experimental and not guaranteed to be stable, so we're not -// generating public-facing documentation for it. +// @Summary Create a new AI task +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID create-task +// @Security CoderSessionToken +// @Tags Experimental +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param request body codersdk.CreateTaskRequest true "Create task request" +// @Success 201 {object} codersdk.Task +// @Router /api/experimental/tasks/{user} [post] +// +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. +// This endpoint creates a new task for the given user. func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { var ( - ctx = r.Context() - apiKey = httpmw.APIKey(r) - auditor = api.Auditor.Load() - mems = httpmw.OrganizationMembersParam(r) + ctx = r.Context() + apiKey = httpmw.APIKey(r) + auditor = api.Auditor.Load() + mems = httpmw.OrganizationMembersParam(r) + taskResourceInfo = audit.AdditionalFields{} ) + if mems.User != nil { + taskResourceInfo.WorkspaceOwner = mems.User.Username + } + + aReq, commitAudit := audit.InitRequest[database.TaskTable](rw, &audit.RequestParams{ + Audit: *auditor, + Log: api.Logger, + Request: r, + Action: database.AuditActionCreate, + AdditionalFields: taskResourceInfo, + }) + + defer commitAudit() + var req codersdk.CreateTaskRequest if !httpapi.Read(ctx, rw, r, &req) { return } - hasAITask, err := api.Database.GetTemplateVersionHasAITask(ctx, req.TemplateVersionID) + // Fetch the template version to verify access and whether or not it has an + // AI task. + templateVersion, err := api.Database.GetTemplateVersionByID(ctx, req.TemplateVersionID) if err != nil { - if errors.Is(err, sql.ErrNoRows) || rbac.IsUnauthorizedError(err) { - httpapi.ResourceNotFound(rw) + if httpapi.Is404Error(err) { + // Avoid using httpapi.ResourceNotFound() here because this is an + // input error and 404 would be confusing. + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Template version not found or you do not have access to this resource", + }) return } - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching whether the template version has an AI task.", + Message: "Internal error fetching template version.", Detail: err.Error(), }) return } - if !hasAITask { + + aReq.UpdateOrganizationID(templateVersion.OrganizationID) + + if !templateVersion.HasAITask.Valid || !templateVersion.HasAITask.Bool { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf(`Template does not have required parameter %q`, codersdk.AITaskPromptParameterName), + Message: `Template does not have a valid "coder_ai_task" resource.`, }) return } @@ -169,23 +198,12 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { } else { // A task can still be created if the caller can read the organization // member. The organization is required, which can be sourced from the - // template. + // templateVersion. // - // TODO: This code gets called twice for each workspace build request. - // This is inefficient and costs at most 2 extra RTTs to the DB. - // This can be optimized. It exists as it is now for code simplicity. - // The most common case is to create a workspace for 'Me'. Which does - // not enter this code branch. - template, err := requestTemplate(ctx, createReq, api.Database) - if err != nil { - httperror.WriteResponseError(ctx, rw, err) - return - } - // If the caller can find the organization membership in the same org // as the template, then they can continue. orgIndex := slices.IndexFunc(mems.Memberships, func(mem httpmw.OrganizationMember) bool { - return mem.OrganizationID == template.OrganizationID + return mem.OrganizationID == templateVersion.OrganizationID }) if orgIndex == -1 { httpapi.ResourceNotFound(rw) @@ -198,56 +216,113 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { Username: member.Username, AvatarURL: member.AvatarURL, } + + // Update workspace owner information for audit in case it changed. + taskResourceInfo.WorkspaceOwner = owner.Username } - aReq, commitAudit := audit.InitRequest[database.WorkspaceTable](rw, &audit.RequestParams{ - Audit: *auditor, - Log: api.Logger, - Request: r, - Action: database.AuditActionCreate, - AdditionalFields: audit.AdditionalFields{ - WorkspaceOwner: owner.Username, + // Track insert from preCreateInTX. + var dbTaskTable database.TaskTable + + // Ensure an audit log is created for the workspace creation event. + aReqWS, commitAuditWS := audit.InitRequest[database.WorkspaceTable](rw, &audit.RequestParams{ + Audit: *auditor, + Log: api.Logger, + Request: r, + Action: database.AuditActionCreate, + AdditionalFields: taskResourceInfo, + OrganizationID: templateVersion.OrganizationID, + }) + defer commitAuditWS() + + workspace, err := createWorkspace(ctx, aReqWS, apiKey.UserID, api, owner, createReq, r, &createWorkspaceOptions{ + // Before creating the workspace, ensure that this task can be created. + preCreateInTX: func(ctx context.Context, tx database.Store) error { + // Create task record in the database before creating the workspace so that + // we can request that the workspace be linked to it after creation. + dbTaskTable, err = tx.InsertTask(ctx, database.InsertTaskParams{ + ID: uuid.New(), + OrganizationID: templateVersion.OrganizationID, + OwnerID: owner.ID, + Name: taskName, + WorkspaceID: uuid.NullUUID{}, // Will be set after workspace creation. + TemplateVersionID: templateVersion.ID, + TemplateParameters: []byte("{}"), + Prompt: req.Input, + CreatedAt: dbtime.Time(api.Clock.Now()), + }) + if err != nil { + return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error creating task.", + Detail: err.Error(), + }) + } + return nil + }, + // After the workspace is created, ensure that the task is linked to it. + postCreateInTX: func(ctx context.Context, tx database.Store, workspace database.Workspace) error { + // Update the task record with the workspace ID after creation. + dbTaskTable, err = tx.UpdateTaskWorkspaceID(ctx, database.UpdateTaskWorkspaceIDParams{ + ID: dbTaskTable.ID, + WorkspaceID: uuid.NullUUID{ + UUID: workspace.ID, + Valid: true, + }, + }) + if err != nil { + return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error updating task.", + Detail: err.Error(), + }) + } + return nil }, }) - defer commitAudit() - w, err := createWorkspace(ctx, aReq, apiKey.UserID, api, owner, createReq, r) if err != nil { httperror.WriteResponseError(ctx, rw, err) return } - task := taskFromWorkspace(w, req.Input) - httpapi.Write(ctx, rw, http.StatusCreated, task) + aReq.New = dbTaskTable + + // Fetch the task to get the additional columns from the view. + dbTask, err := api.Database.GetTaskByID(ctx, dbTaskTable.ID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusCreated, taskFromDBTaskAndWorkspace(dbTask, workspace)) } -func taskFromWorkspace(ws codersdk.Workspace, initialPrompt string) codersdk.Task { - // TODO(DanielleMaywood): - // This just picks up the first agent it discovers. - // This approach _might_ break when a task has multiple agents, - // depending on which agent was found first. - // - // We explicitly do not have support for running tasks - // inside of a sub agent at the moment, so we can be sure - // that any sub agents are not the agent we're looking for. - var taskAgentID uuid.NullUUID +// taskFromDBTaskAndWorkspace creates a codersdk.Task response from the task +// database record and workspace. +func taskFromDBTaskAndWorkspace(dbTask database.Task, ws codersdk.Workspace) codersdk.Task { var taskAgentLifecycle *codersdk.WorkspaceAgentLifecycle var taskAgentHealth *codersdk.WorkspaceAgentHealth - for _, resource := range ws.LatestBuild.Resources { - for _, agent := range resource.Agents { - if agent.ParentID.Valid { - continue - } - taskAgentID = uuid.NullUUID{Valid: true, UUID: agent.ID} - taskAgentLifecycle = &agent.LifecycleState - taskAgentHealth = &agent.Health - break + // If we have an agent ID from the task, find the agent details in the + // workspace. + if dbTask.WorkspaceAgentID.Valid { + findTaskAgentLoop: + for _, resource := range ws.LatestBuild.Resources { + for _, agent := range resource.Agents { + if agent.ID == dbTask.WorkspaceAgentID.UUID { + taskAgentLifecycle = &agent.LifecycleState + taskAgentHealth = &agent.Health + break findTaskAgentLoop + } + } } } - // Ignore 'latest app status' if it is older than the latest build and the latest build is a 'start' transition. - // This ensures that you don't show a stale app status from a previous build. - // For stop transitions, there is still value in showing the latest app status. + // Ignore 'latest app status' if it is older than the latest build and the + // latest build is a 'start' transition. This ensures that you don't show a + // stale app status from a previous build. For stop transitions, there is + // still value in showing the latest app status. var currentState *codersdk.TaskStateEntry if ws.LatestAppStatus != nil { if ws.LatestBuild.Transition != codersdk.WorkspaceTransitionStart || ws.LatestAppStatus.CreatedAt.After(ws.LatestBuild.CreatedAt) { @@ -261,191 +336,169 @@ func taskFromWorkspace(ws codersdk.Workspace, initialPrompt string) codersdk.Tas } return codersdk.Task{ - ID: ws.ID, - OrganizationID: ws.OrganizationID, - OwnerID: ws.OwnerID, - OwnerName: ws.OwnerName, - Name: ws.Name, + ID: dbTask.ID, + OrganizationID: dbTask.OrganizationID, + OwnerID: dbTask.OwnerID, + OwnerName: dbTask.OwnerUsername, + OwnerAvatarURL: dbTask.OwnerAvatarUrl, + Name: dbTask.Name, TemplateID: ws.TemplateID, + TemplateVersionID: dbTask.TemplateVersionID, TemplateName: ws.TemplateName, TemplateDisplayName: ws.TemplateDisplayName, TemplateIcon: ws.TemplateIcon, - WorkspaceID: uuid.NullUUID{Valid: true, UUID: ws.ID}, - WorkspaceAgentID: taskAgentID, + WorkspaceID: dbTask.WorkspaceID, + WorkspaceName: ws.Name, + WorkspaceBuildNumber: dbTask.WorkspaceBuildNumber.Int32, + WorkspaceStatus: ws.LatestBuild.Status, + WorkspaceAgentID: dbTask.WorkspaceAgentID, WorkspaceAgentLifecycle: taskAgentLifecycle, WorkspaceAgentHealth: taskAgentHealth, - CreatedAt: ws.CreatedAt, - UpdatedAt: ws.UpdatedAt, - InitialPrompt: initialPrompt, - Status: ws.LatestBuild.Status, + WorkspaceAppID: dbTask.WorkspaceAppID, + InitialPrompt: dbTask.Prompt, + Status: codersdk.TaskStatus(dbTask.Status), CurrentState: currentState, + CreatedAt: dbTask.CreatedAt, + UpdatedAt: ws.UpdatedAt, } } -// tasksFromWorkspaces converts a slice of API workspaces into tasks, fetching -// prompts and mapping status/state. This method enforces that only AI task -// workspaces are given. -func (api *API) tasksFromWorkspaces(ctx context.Context, apiWorkspaces []codersdk.Workspace) ([]codersdk.Task, error) { - // Fetch prompts for each workspace build and map by build ID. - buildIDs := make([]uuid.UUID, 0, len(apiWorkspaces)) - for _, ws := range apiWorkspaces { - buildIDs = append(buildIDs, ws.LatestBuild.ID) - } - parameters, err := api.Database.GetWorkspaceBuildParametersByBuildIDs(ctx, buildIDs) - if err != nil { - return nil, err - } - promptsByBuildID := make(map[uuid.UUID]string, len(parameters)) - for _, p := range parameters { - if p.Name == codersdk.AITaskPromptParameterName { - promptsByBuildID[p.WorkspaceBuildID] = p.Value - } - } - - tasks := make([]codersdk.Task, 0, len(apiWorkspaces)) - for _, ws := range apiWorkspaces { - tasks = append(tasks, taskFromWorkspace(ws, promptsByBuildID[ws.LatestBuild.ID])) - } - - return tasks, nil -} - -// tasksListResponse wraps a list of experimental tasks. +// @Summary List AI tasks +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID list-tasks +// @Security CoderSessionToken +// @Tags Experimental +// @Param q query string false "Search query for filtering tasks. Supports: owner:, organization:, status:" +// @Success 200 {object} codersdk.TasksListResponse +// @Router /api/experimental/tasks [get] // -// Experimental: Response shape is experimental and may change. -type tasksListResponse struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` -} - -// tasksList is an experimental endpoint to list AI tasks by mapping -// workspaces to a task-shaped response. +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. +// tasksList is an experimental endpoint to list tasks. func (api *API) tasksList(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) - // Support standard pagination/filters for workspaces. - page, ok := ParsePagination(rw, r) - if !ok { - return - } + // Parse query parameters for filtering tasks. queryStr := r.URL.Query().Get("q") - filter, errs := searchquery.Workspaces(ctx, api.Database, queryStr, page, api.AgentInactiveDisconnectTimeout) + filter, errs := searchquery.Tasks(ctx, api.Database, queryStr, apiKey.UserID) if len(errs) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Invalid workspace search query.", + Message: "Invalid task search query.", Validations: errs, }) return } - // Ensure that we only include AI task workspaces in the results. - filter.HasAITask = sql.NullBool{Valid: true, Bool: true} - - if filter.OwnerUsername == "me" { - filter.OwnerID = apiKey.UserID - filter.OwnerUsername = "" - } - - prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, policy.ActionRead, rbac.ResourceWorkspace.Type) + // Fetch all tasks matching the filters from the database. + dbTasks, err := api.Database.ListTasks(ctx, filter) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error preparing sql filter.", + Message: "Internal error fetching tasks.", Detail: err.Error(), }) return } - // Order with requester's favorites first, include summary row. - filter.RequesterID = apiKey.UserID - filter.WithSummary = true - - workspaceRows, err := api.Database.GetAuthorizedWorkspaces(ctx, filter, prepared) + tasks, err := api.convertTasks(ctx, apiKey.UserID, dbTasks) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspaces.", + Message: "Internal error converting tasks.", Detail: err.Error(), }) return } - if len(workspaceRows) == 0 { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspaces.", - Detail: "Workspace summary row is missing.", - }) - return + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.TasksListResponse{ + Tasks: tasks, + Count: len(tasks), + }) +} + +// convertTasks converts database tasks to API tasks, enriching them with +// workspace information. +func (api *API) convertTasks(ctx context.Context, requesterID uuid.UUID, dbTasks []database.Task) ([]codersdk.Task, error) { + if len(dbTasks) == 0 { + return []codersdk.Task{}, nil } - if len(workspaceRows) == 1 { - httpapi.Write(ctx, rw, http.StatusOK, tasksListResponse{ - Tasks: []codersdk.Task{}, - Count: 0, - }) - return + + // Prepare to batch fetch workspaces. + workspaceIDs := make([]uuid.UUID, 0, len(dbTasks)) + for _, task := range dbTasks { + if !task.WorkspaceID.Valid { + return nil, xerrors.New("task has no workspace ID") + } + workspaceIDs = append(workspaceIDs, task.WorkspaceID.UUID) } - // Skip summary row. - workspaceRows = workspaceRows[:len(workspaceRows)-1] + // Fetch workspaces for tasks that have workspaces. + workspaceRows, err := api.Database.GetWorkspaces(ctx, database.GetWorkspacesParams{ + WorkspaceIds: workspaceIDs, + }) + if err != nil { + return nil, xerrors.Errorf("fetch workspaces: %w", err) + } workspaces := database.ConvertWorkspaceRows(workspaceRows) // Gather associated data and convert to API workspaces. data, err := api.workspaceData(ctx, workspaces) if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspace resources.", - Detail: err.Error(), - }) - return + return nil, xerrors.Errorf("fetch workspace data: %w", err) } - apiWorkspaces, err := convertWorkspaces(apiKey.UserID, workspaces, data) + + apiWorkspaces, err := convertWorkspaces(requesterID, workspaces, data) if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error converting workspaces.", - Detail: err.Error(), - }) - return + return nil, xerrors.Errorf("convert workspaces: %w", err) } - tasks, err := api.tasksFromWorkspaces(ctx, apiWorkspaces) - if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching task prompts and states.", - Detail: err.Error(), - }) - return + workspacesByID := make(map[uuid.UUID]codersdk.Workspace) + for _, ws := range apiWorkspaces { + workspacesByID[ws.ID] = ws } - httpapi.Write(ctx, rw, http.StatusOK, tasksListResponse{ - Tasks: tasks, - Count: len(tasks), - }) + // Convert tasks to SDK format. + result := make([]codersdk.Task, 0, len(dbTasks)) + for _, dbTask := range dbTasks { + task := taskFromDBTaskAndWorkspace(dbTask, workspacesByID[dbTask.WorkspaceID.UUID]) + result = append(result, task) + } + + return result, nil } +// @Summary Get AI task by ID +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID get-task +// @Security CoderSessionToken +// @Tags Experimental +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param task path string true "Task ID" format(uuid) +// @Success 200 {object} codersdk.Task +// @Router /api/experimental/tasks/{user}/{task} [get] +// +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. // taskGet is an experimental endpoint to fetch a single AI task by ID // (workspace ID). It returns a synthesized task response including // prompt and status. func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) + task := httpmw.TaskParam(r) - idStr := chi.URLParam(r, "id") - taskID, err := uuid.Parse(idStr) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf("Invalid UUID %q for task ID.", idStr), + if !task.WorkspaceID.Valid { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task.", + Detail: "Task workspace ID is invalid.", }) return } - // For now, taskID = workspaceID, once we have a task data model in - // the DB, we can change this lookup. - workspaceID := taskID - workspace, err := api.Database.GetWorkspaceByID(ctx, workspaceID) - if httpapi.Is404Error(err) { - httpapi.ResourceNotFound(rw) - return - } + workspace, err := api.Database.GetWorkspaceByID(ctx, task.WorkspaceID.UUID) if err != nil { + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching workspace.", Detail: err.Error(), @@ -465,34 +518,6 @@ func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { httpapi.ResourceNotFound(rw) return } - if data.builds[0].HasAITask == nil || !*data.builds[0].HasAITask { - // TODO(DanielleMaywood): - // This is a temporary workaround. When a task has just been created, but - // not yet provisioned, the workspace build will not have `HasAITask` set. - // - // When we reach this code flow, it is _either_ because the workspace is - // not a task, or it is a task that has not yet been provisioned. This - // endpoint should rarely be called with a non-task workspace so we - // should be fine with this extra database call to check if it has the - // special "AI Task" parameter. - parameters, err := api.Database.GetWorkspaceBuildParameters(ctx, data.builds[0].ID) - if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspace build parameters.", - Detail: err.Error(), - }) - return - } - - _, hasAITask := slice.Find(parameters, func(t database.WorkspaceBuildParameter) bool { - return t.Name == codersdk.AITaskPromptParameterName - }) - - if !hasAITask { - httpapi.ResourceNotFound(rw) - return - } - } appStatus := codersdk.WorkspaceAppStatus{} if len(data.appStatuses) > 0 { @@ -515,107 +540,102 @@ func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { return } - tasks, err := api.tasksFromWorkspaces(ctx, []codersdk.Workspace{ws}) - if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching task prompt and state.", - Detail: err.Error(), - }) - return - } - - httpapi.Write(ctx, rw, http.StatusOK, tasks[0]) + taskResp := taskFromDBTaskAndWorkspace(task, ws) + httpapi.Write(ctx, rw, http.StatusOK, taskResp) } -// taskDelete is an experimental endpoint to delete a task by ID (workspace ID). +// @Summary Delete AI task by ID +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID delete-task +// @Security CoderSessionToken +// @Tags Experimental +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param task path string true "Task ID" format(uuid) +// @Success 202 "Task deletion initiated" +// @Router /api/experimental/tasks/{user}/{task} [delete] +// +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. +// taskDelete is an experimental endpoint to delete a task by ID. // It creates a delete workspace build and returns 202 Accepted if the build was // created. func (api *API) taskDelete(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) + task := httpmw.TaskParam(r) - idStr := chi.URLParam(r, "id") - taskID, err := uuid.Parse(idStr) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf("Invalid UUID %q for task ID.", idStr), - }) - return - } + now := api.Clock.Now() - // For now, taskID = workspaceID, once we have a task data model in - // the DB, we can change this lookup. - workspaceID := taskID - workspace, err := api.Database.GetWorkspaceByID(ctx, workspaceID) - if httpapi.Is404Error(err) { - httpapi.ResourceNotFound(rw) - return - } - if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspace.", - Detail: err.Error(), - }) - return + if task.WorkspaceID.Valid { + workspace, err := api.Database.GetWorkspaceByID(ctx, task.WorkspaceID.UUID) + if err != nil { + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task workspace before deleting task.", + Detail: err.Error(), + }) + return + } + + // Construct a request to the workspace build creation handler to + // initiate deletion. + buildReq := codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionDelete, + Reason: "Deleted via tasks API", + } + + _, err = api.postWorkspaceBuildsInternal( + ctx, + apiKey, + workspace, + buildReq, + func(action policy.Action, object rbac.Objecter) bool { + return api.Authorize(r, action, object) + }, + audit.WorkspaceBuildBaggageFromRequest(r), + ) + if err != nil { + httperror.WriteWorkspaceBuildError(ctx, rw, err) + return + } } - data, err := api.workspaceData(ctx, []database.Workspace{workspace}) + _, err := api.Database.DeleteTask(ctx, database.DeleteTaskParams{ + ID: task.ID, + DeletedAt: dbtime.Time(now), + }) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspace resources.", + Message: "Failed to delete task", Detail: err.Error(), }) return } - if len(data.builds) == 0 || len(data.templates) == 0 { - httpapi.ResourceNotFound(rw) - return - } - if data.builds[0].HasAITask == nil || !*data.builds[0].HasAITask { - httpapi.ResourceNotFound(rw) - return - } - - // Construct a request to the workspace build creation handler to - // initiate deletion. - buildReq := codersdk.CreateWorkspaceBuildRequest{ - Transition: codersdk.WorkspaceTransitionDelete, - Reason: "Deleted via tasks API", - } - - _, err = api.postWorkspaceBuildsInternal( - ctx, - apiKey, - workspace, - buildReq, - func(action policy.Action, object rbac.Objecter) bool { - return api.Authorize(r, action, object) - }, - audit.WorkspaceBuildBaggageFromRequest(r), - ) - if err != nil { - httperror.WriteWorkspaceBuildError(ctx, rw, err) - return - } - // Delete build created successfully. + // Task deleted and delete build created successfully. rw.WriteHeader(http.StatusAccepted) } -// taskSend submits task input to the tasks sidebar app by dialing the agent +// @Summary Send input to AI task +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID send-task-input +// @Security CoderSessionToken +// @Tags Experimental +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param task path string true "Task ID" format(uuid) +// @Param request body codersdk.TaskSendRequest true "Task input request" +// @Success 204 "Input sent successfully" +// @Router /api/experimental/tasks/{user}/{task}/send [post] +// +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. +// taskSend submits task input to the task app by dialing the agent // directly over the tailnet. We enforce ApplicationConnect RBAC on the -// workspace and validate the sidebar app health. +// workspace and validate the task app health. func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - - idStr := chi.URLParam(r, "id") - taskID, err := uuid.Parse(idStr) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf("Invalid UUID %q for task ID.", idStr), - }) - return - } + task := httpmw.TaskParam(r) var req codersdk.TaskSendRequest if !httpapi.Read(ctx, rw, r, &req) { @@ -628,62 +648,38 @@ func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) { return } - if err = api.authAndDoWithTaskSidebarAppClient(r, taskID, func(ctx context.Context, client *http.Client, appURL *url.URL) error { - status, err := agentapiDoStatusRequest(ctx, client, appURL) + if err := api.authAndDoWithTaskAppClient(r, task, func(ctx context.Context, client *http.Client, appURL *url.URL) error { + agentAPIClient, err := aiagentapi.NewClient(appURL.String(), aiagentapi.WithHTTPClient(client)) if err != nil { - return err - } - - if status != "stable" { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Task app is not ready to accept input.", - Detail: fmt.Sprintf("Status: %s", status), + Message: "Failed to create agentapi client.", + Detail: err.Error(), }) } - var reqBody struct { - Content string `json:"content"` - Type string `json:"type"` - } - reqBody.Content = req.Input - reqBody.Type = "user" - - req, err := agentapiNewRequest(ctx, http.MethodPost, appURL, "message", reqBody) - if err != nil { - return err - } - - resp, err := client.Do(req) + statusResp, err := agentAPIClient.GetStatus(ctx) if err != nil { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to reach task app endpoint.", + Message: "Failed to get status from task app.", Detail: err.Error(), }) } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(io.LimitReader(resp.Body, 128)) - return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Task app rejected the message.", - Detail: fmt.Sprintf("Upstream status: %d; Body: %s", resp.StatusCode, body), - }) - } - // {"$schema":"http://localhost:3284/schemas/MessageResponseBody.json","ok":true} - // {"$schema":"http://localhost:3284/schemas/ErrorModel.json","title":"Unprocessable Entity","status":422,"detail":"validation failed","errors":[{"location":"body.type","value":"oof"}]} - var respBody map[string]any - if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { + if statusResp.Status != aiagentapi.StatusStable { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to decode task app response body.", - Detail: err.Error(), + Message: "Task app is not ready to accept input.", + Detail: fmt.Sprintf("Status: %s", statusResp.Status), }) } - if v, ok := respBody["ok"].(bool); !ok || !v { + _, err = agentAPIClient.PostMessage(ctx, aiagentapi.PostMessageParams{ + Content: req.Input, + Type: aiagentapi.MessageTypeUser, + }) + if err != nil { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ Message: "Task app rejected the message.", - Detail: fmt.Sprintf("Upstream response: %v", respBody), + Detail: err.Error(), }) } @@ -696,65 +692,48 @@ func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) { rw.WriteHeader(http.StatusNoContent) } +// @Summary Get AI task logs +// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. +// @ID get-task-logs +// @Security CoderSessionToken +// @Tags Experimental +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param task path string true "Task ID" format(uuid) +// @Success 200 {object} codersdk.TaskLogsResponse +// @Router /api/experimental/tasks/{user}/{task}/logs [get] +// +// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. +// taskLogs reads task output by dialing the agent directly over the tailnet. +// We enforce ApplicationConnect RBAC on the workspace and validate the task app health. func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - - idStr := chi.URLParam(r, "id") - taskID, err := uuid.Parse(idStr) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf("Invalid UUID %q for task ID.", idStr), - }) - return - } + task := httpmw.TaskParam(r) var out codersdk.TaskLogsResponse - if err := api.authAndDoWithTaskSidebarAppClient(r, taskID, func(ctx context.Context, client *http.Client, appURL *url.URL) error { - req, err := agentapiNewRequest(ctx, http.MethodGet, appURL, "messages", nil) - if err != nil { - return err - } - - resp, err := client.Do(req) + if err := api.authAndDoWithTaskAppClient(r, task, func(ctx context.Context, client *http.Client, appURL *url.URL) error { + agentAPIClient, err := aiagentapi.NewClient(appURL.String(), aiagentapi.WithHTTPClient(client)) if err != nil { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to reach task app endpoint.", + Message: "Failed to create agentapi client.", Detail: err.Error(), }) } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(io.LimitReader(resp.Body, 128)) - return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Task app rejected the request.", - Detail: fmt.Sprintf("Upstream status: %d; Body: %s", resp.StatusCode, body), - }) - } - // {"$schema":"http://localhost:3284/schemas/MessagesResponseBody.json","messages":[]} - var respBody struct { - Messages []struct { - ID int `json:"id"` - Content string `json:"content"` - Role string `json:"role"` - Time time.Time `json:"time"` - } `json:"messages"` - } - if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { + messagesResp, err := agentAPIClient.GetMessages(ctx) + if err != nil { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to decode task app response body.", + Message: "Failed to get messages from task app.", Detail: err.Error(), }) } - logs := make([]codersdk.TaskLogEntry, 0, len(respBody.Messages)) - for _, m := range respBody.Messages { + logs := make([]codersdk.TaskLogEntry, 0, len(messagesResp.Messages)) + for _, m := range messagesResp.Messages { var typ codersdk.TaskLogType - switch strings.ToLower(m.Role) { - case "user": + switch m.Role { + case aiagentapi.RoleUser: typ = codersdk.TaskLogTypeInput - case "agent": + case aiagentapi.RoleAgent: typ = codersdk.TaskLogTypeOutput default: return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ @@ -763,7 +742,7 @@ func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) { }) } logs = append(logs, codersdk.TaskLogEntry{ - ID: m.ID, + ID: int(m.Id), Content: m.Content, Type: typ, Time: m.Time, @@ -779,24 +758,40 @@ func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, out) } -// authAndDoWithTaskSidebarAppClient centralizes the shared logic to: +// authAndDoWithTaskAppClient centralizes the shared logic to: // // - Fetch the task workspace // - Authorize ApplicationConnect on the workspace -// - Validate the AI task and sidebar app health +// - Validate the AI task and task app health // - Dial the agent and construct an HTTP client to the apps loopback URL // // The provided callback receives the context, an HTTP client that dials via the // agent, and the base app URL (as a value URL) to perform any request. -func (api *API) authAndDoWithTaskSidebarAppClient( +func (api *API) authAndDoWithTaskAppClient( r *http.Request, - taskID uuid.UUID, + task database.Task, do func(ctx context.Context, client *http.Client, appURL *url.URL) error, ) error { ctx := r.Context() - workspaceID := taskID - workspace, err := api.Database.GetWorkspaceByID(ctx, workspaceID) + if task.Status != database.TaskStatusActive { + return httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ + Message: "Task status must be active.", + Detail: fmt.Sprintf("Task status is %q, it must be %q to interact with the task.", task.Status, codersdk.TaskStatusActive), + }) + } + if !task.WorkspaceID.Valid { + return httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ + Message: "Task does not have a workspace.", + }) + } + if !task.WorkspaceAppID.Valid { + return httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ + Message: "Task does not have a workspace app.", + }) + } + + workspace, err := api.Database.GetWorkspaceByID(ctx, task.WorkspaceID.UUID) if err != nil { if httpapi.Is404Error(err) { return httperror.ErrResourceNotFound @@ -812,65 +807,30 @@ func (api *API) authAndDoWithTaskSidebarAppClient( return httperror.ErrResourceNotFound } - data, err := api.workspaceData(ctx, []database.Workspace{workspace}) + apps, err := api.Database.GetWorkspaceAppsByAgentID(ctx, task.WorkspaceAgentID.UUID) if err != nil { return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching workspace resources.", Detail: err.Error(), }) } - if len(data.builds) == 0 || len(data.templates) == 0 { - return httperror.ErrResourceNotFound - } - build := data.builds[0] - if build.HasAITask == nil || !*build.HasAITask || build.AITaskSidebarAppID == nil || *build.AITaskSidebarAppID == uuid.Nil { - return httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ - Message: "Task is not configured with a sidebar app.", - }) - } - // Find the sidebar app details to get the URL and validate app health. - sidebarAppID := *build.AITaskSidebarAppID - agentID, sidebarApp, ok := func() (uuid.UUID, codersdk.WorkspaceApp, bool) { - for _, res := range build.Resources { - for _, agent := range res.Agents { - for _, app := range agent.Apps { - if app.ID == sidebarAppID { - return agent.ID, app, true - } - } - } + var app *database.WorkspaceApp + for _, a := range apps { + if a.ID == task.WorkspaceAppID.UUID { + app = &a + break } - return uuid.Nil, codersdk.WorkspaceApp{}, false - }() - if !ok { - return httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ - Message: "Task sidebar app not found in latest build.", - }) - } - - // Return an informative error if the app isn't healthy rather than trying - // and failing. - switch sidebarApp.Health { - case codersdk.WorkspaceAppHealthDisabled: - // No health check, pass through. - case codersdk.WorkspaceAppHealthInitializing: - return httperror.NewResponseError(http.StatusServiceUnavailable, codersdk.Response{ - Message: "Task sidebar app is initializing. Try again shortly.", - }) - case codersdk.WorkspaceAppHealthUnhealthy: - return httperror.NewResponseError(http.StatusServiceUnavailable, codersdk.Response{ - Message: "Task sidebar app is unhealthy.", - }) } // Build the direct app URL and dial the agent. - if sidebarApp.URL == "" { + appURL := app.Url.String + if appURL == "" { return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ - Message: "Task sidebar app URL is not configured.", + Message: "Task app URL is not configured.", }) } - parsedURL, err := url.Parse(sidebarApp.URL) + parsedURL, err := url.Parse(appURL) if err != nil { return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ Message: "Internal error parsing task app URL.", @@ -885,7 +845,7 @@ func (api *API) authAndDoWithTaskSidebarAppClient( dialCtx, dialCancel := context.WithTimeout(ctx, time.Second*30) defer dialCancel() - agentConn, release, err := api.agentProvider.AgentConn(dialCtx, agentID) + agentConn, release, err := api.agentProvider.AgentConn(dialCtx, task.WorkspaceAgentID.UUID) if err != nil { return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ Message: "Failed to reach task app endpoint.", @@ -903,69 +863,3 @@ func (api *API) authAndDoWithTaskSidebarAppClient( } return do(ctx, client, parsedURL) } - -func agentapiNewRequest(ctx context.Context, method string, appURL *url.URL, appURLPath string, body any) (*http.Request, error) { - u := *appURL - u.Path = path.Join(appURL.Path, appURLPath) - - var bodyReader io.Reader - if body != nil { - b, err := json.Marshal(body) - if err != nil { - return nil, httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ - Message: "Failed to marshal task app request body.", - Detail: err.Error(), - }) - } - bodyReader = bytes.NewReader(b) - } - - req, err := http.NewRequestWithContext(ctx, method, u.String(), bodyReader) - if err != nil { - return nil, httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{ - Message: "Failed to create task app request.", - Detail: err.Error(), - }) - } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Accept", "application/json") - - return req, nil -} - -func agentapiDoStatusRequest(ctx context.Context, client *http.Client, appURL *url.URL) (string, error) { - req, err := agentapiNewRequest(ctx, http.MethodGet, appURL, "status", nil) - if err != nil { - return "", err - } - - resp, err := client.Do(req) - if err != nil { - return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to reach task app endpoint.", - Detail: err.Error(), - }) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Task app status returned an error.", - Detail: fmt.Sprintf("Status code: %d", resp.StatusCode), - }) - } - - // {"$schema":"http://localhost:3284/schemas/StatusResponseBody.json","status":"stable"} - var respBody struct { - Status string `json:"status"` - } - - if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { - return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{ - Message: "Failed to decode task app status response body.", - Detail: err.Error(), - }) - } - - return respBody.Status, nil -} diff --git a/coderd/aitasks_test.go b/coderd/aitasks_test.go index 20b33e9314da4..5fb24d7b1f546 100644 --- a/coderd/aitasks_test.go +++ b/coderd/aitasks_test.go @@ -1,18 +1,22 @@ package coderd_test import ( + "context" "database/sql" - "fmt" + "encoding/json" "io" "net/http" "net/http/httptest" + "strings" "testing" "time" + "unicode/utf8" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + agentapisdk "github.com/coder/agentapi-sdk-go" "github.com/coder/coder/v2/agent" "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" @@ -20,7 +24,6 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" - "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/notifications/notificationstest" @@ -52,10 +55,6 @@ func TestAITasksPrompts(t *testing.T) { t.Run("MultipleBuilds", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test checks RBAC, which is not supported in the in-memory database") - } - adminClient := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) first := coderdtest.CreateFirstUser(t, adminClient) memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, first.OrganizationID) @@ -213,8 +212,8 @@ func TestTasks(t *testing.T) { Apps: []*proto.App{ { Id: taskAppID.String(), - Slug: "task-sidebar", - DisplayName: "Task Sidebar", + Slug: "task-app", + DisplayName: "Task App", Url: opt.appURL, }, }, @@ -224,9 +223,7 @@ func TestTasks(t *testing.T) { }, AiTasks: []*proto.AITask{ { - SidebarApp: &proto.AITaskSidebarApp{ - Id: taskAppID.String(), - }, + AppId: taskAppID.String(), }, }, }, @@ -249,27 +246,36 @@ func TestTasks(t *testing.T) { template := createAITemplate(t, client, user) - // Create a workspace (task) with a specific prompt. + // Create a task with a specific prompt using the new data model. wantPrompt := "build me a web app" - workspace := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: wantPrompt}, - } + exp := codersdk.NewExperimentalClient(client) + task, err := exp.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: wantPrompt, }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + + // Wait for the workspace to be built. + workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + if assert.True(t, workspace.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, workspace.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) // List tasks via experimental API and verify the prompt and status mapping. - exp := codersdk.NewExperimentalClient(client) tasks, err := exp.Tasks(ctx, &codersdk.TasksFilter{Owner: codersdk.Me}) require.NoError(t, err) - got, ok := slice.Find(tasks, func(task codersdk.Task) bool { return task.ID == workspace.ID }) + got, ok := slice.Find(tasks, func(t codersdk.Task) bool { return t.ID == task.ID }) require.True(t, ok, "task should be found in the list") assert.Equal(t, wantPrompt, got.InitialPrompt, "task prompt should match the AI Prompt parameter") - assert.Equal(t, workspace.Name, got.Name, "task name should map from workspace name") - assert.Equal(t, workspace.ID, got.WorkspaceID.UUID, "workspace id should match") - // Status should be populated via app status or workspace status mapping. + assert.Equal(t, task.WorkspaceID.UUID, got.WorkspaceID.UUID, "workspace id should match") + assert.Equal(t, task.WorkspaceName, got.WorkspaceName, "workspace name should match") + // Status should be populated via the tasks_with_status view. assert.NotEmpty(t, got.Status, "task status should not be empty") + assert.NotEmpty(t, got.WorkspaceStatus, "workspace status should not be empty") }) t.Run("Get", func(t *testing.T) { @@ -280,17 +286,25 @@ func TestTasks(t *testing.T) { ctx = testutil.Context(t, testutil.WaitLong) user = coderdtest.CreateFirstUser(t, client) template = createAITemplate(t, client, user) - // Create a workspace (task) with a specific prompt. wantPrompt = "review my code" - workspace = coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: wantPrompt}, - } - }) + exp = codersdk.NewExperimentalClient(client) ) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) - ws := coderdtest.MustWorkspace(t, client, workspace.ID) + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: wantPrompt, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) + + // Get the workspace and wait for it to be ready. + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + ws = coderdtest.MustWorkspace(t, client, task.WorkspaceID.UUID) // Assert invariant: the workspace has exactly one resource with one agent with one app. require.Len(t, ws.LatestBuild.Resources, 1) require.Len(t, ws.LatestBuild.Resources[0].Agents, 1) @@ -298,9 +312,9 @@ func TestTasks(t *testing.T) { taskAppID := ws.LatestBuild.Resources[0].Agents[0].Apps[0].ID // Insert an app status for the workspace - _, err := db.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ + _, err = db.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ ID: uuid.New(), - WorkspaceID: workspace.ID, + WorkspaceID: task.WorkspaceID.UUID, CreatedAt: dbtime.Now(), AgentID: agentID, AppID: taskAppID, @@ -310,31 +324,34 @@ func TestTasks(t *testing.T) { require.NoError(t, err) // Fetch the task by ID via experimental API and verify fields. - exp := codersdk.NewExperimentalClient(client) - task, err := exp.TaskByID(ctx, workspace.ID) + updated, err := exp.TaskByID(ctx, task.ID) require.NoError(t, err) - assert.Equal(t, workspace.ID, task.ID, "task ID should match workspace ID") - assert.Equal(t, workspace.Name, task.Name, "task name should map from workspace name") - assert.Equal(t, wantPrompt, task.InitialPrompt, "task prompt should match the AI Prompt parameter") - assert.Equal(t, workspace.ID, task.WorkspaceID.UUID, "workspace id should match") - assert.NotEmpty(t, task.Status, "task status should not be empty") + assert.Equal(t, task.ID, updated.ID, "task ID should match") + assert.Equal(t, task.Name, updated.Name, "task name should match") + assert.Equal(t, wantPrompt, updated.InitialPrompt, "task prompt should match the AI Prompt parameter") + assert.Equal(t, task.WorkspaceID.UUID, updated.WorkspaceID.UUID, "workspace id should match") + assert.Equal(t, task.WorkspaceName, updated.WorkspaceName, "workspace name should match") + assert.Equal(t, ws.LatestBuild.BuildNumber, updated.WorkspaceBuildNumber, "workspace build number should match") + assert.Equal(t, agentID, updated.WorkspaceAgentID.UUID, "workspace agent id should match") + assert.Equal(t, taskAppID, updated.WorkspaceAppID.UUID, "workspace app id should match") + assert.NotEmpty(t, updated.WorkspaceStatus, "task status should not be empty") // Stop the workspace - coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + coderdtest.MustTransitionWorkspace(t, client, task.WorkspaceID.UUID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) // Verify that the previous status still remains - updated, err := exp.TaskByID(ctx, workspace.ID) + updated, err = exp.TaskByID(ctx, task.ID) require.NoError(t, err) assert.NotNil(t, updated.CurrentState, "current state should not be nil") assert.Equal(t, "all done", updated.CurrentState.Message) assert.Equal(t, codersdk.TaskStateComplete, updated.CurrentState.State) // Start the workspace again - coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStop, codersdk.WorkspaceTransitionStart) + coderdtest.MustTransitionWorkspace(t, client, task.WorkspaceID.UUID, codersdk.WorkspaceTransitionStop, codersdk.WorkspaceTransitionStart) // Verify that the status from the previous build is no longer present - updated, err = exp.TaskByID(ctx, workspace.ID) + updated, err = exp.TaskByID(ctx, task.ID) require.NoError(t, err) assert.Nil(t, updated.CurrentState, "current state should be nil") }) @@ -357,26 +374,26 @@ func TestTasks(t *testing.T) { Input: "delete me", }) require.NoError(t, err) - ws, err := client.Workspace(ctx, task.ID) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) err = exp.DeleteTask(ctx, "me", task.ID) require.NoError(t, err, "delete task request should be accepted") // Poll until the workspace is deleted. - for { - dws, derr := client.DeletedWorkspace(ctx, task.ID) - if derr == nil && dws.LatestBuild.Status == codersdk.WorkspaceStatusDeleted { - break - } - if ctx.Err() != nil { - require.NoError(t, derr, "expected to fetch deleted workspace before deadline") - require.Equal(t, codersdk.WorkspaceStatusDeleted, dws.LatestBuild.Status, "workspace should be deleted before deadline") - break + testutil.Eventually(ctx, t, func(ctx context.Context) (done bool) { + dws, derr := client.DeletedWorkspace(ctx, task.WorkspaceID.UUID) + if !assert.NoError(t, derr, "expected to fetch deleted workspace before deadline") { + return false } - time.Sleep(testutil.IntervalMedium) - } + t.Logf("workspace latest_build status: %q", dws.LatestBuild.Status) + return dws.LatestBuild.Status == codersdk.WorkspaceStatusDeleted + }, testutil.IntervalMedium, "workspace should be deleted before deadline") }) t.Run("NotFound", func(t *testing.T) { @@ -409,6 +426,9 @@ func TestTasks(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) ws := coderdtest.CreateWorkspace(t, client, template.ID) + if assert.False(t, ws.TaskID.Valid, "task id should not be set on non-task workspace") { + assert.Zero(t, ws.TaskID, "non-task workspace task id should be empty") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) exp := codersdk.NewExperimentalClient(client) @@ -437,7 +457,8 @@ func TestTasks(t *testing.T) { Input: "delete me not", }) require.NoError(t, err) - ws, err := client.Workspace(ctx, task.ID) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) @@ -456,6 +477,72 @@ func TestTasks(t *testing.T) { t.Fatalf("unexpected status code: %d (expected 403 or 404)", authErr.StatusCode()) } }) + + t.Run("DeletedWorkspace", func(t *testing.T) { + t.Parallel() + + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + template := createAITemplate(t, client, user) + ctx := testutil.Context(t, testutil.WaitLong) + exp := codersdk.NewExperimentalClient(client) + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "delete me", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + // Mark the workspace as deleted directly in the database, bypassing provisionerd. + require.NoError(t, db.UpdateWorkspaceDeletedByID(dbauthz.AsProvisionerd(ctx), database.UpdateWorkspaceDeletedByIDParams{ + ID: ws.ID, + Deleted: true, + })) + // We should still be able to fetch the task if its workspace was deleted. + // Provisionerdserver will attempt delete the related task when deleting a workspace. + // This test ensures that we can still handle the case where, for some reason, the + // task has not been marked as deleted, but the workspace has. + task, err = exp.TaskByID(ctx, task.ID) + require.NoError(t, err, "fetching a task should still work if its related workspace is deleted") + err = exp.DeleteTask(ctx, task.OwnerID.String(), task.ID) + require.NoError(t, err, "should be possible to delete a task with no workspace") + }) + + t.Run("DeletingTaskWorkspaceDeletesTask", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + template := createAITemplate(t, client, user) + + ctx := testutil.Context(t, testutil.WaitLong) + + exp := codersdk.NewExperimentalClient(client) + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "delete me", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + // When; the task workspace is deleted + coderdtest.MustTransitionWorkspace(t, client, ws.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionDelete) + // Then: the task associated with the workspace is also deleted + _, err = exp.TaskByID(ctx, task.ID) + require.Error(t, err, "expected an error fetching the task") + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr, "expected a codersdk.Error") + require.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) + }) }) t.Run("Send", func(t *testing.T) { @@ -464,36 +551,37 @@ func TestTasks(t *testing.T) { t.Run("IntegrationOK", func(t *testing.T) { t.Parallel() - client, _, api := coderdtest.NewWithAPI(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - owner := coderdtest.CreateFirstUser(t, client) - userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - - createStatusResponse := func(status string) string { - return ` - { - "$schema": "http://localhost:3284/schemas/StatusResponseBody.json", - "status": "` + status + `" - } - ` - } - statusResponse := createStatusResponse("stable") + statusResponse := agentapisdk.StatusStable // Start a fake AgentAPI that accepts GET /status and POST /message. srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method == http.MethodGet && r.URL.Path == "/status" { w.Header().Set("Content-Type", "application/json") + resp := agentapisdk.GetStatusResponse{ + Status: statusResponse, + } + respBytes, err := json.Marshal(resp) + assert.NoError(t, err) w.WriteHeader(http.StatusOK) - _, _ = fmt.Fprint(w, statusResponse) + w.Write(respBytes) return } if r.Method == http.MethodPost && r.URL.Path == "/message" { w.Header().Set("Content-Type", "application/json") b, _ := io.ReadAll(r.Body) - assert.Equal(t, `{"content":"Hello, Agent!","type":"user"}`, string(b), "expected message content") + expectedReq := agentapisdk.PostMessageParams{ + Content: "Hello, Agent!", + Type: agentapisdk.MessageTypeUser, + } + expectedBytes, _ := json.Marshal(expectedReq) + assert.Equal(t, string(expectedBytes), string(b), "expected message content") + resp := agentapisdk.PostMessageResponse{Ok: true} + respBytes, err := json.Marshal(resp) + assert.NoError(t, err) w.WriteHeader(http.StatusOK) - io.WriteString(w, `{"ok": true}`) + w.Write(respBytes) return } w.WriteHeader(http.StatusInternalServerError) @@ -501,103 +589,105 @@ func TestTasks(t *testing.T) { defer srv.Close() // Create an AI-capable template whose sidebar app points to our fake AgentAPI. - authToken := uuid.NewString() - template := createAITemplate(t, client, owner, withSidebarURL(srv.URL), withAgentToken(authToken)) + var ( + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + ctx = testutil.Context(t, testutil.WaitLong) + owner = coderdtest.CreateFirstUser(t, client) + userClient, _ = coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + agentAuthToken = uuid.NewString() + template = createAITemplate(t, client, owner, withAgentToken(agentAuthToken), withSidebarURL(srv.URL)) + exp = codersdk.NewExperimentalClient(userClient) + ) - // Create a workspace (task) from the AI-capable template. - ws := coderdtest.CreateWorkspace(t, userClient, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: "send a message"}, - } + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "send me food", }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) + + // Get the workspace and wait for it to be ready. + ws, err := userClient.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, ws.LatestBuild.ID) + + // Fetch the task by ID via experimental API and verify fields. + task, err = exp.TaskByID(ctx, task.ID) + require.NoError(t, err) + require.NotZero(t, task.WorkspaceBuildNumber) + require.True(t, task.WorkspaceAgentID.Valid) + require.True(t, task.WorkspaceAppID.Valid) + + // Insert an app status for the workspace + _, err = db.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ + ID: uuid.New(), + WorkspaceID: task.WorkspaceID.UUID, + CreatedAt: dbtime.Now(), + AgentID: task.WorkspaceAgentID.UUID, + AppID: task.WorkspaceAppID.UUID, + State: database.WorkspaceAppStatusStateComplete, + Message: "all done", + }) + require.NoError(t, err) // Start a fake agent so the workspace agent is connected before sending the message. - agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(authToken)) - _ = agenttest.New(t, client.URL, authToken, func(o *agent.Options) { + agentClient := agentsdk.New(userClient.URL, agentsdk.WithFixedToken(agentAuthToken)) + _ = agenttest.New(t, userClient.URL, agentAuthToken, func(o *agent.Options) { o.Client = agentClient }) - coderdtest.NewWorkspaceAgentWaiter(t, client, ws.ID).WaitFor(coderdtest.AgentsReady) + coderdtest.NewWorkspaceAgentWaiter(t, userClient, ws.ID).WaitFor(coderdtest.AgentsReady) - ctx := testutil.Context(t, testutil.WaitMedium) - - // Lookup the sidebar app ID. - w, err := client.Workspace(ctx, ws.ID) + // Fetch the task by ID via experimental API and verify fields. + task, err = exp.TaskByID(ctx, task.ID) require.NoError(t, err) - var sidebarAppID uuid.UUID - for _, res := range w.LatestBuild.Resources { - for _, ag := range res.Agents { - for _, app := range ag.Apps { - if app.Slug == "task-sidebar" { - sidebarAppID = app.ID - } - } - } - } - require.NotEqual(t, uuid.Nil, sidebarAppID) // Make the sidebar app unhealthy initially. - err = api.Database.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ - ID: sidebarAppID, + err = db.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ + ID: task.WorkspaceAppID.UUID, Health: database.WorkspaceAppHealthUnhealthy, }) require.NoError(t, err) - exp := codersdk.NewExperimentalClient(userClient) - err = exp.TaskSend(ctx, "me", ws.ID, codersdk.TaskSendRequest{ + err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "Hello, Agent!", }) require.Error(t, err, "wanted error due to unhealthy sidebar app") // Make the sidebar app healthy. - err = api.Database.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ - ID: sidebarAppID, + err = db.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ + ID: task.WorkspaceAppID.UUID, Health: database.WorkspaceAppHealthHealthy, }) require.NoError(t, err) - statusResponse = createStatusResponse("bad") + statusResponse = agentapisdk.AgentStatus("bad") - err = exp.TaskSend(ctx, "me", ws.ID, codersdk.TaskSendRequest{ + err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "Hello, Agent!", }) require.Error(t, err, "wanted error due to bad status") - statusResponse = createStatusResponse("stable") + statusResponse = agentapisdk.StatusStable - // Send task input to the tasks sidebar app and expect 204.e - err = exp.TaskSend(ctx, "me", ws.ID, codersdk.TaskSendRequest{ - Input: "Hello, Agent!", + //nolint:tparallel // Not intended to run in parallel. + t.Run("SendOK", func(t *testing.T) { + err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + Input: "Hello, Agent!", + }) + require.NoError(t, err, "wanted no error due to healthy sidebar app and stable status") }) - require.NoError(t, err, "wanted no error due to healthy sidebar app and stable status") - }) - - t.Run("MissingContent", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - user := coderdtest.CreateFirstUser(t, client) - ctx := testutil.Context(t, testutil.WaitLong) - - template := createAITemplate(t, client, user) - - // Create a workspace (task). - ws := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: "do work"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + //nolint:tparallel // Not intended to run in parallel. + t.Run("MissingContent", func(t *testing.T) { + err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + Input: "", + }) + require.Error(t, err, "wanted error due to missing content") - exp := codersdk.NewExperimentalClient(client) - err := exp.TaskSend(ctx, "me", ws.ID, codersdk.TaskSendRequest{ - Input: "", + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) }) - - var sdkErr *codersdk.Error - require.Error(t, err) - require.ErrorAs(t, err, &sdkErr) - require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) }) t.Run("TaskNotFound", func(t *testing.T) { @@ -617,106 +707,112 @@ func TestTasks(t *testing.T) { require.ErrorAs(t, err, &sdkErr) require.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) }) - - t.Run("NotATask", func(t *testing.T) { - t.Parallel() - - client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - user := coderdtest.CreateFirstUser(t, client) - ctx := testutil.Context(t, testutil.WaitShort) - - // Create a template without AI tasks. - version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) - coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - - ws := coderdtest.CreateWorkspace(t, client, template.ID) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - - exp := codersdk.NewExperimentalClient(client) - err := exp.TaskSend(ctx, "me", ws.ID, codersdk.TaskSendRequest{ - Input: "hello", - }) - - var sdkErr *codersdk.Error - require.Error(t, err) - require.ErrorAs(t, err, &sdkErr) - require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) - }) }) t.Run("Logs", func(t *testing.T) { t.Parallel() - t.Run("OK", func(t *testing.T) { - t.Parallel() + messageResponseData := agentapisdk.GetMessagesResponse{ + Messages: []agentapisdk.Message{ + { + Id: 0, + Content: "Welcome, user!", + Role: agentapisdk.RoleAgent, + Time: time.Date(2025, 9, 25, 10, 42, 48, 0, time.UTC), + }, + { + Id: 1, + Content: "Hello, agent!", + Role: agentapisdk.RoleUser, + Time: time.Date(2025, 9, 25, 10, 46, 42, 0, time.UTC), + }, + { + Id: 2, + Content: "What would you like to work on today?", + Role: agentapisdk.RoleAgent, + Time: time.Date(2025, 9, 25, 10, 46, 50, 0, time.UTC), + }, + }, + } + messageResponseBytes, err := json.Marshal(messageResponseData) + require.NoError(t, err) + messageResponse := string(messageResponseBytes) - client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - owner := coderdtest.CreateFirstUser(t, client) - ctx := testutil.Context(t, testutil.WaitLong) + var shouldReturnError bool - messageResponse := ` - { - "$schema": "http://localhost:3284/schemas/MessagesResponseBody.json", - "messages": [ - { - "id": 0, - "content": "Welcome, user!", - "role": "agent", - "time": "2025-09-25T10:42:48.751774125Z" - }, - { - "id": 1, - "content": "Hello, agent!", - "role": "user", - "time": "2025-09-25T10:46:42.880996296Z" - }, - { - "id": 2, - "content": "What would you like to work on today?", - "role": "agent", - "time": "2025-09-25T10:46:50.747761102Z" - } - ] - } - ` + // Fake AgentAPI that returns a couple of messages or an error. + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if shouldReturnError { + w.WriteHeader(http.StatusInternalServerError) + _, _ = io.WriteString(w, "boom") + return + } + if r.Method == http.MethodGet && r.URL.Path == "/messages" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + io.WriteString(w, messageResponse) + return + } + w.WriteHeader(http.StatusNotFound) + })) + defer srv.Close() - // Fake AgentAPI that returns a couple of messages. - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method == http.MethodGet && r.URL.Path == "/messages" { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - io.WriteString(w, messageResponse) - return - } - w.WriteHeader(http.StatusNotFound) - })) - t.Cleanup(srv.Close) + // Create an AI-capable template whose sidebar app points to our fake AgentAPI. + var ( + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + ctx = testutil.Context(t, testutil.WaitLong) + owner = coderdtest.CreateFirstUser(t, client) + agentAuthToken = uuid.NewString() + template = createAITemplate(t, client, owner, withAgentToken(agentAuthToken), withSidebarURL(srv.URL)) + exp = codersdk.NewExperimentalClient(client) + ) - // Template pointing sidebar app to our fake AgentAPI. - authToken := uuid.NewString() - template := createAITemplate(t, client, owner, withSidebarURL(srv.URL), withAgentToken(authToken)) + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "show logs", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) - // Create task workspace. - ws := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: "show logs"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + // Get the workspace and wait for it to be ready. + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - // Start a fake agent. - agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(authToken)) - _ = agenttest.New(t, client.URL, authToken, func(o *agent.Options) { - o.Client = agentClient - }) - coderdtest.NewWorkspaceAgentWaiter(t, client, ws.ID).WaitFor(coderdtest.AgentsReady) + // Fetch the task by ID via experimental API and verify fields. + task, err = exp.TaskByID(ctx, task.ID) + require.NoError(t, err) + require.NotZero(t, task.WorkspaceBuildNumber) + require.True(t, task.WorkspaceAgentID.Valid) + require.True(t, task.WorkspaceAppID.Valid) - // Omit sidebar app health as undefined is OK. + // Insert an app status for the workspace + _, err = db.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ + ID: uuid.New(), + WorkspaceID: task.WorkspaceID.UUID, + CreatedAt: dbtime.Now(), + AgentID: task.WorkspaceAgentID.UUID, + AppID: task.WorkspaceAppID.UUID, + State: database.WorkspaceAppStatusStateComplete, + Message: "all done", + }) + require.NoError(t, err) + // Start a fake agent so the workspace agent is connected before fetching logs. + agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(agentAuthToken)) + _ = agenttest.New(t, client.URL, agentAuthToken, func(o *agent.Options) { + o.Client = agentClient + }) + coderdtest.NewWorkspaceAgentWaiter(t, client, ws.ID).WaitFor(coderdtest.AgentsReady) + + // Fetch the task by ID via experimental API and verify fields. + task, err = exp.TaskByID(ctx, task.ID) + require.NoError(t, err) + + //nolint:tparallel // Not intended to run in parallel. + t.Run("OK", func(t *testing.T) { // Fetch logs. - exp := codersdk.NewExperimentalClient(client) - resp, err := exp.TaskLogs(ctx, "me", ws.ID) + resp, err := exp.TaskLogs(ctx, "me", task.ID) require.NoError(t, err) require.Len(t, resp.Logs, 3) assert.Equal(t, 0, resp.Logs[0].ID) @@ -732,38 +828,11 @@ func TestTasks(t *testing.T) { assert.Equal(t, "What would you like to work on today?", resp.Logs[2].Content) }) + //nolint:tparallel // Not intended to run in parallel. t.Run("UpstreamError", func(t *testing.T) { - t.Parallel() - - client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - owner := coderdtest.CreateFirstUser(t, client) - ctx := testutil.Context(t, testutil.WaitShort) - - // Fake AgentAPI that returns 500 for messages. - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - _, _ = io.WriteString(w, "boom") - })) - t.Cleanup(srv.Close) - - authToken := uuid.NewString() - template := createAITemplate(t, client, owner, withSidebarURL(srv.URL), withAgentToken(authToken)) - ws := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { - req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: "show logs"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - - // Start fake agent. - agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(authToken)) - _ = agenttest.New(t, client.URL, authToken, func(o *agent.Options) { - o.Client = agentClient - }) - coderdtest.NewWorkspaceAgentWaiter(t, client, ws.ID).WaitFor(coderdtest.AgentsReady) - - exp := codersdk.NewExperimentalClient(client) - _, err := exp.TaskLogs(ctx, "me", ws.ID) + shouldReturnError = true + t.Cleanup(func() { shouldReturnError = false }) + _, err := exp.TaskLogs(ctx, "me", task.ID) var sdkErr *codersdk.Error require.Error(t, err) @@ -794,7 +863,7 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: "AI Prompt", Type: "string"}}, + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -867,7 +936,7 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: "AI Prompt", Type: "string"}}, + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -963,7 +1032,212 @@ func TestTasksCreate(t *testing.T) { var sdkErr *codersdk.Error require.Error(t, err) require.ErrorAsf(t, err, &sdkErr, "error should be of type *codersdk.Error") - assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) + assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + }) + + t.Run("TaskTableCreatedAndLinked", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + taskPrompt = "Create a REST API" + ) + + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create a template with AI task support to test the new task data model. + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: taskPrompt, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) + + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + // Verify that the task was created in the tasks table with the correct + // fields. This ensures the data model properly separates task records + // from workspace records. + dbCtx := dbauthz.AsSystemRestricted(ctx) + dbTask, err := db.GetTaskByID(dbCtx, task.ID) + require.NoError(t, err) + assert.Equal(t, user.OrganizationID, dbTask.OrganizationID) + assert.Equal(t, user.UserID, dbTask.OwnerID) + assert.Equal(t, task.Name, dbTask.Name) + assert.True(t, dbTask.WorkspaceID.Valid) + assert.Equal(t, ws.ID, dbTask.WorkspaceID.UUID) + assert.Equal(t, version.ID, dbTask.TemplateVersionID) + assert.Equal(t, taskPrompt, dbTask.Prompt) + assert.False(t, dbTask.DeletedAt.Valid) + + // Verify the bidirectional relationship works by looking up the task + // via workspace ID. + dbTaskByWs, err := db.GetTaskByWorkspaceID(dbCtx, ws.ID) + require.NoError(t, err) + assert.Equal(t, dbTask.ID, dbTaskByWs.ID) + }) + + t.Run("TaskWithCustomName", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + taskPrompt = "Build a dashboard" + taskName = "my-custom-task" + ) + + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: taskPrompt, + Name: taskName, + }) + require.NoError(t, err) + require.Equal(t, taskName, task.Name) + + // Verify the custom name is preserved in the database record. + dbCtx := dbauthz.AsSystemRestricted(ctx) + dbTask, err := db.GetTaskByID(dbCtx, task.ID) + require.NoError(t, err) + assert.Equal(t, taskName, dbTask.Name) + }) + + t.Run("MultipleTasksForSameUser", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + task1, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "First task", + Name: "task-1", + }) + require.NoError(t, err) + + task2, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "Second task", + Name: "task-2", + }) + require.NoError(t, err) + + // Verify both tasks are stored independently and can be listed together. + dbCtx := dbauthz.AsSystemRestricted(ctx) + tasks, err := db.ListTasks(dbCtx, database.ListTasksParams{ + OwnerID: user.UserID, + OrganizationID: uuid.Nil, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(tasks), 2) + + taskIDs := make(map[uuid.UUID]bool) + for _, task := range tasks { + taskIDs[task.ID] = true + } + assert.True(t, taskIDs[task1.ID], "task1 should be in the list") + assert.True(t, taskIDs[task2.ID], "task2 should be in the list") + }) + + t.Run("TaskLinkedToCorrectTemplateVersion", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + version1 := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version1.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version1.ID) + + version2 := coderdtest.UpdateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }, template.ID) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version2.ID) + + expClient := codersdk.NewExperimentalClient(client) + + // Create a task using version 2 to verify the template_version_id is + // stored correctly. + task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: version2.ID, + Input: "Use version 2", + }) + require.NoError(t, err) + + // Verify the task references the correct template version, not just the + // active one. + dbCtx := dbauthz.AsSystemRestricted(ctx) + dbTask, err := db.GetTaskByID(dbCtx, task.ID) + require.NoError(t, err) + assert.Equal(t, version2.ID, dbTask.TemplateVersionID, "task should be linked to version 2") }) } @@ -977,6 +1251,7 @@ func TestTasksNotification(t *testing.T) { isAITask bool isNotificationSent bool notificationTemplate uuid.UUID + taskPrompt string }{ // Should not send a notification when the agent app is not an AI task. { @@ -985,6 +1260,7 @@ func TestTasksNotification(t *testing.T) { newAppStatus: codersdk.WorkspaceAppStatusStateWorking, isAITask: false, isNotificationSent: false, + taskPrompt: "NoAITask", }, // Should not send a notification when the new app status is neither 'Working' nor 'Idle'. { @@ -993,6 +1269,7 @@ func TestTasksNotification(t *testing.T) { newAppStatus: codersdk.WorkspaceAppStatusStateComplete, isAITask: true, isNotificationSent: false, + taskPrompt: "NonNotifiedState", }, // Should not send a notification when the new app status equals the latest status (Working). { @@ -1001,15 +1278,27 @@ func TestTasksNotification(t *testing.T) { newAppStatus: codersdk.WorkspaceAppStatusStateWorking, isAITask: true, isNotificationSent: false, + taskPrompt: "NonNotifiedTransition", }, - // Should send TemplateTaskWorking when the AI task transitions to 'Working'. + // Should NOT send TemplateTaskWorking when the AI task's FIRST status is 'Working' (obvious state). { name: "TemplateTaskWorking", latestAppStatuses: nil, newAppStatus: codersdk.WorkspaceAppStatusStateWorking, isAITask: true, - isNotificationSent: true, + isNotificationSent: false, notificationTemplate: notifications.TemplateTaskWorking, + taskPrompt: "TemplateTaskWorking", + }, + // Should send TemplateTaskIdle when the AI task's FIRST status is 'Idle' (task completed immediately). + { + name: "InitialTemplateTaskIdle", + latestAppStatuses: nil, + newAppStatus: codersdk.WorkspaceAppStatusStateIdle, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskIdle, + taskPrompt: "InitialTemplateTaskIdle", }, // Should send TemplateTaskWorking when the AI task transitions to 'Working' from 'Idle'. { @@ -1022,6 +1311,7 @@ func TestTasksNotification(t *testing.T) { isAITask: true, isNotificationSent: true, notificationTemplate: notifications.TemplateTaskWorking, + taskPrompt: "TemplateTaskWorkingFromIdle", }, // Should send TemplateTaskIdle when the AI task transitions to 'Idle'. { @@ -1031,6 +1321,75 @@ func TestTasksNotification(t *testing.T) { isAITask: true, isNotificationSent: true, notificationTemplate: notifications.TemplateTaskIdle, + taskPrompt: "TemplateTaskIdle", + }, + // Long task prompts should be truncated to 160 characters. + { + name: "LongTaskPrompt", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking}, + newAppStatus: codersdk.WorkspaceAppStatusStateIdle, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskIdle, + taskPrompt: "This is a very long task prompt that should be truncated to 160 characters. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", + }, + // Should send TemplateTaskCompleted when the AI task transitions to 'Complete'. + { + name: "TemplateTaskCompleted", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking}, + newAppStatus: codersdk.WorkspaceAppStatusStateComplete, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskCompleted, + taskPrompt: "TemplateTaskCompleted", + }, + // Should send TemplateTaskFailed when the AI task transitions to 'Failure'. + { + name: "TemplateTaskFailed", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking}, + newAppStatus: codersdk.WorkspaceAppStatusStateFailure, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskFailed, + taskPrompt: "TemplateTaskFailed", + }, + // Should send TemplateTaskCompleted when the AI task transitions from 'Idle' to 'Complete'. + { + name: "TemplateTaskCompletedFromIdle", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateIdle}, + newAppStatus: codersdk.WorkspaceAppStatusStateComplete, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskCompleted, + taskPrompt: "TemplateTaskCompletedFromIdle", + }, + // Should send TemplateTaskFailed when the AI task transitions from 'Idle' to 'Failure'. + { + name: "TemplateTaskFailedFromIdle", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateIdle}, + newAppStatus: codersdk.WorkspaceAppStatusStateFailure, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskFailed, + taskPrompt: "TemplateTaskFailedFromIdle", + }, + // Should NOT send notification when transitioning from 'Complete' to 'Complete' (no change). + { + name: "NoNotificationCompleteToComplete", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateComplete}, + newAppStatus: codersdk.WorkspaceAppStatusStateComplete, + isAITask: true, + isNotificationSent: false, + taskPrompt: "NoNotificationCompleteToComplete", + }, + // Should NOT send notification when transitioning from 'Failure' to 'Failure' (no change). + { + name: "NoNotificationFailureToFailure", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateFailure}, + newAppStatus: codersdk.WorkspaceAppStatusStateFailure, + isAITask: true, + isNotificationSent: false, + taskPrompt: "NoNotificationFailureToFailure", }, } { t.Run(tc.name, func(t *testing.T) { @@ -1067,7 +1426,7 @@ func TestTasksNotification(t *testing.T) { }).Seed(workspaceBuildSeed).Params(database.WorkspaceBuildParameter{ WorkspaceBuildID: workspaceBuildID, Name: codersdk.AITaskPromptParameterName, - Value: "task prompt", + Value: tc.taskPrompt, }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { agent[0].Apps = []*proto.App{{ Id: workspaceAgentAppID.String(), @@ -1115,7 +1474,13 @@ func TestTasksNotification(t *testing.T) { require.Len(t, sent, 1) require.Equal(t, memberUser.ID, sent[0].UserID) require.Len(t, sent[0].Labels, 2) - require.Equal(t, "task prompt", sent[0].Labels["task"]) + // NOTE: len(string) is the number of bytes in the string, not the number of runes. + require.LessOrEqual(t, utf8.RuneCountInString(sent[0].Labels["task"]), 160) + if len(tc.taskPrompt) > 160 { + require.Contains(t, tc.taskPrompt, strings.TrimSuffix(sent[0].Labels["task"], "…")) + } else { + require.Equal(t, tc.taskPrompt, sent[0].Labels["task"]) + } require.Equal(t, workspace.Name, sent[0].Labels["workspace"]) } else { // Then: No notification is sent diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index b93c647dad689..76a38452552d4 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -85,7 +85,7 @@ const docTemplate = `{ } } }, - "/api/experimental/aibridge/interceptions": { + "/aibridge/interceptions": { "get": { "security": [ { @@ -115,9 +115,15 @@ const docTemplate = `{ }, { "type": "string", - "description": "Cursor pagination after ID", + "description": "Cursor pagination after ID (cannot be used with offset)", "name": "after_id", "in": "query" + }, + { + "type": "integer", + "description": "Offset pagination (cannot be used with after_id)", + "name": "offset", + "in": "query" } ], "responses": { @@ -130,6 +136,233 @@ const docTemplate = `{ } } }, + "/api/experimental/tasks": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "List AI tasks", + "operationId": "list-tasks", + "parameters": [ + { + "type": "string", + "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", + "name": "q", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TasksListResponse" + } + } + } + } + }, + "/api/experimental/tasks/{user}": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "Create a new AI task", + "operationId": "create-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "Create task request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.CreateTaskRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + } + }, + "/api/experimental/tasks/{user}/{task}": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "Get AI task by ID", + "operationId": "get-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "Delete AI task by ID", + "operationId": "delete-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "202": { + "description": "Task deletion initiated" + } + } + } + }, + "/api/experimental/tasks/{user}/{task}/logs": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "Get AI task logs", + "operationId": "get-task-logs", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TaskLogsResponse" + } + } + } + } + }, + "/api/experimental/tasks/{user}/{task}/send": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Experimental" + ], + "summary": "Send input to AI task", + "operationId": "send-task-input", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Task input request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.TaskSendRequest" + } + } + ], + "responses": { + "204": { + "description": "Input sent successfully" + } + } + } + }, "/appearance": { "get": { "security": [ @@ -704,6 +937,138 @@ const docTemplate = `{ } } }, + "/debug/metrics": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug metrics", + "operationId": "debug-metrics", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug pprof index", + "operationId": "debug-pprof-index", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/cmdline": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug pprof cmdline", + "operationId": "debug-pprof-cmdline", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/profile": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug pprof profile", + "operationId": "debug-pprof-profile", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/symbol": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug pprof symbol", + "operationId": "debug-pprof-symbol", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/trace": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Debug" + ], + "summary": "Debug pprof trace", + "operationId": "debug-pprof-trace", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, "/debug/tailnet": { "get": { "security": [ @@ -2694,6 +3059,45 @@ const docTemplate = `{ } } }, + "/oauth2/revoke": { + "post": { + "consumes": [ + "application/x-www-form-urlencoded" + ], + "tags": [ + "Enterprise" + ], + "summary": "Revoke OAuth2 tokens (RFC 7009).", + "operationId": "oauth2-token-revocation", + "parameters": [ + { + "type": "string", + "description": "Client ID for authentication", + "name": "client_id", + "in": "formData", + "required": true + }, + { + "type": "string", + "description": "The token to revoke", + "name": "token", + "in": "formData", + "required": true + }, + { + "type": "string", + "description": "Hint about token type (access_token or refresh_token)", + "name": "token_type_hint", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "Token successfully revoked" + } + } + } + }, "/oauth2/tokens": { "post": { "produces": [ @@ -3744,6 +4148,13 @@ const docTemplate = `{ "description": "Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'})", "name": "tags", "in": "query" + }, + { + "type": "string", + "format": "uuid", + "description": "Filter results by initiator", + "name": "initiator", + "in": "query" } ], "responses": { @@ -11257,12 +11668,35 @@ const docTemplate = `{ } } }, + "codersdk.AIBridgeBedrockConfig": { + "type": "object", + "properties": { + "access_key": { + "type": "string" + }, + "access_key_secret": { + "type": "string" + }, + "model": { + "type": "string" + }, + "region": { + "type": "string" + }, + "small_fast_model": { + "type": "string" + } + } + }, "codersdk.AIBridgeConfig": { "type": "object", "properties": { "anthropic": { "$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig" }, + "bedrock": { + "$ref": "#/definitions/codersdk.AIBridgeBedrockConfig" + }, "enabled": { "type": "boolean" }, @@ -11274,14 +11708,17 @@ const docTemplate = `{ "codersdk.AIBridgeInterception": { "type": "object", "properties": { - "id": { + "ended_at": { "type": "string", - "format": "uuid" + "format": "date-time" }, - "initiator_id": { + "id": { "type": "string", "format": "uuid" }, + "initiator": { + "$ref": "#/definitions/codersdk.MinimalUser" + }, "metadata": { "type": "object", "additionalProperties": {} @@ -11319,6 +11756,9 @@ const docTemplate = `{ "codersdk.AIBridgeListInterceptionsResponse": { "type": "object", "properties": { + "count": { + "type": "integer" + }, "results": { "type": "array", "items": { @@ -11442,6 +11882,17 @@ const docTemplate = `{ } } }, + "codersdk.APIAllowListTarget": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/codersdk.RBACResource" + } + } + }, "codersdk.APIKey": { "type": "object", "required": [ @@ -11456,6 +11907,12 @@ const docTemplate = `{ "user_id" ], "properties": { + "allow_list": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.APIAllowListTarget" + } + }, "created_at": { "type": "string", "format": "date-time" @@ -11523,11 +11980,29 @@ const docTemplate = `{ "enum": [ "all", "application_connect", + "aibridge_interception:*", + "aibridge_interception:create", + "aibridge_interception:read", + "aibridge_interception:update", "api_key:*", "api_key:create", "api_key:delete", "api_key:read", "api_key:update", + "assign_org_role:*", + "assign_org_role:assign", + "assign_org_role:create", + "assign_org_role:delete", + "assign_org_role:read", + "assign_org_role:unassign", + "assign_org_role:update", + "assign_role:*", + "assign_role:assign", + "assign_role:read", + "assign_role:unassign", + "audit_log:*", + "audit_log:create", + "audit_log:read", "coder:all", "coder:apikeys.manage_self", "coder:application_connect", @@ -11537,40 +12012,195 @@ const docTemplate = `{ "coder:workspaces.create", "coder:workspaces.delete", "coder:workspaces.operate", + "connection_log:*", + "connection_log:read", + "connection_log:update", + "crypto_key:*", + "crypto_key:create", + "crypto_key:delete", + "crypto_key:read", + "crypto_key:update", + "debug_info:*", + "debug_info:read", + "deployment_config:*", + "deployment_config:read", + "deployment_config:update", + "deployment_stats:*", + "deployment_stats:read", "file:*", "file:create", "file:read", + "group:*", + "group:create", + "group:delete", + "group:read", + "group:update", + "group_member:*", + "group_member:read", + "idpsync_settings:*", + "idpsync_settings:read", + "idpsync_settings:update", + "inbox_notification:*", + "inbox_notification:create", + "inbox_notification:read", + "inbox_notification:update", + "license:*", + "license:create", + "license:delete", + "license:read", + "notification_message:*", + "notification_message:create", + "notification_message:delete", + "notification_message:read", + "notification_message:update", + "notification_preference:*", + "notification_preference:read", + "notification_preference:update", + "notification_template:*", + "notification_template:read", + "notification_template:update", + "oauth2_app:*", + "oauth2_app:create", + "oauth2_app:delete", + "oauth2_app:read", + "oauth2_app:update", + "oauth2_app_code_token:*", + "oauth2_app_code_token:create", + "oauth2_app_code_token:delete", + "oauth2_app_code_token:read", + "oauth2_app_secret:*", + "oauth2_app_secret:create", + "oauth2_app_secret:delete", + "oauth2_app_secret:read", + "oauth2_app_secret:update", + "organization:*", + "organization:create", + "organization:delete", + "organization:read", + "organization:update", + "organization_member:*", + "organization_member:create", + "organization_member:delete", + "organization_member:read", + "organization_member:update", + "prebuilt_workspace:*", + "prebuilt_workspace:delete", + "prebuilt_workspace:update", + "provisioner_daemon:*", + "provisioner_daemon:create", + "provisioner_daemon:delete", + "provisioner_daemon:read", + "provisioner_daemon:update", + "provisioner_jobs:*", + "provisioner_jobs:create", + "provisioner_jobs:read", + "provisioner_jobs:update", + "replicas:*", + "replicas:read", + "system:*", + "system:create", + "system:delete", + "system:read", + "system:update", + "tailnet_coordinator:*", + "tailnet_coordinator:create", + "tailnet_coordinator:delete", + "tailnet_coordinator:read", + "tailnet_coordinator:update", + "task:*", + "task:create", + "task:delete", + "task:read", + "task:update", "template:*", "template:create", "template:delete", "template:read", "template:update", "template:use", + "template:view_insights", + "usage_event:*", + "usage_event:create", + "usage_event:read", + "usage_event:update", + "user:*", + "user:create", + "user:delete", + "user:read", "user:read_personal", + "user:update", "user:update_personal", "user_secret:*", "user_secret:create", "user_secret:delete", "user_secret:read", "user_secret:update", + "webpush_subscription:*", + "webpush_subscription:create", + "webpush_subscription:delete", + "webpush_subscription:read", "workspace:*", "workspace:application_connect", "workspace:create", + "workspace:create_agent", "workspace:delete", + "workspace:delete_agent", "workspace:read", + "workspace:share", "workspace:ssh", "workspace:start", "workspace:stop", - "workspace:update" + "workspace:update", + "workspace_agent_devcontainers:*", + "workspace_agent_devcontainers:create", + "workspace_agent_resource_monitor:*", + "workspace_agent_resource_monitor:create", + "workspace_agent_resource_monitor:read", + "workspace_agent_resource_monitor:update", + "workspace_dormant:*", + "workspace_dormant:application_connect", + "workspace_dormant:create", + "workspace_dormant:create_agent", + "workspace_dormant:delete", + "workspace_dormant:delete_agent", + "workspace_dormant:read", + "workspace_dormant:share", + "workspace_dormant:ssh", + "workspace_dormant:start", + "workspace_dormant:stop", + "workspace_dormant:update", + "workspace_proxy:*", + "workspace_proxy:create", + "workspace_proxy:delete", + "workspace_proxy:read", + "workspace_proxy:update" ], "x-enum-varnames": [ "APIKeyScopeAll", "APIKeyScopeApplicationConnect", + "APIKeyScopeAibridgeInterceptionAll", + "APIKeyScopeAibridgeInterceptionCreate", + "APIKeyScopeAibridgeInterceptionRead", + "APIKeyScopeAibridgeInterceptionUpdate", "APIKeyScopeApiKeyAll", "APIKeyScopeApiKeyCreate", "APIKeyScopeApiKeyDelete", "APIKeyScopeApiKeyRead", "APIKeyScopeApiKeyUpdate", + "APIKeyScopeAssignOrgRoleAll", + "APIKeyScopeAssignOrgRoleAssign", + "APIKeyScopeAssignOrgRoleCreate", + "APIKeyScopeAssignOrgRoleDelete", + "APIKeyScopeAssignOrgRoleRead", + "APIKeyScopeAssignOrgRoleUnassign", + "APIKeyScopeAssignOrgRoleUpdate", + "APIKeyScopeAssignRoleAll", + "APIKeyScopeAssignRoleAssign", + "APIKeyScopeAssignRoleRead", + "APIKeyScopeAssignRoleUnassign", + "APIKeyScopeAuditLogAll", + "APIKeyScopeAuditLogCreate", + "APIKeyScopeAuditLogRead", "APIKeyScopeCoderAll", "APIKeyScopeCoderApikeysManageSelf", "APIKeyScopeCoderApplicationConnect", @@ -11580,31 +12210,168 @@ const docTemplate = `{ "APIKeyScopeCoderWorkspacesCreate", "APIKeyScopeCoderWorkspacesDelete", "APIKeyScopeCoderWorkspacesOperate", + "APIKeyScopeConnectionLogAll", + "APIKeyScopeConnectionLogRead", + "APIKeyScopeConnectionLogUpdate", + "APIKeyScopeCryptoKeyAll", + "APIKeyScopeCryptoKeyCreate", + "APIKeyScopeCryptoKeyDelete", + "APIKeyScopeCryptoKeyRead", + "APIKeyScopeCryptoKeyUpdate", + "APIKeyScopeDebugInfoAll", + "APIKeyScopeDebugInfoRead", + "APIKeyScopeDeploymentConfigAll", + "APIKeyScopeDeploymentConfigRead", + "APIKeyScopeDeploymentConfigUpdate", + "APIKeyScopeDeploymentStatsAll", + "APIKeyScopeDeploymentStatsRead", "APIKeyScopeFileAll", "APIKeyScopeFileCreate", "APIKeyScopeFileRead", + "APIKeyScopeGroupAll", + "APIKeyScopeGroupCreate", + "APIKeyScopeGroupDelete", + "APIKeyScopeGroupRead", + "APIKeyScopeGroupUpdate", + "APIKeyScopeGroupMemberAll", + "APIKeyScopeGroupMemberRead", + "APIKeyScopeIdpsyncSettingsAll", + "APIKeyScopeIdpsyncSettingsRead", + "APIKeyScopeIdpsyncSettingsUpdate", + "APIKeyScopeInboxNotificationAll", + "APIKeyScopeInboxNotificationCreate", + "APIKeyScopeInboxNotificationRead", + "APIKeyScopeInboxNotificationUpdate", + "APIKeyScopeLicenseAll", + "APIKeyScopeLicenseCreate", + "APIKeyScopeLicenseDelete", + "APIKeyScopeLicenseRead", + "APIKeyScopeNotificationMessageAll", + "APIKeyScopeNotificationMessageCreate", + "APIKeyScopeNotificationMessageDelete", + "APIKeyScopeNotificationMessageRead", + "APIKeyScopeNotificationMessageUpdate", + "APIKeyScopeNotificationPreferenceAll", + "APIKeyScopeNotificationPreferenceRead", + "APIKeyScopeNotificationPreferenceUpdate", + "APIKeyScopeNotificationTemplateAll", + "APIKeyScopeNotificationTemplateRead", + "APIKeyScopeNotificationTemplateUpdate", + "APIKeyScopeOauth2AppAll", + "APIKeyScopeOauth2AppCreate", + "APIKeyScopeOauth2AppDelete", + "APIKeyScopeOauth2AppRead", + "APIKeyScopeOauth2AppUpdate", + "APIKeyScopeOauth2AppCodeTokenAll", + "APIKeyScopeOauth2AppCodeTokenCreate", + "APIKeyScopeOauth2AppCodeTokenDelete", + "APIKeyScopeOauth2AppCodeTokenRead", + "APIKeyScopeOauth2AppSecretAll", + "APIKeyScopeOauth2AppSecretCreate", + "APIKeyScopeOauth2AppSecretDelete", + "APIKeyScopeOauth2AppSecretRead", + "APIKeyScopeOauth2AppSecretUpdate", + "APIKeyScopeOrganizationAll", + "APIKeyScopeOrganizationCreate", + "APIKeyScopeOrganizationDelete", + "APIKeyScopeOrganizationRead", + "APIKeyScopeOrganizationUpdate", + "APIKeyScopeOrganizationMemberAll", + "APIKeyScopeOrganizationMemberCreate", + "APIKeyScopeOrganizationMemberDelete", + "APIKeyScopeOrganizationMemberRead", + "APIKeyScopeOrganizationMemberUpdate", + "APIKeyScopePrebuiltWorkspaceAll", + "APIKeyScopePrebuiltWorkspaceDelete", + "APIKeyScopePrebuiltWorkspaceUpdate", + "APIKeyScopeProvisionerDaemonAll", + "APIKeyScopeProvisionerDaemonCreate", + "APIKeyScopeProvisionerDaemonDelete", + "APIKeyScopeProvisionerDaemonRead", + "APIKeyScopeProvisionerDaemonUpdate", + "APIKeyScopeProvisionerJobsAll", + "APIKeyScopeProvisionerJobsCreate", + "APIKeyScopeProvisionerJobsRead", + "APIKeyScopeProvisionerJobsUpdate", + "APIKeyScopeReplicasAll", + "APIKeyScopeReplicasRead", + "APIKeyScopeSystemAll", + "APIKeyScopeSystemCreate", + "APIKeyScopeSystemDelete", + "APIKeyScopeSystemRead", + "APIKeyScopeSystemUpdate", + "APIKeyScopeTailnetCoordinatorAll", + "APIKeyScopeTailnetCoordinatorCreate", + "APIKeyScopeTailnetCoordinatorDelete", + "APIKeyScopeTailnetCoordinatorRead", + "APIKeyScopeTailnetCoordinatorUpdate", + "APIKeyScopeTaskAll", + "APIKeyScopeTaskCreate", + "APIKeyScopeTaskDelete", + "APIKeyScopeTaskRead", + "APIKeyScopeTaskUpdate", "APIKeyScopeTemplateAll", "APIKeyScopeTemplateCreate", "APIKeyScopeTemplateDelete", "APIKeyScopeTemplateRead", "APIKeyScopeTemplateUpdate", "APIKeyScopeTemplateUse", + "APIKeyScopeTemplateViewInsights", + "APIKeyScopeUsageEventAll", + "APIKeyScopeUsageEventCreate", + "APIKeyScopeUsageEventRead", + "APIKeyScopeUsageEventUpdate", + "APIKeyScopeUserAll", + "APIKeyScopeUserCreate", + "APIKeyScopeUserDelete", + "APIKeyScopeUserRead", "APIKeyScopeUserReadPersonal", + "APIKeyScopeUserUpdate", "APIKeyScopeUserUpdatePersonal", "APIKeyScopeUserSecretAll", "APIKeyScopeUserSecretCreate", "APIKeyScopeUserSecretDelete", "APIKeyScopeUserSecretRead", "APIKeyScopeUserSecretUpdate", + "APIKeyScopeWebpushSubscriptionAll", + "APIKeyScopeWebpushSubscriptionCreate", + "APIKeyScopeWebpushSubscriptionDelete", + "APIKeyScopeWebpushSubscriptionRead", "APIKeyScopeWorkspaceAll", "APIKeyScopeWorkspaceApplicationConnect", "APIKeyScopeWorkspaceCreate", + "APIKeyScopeWorkspaceCreateAgent", "APIKeyScopeWorkspaceDelete", + "APIKeyScopeWorkspaceDeleteAgent", "APIKeyScopeWorkspaceRead", + "APIKeyScopeWorkspaceShare", "APIKeyScopeWorkspaceSsh", "APIKeyScopeWorkspaceStart", "APIKeyScopeWorkspaceStop", - "APIKeyScopeWorkspaceUpdate" + "APIKeyScopeWorkspaceUpdate", + "APIKeyScopeWorkspaceAgentDevcontainersAll", + "APIKeyScopeWorkspaceAgentDevcontainersCreate", + "APIKeyScopeWorkspaceAgentResourceMonitorAll", + "APIKeyScopeWorkspaceAgentResourceMonitorCreate", + "APIKeyScopeWorkspaceAgentResourceMonitorRead", + "APIKeyScopeWorkspaceAgentResourceMonitorUpdate", + "APIKeyScopeWorkspaceDormantAll", + "APIKeyScopeWorkspaceDormantApplicationConnect", + "APIKeyScopeWorkspaceDormantCreate", + "APIKeyScopeWorkspaceDormantCreateAgent", + "APIKeyScopeWorkspaceDormantDelete", + "APIKeyScopeWorkspaceDormantDeleteAgent", + "APIKeyScopeWorkspaceDormantRead", + "APIKeyScopeWorkspaceDormantShare", + "APIKeyScopeWorkspaceDormantSsh", + "APIKeyScopeWorkspaceDormantStart", + "APIKeyScopeWorkspaceDormantStop", + "APIKeyScopeWorkspaceDormantUpdate", + "APIKeyScopeWorkspaceProxyAll", + "APIKeyScopeWorkspaceProxyCreate", + "APIKeyScopeWorkspaceProxyDelete", + "APIKeyScopeWorkspaceProxyRead", + "APIKeyScopeWorkspaceProxyUpdate" ] }, "codersdk.AddLicenseRequest": { @@ -11756,6 +12523,13 @@ const docTemplate = `{ "type": "string", "format": "uuid" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", "type": "array", @@ -12408,11 +13182,30 @@ const docTemplate = `{ } } }, - "codersdk.CreateProvisionerKeyResponse": { + "codersdk.CreateProvisionerKeyResponse": { + "type": "object", + "properties": { + "key": { + "type": "string" + } + } + }, + "codersdk.CreateTaskRequest": { "type": "object", "properties": { - "key": { + "input": { "type": "string" + }, + "name": { + "type": "string" + }, + "template_version_id": { + "type": "string", + "format": "uuid" + }, + "template_version_preset_id": { + "type": "string", + "format": "uuid" } } }, @@ -12670,6 +13463,12 @@ const docTemplate = `{ "codersdk.CreateTokenRequest": { "type": "object", "properties": { + "allow_list": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.APIAllowListTarget" + } + }, "lifetime": { "type": "integer" }, @@ -12954,6 +13753,13 @@ const docTemplate = `{ "name": { "type": "string" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific to the organization the role belongs to.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific to the organization the role belongs to.", "type": "array", @@ -13219,6 +14025,9 @@ const docTemplate = `{ "docs_url": { "$ref": "#/definitions/serpent.URL" }, + "enable_authz_recording": { + "type": "boolean" + }, "enable_terraform_debug_mode": { "type": "boolean" }, @@ -13507,11 +14316,9 @@ const docTemplate = `{ "web-push", "oauth2", "mcp-server-http", - "workspace-sharing", - "aibridge" + "workspace-sharing" ], "x-enum-comments": { - "ExperimentAIBridge": "Enables AI Bridge functionality.", "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", "ExperimentExample": "This isn't used for anything.", "ExperimentMCPServerHTTP": "Enables the MCP HTTP server functionality.", @@ -13529,8 +14336,7 @@ const docTemplate = `{ "ExperimentWebPush", "ExperimentOAuth2", "ExperimentMCPServerHTTP", - "ExperimentWorkspaceSharing", - "ExperimentAIBridge" + "ExperimentWorkspaceSharing" ] }, "codersdk.ExternalAPIKeyScopes": { @@ -14137,7 +14943,15 @@ const docTemplate = `{ "enum": [ "bug", "chat", - "docs" + "docs", + "star" + ] + }, + "location": { + "type": "string", + "enum": [ + "navbar", + "dropdown" ] }, "name": { @@ -14310,6 +15124,9 @@ const docTemplate = `{ "type": "string", "format": "uuid" }, + "name": { + "type": "string" + }, "username": { "type": "string" } @@ -14582,6 +15399,9 @@ const docTemplate = `{ }, "token": { "type": "string" + }, + "token_revoke": { + "type": "string" } } }, @@ -14681,7 +15501,10 @@ const docTemplate = `{ } }, "registration_access_token": { - "type": "string" + "type": "array", + "items": { + "type": "integer" + } }, "registration_client_uri": { "type": "string" @@ -15974,6 +16797,10 @@ const docTemplate = `{ "type": "string", "format": "uuid" }, + "initiator_id": { + "type": "string", + "format": "uuid" + }, "input": { "$ref": "#/definitions/codersdk.ProvisionerJobInput" }, @@ -16309,6 +17136,7 @@ const docTemplate = `{ "read", "read_personal", "ssh", + "share", "unassign", "update", "update_personal", @@ -16327,6 +17155,7 @@ const docTemplate = `{ "ActionRead", "ActionReadPersonal", "ActionSSH", + "ActionShare", "ActionUnassign", "ActionUpdate", "ActionUpdatePersonal", @@ -16370,6 +17199,7 @@ const docTemplate = `{ "replicas", "system", "tailnet_coordinator", + "task", "template", "usage_event", "user", @@ -16413,6 +17243,7 @@ const docTemplate = `{ "ResourceReplicas", "ResourceSystem", "ResourceTailnetCoordinator", + "ResourceTask", "ResourceTemplate", "ResourceUsageEvent", "ResourceUser", @@ -16628,7 +17459,8 @@ const docTemplate = `{ "idp_sync_settings_group", "idp_sync_settings_role", "workspace_agent", - "workspace_app" + "workspace_app", + "task" ], "x-enum-varnames": [ "ResourceTypeTemplate", @@ -16655,7 +17487,8 @@ const docTemplate = `{ "ResourceTypeIdpSyncSettingsGroup", "ResourceTypeIdpSyncSettingsRole", "ResourceTypeWorkspaceAgent", - "ResourceTypeWorkspaceApp" + "ResourceTypeWorkspaceApp", + "ResourceTypeTask" ] }, "codersdk.Response": { @@ -16691,6 +17524,13 @@ const docTemplate = `{ "type": "string", "format": "uuid" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", "type": "array", @@ -16911,6 +17751,247 @@ const docTemplate = `{ } } }, + "codersdk.Task": { + "type": "object", + "properties": { + "created_at": { + "type": "string", + "format": "date-time" + }, + "current_state": { + "$ref": "#/definitions/codersdk.TaskStateEntry" + }, + "id": { + "type": "string", + "format": "uuid" + }, + "initial_prompt": { + "type": "string" + }, + "name": { + "type": "string" + }, + "organization_id": { + "type": "string", + "format": "uuid" + }, + "owner_avatar_url": { + "type": "string" + }, + "owner_id": { + "type": "string", + "format": "uuid" + }, + "owner_name": { + "type": "string" + }, + "status": { + "enum": [ + "pending", + "initializing", + "active", + "paused", + "unknown", + "error" + ], + "allOf": [ + { + "$ref": "#/definitions/codersdk.TaskStatus" + } + ] + }, + "template_display_name": { + "type": "string" + }, + "template_icon": { + "type": "string" + }, + "template_id": { + "type": "string", + "format": "uuid" + }, + "template_name": { + "type": "string" + }, + "template_version_id": { + "type": "string", + "format": "uuid" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "workspace_agent_health": { + "$ref": "#/definitions/codersdk.WorkspaceAgentHealth" + }, + "workspace_agent_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_agent_lifecycle": { + "$ref": "#/definitions/codersdk.WorkspaceAgentLifecycle" + }, + "workspace_app_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_build_number": { + "type": "integer" + }, + "workspace_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_name": { + "type": "string" + }, + "workspace_status": { + "enum": [ + "pending", + "starting", + "running", + "stopping", + "stopped", + "failed", + "canceling", + "canceled", + "deleting", + "deleted" + ], + "allOf": [ + { + "$ref": "#/definitions/codersdk.WorkspaceStatus" + } + ] + } + } + }, + "codersdk.TaskLogEntry": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "type": { + "$ref": "#/definitions/codersdk.TaskLogType" + } + } + }, + "codersdk.TaskLogType": { + "type": "string", + "enum": [ + "input", + "output" + ], + "x-enum-varnames": [ + "TaskLogTypeInput", + "TaskLogTypeOutput" + ] + }, + "codersdk.TaskLogsResponse": { + "type": "object", + "properties": { + "logs": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.TaskLogEntry" + } + } + } + }, + "codersdk.TaskSendRequest": { + "type": "object", + "properties": { + "input": { + "type": "string" + } + } + }, + "codersdk.TaskState": { + "type": "string", + "enum": [ + "working", + "idle", + "complete", + "failed" + ], + "x-enum-varnames": [ + "TaskStateWorking", + "TaskStateIdle", + "TaskStateComplete", + "TaskStateFailed" + ] + }, + "codersdk.TaskStateEntry": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "state": { + "$ref": "#/definitions/codersdk.TaskState" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "uri": { + "type": "string" + } + } + }, + "codersdk.TaskStatus": { + "type": "string", + "enum": [ + "pending", + "initializing", + "active", + "paused", + "unknown", + "error" + ], + "x-enum-varnames": [ + "TaskStatusPending", + "TaskStatusInitializing", + "TaskStatusActive", + "TaskStatusPaused", + "TaskStatusUnknown", + "TaskStatusError" + ] + }, + "codersdk.TasksListResponse": { + "type": "object", + "properties": { + "count": { + "type": "integer" + }, + "tasks": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, "codersdk.TelemetryConfig": { "type": "object", "properties": { @@ -18631,6 +19712,14 @@ const docTemplate = `{ "description": "OwnerName is the username of the owner of the workspace.", "type": "string" }, + "task_id": { + "description": "TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task.", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, "template_active_version_id": { "type": "string", "format": "uuid" @@ -19438,6 +20527,7 @@ const docTemplate = `{ "type": "object", "properties": { "ai_task_sidebar_app_id": { + "description": "Deprecated: This field has been replaced with ` + "`" + `Task.WorkspaceAppID` + "`" + `", "type": "string", "format": "uuid" }, @@ -19941,6 +21031,9 @@ const docTemplate = `{ "type": "string", "format": "uuid" }, + "name": { + "type": "string" + }, "role": { "enum": [ "admin", diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 731e1720c09bc..4aec870e77587 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -65,7 +65,7 @@ } } }, - "/api/experimental/aibridge/interceptions": { + "/aibridge/interceptions": { "get": { "security": [ { @@ -91,9 +91,15 @@ }, { "type": "string", - "description": "Cursor pagination after ID", + "description": "Cursor pagination after ID (cannot be used with offset)", "name": "after_id", "in": "query" + }, + { + "type": "integer", + "description": "Offset pagination (cannot be used with after_id)", + "name": "offset", + "in": "query" } ], "responses": { @@ -106,6 +112,221 @@ } } }, + "/api/experimental/tasks": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "List AI tasks", + "operationId": "list-tasks", + "parameters": [ + { + "type": "string", + "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", + "name": "q", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TasksListResponse" + } + } + } + } + }, + "/api/experimental/tasks/{user}": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "Create a new AI task", + "operationId": "create-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "Create task request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.CreateTaskRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + } + }, + "/api/experimental/tasks/{user}/{task}": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "Get AI task by ID", + "operationId": "get-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "Delete AI task by ID", + "operationId": "delete-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "202": { + "description": "Task deletion initiated" + } + } + } + }, + "/api/experimental/tasks/{user}/{task}/logs": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "Get AI task logs", + "operationId": "get-task-logs", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TaskLogsResponse" + } + } + } + } + }, + "/api/experimental/tasks/{user}/{task}/send": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Experimental"], + "summary": "Send input to AI task", + "operationId": "send-task-input", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "format": "uuid", + "description": "Task ID", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Task input request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.TaskSendRequest" + } + } + ], + "responses": { + "204": { + "description": "Input sent successfully" + } + } + } + }, "/appearance": { "get": { "security": [ @@ -602,6 +823,126 @@ } } }, + "/debug/metrics": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug metrics", + "operationId": "debug-metrics", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug pprof index", + "operationId": "debug-pprof-index", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/cmdline": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug pprof cmdline", + "operationId": "debug-pprof-cmdline", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/profile": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug pprof profile", + "operationId": "debug-pprof-profile", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/symbol": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug pprof symbol", + "operationId": "debug-pprof-symbol", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, + "/debug/pprof/trace": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Debug"], + "summary": "Debug pprof trace", + "operationId": "debug-pprof-trace", + "responses": { + "200": { + "description": "OK" + } + }, + "x-apidocgen": { + "skip": true + } + } + }, "/debug/tailnet": { "get": { "security": [ @@ -2356,6 +2697,41 @@ } } }, + "/oauth2/revoke": { + "post": { + "consumes": ["application/x-www-form-urlencoded"], + "tags": ["Enterprise"], + "summary": "Revoke OAuth2 tokens (RFC 7009).", + "operationId": "oauth2-token-revocation", + "parameters": [ + { + "type": "string", + "description": "Client ID for authentication", + "name": "client_id", + "in": "formData", + "required": true + }, + { + "type": "string", + "description": "The token to revoke", + "name": "token", + "in": "formData", + "required": true + }, + { + "type": "string", + "description": "Hint about token type (access_token or refresh_token)", + "name": "token_type_hint", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "Token successfully revoked" + } + } + } + }, "/oauth2/tokens": { "post": { "produces": ["application/json"], @@ -3299,6 +3675,13 @@ "description": "Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'})", "name": "tags", "in": "query" + }, + { + "type": "string", + "format": "uuid", + "description": "Filter results by initiator", + "name": "initiator", + "in": "query" } ], "responses": { @@ -9981,12 +10364,35 @@ } } }, + "codersdk.AIBridgeBedrockConfig": { + "type": "object", + "properties": { + "access_key": { + "type": "string" + }, + "access_key_secret": { + "type": "string" + }, + "model": { + "type": "string" + }, + "region": { + "type": "string" + }, + "small_fast_model": { + "type": "string" + } + } + }, "codersdk.AIBridgeConfig": { "type": "object", "properties": { "anthropic": { "$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig" }, + "bedrock": { + "$ref": "#/definitions/codersdk.AIBridgeBedrockConfig" + }, "enabled": { "type": "boolean" }, @@ -9998,14 +10404,17 @@ "codersdk.AIBridgeInterception": { "type": "object", "properties": { - "id": { + "ended_at": { "type": "string", - "format": "uuid" + "format": "date-time" }, - "initiator_id": { + "id": { "type": "string", "format": "uuid" }, + "initiator": { + "$ref": "#/definitions/codersdk.MinimalUser" + }, "metadata": { "type": "object", "additionalProperties": {} @@ -10043,6 +10452,9 @@ "codersdk.AIBridgeListInterceptionsResponse": { "type": "object", "properties": { + "count": { + "type": "integer" + }, "results": { "type": "array", "items": { @@ -10166,6 +10578,17 @@ } } }, + "codersdk.APIAllowListTarget": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/codersdk.RBACResource" + } + } + }, "codersdk.APIKey": { "type": "object", "required": [ @@ -10180,6 +10603,12 @@ "user_id" ], "properties": { + "allow_list": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.APIAllowListTarget" + } + }, "created_at": { "type": "string", "format": "date-time" @@ -10239,11 +10668,29 @@ "enum": [ "all", "application_connect", + "aibridge_interception:*", + "aibridge_interception:create", + "aibridge_interception:read", + "aibridge_interception:update", "api_key:*", "api_key:create", "api_key:delete", "api_key:read", "api_key:update", + "assign_org_role:*", + "assign_org_role:assign", + "assign_org_role:create", + "assign_org_role:delete", + "assign_org_role:read", + "assign_org_role:unassign", + "assign_org_role:update", + "assign_role:*", + "assign_role:assign", + "assign_role:read", + "assign_role:unassign", + "audit_log:*", + "audit_log:create", + "audit_log:read", "coder:all", "coder:apikeys.manage_self", "coder:application_connect", @@ -10253,40 +10700,195 @@ "coder:workspaces.create", "coder:workspaces.delete", "coder:workspaces.operate", + "connection_log:*", + "connection_log:read", + "connection_log:update", + "crypto_key:*", + "crypto_key:create", + "crypto_key:delete", + "crypto_key:read", + "crypto_key:update", + "debug_info:*", + "debug_info:read", + "deployment_config:*", + "deployment_config:read", + "deployment_config:update", + "deployment_stats:*", + "deployment_stats:read", "file:*", "file:create", "file:read", + "group:*", + "group:create", + "group:delete", + "group:read", + "group:update", + "group_member:*", + "group_member:read", + "idpsync_settings:*", + "idpsync_settings:read", + "idpsync_settings:update", + "inbox_notification:*", + "inbox_notification:create", + "inbox_notification:read", + "inbox_notification:update", + "license:*", + "license:create", + "license:delete", + "license:read", + "notification_message:*", + "notification_message:create", + "notification_message:delete", + "notification_message:read", + "notification_message:update", + "notification_preference:*", + "notification_preference:read", + "notification_preference:update", + "notification_template:*", + "notification_template:read", + "notification_template:update", + "oauth2_app:*", + "oauth2_app:create", + "oauth2_app:delete", + "oauth2_app:read", + "oauth2_app:update", + "oauth2_app_code_token:*", + "oauth2_app_code_token:create", + "oauth2_app_code_token:delete", + "oauth2_app_code_token:read", + "oauth2_app_secret:*", + "oauth2_app_secret:create", + "oauth2_app_secret:delete", + "oauth2_app_secret:read", + "oauth2_app_secret:update", + "organization:*", + "organization:create", + "organization:delete", + "organization:read", + "organization:update", + "organization_member:*", + "organization_member:create", + "organization_member:delete", + "organization_member:read", + "organization_member:update", + "prebuilt_workspace:*", + "prebuilt_workspace:delete", + "prebuilt_workspace:update", + "provisioner_daemon:*", + "provisioner_daemon:create", + "provisioner_daemon:delete", + "provisioner_daemon:read", + "provisioner_daemon:update", + "provisioner_jobs:*", + "provisioner_jobs:create", + "provisioner_jobs:read", + "provisioner_jobs:update", + "replicas:*", + "replicas:read", + "system:*", + "system:create", + "system:delete", + "system:read", + "system:update", + "tailnet_coordinator:*", + "tailnet_coordinator:create", + "tailnet_coordinator:delete", + "tailnet_coordinator:read", + "tailnet_coordinator:update", + "task:*", + "task:create", + "task:delete", + "task:read", + "task:update", "template:*", "template:create", "template:delete", "template:read", "template:update", "template:use", + "template:view_insights", + "usage_event:*", + "usage_event:create", + "usage_event:read", + "usage_event:update", + "user:*", + "user:create", + "user:delete", + "user:read", "user:read_personal", + "user:update", "user:update_personal", "user_secret:*", "user_secret:create", "user_secret:delete", "user_secret:read", "user_secret:update", + "webpush_subscription:*", + "webpush_subscription:create", + "webpush_subscription:delete", + "webpush_subscription:read", "workspace:*", "workspace:application_connect", "workspace:create", + "workspace:create_agent", "workspace:delete", + "workspace:delete_agent", "workspace:read", + "workspace:share", "workspace:ssh", "workspace:start", "workspace:stop", - "workspace:update" + "workspace:update", + "workspace_agent_devcontainers:*", + "workspace_agent_devcontainers:create", + "workspace_agent_resource_monitor:*", + "workspace_agent_resource_monitor:create", + "workspace_agent_resource_monitor:read", + "workspace_agent_resource_monitor:update", + "workspace_dormant:*", + "workspace_dormant:application_connect", + "workspace_dormant:create", + "workspace_dormant:create_agent", + "workspace_dormant:delete", + "workspace_dormant:delete_agent", + "workspace_dormant:read", + "workspace_dormant:share", + "workspace_dormant:ssh", + "workspace_dormant:start", + "workspace_dormant:stop", + "workspace_dormant:update", + "workspace_proxy:*", + "workspace_proxy:create", + "workspace_proxy:delete", + "workspace_proxy:read", + "workspace_proxy:update" ], "x-enum-varnames": [ "APIKeyScopeAll", "APIKeyScopeApplicationConnect", + "APIKeyScopeAibridgeInterceptionAll", + "APIKeyScopeAibridgeInterceptionCreate", + "APIKeyScopeAibridgeInterceptionRead", + "APIKeyScopeAibridgeInterceptionUpdate", "APIKeyScopeApiKeyAll", "APIKeyScopeApiKeyCreate", "APIKeyScopeApiKeyDelete", "APIKeyScopeApiKeyRead", "APIKeyScopeApiKeyUpdate", + "APIKeyScopeAssignOrgRoleAll", + "APIKeyScopeAssignOrgRoleAssign", + "APIKeyScopeAssignOrgRoleCreate", + "APIKeyScopeAssignOrgRoleDelete", + "APIKeyScopeAssignOrgRoleRead", + "APIKeyScopeAssignOrgRoleUnassign", + "APIKeyScopeAssignOrgRoleUpdate", + "APIKeyScopeAssignRoleAll", + "APIKeyScopeAssignRoleAssign", + "APIKeyScopeAssignRoleRead", + "APIKeyScopeAssignRoleUnassign", + "APIKeyScopeAuditLogAll", + "APIKeyScopeAuditLogCreate", + "APIKeyScopeAuditLogRead", "APIKeyScopeCoderAll", "APIKeyScopeCoderApikeysManageSelf", "APIKeyScopeCoderApplicationConnect", @@ -10296,31 +10898,168 @@ "APIKeyScopeCoderWorkspacesCreate", "APIKeyScopeCoderWorkspacesDelete", "APIKeyScopeCoderWorkspacesOperate", + "APIKeyScopeConnectionLogAll", + "APIKeyScopeConnectionLogRead", + "APIKeyScopeConnectionLogUpdate", + "APIKeyScopeCryptoKeyAll", + "APIKeyScopeCryptoKeyCreate", + "APIKeyScopeCryptoKeyDelete", + "APIKeyScopeCryptoKeyRead", + "APIKeyScopeCryptoKeyUpdate", + "APIKeyScopeDebugInfoAll", + "APIKeyScopeDebugInfoRead", + "APIKeyScopeDeploymentConfigAll", + "APIKeyScopeDeploymentConfigRead", + "APIKeyScopeDeploymentConfigUpdate", + "APIKeyScopeDeploymentStatsAll", + "APIKeyScopeDeploymentStatsRead", "APIKeyScopeFileAll", "APIKeyScopeFileCreate", "APIKeyScopeFileRead", + "APIKeyScopeGroupAll", + "APIKeyScopeGroupCreate", + "APIKeyScopeGroupDelete", + "APIKeyScopeGroupRead", + "APIKeyScopeGroupUpdate", + "APIKeyScopeGroupMemberAll", + "APIKeyScopeGroupMemberRead", + "APIKeyScopeIdpsyncSettingsAll", + "APIKeyScopeIdpsyncSettingsRead", + "APIKeyScopeIdpsyncSettingsUpdate", + "APIKeyScopeInboxNotificationAll", + "APIKeyScopeInboxNotificationCreate", + "APIKeyScopeInboxNotificationRead", + "APIKeyScopeInboxNotificationUpdate", + "APIKeyScopeLicenseAll", + "APIKeyScopeLicenseCreate", + "APIKeyScopeLicenseDelete", + "APIKeyScopeLicenseRead", + "APIKeyScopeNotificationMessageAll", + "APIKeyScopeNotificationMessageCreate", + "APIKeyScopeNotificationMessageDelete", + "APIKeyScopeNotificationMessageRead", + "APIKeyScopeNotificationMessageUpdate", + "APIKeyScopeNotificationPreferenceAll", + "APIKeyScopeNotificationPreferenceRead", + "APIKeyScopeNotificationPreferenceUpdate", + "APIKeyScopeNotificationTemplateAll", + "APIKeyScopeNotificationTemplateRead", + "APIKeyScopeNotificationTemplateUpdate", + "APIKeyScopeOauth2AppAll", + "APIKeyScopeOauth2AppCreate", + "APIKeyScopeOauth2AppDelete", + "APIKeyScopeOauth2AppRead", + "APIKeyScopeOauth2AppUpdate", + "APIKeyScopeOauth2AppCodeTokenAll", + "APIKeyScopeOauth2AppCodeTokenCreate", + "APIKeyScopeOauth2AppCodeTokenDelete", + "APIKeyScopeOauth2AppCodeTokenRead", + "APIKeyScopeOauth2AppSecretAll", + "APIKeyScopeOauth2AppSecretCreate", + "APIKeyScopeOauth2AppSecretDelete", + "APIKeyScopeOauth2AppSecretRead", + "APIKeyScopeOauth2AppSecretUpdate", + "APIKeyScopeOrganizationAll", + "APIKeyScopeOrganizationCreate", + "APIKeyScopeOrganizationDelete", + "APIKeyScopeOrganizationRead", + "APIKeyScopeOrganizationUpdate", + "APIKeyScopeOrganizationMemberAll", + "APIKeyScopeOrganizationMemberCreate", + "APIKeyScopeOrganizationMemberDelete", + "APIKeyScopeOrganizationMemberRead", + "APIKeyScopeOrganizationMemberUpdate", + "APIKeyScopePrebuiltWorkspaceAll", + "APIKeyScopePrebuiltWorkspaceDelete", + "APIKeyScopePrebuiltWorkspaceUpdate", + "APIKeyScopeProvisionerDaemonAll", + "APIKeyScopeProvisionerDaemonCreate", + "APIKeyScopeProvisionerDaemonDelete", + "APIKeyScopeProvisionerDaemonRead", + "APIKeyScopeProvisionerDaemonUpdate", + "APIKeyScopeProvisionerJobsAll", + "APIKeyScopeProvisionerJobsCreate", + "APIKeyScopeProvisionerJobsRead", + "APIKeyScopeProvisionerJobsUpdate", + "APIKeyScopeReplicasAll", + "APIKeyScopeReplicasRead", + "APIKeyScopeSystemAll", + "APIKeyScopeSystemCreate", + "APIKeyScopeSystemDelete", + "APIKeyScopeSystemRead", + "APIKeyScopeSystemUpdate", + "APIKeyScopeTailnetCoordinatorAll", + "APIKeyScopeTailnetCoordinatorCreate", + "APIKeyScopeTailnetCoordinatorDelete", + "APIKeyScopeTailnetCoordinatorRead", + "APIKeyScopeTailnetCoordinatorUpdate", + "APIKeyScopeTaskAll", + "APIKeyScopeTaskCreate", + "APIKeyScopeTaskDelete", + "APIKeyScopeTaskRead", + "APIKeyScopeTaskUpdate", "APIKeyScopeTemplateAll", "APIKeyScopeTemplateCreate", "APIKeyScopeTemplateDelete", "APIKeyScopeTemplateRead", "APIKeyScopeTemplateUpdate", "APIKeyScopeTemplateUse", + "APIKeyScopeTemplateViewInsights", + "APIKeyScopeUsageEventAll", + "APIKeyScopeUsageEventCreate", + "APIKeyScopeUsageEventRead", + "APIKeyScopeUsageEventUpdate", + "APIKeyScopeUserAll", + "APIKeyScopeUserCreate", + "APIKeyScopeUserDelete", + "APIKeyScopeUserRead", "APIKeyScopeUserReadPersonal", + "APIKeyScopeUserUpdate", "APIKeyScopeUserUpdatePersonal", "APIKeyScopeUserSecretAll", "APIKeyScopeUserSecretCreate", "APIKeyScopeUserSecretDelete", "APIKeyScopeUserSecretRead", "APIKeyScopeUserSecretUpdate", + "APIKeyScopeWebpushSubscriptionAll", + "APIKeyScopeWebpushSubscriptionCreate", + "APIKeyScopeWebpushSubscriptionDelete", + "APIKeyScopeWebpushSubscriptionRead", "APIKeyScopeWorkspaceAll", "APIKeyScopeWorkspaceApplicationConnect", "APIKeyScopeWorkspaceCreate", + "APIKeyScopeWorkspaceCreateAgent", "APIKeyScopeWorkspaceDelete", + "APIKeyScopeWorkspaceDeleteAgent", "APIKeyScopeWorkspaceRead", + "APIKeyScopeWorkspaceShare", "APIKeyScopeWorkspaceSsh", "APIKeyScopeWorkspaceStart", "APIKeyScopeWorkspaceStop", - "APIKeyScopeWorkspaceUpdate" + "APIKeyScopeWorkspaceUpdate", + "APIKeyScopeWorkspaceAgentDevcontainersAll", + "APIKeyScopeWorkspaceAgentDevcontainersCreate", + "APIKeyScopeWorkspaceAgentResourceMonitorAll", + "APIKeyScopeWorkspaceAgentResourceMonitorCreate", + "APIKeyScopeWorkspaceAgentResourceMonitorRead", + "APIKeyScopeWorkspaceAgentResourceMonitorUpdate", + "APIKeyScopeWorkspaceDormantAll", + "APIKeyScopeWorkspaceDormantApplicationConnect", + "APIKeyScopeWorkspaceDormantCreate", + "APIKeyScopeWorkspaceDormantCreateAgent", + "APIKeyScopeWorkspaceDormantDelete", + "APIKeyScopeWorkspaceDormantDeleteAgent", + "APIKeyScopeWorkspaceDormantRead", + "APIKeyScopeWorkspaceDormantShare", + "APIKeyScopeWorkspaceDormantSsh", + "APIKeyScopeWorkspaceDormantStart", + "APIKeyScopeWorkspaceDormantStop", + "APIKeyScopeWorkspaceDormantUpdate", + "APIKeyScopeWorkspaceProxyAll", + "APIKeyScopeWorkspaceProxyCreate", + "APIKeyScopeWorkspaceProxyDelete", + "APIKeyScopeWorkspaceProxyRead", + "APIKeyScopeWorkspaceProxyUpdate" ] }, "codersdk.AddLicenseRequest": { @@ -10466,6 +11205,13 @@ "type": "string", "format": "uuid" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", "type": "array", @@ -11094,6 +11840,25 @@ } } }, + "codersdk.CreateTaskRequest": { + "type": "object", + "properties": { + "input": { + "type": "string" + }, + "name": { + "type": "string" + }, + "template_version_id": { + "type": "string", + "format": "uuid" + }, + "template_version_preset_id": { + "type": "string", + "format": "uuid" + } + } + }, "codersdk.CreateTemplateRequest": { "type": "object", "required": ["name", "template_version_id"], @@ -11327,6 +12092,12 @@ "codersdk.CreateTokenRequest": { "type": "object", "properties": { + "allow_list": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.APIAllowListTarget" + } + }, "lifetime": { "type": "integer" }, @@ -11596,6 +12367,13 @@ "name": { "type": "string" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific to the organization the role belongs to.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific to the organization the role belongs to.", "type": "array", @@ -11861,6 +12639,9 @@ "docs_url": { "$ref": "#/definitions/serpent.URL" }, + "enable_authz_recording": { + "type": "boolean" + }, "enable_terraform_debug_mode": { "type": "boolean" }, @@ -12142,11 +12923,9 @@ "web-push", "oauth2", "mcp-server-http", - "workspace-sharing", - "aibridge" + "workspace-sharing" ], "x-enum-comments": { - "ExperimentAIBridge": "Enables AI Bridge functionality.", "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", "ExperimentExample": "This isn't used for anything.", "ExperimentMCPServerHTTP": "Enables the MCP HTTP server functionality.", @@ -12164,8 +12943,7 @@ "ExperimentWebPush", "ExperimentOAuth2", "ExperimentMCPServerHTTP", - "ExperimentWorkspaceSharing", - "ExperimentAIBridge" + "ExperimentWorkspaceSharing" ] }, "codersdk.ExternalAPIKeyScopes": { @@ -12753,7 +13531,11 @@ "properties": { "icon": { "type": "string", - "enum": ["bug", "chat", "docs"] + "enum": ["bug", "chat", "docs", "star"] + }, + "location": { + "type": "string", + "enum": ["navbar", "dropdown"] }, "name": { "type": "string" @@ -12896,6 +13678,9 @@ "type": "string", "format": "uuid" }, + "name": { + "type": "string" + }, "username": { "type": "string" } @@ -13168,6 +13953,9 @@ }, "token": { "type": "string" + }, + "token_revoke": { + "type": "string" } } }, @@ -13267,7 +14055,10 @@ } }, "registration_access_token": { - "type": "string" + "type": "array", + "items": { + "type": "integer" + } }, "registration_client_uri": { "type": "string" @@ -14532,6 +15323,10 @@ "type": "string", "format": "uuid" }, + "initiator_id": { + "type": "string", + "format": "uuid" + }, "input": { "$ref": "#/definitions/codersdk.ProvisionerJobInput" }, @@ -14843,6 +15638,7 @@ "read", "read_personal", "ssh", + "share", "unassign", "update", "update_personal", @@ -14861,6 +15657,7 @@ "ActionRead", "ActionReadPersonal", "ActionSSH", + "ActionShare", "ActionUnassign", "ActionUpdate", "ActionUpdatePersonal", @@ -14904,6 +15701,7 @@ "replicas", "system", "tailnet_coordinator", + "task", "template", "usage_event", "user", @@ -14947,6 +15745,7 @@ "ResourceReplicas", "ResourceSystem", "ResourceTailnetCoordinator", + "ResourceTask", "ResourceTemplate", "ResourceUsageEvent", "ResourceUser", @@ -15152,7 +15951,8 @@ "idp_sync_settings_group", "idp_sync_settings_role", "workspace_agent", - "workspace_app" + "workspace_app", + "task" ], "x-enum-varnames": [ "ResourceTypeTemplate", @@ -15179,7 +15979,8 @@ "ResourceTypeIdpSyncSettingsGroup", "ResourceTypeIdpSyncSettingsRole", "ResourceTypeWorkspaceAgent", - "ResourceTypeWorkspaceApp" + "ResourceTypeWorkspaceApp", + "ResourceTypeTask" ] }, "codersdk.Response": { @@ -15215,6 +16016,13 @@ "type": "string", "format": "uuid" }, + "organization_member_permissions": { + "description": "OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, "organization_permissions": { "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", "type": "array", @@ -15431,6 +16239,236 @@ } } }, + "codersdk.Task": { + "type": "object", + "properties": { + "created_at": { + "type": "string", + "format": "date-time" + }, + "current_state": { + "$ref": "#/definitions/codersdk.TaskStateEntry" + }, + "id": { + "type": "string", + "format": "uuid" + }, + "initial_prompt": { + "type": "string" + }, + "name": { + "type": "string" + }, + "organization_id": { + "type": "string", + "format": "uuid" + }, + "owner_avatar_url": { + "type": "string" + }, + "owner_id": { + "type": "string", + "format": "uuid" + }, + "owner_name": { + "type": "string" + }, + "status": { + "enum": [ + "pending", + "initializing", + "active", + "paused", + "unknown", + "error" + ], + "allOf": [ + { + "$ref": "#/definitions/codersdk.TaskStatus" + } + ] + }, + "template_display_name": { + "type": "string" + }, + "template_icon": { + "type": "string" + }, + "template_id": { + "type": "string", + "format": "uuid" + }, + "template_name": { + "type": "string" + }, + "template_version_id": { + "type": "string", + "format": "uuid" + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "workspace_agent_health": { + "$ref": "#/definitions/codersdk.WorkspaceAgentHealth" + }, + "workspace_agent_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_agent_lifecycle": { + "$ref": "#/definitions/codersdk.WorkspaceAgentLifecycle" + }, + "workspace_app_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_build_number": { + "type": "integer" + }, + "workspace_id": { + "format": "uuid", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, + "workspace_name": { + "type": "string" + }, + "workspace_status": { + "enum": [ + "pending", + "starting", + "running", + "stopping", + "stopped", + "failed", + "canceling", + "canceled", + "deleting", + "deleted" + ], + "allOf": [ + { + "$ref": "#/definitions/codersdk.WorkspaceStatus" + } + ] + } + } + }, + "codersdk.TaskLogEntry": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "type": { + "$ref": "#/definitions/codersdk.TaskLogType" + } + } + }, + "codersdk.TaskLogType": { + "type": "string", + "enum": ["input", "output"], + "x-enum-varnames": ["TaskLogTypeInput", "TaskLogTypeOutput"] + }, + "codersdk.TaskLogsResponse": { + "type": "object", + "properties": { + "logs": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.TaskLogEntry" + } + } + } + }, + "codersdk.TaskSendRequest": { + "type": "object", + "properties": { + "input": { + "type": "string" + } + } + }, + "codersdk.TaskState": { + "type": "string", + "enum": ["working", "idle", "complete", "failed"], + "x-enum-varnames": [ + "TaskStateWorking", + "TaskStateIdle", + "TaskStateComplete", + "TaskStateFailed" + ] + }, + "codersdk.TaskStateEntry": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "state": { + "$ref": "#/definitions/codersdk.TaskState" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "uri": { + "type": "string" + } + } + }, + "codersdk.TaskStatus": { + "type": "string", + "enum": [ + "pending", + "initializing", + "active", + "paused", + "unknown", + "error" + ], + "x-enum-varnames": [ + "TaskStatusPending", + "TaskStatusInitializing", + "TaskStatusActive", + "TaskStatusPaused", + "TaskStatusUnknown", + "TaskStatusError" + ] + }, + "codersdk.TasksListResponse": { + "type": "object", + "properties": { + "count": { + "type": "integer" + }, + "tasks": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, "codersdk.TelemetryConfig": { "type": "object", "properties": { @@ -17060,6 +18098,14 @@ "description": "OwnerName is the username of the owner of the workspace.", "type": "string" }, + "task_id": { + "description": "TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task.", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, "template_active_version_id": { "type": "string", "format": "uuid" @@ -17815,6 +18861,7 @@ "type": "object", "properties": { "ai_task_sidebar_app_id": { + "description": "Deprecated: This field has been replaced with `Task.WorkspaceAppID`", "type": "string", "format": "uuid" }, @@ -18292,6 +19339,9 @@ "type": "string", "format": "uuid" }, + "name": { + "type": "string" + }, "role": { "enum": ["admin", "use"], "allOf": [ diff --git a/coderd/apikey.go b/coderd/apikey.go index c7a7eef3c6a7e..f2aec89e5709e 100644 --- a/coderd/apikey.go +++ b/coderd/apikey.go @@ -116,6 +116,37 @@ func (api *API) postToken(rw http.ResponseWriter, r *http.Request) { TokenName: tokenName, } + if len(createToken.AllowList) > 0 { + rbacAllowListElements := make([]rbac.AllowListElement, 0, len(createToken.AllowList)) + for _, t := range createToken.AllowList { + entry, err := rbac.NewAllowListElement(string(t.Type), t.ID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Failed to create API key.", + Detail: err.Error(), + }) + return + } + rbacAllowListElements = append(rbacAllowListElements, entry) + } + + rbacAllowList, err := rbac.NormalizeAllowList(rbacAllowListElements) + if err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Failed to create API key.", + Detail: err.Error(), + }) + return + } + + dbAllowList := make(database.AllowList, 0, len(rbacAllowList)) + for _, e := range rbacAllowList { + dbAllowList = append(dbAllowList, rbac.AllowListElement{Type: e.Type, ID: e.ID}) + } + + params.AllowList = dbAllowList + } + if createToken.Lifetime != 0 { err := api.validateAPIKeyLifetime(ctx, user.ID, createToken.Lifetime) if err != nil { diff --git a/coderd/apikey/apikey.go b/coderd/apikey/apikey.go index ea186223a1374..89bbb7ca536d8 100644 --- a/coderd/apikey/apikey.go +++ b/coderd/apikey/apikey.go @@ -2,6 +2,7 @@ package apikey import ( "crypto/sha256" + "crypto/subtle" "fmt" "net" "time" @@ -12,6 +13,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/cryptorand" ) @@ -34,18 +36,26 @@ type CreateParams struct { Scopes database.APIKeyScopes TokenName string RemoteAddr string + // AllowList is an optional, normalized allow-list + // of resource type and uuid entries. If empty, defaults to wildcard. + AllowList database.AllowList } // Generate generates an API key, returning the key as a string as well as the // database representation. It is the responsibility of the caller to insert it // into the database. func Generate(params CreateParams) (database.InsertAPIKeyParams, string, error) { - keyID, keySecret, err := generateKey() + // Length of an API Key ID. + keyID, err := cryptorand.String(10) if err != nil { - return database.InsertAPIKeyParams{}, "", xerrors.Errorf("generate API key: %w", err) + return database.InsertAPIKeyParams{}, "", xerrors.Errorf("generate API key ID: %w", err) } - hashed := sha256.Sum256([]byte(keySecret)) + // Length of an API Key secret. + keySecret, hashedSecret, err := GenerateSecret(22) + if err != nil { + return database.InsertAPIKeyParams{}, "", xerrors.Errorf("generate API key secret: %w", err) + } // Default expires at to now+lifetime, or use the configured value if not // set. @@ -61,6 +71,10 @@ func Generate(params CreateParams) (database.InsertAPIKeyParams, string, error) params.LifetimeSeconds = int64(time.Until(params.ExpiresAt).Seconds()) } + if len(params.AllowList) == 0 { + params.AllowList = database.AllowList{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}} + } + ip := net.ParseIP(params.RemoteAddr) if ip == nil { ip = net.IPv4(0, 0, 0, 0) @@ -112,25 +126,32 @@ func Generate(params CreateParams) (database.InsertAPIKeyParams, string, error) ExpiresAt: params.ExpiresAt.UTC(), CreatedAt: dbtime.Now(), UpdatedAt: dbtime.Now(), - HashedSecret: hashed[:], + HashedSecret: hashedSecret, LoginType: params.LoginType, Scopes: scopes, - AllowList: database.AllowList{database.AllowListWildcard()}, + AllowList: params.AllowList, TokenName: params.TokenName, }, token, nil } -// generateKey a new ID and secret for an API key. -func generateKey() (id string, secret string, err error) { - // Length of an API Key ID. - id, err = cryptorand.String(10) +func GenerateSecret(length int) (secret string, hashed []byte, err error) { + secret, err = cryptorand.String(length) if err != nil { - return "", "", err + return "", nil, err } - // Length of an API Key secret. - secret, err = cryptorand.String(22) - if err != nil { - return "", "", err - } - return id, secret, nil + hash := HashSecret(secret) + return secret, hash, nil +} + +// ValidateHash compares a secret against an expected hashed secret. +func ValidateHash(hashedSecret []byte, secret string) bool { + hash := HashSecret(secret) + return subtle.ConstantTimeCompare(hashedSecret, hash) == 1 +} + +// HashSecret is the single function used to hash API key secrets. +// Use this to ensure a consistent hashing algorithm. +func HashSecret(secret string) []byte { + hash := sha256.Sum256([]byte(secret)) + return hash[:] } diff --git a/coderd/apikey/apikey_test.go b/coderd/apikey/apikey_test.go index 1f5de3aa18a49..aa17a02561eeb 100644 --- a/coderd/apikey/apikey_test.go +++ b/coderd/apikey/apikey_test.go @@ -1,7 +1,6 @@ package apikey_test import ( - "crypto/sha256" "strings" "testing" "time" @@ -126,8 +125,8 @@ func TestGenerate(t *testing.T) { require.Equal(t, key.ID, keytokens[0]) // Assert that the hashed secret is correct. - hashed := sha256.Sum256([]byte(keytokens[1])) - assert.ElementsMatch(t, hashed, key.HashedSecret) + equal := apikey.ValidateHash(key.HashedSecret, keytokens[1]) + require.True(t, equal, "valid secret") assert.Equal(t, tc.params.UserID, key.UserID) assert.WithinDuration(t, dbtime.Now(), key.CreatedAt, time.Second*5) @@ -173,3 +172,17 @@ func TestGenerate(t *testing.T) { }) } } + +// TestInvalid just ensures the false case is asserted by some tests. +// Otherwise, a function that just `returns true` might pass all tests incorrectly. +func TestInvalid(t *testing.T) { + t.Parallel() + + require.Falsef(t, apikey.ValidateHash([]byte{}, "secret"), "empty hash") + + secret, hash, err := apikey.GenerateSecret(10) + require.NoError(t, err) + + require.Falsef(t, apikey.ValidateHash(hash, secret+"_"), "different secret") + require.Falsef(t, apikey.ValidateHash(hash[:len(hash)-1], secret), "different hash length") +} diff --git a/coderd/apikey_test.go b/coderd/apikey_test.go index f980706d6ef6e..65feb1c9cb808 100644 --- a/coderd/apikey_test.go +++ b/coderd/apikey_test.go @@ -51,6 +51,8 @@ func TestTokenCRUD(t *testing.T) { require.Greater(t, keys[0].ExpiresAt, time.Now().Add(time.Hour*24*6)) require.Less(t, keys[0].ExpiresAt, time.Now().Add(time.Hour*24*8)) require.Equal(t, codersdk.APIKeyScopeAll, keys[0].Scope) + require.Len(t, keys[0].AllowList, 1) + require.Equal(t, "*:*", keys[0].AllowList[0].String()) // no update @@ -86,6 +88,8 @@ func TestTokenScoped(t *testing.T) { require.EqualValues(t, len(keys), 1) require.Contains(t, res.Key, keys[0].ID) require.Equal(t, keys[0].Scope, codersdk.APIKeyScopeApplicationConnect) + require.Len(t, keys[0].AllowList, 1) + require.Equal(t, "*:*", keys[0].AllowList[0].String()) } // Ensure backward-compat: when a token is created using the legacy singular @@ -132,6 +136,8 @@ func TestTokenLegacySingularScopeCompat(t *testing.T) { require.Len(t, keys, 1) require.Equal(t, tc.scope, keys[0].Scope) require.ElementsMatch(t, keys[0].Scopes, tc.scopes) + require.Len(t, keys[0].AllowList, 1) + require.Equal(t, "*:*", keys[0].AllowList[0].String()) }) } } diff --git a/coderd/audit.go b/coderd/audit.go index e8d7c4dfe9bca..3a3237a9fed50 100644 --- a/coderd/audit.go +++ b/coderd/audit.go @@ -420,6 +420,14 @@ func (api *API) auditLogIsResourceDeleted(ctx context.Context, alog database.Get api.Logger.Error(ctx, "unable to fetch oauth2 app secret", slog.Error(err)) } return false + case database.ResourceTypeTask: + task, err := api.Database.GetTaskByID(ctx, alog.AuditLog.ResourceID) + if xerrors.Is(err, sql.ErrNoRows) { + return true + } else if err != nil { + api.Logger.Error(ctx, "unable to fetch task", slog.Error(err)) + } + return task.DeletedAt.Valid && task.DeletedAt.Time.Before(time.Now()) default: return false } @@ -496,6 +504,17 @@ func (api *API) auditLogResourceLink(ctx context.Context, alog database.GetAudit } return fmt.Sprintf("/deployment/oauth2-provider/apps/%s", secret.AppID) + case database.ResourceTypeTask: + task, err := api.Database.GetTaskByID(ctx, alog.AuditLog.ResourceID) + if err != nil { + return "" + } + user, err := api.Database.GetUserByID(ctx, task.OwnerID) + if err != nil { + return "" + } + return fmt.Sprintf("/tasks/%s/%s", user.Username, task.ID) + default: return "" } diff --git a/coderd/audit/diff.go b/coderd/audit/diff.go index b8139bb63b290..c14dbc392f356 100644 --- a/coderd/audit/diff.go +++ b/coderd/audit/diff.go @@ -31,7 +31,8 @@ type Auditable interface { database.NotificationTemplate | idpsync.OrganizationSyncSettings | idpsync.GroupSyncSettings | - idpsync.RoleSyncSettings + idpsync.RoleSyncSettings | + database.TaskTable } // Map is a map of changed fields in an audited resource. It maps field names to diff --git a/coderd/audit/request.go b/coderd/audit/request.go index a973bdb915e3c..20aa89f6a870d 100644 --- a/coderd/audit/request.go +++ b/coderd/audit/request.go @@ -131,6 +131,8 @@ func ResourceTarget[T Auditable](tgt T) string { return "Organization Group Sync" case idpsync.RoleSyncSettings: return "Organization Role Sync" + case database.TaskTable: + return typed.Name default: panic(fmt.Sprintf("unknown resource %T for ResourceTarget", tgt)) } @@ -193,6 +195,8 @@ func ResourceID[T Auditable](tgt T) uuid.UUID { return noID // Org field on audit log has org id case idpsync.RoleSyncSettings: return noID // Org field on audit log has org id + case database.TaskTable: + return typed.ID default: panic(fmt.Sprintf("unknown resource %T for ResourceID", tgt)) } @@ -246,6 +250,8 @@ func ResourceType[T Auditable](tgt T) database.ResourceType { return database.ResourceTypeIdpSyncSettingsRole case idpsync.GroupSyncSettings: return database.ResourceTypeIdpSyncSettingsGroup + case database.TaskTable: + return database.ResourceTypeTask default: panic(fmt.Sprintf("unknown resource %T for ResourceType", typed)) } @@ -302,6 +308,8 @@ func ResourceRequiresOrgID[T Auditable]() bool { return true case idpsync.RoleSyncSettings: return true + case database.TaskTable: + return true default: panic(fmt.Sprintf("unknown resource %T for ResourceRequiresOrgID", tgt)) } diff --git a/coderd/authorize_test.go b/coderd/authorize_test.go index b8084211de60c..e3ce4b922f7c4 100644 --- a/coderd/authorize_test.go +++ b/coderd/authorize_test.go @@ -50,24 +50,25 @@ func TestCheckPermissions(t *testing.T) { }, Action: "read", }, - readMyself: { + readOrgWorkspaces: { Object: codersdk.AuthorizationObject{ - ResourceType: codersdk.ResourceUser, - OwnerID: "me", + ResourceType: codersdk.ResourceWorkspace, + OrganizationID: adminUser.OrganizationID.String(), }, Action: "read", }, - readOwnWorkspaces: { + readMyself: { Object: codersdk.AuthorizationObject{ - ResourceType: codersdk.ResourceWorkspace, + ResourceType: codersdk.ResourceUser, OwnerID: "me", }, Action: "read", }, - readOrgWorkspaces: { + readOwnWorkspaces: { Object: codersdk.AuthorizationObject{ ResourceType: codersdk.ResourceWorkspace, OrganizationID: adminUser.OrganizationID.String(), + OwnerID: "me", }, Action: "read", }, @@ -92,9 +93,9 @@ func TestCheckPermissions(t *testing.T) { UserID: adminUser.UserID, Check: map[string]bool{ readAllUsers: true, + readOrgWorkspaces: true, readMyself: true, readOwnWorkspaces: true, - readOrgWorkspaces: true, updateSpecificTemplate: true, }, }, @@ -104,9 +105,9 @@ func TestCheckPermissions(t *testing.T) { UserID: orgAdminUser.ID, Check: map[string]bool{ readAllUsers: true, + readOrgWorkspaces: true, readMyself: true, readOwnWorkspaces: true, - readOrgWorkspaces: true, updateSpecificTemplate: true, }, }, @@ -116,9 +117,9 @@ func TestCheckPermissions(t *testing.T) { UserID: memberUser.ID, Check: map[string]bool{ readAllUsers: false, + readOrgWorkspaces: false, readMyself: true, readOwnWorkspaces: true, - readOrgWorkspaces: false, updateSpecificTemplate: false, }, }, diff --git a/coderd/autobuild/lifecycle_executor_test.go b/coderd/autobuild/lifecycle_executor_test.go index 1bd50564b6b9b..466c8c40525e1 100644 --- a/coderd/autobuild/lifecycle_executor_test.go +++ b/coderd/autobuild/lifecycle_executor_test.go @@ -776,10 +776,6 @@ func TestExecutorWorkspaceAutostopNoWaitChangedMyMind(t *testing.T) { } func TestExecutorAutostartMultipleOK(t *testing.T) { - if !dbtestutil.WillUsePostgres() { - t.Skip(`This test only really works when using a "real" database, similar to a HA setup`) - } - t.Parallel() var ( @@ -1259,10 +1255,6 @@ func TestNotifications(t *testing.T) { func TestExecutorPrebuilds(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - // Prebuild workspaces should not be autostopped when the deadline is reached. // After being claimed, the workspace should stop at the deadline. t.Run("OnlyStopsAfterClaimed", func(t *testing.T) { @@ -1772,3 +1764,175 @@ func TestExecutorAutostartSkipsWhenNoProvisionersAvailable(t *testing.T) { assert.Len(t, stats.Transitions, 1, "should create builds when provisioners are available") } + +func TestExecutorTaskWorkspace(t *testing.T) { + t.Parallel() + + createTaskTemplate := func(t *testing.T, client *codersdk.Client, orgID uuid.UUID, ctx context.Context, defaultTTL time.Duration) codersdk.Template { + t.Helper() + + taskAppID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, orgID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{HasAiTasks: true}, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Agents: []*proto.Agent{ + { + Id: uuid.NewString(), + Name: "dev", + Auth: &proto.Agent_Token{ + Token: uuid.NewString(), + }, + Apps: []*proto.App{ + { + Id: taskAppID.String(), + Slug: "task-app", + }, + }, + }, + }, + }, + }, + AiTasks: []*proto.AITask{ + { + AppId: taskAppID.String(), + }, + }, + }, + }, + }, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, orgID, version.ID) + + if defaultTTL > 0 { + _, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ + DefaultTTLMillis: defaultTTL.Milliseconds(), + }) + require.NoError(t, err) + } + + return template + } + + createTaskWorkspace := func(t *testing.T, client *codersdk.Client, template codersdk.Template, ctx context.Context, input string) codersdk.Workspace { + t.Helper() + + exp := codersdk.NewExperimentalClient(client) + task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: input, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace") + + workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + return workspace + } + + t.Run("Autostart", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + AutobuildTicker: tickCh, + IncludeProvisionerDaemon: true, + AutobuildStats: statsCh, + }) + admin = coderdtest.CreateFirstUser(t, client) + ) + + // Given: A task workspace + template := createTaskTemplate(t, client, admin.OrganizationID, ctx, 0) + workspace := createTaskWorkspace(t, client, template, ctx, "test task for autostart") + + // Given: The task workspace has an autostart schedule + err := client.UpdateWorkspaceAutostart(ctx, workspace.ID, codersdk.UpdateWorkspaceAutostartRequest{ + Schedule: ptr.Ref(sched.String()), + }) + require.NoError(t, err) + + // Given: That the workspace is in a stopped state. + workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, map[string]string{}) + require.NoError(t, err) + + // When: the autobuild executor ticks after the scheduled time + go func() { + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime + close(tickCh) + }() + + // Then: We expect to see a start transition + stats := <-statsCh + require.Len(t, stats.Transitions, 1, "lifecycle executor should transition the task workspace") + assert.Contains(t, stats.Transitions, workspace.ID, "task workspace should be in transitions") + assert.Equal(t, database.WorkspaceTransitionStart, stats.Transitions[workspace.ID], "should autostart the workspace") + require.Empty(t, stats.Errors, "should have no errors when managing task workspaces") + }) + + t.Run("Autostop", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + AutobuildTicker: tickCh, + IncludeProvisionerDaemon: true, + AutobuildStats: statsCh, + }) + admin = coderdtest.CreateFirstUser(t, client) + ) + + // Given: A task workspace with an 8 hour deadline + template := createTaskTemplate(t, client, admin.OrganizationID, ctx, 8*time.Hour) + workspace := createTaskWorkspace(t, client, template, ctx, "test task for autostop") + + // Given: The workspace is currently running + workspace = coderdtest.MustWorkspace(t, client, workspace.ID) + require.Equal(t, codersdk.WorkspaceTransitionStart, workspace.LatestBuild.Transition) + require.NotZero(t, workspace.LatestBuild.Deadline, "workspace should have a deadline for autostop") + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, map[string]string{}) + require.NoError(t, err) + + // When: the autobuild executor ticks after the deadline + go func() { + tickTime := workspace.LatestBuild.Deadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime + close(tickCh) + }() + + // Then: We expect to see a stop transition + stats := <-statsCh + require.Len(t, stats.Transitions, 1, "lifecycle executor should transition the task workspace") + assert.Contains(t, stats.Transitions, workspace.ID, "task workspace should be in transitions") + assert.Equal(t, database.WorkspaceTransitionStop, stats.Transitions[workspace.ID], "should autostop the workspace") + require.Empty(t, stats.Errors, "should have no errors when managing task workspaces") + }) +} diff --git a/coderd/coderd.go b/coderd/coderd.go index 15f2767c22916..a1f94bfa6fee7 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -11,6 +11,7 @@ import ( "fmt" "io" "net/http" + httppprof "net/http/pprof" "net/url" "path/filepath" "regexp" @@ -32,6 +33,7 @@ import ( "github.com/google/uuid" "github.com/klauspost/compress/zstd" "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" httpSwagger "github.com/swaggo/http-swagger/v2" "go.opentelemetry.io/otel/trace" "golang.org/x/xerrors" @@ -491,7 +493,7 @@ func New(options *Options) *API { // We add this middleware early, to make sure that authorization checks made // by other middleware get recorded. if buildinfo.IsDev() { - r.Use(httpmw.RecordAuthzChecks) + r.Use(httpmw.RecordAuthzChecks(options.DeploymentValues.EnableAuthzRecording.Value())) } ctx, cancel := context.WithCancel(context.Background()) @@ -983,6 +985,16 @@ func New(options *Options) *API { r.Post("/", api.postOAuth2ProviderAppToken()) }) + // RFC 7009 Token Revocation Endpoint + r.Route("/revoke", func(r chi.Router) { + r.Use( + // RFC 7009 endpoint uses OAuth2 client authentication, not API key + httpmw.AsAuthzSystem(httpmw.ExtractOAuth2ProviderAppWithOAuth2Errors(options.Database)), + ) + // POST /revoke is the standard OAuth2 token revocation endpoint per RFC 7009 + r.Post("/", api.revokeOAuth2Token()) + }) + // RFC 7591 Dynamic Client Registration - Public endpoint r.Post("/register", api.postOAuth2ClientRegistration()) @@ -1020,11 +1032,15 @@ func New(options *Options) *API { r.Route("/{user}", func(r chi.Router) { r.Use(httpmw.ExtractOrganizationMembersParam(options.Database, api.HTTPAuth.Authorize)) - r.Get("/{id}", api.taskGet) - r.Delete("/{id}", api.taskDelete) - r.Post("/{id}/send", api.taskSend) - r.Get("/{id}/logs", api.taskLogs) r.Post("/", api.tasksCreate) + + r.Route("/{task}", func(r chi.Router) { + r.Use(httpmw.ExtractTaskParam(options.Database)) + r.Get("/", api.taskGet) + r.Delete("/", api.taskDelete) + r.Post("/send", api.taskSend) + r.Get("/logs", api.taskLogs) + }) }) }) r.Route("/mcp", func(r chi.Router) { @@ -1512,7 +1528,8 @@ func New(options *Options) *API { r.Route("/debug", func(r chi.Router) { r.Use( apiKeyMiddleware, - // Ensure only owners can access debug endpoints. + // Ensure only users with the debug_info:read (e.g. only owners) + // can view debug endpoints. func(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { if !api.Authorize(r, policy.ActionRead, rbac.ResourceDebugInfo) { @@ -1545,6 +1562,41 @@ func New(options *Options) *API { }) } r.Method("GET", "/expvar", expvar.Handler()) // contains DERP metrics as well as cmdline and memstats + + r.Route("/pprof", func(r chi.Router) { + r.Use(func(next http.Handler) http.Handler { + // Some of the pprof handlers strip the `/debug/pprof` + // prefix, so we need to strip our additional prefix as + // well. + return http.StripPrefix("/api/v2", next) + }) + + // Serve the index HTML page. + r.Get("/", func(w http.ResponseWriter, r *http.Request) { + // Redirect to include a trailing slash, otherwise links on + // the generated HTML page will be broken. + if !strings.HasSuffix(r.URL.Path, "/") { + http.Redirect(w, r, "/api/v2/debug/pprof/", http.StatusTemporaryRedirect) + return + } + httppprof.Index(w, r) + }) + + // Handle any out of the box pprof handlers that don't get + // dealt with by the default index handler. See httppprof.init. + r.Get("/cmdline", httppprof.Cmdline) + r.Get("/profile", httppprof.Profile) + r.Get("/symbol", httppprof.Symbol) + r.Get("/trace", httppprof.Trace) + + // Index will handle any standard and custom runtime/pprof + // profiles. + r.Get("/*", httppprof.Index) + }) + + r.Get("/metrics", promhttp.InstrumentMetricHandler( + options.PrometheusRegistry, promhttp.HandlerFor(options.PrometheusRegistry, promhttp.HandlerOpts{}), + ).ServeHTTP) }) // Manage OAuth2 applications that can use Coder as an OAuth2 provider. r.Route("/oauth2-provider", func(r chi.Router) { diff --git a/coderd/coderdtest/authorize.go b/coderd/coderdtest/authorize.go index f0d35cd6359a2..f53ef3fa3bea9 100644 --- a/coderd/coderdtest/authorize.go +++ b/coderd/coderdtest/authorize.go @@ -68,7 +68,7 @@ func AssertRBAC(t *testing.T, api *coderd.API, client *codersdk.Client) RBACAsse ID: key.UserID.String(), Roles: rbac.RoleIdentifiers(roleNames), Groups: roles.Groups, - Scope: key.Scopes, + Scope: key.ScopeSet(), }, Recorder: recorder, } diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index ab1c641c0c27d..463ee888f6f22 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -1128,6 +1128,7 @@ type WorkspaceAgentWaiter struct { workspaceID uuid.UUID agentNames []string resourcesMatcher func([]codersdk.WorkspaceResource) bool + ctx context.Context } // NewWorkspaceAgentWaiter returns an object that waits for agents to connect when @@ -1156,6 +1157,14 @@ func (w WorkspaceAgentWaiter) MatchResources(m func([]codersdk.WorkspaceResource return w } +// WithContext instructs the waiter to use the provided context for all operations. +// If not specified, the waiter will create its own context with testutil.WaitLong timeout. +func (w WorkspaceAgentWaiter) WithContext(ctx context.Context) WorkspaceAgentWaiter { + //nolint: revive // returns modified struct + w.ctx = ctx + return w +} + // WaitForAgentFn represents a boolean assertion to be made against each agent // that a given WorkspaceAgentWaited knows about. Each WaitForAgentFn should apply // the check to a single agent, but it should be named for plural, because `func (w WorkspaceAgentWaiter) WaitFor` @@ -1176,6 +1185,8 @@ func AgentsNotReady(agent codersdk.WorkspaceAgent) bool { return !AgentsReady(agent) } +// WaitFor waits for the given criteria and fails the test if they are not met before the +// waiter's context is canceled. func (w WorkspaceAgentWaiter) WaitFor(criteria ...WaitForAgentFn) { w.t.Helper() @@ -1184,11 +1195,13 @@ func (w WorkspaceAgentWaiter) WaitFor(criteria ...WaitForAgentFn) { agentNamesMap[name] = struct{}{} } - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := w.ctx + if w.ctx == nil { + ctx = testutil.Context(w.t, testutil.WaitLong) + } w.t.Logf("waiting for workspace agents (workspace %s)", w.workspaceID) - require.Eventually(w.t, func() bool { + testutil.Eventually(ctx, w.t, func(ctx context.Context) bool { var err error workspace, err := w.client.Workspace(ctx, w.workspaceID) if err != nil { @@ -1216,10 +1229,11 @@ func (w WorkspaceAgentWaiter) WaitFor(criteria ...WaitForAgentFn) { } } return true - }, testutil.WaitLong, testutil.IntervalMedium) + }, testutil.IntervalMedium) } -// Wait waits for the agent(s) to connect and fails the test if they do not within testutil.WaitLong +// Wait waits for the agent(s) to connect and fails the test if they do not connect before the +// waiter's context is canceled. func (w WorkspaceAgentWaiter) Wait() []codersdk.WorkspaceResource { w.t.Helper() @@ -1228,12 +1242,14 @@ func (w WorkspaceAgentWaiter) Wait() []codersdk.WorkspaceResource { agentNamesMap[name] = struct{}{} } - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := w.ctx + if w.ctx == nil { + ctx = testutil.Context(w.t, testutil.WaitLong) + } w.t.Logf("waiting for workspace agents (workspace %s)", w.workspaceID) var resources []codersdk.WorkspaceResource - require.Eventually(w.t, func() bool { + testutil.Eventually(ctx, w.t, func(ctx context.Context) bool { var err error workspace, err := w.client.Workspace(ctx, w.workspaceID) if err != nil { @@ -1265,7 +1281,7 @@ func (w WorkspaceAgentWaiter) Wait() []codersdk.WorkspaceResource { return true } return w.resourcesMatcher(resources) - }, testutil.WaitLong, testutil.IntervalMedium) + }, testutil.IntervalMedium) w.t.Logf("got workspace agents (workspace %s)", w.workspaceID) return resources } diff --git a/coderd/coderdtest/swaggerparser.go b/coderd/coderdtest/swaggerparser.go index 4a0b6744a9082..cac6fdf7a9278 100644 --- a/coderd/coderdtest/swaggerparser.go +++ b/coderd/coderdtest/swaggerparser.go @@ -160,8 +160,9 @@ func VerifySwaggerDefinitions(t *testing.T, router chi.Router, swaggerComments [ t.Run(method+" "+route, func(t *testing.T) { t.Parallel() - // This route is for compatibility purposes and is not documented. - if route == "/workspaceagents/me/metadata" { + // Wildcard routes break the swaggo parser, so we do not document + // them. + if strings.HasSuffix(route, "/*") { return } diff --git a/coderd/connectionlog/connectionlog.go b/coderd/connectionlog/connectionlog.go index 1b56ffc288fd3..b3d9e9115f5c0 100644 --- a/coderd/connectionlog/connectionlog.go +++ b/coderd/connectionlog/connectionlog.go @@ -62,10 +62,6 @@ func (m *FakeConnectionLogger) Contains(t testing.TB, expected database.UpsertCo t.Logf("connection log %d: expected ID %s, got %s", idx+1, expected.ID, cl.ID) continue } - if !expected.Time.IsZero() && expected.Time != cl.Time { - t.Logf("connection log %d: expected Time %s, got %s", idx+1, expected.Time, cl.Time) - continue - } if expected.OrganizationID != uuid.Nil && cl.OrganizationID != expected.OrganizationID { t.Logf("connection log %d: expected OrganizationID %s, got %s", idx+1, expected.OrganizationID, cl.OrganizationID) continue @@ -114,6 +110,18 @@ func (m *FakeConnectionLogger) Contains(t testing.TB, expected database.UpsertCo t.Logf("connection log %d: expected ConnectionID %s, got %s", idx+1, expected.ConnectionID.UUID, cl.ConnectionID.UUID) continue } + if expected.DisconnectReason.Valid && cl.DisconnectReason.String != expected.DisconnectReason.String { + t.Logf("connection log %d: expected DisconnectReason %s, got %s", idx+1, expected.DisconnectReason.String, cl.DisconnectReason.String) + continue + } + if !expected.Time.IsZero() && expected.Time != cl.Time { + t.Logf("connection log %d: expected Time %s, got %s", idx+1, expected.Time, cl.Time) + continue + } + if expected.ConnectionStatus != "" && expected.ConnectionStatus != cl.ConnectionStatus { + t.Logf("connection log %d: expected ConnectionStatus %s, got %s", idx+1, expected.ConnectionStatus, cl.ConnectionStatus) + continue + } return true } diff --git a/coderd/database/check_constraint.go b/coderd/database/check_constraint.go index ac204f85f5603..8b1917b7697db 100644 --- a/coderd/database/check_constraint.go +++ b/coderd/database/check_constraint.go @@ -6,13 +6,15 @@ type CheckConstraint string // CheckConstraint enums. const ( + CheckAPIKeysAllowListNotEmpty CheckConstraint = "api_keys_allow_list_not_empty" // api_keys CheckOneTimePasscodeSet CheckConstraint = "one_time_passcode_set" // users CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs - CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters - CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents CheckWorkspaceBuildsAiTaskSidebarAppIDRequired CheckConstraint = "workspace_builds_ai_task_sidebar_app_id_required" // workspace_builds CheckWorkspaceBuildsDeadlineBelowMaxDeadline CheckConstraint = "workspace_builds_deadline_below_max_deadline" // workspace_builds + CheckTelemetryLockEventTypeConstraint CheckConstraint = "telemetry_lock_event_type_constraint" // telemetry_locks + CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters + CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events ) diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 7d9622b316c2e..2e1770d47dc87 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -51,6 +51,13 @@ func ListLazy[F any, T any](convert func(F) T) func(list []F) []T { } } +func APIAllowListTarget(entry rbac.AllowListElement) codersdk.APIAllowListTarget { + return codersdk.APIAllowListTarget{ + Type: codersdk.RBACResource(entry.Type), + ID: entry.ID, + } +} + type ExternalAuthMeta struct { Authenticated bool ValidateError string @@ -189,6 +196,16 @@ func MinimalUser(user database.User) codersdk.MinimalUser { return codersdk.MinimalUser{ ID: user.ID, Username: user.Username, + Name: user.Name, + AvatarURL: user.AvatarURL, + } +} + +func MinimalUserFromVisibleUser(user database.VisibleUser) codersdk.MinimalUser { + return codersdk.MinimalUser{ + ID: user.ID, + Username: user.Username, + Name: user.Name, AvatarURL: user.AvatarURL, } } @@ -197,7 +214,6 @@ func ReducedUser(user database.User) codersdk.ReducedUser { return codersdk.ReducedUser{ MinimalUser: MinimalUser(user), Email: user.Email, - Name: user.Name, CreatedAt: user.CreatedAt, UpdatedAt: user.UpdatedAt, LastSeenAt: user.LastSeenAt, @@ -374,6 +390,9 @@ func OAuth2ProviderApp(accessURL *url.URL, dbApp database.OAuth2ProviderApp) cod }).String(), // We do not currently support DeviceAuth. DeviceAuth: "", + TokenRevoke: accessURL.ResolveReference(&url.URL{ + Path: "/oauth2/revoke", + }).String(), }, } } @@ -693,14 +712,15 @@ func SlimRoleFromName(name string) codersdk.SlimRole { func RBACRole(role rbac.Role) codersdk.Role { slim := SlimRole(role) - orgPerms := role.Org[slim.OrganizationID] + orgPerms := role.ByOrgID[slim.OrganizationID] return codersdk.Role{ - Name: slim.Name, - OrganizationID: slim.OrganizationID, - DisplayName: slim.DisplayName, - SitePermissions: List(role.Site, RBACPermission), - OrganizationPermissions: List(orgPerms, RBACPermission), - UserPermissions: List(role.User, RBACPermission), + Name: slim.Name, + OrganizationID: slim.OrganizationID, + DisplayName: slim.DisplayName, + SitePermissions: List(role.Site, RBACPermission), + UserPermissions: List(role.User, RBACPermission), + OrganizationPermissions: List(orgPerms.Org, RBACPermission), + OrganizationMemberPermissions: List(orgPerms.Member, RBACPermission), } } @@ -715,8 +735,8 @@ func Role(role database.CustomRole) codersdk.Role { OrganizationID: orgID, DisplayName: role.DisplayName, SitePermissions: List(role.SitePermissions, Permission), - OrganizationPermissions: List(role.OrgPermissions, Permission), UserPermissions: List(role.UserPermissions, Permission), + OrganizationPermissions: List(role.OrgPermissions, Permission), } } @@ -927,7 +947,7 @@ func PreviewParameterValidation(v *previewtypes.ParameterValidation) codersdk.Pr } } -func AIBridgeInterception(interception database.AIBridgeInterception, tokenUsages []database.AIBridgeTokenUsage, userPrompts []database.AIBridgeUserPrompt, toolUsages []database.AIBridgeToolUsage) codersdk.AIBridgeInterception { +func AIBridgeInterception(interception database.AIBridgeInterception, initiator database.VisibleUser, tokenUsages []database.AIBridgeTokenUsage, userPrompts []database.AIBridgeUserPrompt, toolUsages []database.AIBridgeToolUsage) codersdk.AIBridgeInterception { sdkTokenUsages := List(tokenUsages, AIBridgeTokenUsage) sort.Slice(sdkTokenUsages, func(i, j int) bool { // created_at ASC @@ -943,9 +963,9 @@ func AIBridgeInterception(interception database.AIBridgeInterception, tokenUsage // created_at ASC return sdkToolUsages[i].CreatedAt.Before(sdkToolUsages[j].CreatedAt) }) - return codersdk.AIBridgeInterception{ + intc := codersdk.AIBridgeInterception{ ID: interception.ID, - InitiatorID: interception.InitiatorID, + Initiator: MinimalUserFromVisibleUser(initiator), Provider: interception.Provider, Model: interception.Model, Metadata: jsonOrEmptyMap(interception.Metadata), @@ -954,6 +974,10 @@ func AIBridgeInterception(interception database.AIBridgeInterception, tokenUsage UserPrompts: sdkUserPrompts, ToolUsages: sdkToolUsages, } + if interception.EndedAt.Valid { + intc.EndedAt = &interception.EndedAt.Time + } + return intc } func AIBridgeTokenUsage(usage database.AIBridgeTokenUsage) codersdk.AIBridgeTokenUsage { diff --git a/coderd/database/db_test.go b/coderd/database/db_test.go index f9442942e53e1..68b60a788fd3d 100644 --- a/coderd/database/db_test.go +++ b/coderd/database/db_test.go @@ -85,10 +85,6 @@ func TestNestedInTx(t *testing.T) { func testSQLDB(t testing.TB) *sql.DB { t.Helper() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - connection, err := dbtestutil.Open(t) require.NoError(t, err) diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index dae0755cb5236..8066ebd0479a1 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -219,7 +219,9 @@ var ( rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionCreateAgent}, - rbac.ResourceApiKey.Type: {policy.WildcardSymbol}, + // Provisionerd needs to read, update, and delete tasks associated with workspaces. + rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceApiKey.Type: {policy.WildcardSymbol}, // When org scoped provisioner credentials are implemented, // this can be reduced to read a specific org. rbac.ResourceOrganization.Type: {policy.ActionRead}, @@ -232,8 +234,8 @@ var ( // Provisionerd creates usage events rbac.ResourceUsageEvent.Type: {policy.ActionCreate}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -252,13 +254,14 @@ var ( rbac.ResourceFile.Type: {policy.ActionRead}, // Required to read terraform files rbac.ResourceNotificationMessage.Type: {policy.ActionCreate, policy.ActionRead}, rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate}, rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, rbac.ResourceUser.Type: {policy.ActionRead}, rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -274,13 +277,14 @@ var ( Identifier: rbac.RoleIdentifier{Name: "jobreaper"}, DisplayName: "Job Reaper Daemon", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceSystem.Type: {policy.WildcardSymbol}, - rbac.ResourceTemplate.Type: {policy.ActionRead}, - rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, - rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -298,8 +302,8 @@ var ( Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -317,8 +321,8 @@ var ( Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -335,8 +339,8 @@ var ( Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceConnectionLog.Type: {policy.ActionUpdate, policy.ActionRead}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -356,8 +360,8 @@ var ( rbac.ResourceWebpushSubscription.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceDeploymentConfig.Type: {policy.ActionRead, policy.ActionUpdate}, // To read and upsert VAPID keys }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -375,8 +379,8 @@ var ( // The workspace monitor needs to be able to update monitors rbac.ResourceWorkspaceAgentResourceMonitor.Type: {policy.ActionUpdate}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -392,12 +396,14 @@ var ( Identifier: rbac.RoleIdentifier{Name: "subagentapi"}, DisplayName: "Sub Agent API", Site: []rbac.Permission{}, - Org: map[string][]rbac.Permission{ - orgID.String(): {}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{ + orgID.String(): { + Member: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreateAgent, policy.ActionDeleteAgent}, + }), + }, }, - User: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreateAgent, policy.ActionDeleteAgent}, - }), }, }), Scope: rbac.ScopeAll, @@ -436,8 +442,36 @@ var ( rbac.ResourceOauth2App.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceOauth2AppSecret.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() + + subjectSystemOAuth2 = rbac.Subject{ + Type: rbac.SubjectTypeSystemOAuth, + FriendlyName: "System OAuth2", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "system-oauth2"}, + DisplayName: "System OAuth2", + Site: rbac.Permissions(map[string][]policy.Action{ + // OAuth2 resources - full CRUD permissions + rbac.ResourceOauth2App.Type: rbac.ResourceOauth2App.AvailableActions(), + rbac.ResourceOauth2AppSecret.Type: rbac.ResourceOauth2AppSecret.AvailableActions(), + rbac.ResourceOauth2AppCodeToken.Type: rbac.ResourceOauth2AppCodeToken.AvailableActions(), + + // API key permissions needed for OAuth2 token revocation + rbac.ResourceApiKey.Type: {policy.ActionRead, policy.ActionDelete}, + + // Minimal read permissions that might be needed for OAuth2 operations + rbac.ResourceUser.Type: {policy.ActionRead}, + rbac.ResourceOrganization.Type: {policy.ActionRead}, + }), + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -454,8 +488,8 @@ var ( Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceProvisionerDaemon.Type: {policy.ActionRead}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -531,8 +565,8 @@ var ( Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceFile.Type: {policy.ActionRead}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -552,8 +586,8 @@ var ( // reads/processes them. rbac.ResourceUsageEvent.Type: {policy.ActionRead, policy.ActionUpdate}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -576,8 +610,8 @@ var ( rbac.ResourceApiKey.Type: {policy.ActionRead}, // Validate API keys. rbac.ResourceAibridgeInterception.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -640,6 +674,12 @@ func AsSystemRestricted(ctx context.Context) context.Context { return As(ctx, subjectSystemRestricted) } +// AsSystemOAuth2 returns a context with an actor that has permissions +// required for OAuth2 provider operations (token revocation, device codes, registration). +func AsSystemOAuth2(ctx context.Context) context.Context { + return As(ctx, subjectSystemOAuth2) +} + // AsSystemReadProvisionerDaemons returns a context with an actor that has permissions // to read provisioner daemons. func AsSystemReadProvisionerDaemons(ctx context.Context) context.Context { @@ -1253,14 +1293,17 @@ func (q *querier) customRoleCheck(ctx context.Context, role database.CustomRole) return xerrors.Errorf("invalid role: %w", err) } - if len(rbacRole.Org) > 0 && len(rbacRole.Site) > 0 { - // This is a choice to keep roles simple. If we allow mixing site and org scoped perms, then knowing who can - // do what gets more complicated. - return xerrors.Errorf("invalid custom role, cannot assign both org and site permissions at the same time") + if len(rbacRole.ByOrgID) > 0 && (len(rbacRole.Site) > 0 || len(rbacRole.User) > 0) { + // This is a choice to keep roles simple. If we allow mixing site and org + // scoped perms, then knowing who can do what gets more complicated. Roles + // should either be entirely org-scoped or entirely unrelated to + // organizations. + return xerrors.Errorf("invalid custom role, cannot assign both org-scoped and site/user permissions at the same time") } - if len(rbacRole.Org) > 1 { - // Again to avoid more complexity in our roles + if len(rbacRole.ByOrgID) > 1 { + // Again to avoid more complexity in our roles. Roles are limited to one + // organization. return xerrors.Errorf("invalid custom role, cannot assign permissions to more than 1 org at a time") } @@ -1272,11 +1315,22 @@ func (q *querier) customRoleCheck(ctx context.Context, role database.CustomRole) } } - for orgID, perms := range rbacRole.Org { - for _, orgPerm := range perms { + for orgID, perms := range rbacRole.ByOrgID { + for _, orgPerm := range perms.Org { err := q.customRoleEscalationCheck(ctx, act, orgPerm, rbac.Object{OrgID: orgID, Type: orgPerm.ResourceType}) if err != nil { - return xerrors.Errorf("org=%q: %w", orgID, err) + return xerrors.Errorf("org=%q: org: %w", orgID, err) + } + } + for _, memberPerm := range perms.Member { + // The person giving the permission should still be required to have + // the permissions throughout the org in order to give individuals the + // same permission among their own resources, since the role can be given + // to anyone. The `Owner` is intentionally omitted from the `Object` to + // enforce this. + err := q.customRoleEscalationCheck(ctx, act, memberPerm, rbac.Object{OrgID: orgID, Type: memberPerm.ResourceType}) + if err != nil { + return xerrors.Errorf("org=%q: member: %w", orgID, err) } } } @@ -1294,8 +1348,8 @@ func (q *querier) customRoleCheck(ctx context.Context, role database.CustomRole) func (q *querier) authorizeProvisionerJob(ctx context.Context, job database.ProvisionerJob) error { switch job.Type { case database.ProvisionerJobTypeWorkspaceBuild: - // Authorized call to get workspace build. If we can read the build, we - // can read the job. + // Authorized call to get workspace build. If we can read the build, we can + // read the job. _, err := q.GetWorkspaceBuildByJobID(ctx, job.ID) if err != nil { return xerrors.Errorf("fetch related workspace build: %w", err) @@ -1338,8 +1392,8 @@ func (q *querier) ActivityBumpWorkspace(ctx context.Context, arg database.Activi } func (q *querier) AllUserIDs(ctx context.Context, includeSystem bool) ([]uuid.UUID, error) { - // Although this technically only reads users, only system-related functions should be - // allowed to call this. + // Although this technically only reads users, only system-related functions + // should be allowed to call this. if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } @@ -1358,8 +1412,8 @@ func (q *querier) ArchiveUnusedTemplateVersions(ctx context.Context, arg databas } func (q *querier) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { - // Could be any workspace and checking auth to each workspace is overkill for the purpose - // of this function. + // Could be any workspace and checking auth to each workspace is overkill for + // the purpose of this function. if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceWorkspace.All()); err != nil { return err } @@ -1387,6 +1441,13 @@ func (q *querier) BulkMarkNotificationMessagesSent(ctx context.Context, arg data return q.db.BulkMarkNotificationMessagesSent(ctx, arg) } +func (q *querier) CalculateAIBridgeInterceptionsTelemetrySummary(ctx context.Context, arg database.CalculateAIBridgeInterceptionsTelemetrySummaryParams) (database.CalculateAIBridgeInterceptionsTelemetrySummaryRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAibridgeInterception); err != nil { + return database.CalculateAIBridgeInterceptionsTelemetrySummaryRow{}, err + } + return q.db.CalculateAIBridgeInterceptionsTelemetrySummary(ctx, arg) +} + func (q *querier) ClaimPrebuiltWorkspace(ctx context.Context, arg database.ClaimPrebuiltWorkspaceParams) (database.ClaimPrebuiltWorkspaceRow, error) { empty := database.ClaimPrebuiltWorkspaceRow{} @@ -1433,6 +1494,14 @@ func (q *querier) CleanTailnetTunnels(ctx context.Context) error { return q.db.CleanTailnetTunnels(ctx) } +func (q *querier) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) { + prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceAibridgeInterception.Type) + if err != nil { + return 0, xerrors.Errorf("(dev error) prepare sql filter: %w", err) + } + return q.db.CountAuthorizedAIBridgeInterceptions(ctx, arg, prep) +} + func (q *querier) CountAuditLogs(ctx context.Context, arg database.CountAuditLogsParams) (int64, error) { // Shortcut if the user is an owner. The SQL filter is noticeable, // and this is an easy win for owners. Which is the common case. @@ -1467,6 +1536,13 @@ func (q *querier) CountInProgressPrebuilds(ctx context.Context) ([]database.Coun return q.db.CountInProgressPrebuilds(ctx) } +func (q *querier) CountPendingNonActivePrebuilds(ctx context.Context) ([]database.CountPendingNonActivePrebuildsRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceWorkspace.All()); err != nil { + return nil, err + } + return q.db.CountPendingNonActivePrebuilds(ctx) +} + func (q *querier) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceInboxNotification.WithOwner(userID.String())); err != nil { return 0, err @@ -1671,6 +1747,13 @@ func (q *querier) DeleteOldProvisionerDaemons(ctx context.Context) error { return q.db.DeleteOldProvisionerDaemons(ctx) } +func (q *querier) DeleteOldTelemetryLocks(ctx context.Context, beforeTime time.Time) error { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { + return err + } + return q.db.DeleteOldTelemetryLocks(ctx, beforeTime) +} + func (q *querier) DeleteOldWorkspaceAgentLogs(ctx context.Context, threshold time.Time) error { if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err @@ -1753,6 +1836,19 @@ func (q *querier) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTa return q.db.DeleteTailnetTunnel(ctx, arg) } +func (q *querier) DeleteTask(ctx context.Context, arg database.DeleteTaskParams) (database.TaskTable, error) { + task, err := q.db.GetTaskByID(ctx, arg.ID) + if err != nil { + return database.TaskTable{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionDelete, task.RBACObject()); err != nil { + return database.TaskTable{}, err + } + + return q.db.DeleteTask(ctx, arg) +} + func (q *querier) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { // First get the secret to check ownership secret, err := q.GetUserSecret(ctx, id) @@ -1789,7 +1885,7 @@ func (q *querier) DeleteWorkspaceACLByID(ctx context.Context, id uuid.UUID) erro return w.WorkspaceTable(), nil } - return fetchAndExec(q.log, q.auth, policy.ActionUpdate, fetch, q.db.DeleteWorkspaceACLByID)(ctx, id) + return fetchAndExec(q.log, q.auth, policy.ActionShare, fetch, q.db.DeleteWorkspaceACLByID)(ctx, id) } func (q *querier) DeleteWorkspaceAgentPortShare(ctx context.Context, arg database.DeleteWorkspaceAgentPortShareParams) error { @@ -2417,7 +2513,7 @@ func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (d return q.db.GetOAuth2ProviderAppByID(ctx, id) } -func (q *querier) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken sql.NullString) (database.OAuth2ProviderApp, error) { +func (q *querier) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken []byte) (database.OAuth2ProviderApp, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil { return database.OAuth2ProviderApp{}, err } @@ -2553,6 +2649,13 @@ func (q *querier) GetOrganizationsByUserID(ctx context.Context, userID database. return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationsByUserID)(ctx, userID) } +func (q *querier) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOrganization.All()); err != nil { + return nil, err + } + return q.db.GetOrganizationsWithPrebuildStatus(ctx, arg) +} + func (q *querier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { version, err := q.db.GetTemplateVersionByJobID(ctx, jobID) if err != nil { @@ -2882,6 +2985,14 @@ func (q *querier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) return q.db.GetTailnetTunnelPeerIDs(ctx, srcID) } +func (q *querier) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) { + return fetch(q.log, q.auth, q.db.GetTaskByID)(ctx, id) +} + +func (q *querier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { + return fetch(q.log, q.auth, q.db.GetTaskByWorkspaceID)(ctx, workspaceID) +} + func (q *querier) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.TelemetryItem{}, err @@ -3377,7 +3488,7 @@ func (q *querier) GetWorkspaceACLByID(ctx context.Context, id uuid.UUID) (databa if err != nil { return database.GetWorkspaceACLByIDRow{}, err } - if err := q.authorizeContext(ctx, policy.ActionCreate, workspace); err != nil { + if err := q.authorizeContext(ctx, policy.ActionShare, workspace); err != nil { return database.GetWorkspaceACLByIDRow{}, err } return q.db.GetWorkspaceACLByID(ctx, id) @@ -3541,6 +3652,13 @@ func (q *querier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt return q.db.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) } +func (q *querier) GetWorkspaceAgentsForMetrics(ctx context.Context) ([]database.GetWorkspaceAgentsForMetricsRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceWorkspace); err != nil { + return nil, err + } + return q.db.GetWorkspaceAgentsForMetrics(ctx) +} + func (q *querier) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { workspace, err := q.GetWorkspaceByID(ctx, workspaceID) if err != nil { @@ -3846,6 +3964,13 @@ func (q *querier) GetWorkspacesEligibleForTransition(ctx context.Context, now ti return q.db.GetWorkspacesEligibleForTransition(ctx, now) } +func (q *querier) GetWorkspacesForWorkspaceMetrics(ctx context.Context) ([]database.GetWorkspacesForWorkspaceMetricsRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceWorkspace); err != nil { + return nil, err + } + return q.db.GetWorkspacesForWorkspaceMetrics(ctx) +} + func (q *querier) InsertAIBridgeInterception(ctx context.Context, arg database.InsertAIBridgeInterceptionParams) (database.AIBridgeInterception, error) { return insert(q.log, q.auth, rbac.ResourceAibridgeInterception.WithOwner(arg.InitiatorID.String()), q.db.InsertAIBridgeInterception)(ctx, arg) } @@ -4107,6 +4232,17 @@ func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaP return q.db.InsertReplica(ctx, arg) } +func (q *querier) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) { + // Ensure the actor can access the specified template version (and thus its template). + if _, err := q.GetTemplateVersionByID(ctx, arg.TemplateVersionID); err != nil { + return database.TaskTable{}, err + } + + obj := rbac.ResourceTask.WithOwner(arg.OwnerID.String()).InOrg(arg.OrganizationID) + + return insert(q.log, q.auth, obj, q.db.InsertTask)(ctx, arg) +} + func (q *querier) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error { if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err @@ -4114,6 +4250,13 @@ func (q *querier) InsertTelemetryItemIfNotExists(ctx context.Context, arg databa return q.db.InsertTelemetryItemIfNotExists(ctx, arg) } +func (q *querier) InsertTelemetryLock(ctx context.Context, arg database.InsertTelemetryLockParams) error { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { + return err + } + return q.db.InsertTelemetryLock(ctx, arg) +} + func (q *querier) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { obj := rbac.ResourceTemplate.InOrg(arg.OrganizationID) if err := q.authorizeContext(ctx, policy.ActionCreate, obj); err != nil { @@ -4417,7 +4560,7 @@ func (q *querier) InsertWorkspaceResourceMetadata(ctx context.Context, arg datab return q.db.InsertWorkspaceResourceMetadata(ctx, arg) } -func (q *querier) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.AIBridgeInterception, error) { +func (q *querier) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.ListAIBridgeInterceptionsRow, error) { prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceAibridgeInterception.Type) if err != nil { return nil, xerrors.Errorf("(dev error) prepare sql filter: %w", err) @@ -4425,6 +4568,13 @@ func (q *querier) ListAIBridgeInterceptions(ctx context.Context, arg database.Li return q.db.ListAuthorizedAIBridgeInterceptions(ctx, arg, prep) } +func (q *querier) ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg database.ListAIBridgeInterceptionsTelemetrySummariesParams) ([]database.ListAIBridgeInterceptionsTelemetrySummariesRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAibridgeInterception); err != nil { + return nil, err + } + return q.db.ListAIBridgeInterceptionsTelemetrySummaries(ctx, arg) +} + func (q *querier) ListAIBridgeTokenUsagesByInterceptionIDs(ctx context.Context, interceptionIDs []uuid.UUID) ([]database.AIBridgeTokenUsage, error) { // This function is a system function until we implement a join for aibridge interceptions. // Matches the behavior of the workspaces listing endpoint. @@ -4463,6 +4613,11 @@ func (q *querier) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.C return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.ListProvisionerKeysByOrganizationExcludeReserved)(ctx, organizationID) } +func (q *querier) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) { + // TODO(Cian): replace this with a sql filter for improved performance. https://github.com/coder/internal/issues/1061 + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.ListTasks)(ctx, arg) +} + func (q *querier) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { obj := rbac.ResourceUserSecret.WithOwner(userID.String()) if err := q.authorizeContext(ctx, policy.ActionRead, obj); err != nil { @@ -4608,6 +4763,13 @@ func (q *querier) UnfavoriteWorkspace(ctx context.Context, id uuid.UUID) error { return update(q.log, q.auth, fetch, q.db.UnfavoriteWorkspace)(ctx, id) } +func (q *querier) UpdateAIBridgeInterceptionEnded(ctx context.Context, params database.UpdateAIBridgeInterceptionEndedParams) (database.AIBridgeInterception, error) { + if err := q.authorizeAIBridgeInterceptionAction(ctx, policy.ActionUpdate, params.ID); err != nil { + return database.AIBridgeInterception{}, err + } + return q.db.UpdateAIBridgeInterceptionEnded(ctx, params) +} + func (q *querier) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { fetch := func(ctx context.Context, arg database.UpdateAPIKeyByIDParams) (database.APIKey, error) { return q.db.GetAPIKeyByID(ctx, arg.ID) @@ -4779,6 +4941,14 @@ func (q *querier) UpdateOrganizationDeletedByID(ctx context.Context, arg databas return deleteQ(q.log, q.auth, q.db.GetOrganizationByID, deleteF)(ctx, arg.ID) } +func (q *querier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { + // Prebuild operation for canceling pending prebuild jobs from non-active template versions + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourcePrebuiltWorkspace); err != nil { + return []database.UpdatePrebuildProvisionerJobWithCancelRow{}, err + } + return q.db.UpdatePrebuildProvisionerJobWithCancel(ctx, arg) +} + func (q *querier) UpdatePresetPrebuildStatus(ctx context.Context, arg database.UpdatePresetPrebuildStatusParams) error { preset, err := q.db.GetPresetByID(ctx, arg.PresetID) if err != nil { @@ -4926,6 +5096,30 @@ func (q *querier) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg return q.db.UpdateTailnetPeerStatusByCoordinator(ctx, arg) } +func (q *querier) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { + // An actor is allowed to update the workspace ID of a task if they are the + // owner of the task and workspace or have the appropriate permissions. + task, err := q.db.GetTaskByID(ctx, arg.ID) + if err != nil { + return database.TaskTable{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionUpdate, task.RBACObject()); err != nil { + return database.TaskTable{}, err + } + + ws, err := q.db.GetWorkspaceByID(ctx, arg.WorkspaceID.UUID) + if err != nil { + return database.TaskTable{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionUpdate, ws.RBACObject()); err != nil { + return database.TaskTable{}, err + } + + return q.db.UpdateTaskWorkspaceID(ctx, arg) +} + func (q *querier) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { fetch := func(ctx context.Context, arg database.UpdateTemplateACLByIDParams) (database.Template, error) { return q.db.GetTemplateByID(ctx, arg.ID) @@ -5271,7 +5465,7 @@ func (q *querier) UpdateWorkspaceACLByID(ctx context.Context, arg database.Updat return w.WorkspaceTable(), nil } - return fetchAndExec(q.log, q.auth, policy.ActionCreate, fetch, q.db.UpdateWorkspaceACLByID)(ctx, arg) + return fetchAndExec(q.log, q.auth, policy.ActionShare, fetch, q.db.UpdateWorkspaceACLByID)(ctx, arg) } func (q *querier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { @@ -5665,6 +5859,18 @@ func (q *querier) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTa return q.db.UpsertTailnetTunnel(ctx, arg) } +func (q *querier) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) { + // Fetch the task to derive the RBAC object and authorize update on it. + task, err := q.db.GetTaskByID(ctx, arg.TaskID) + if err != nil { + return database.TaskWorkspaceApp{}, err + } + if err := q.authorizeContext(ctx, policy.ActionUpdate, task); err != nil { + return database.TaskWorkspaceApp{}, err + } + return q.db.UpsertTaskWorkspaceApp(ctx, arg) +} + func (q *querier) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err @@ -5809,9 +6015,16 @@ func (q *querier) CountAuthorizedConnectionLogs(ctx context.Context, arg databas return q.CountConnectionLogs(ctx, arg) } -func (q *querier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, _ rbac.PreparedAuthorized) ([]database.AIBridgeInterception, error) { +func (q *querier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, _ rbac.PreparedAuthorized) ([]database.ListAIBridgeInterceptionsRow, error) { // TODO: Delete this function, all ListAIBridgeInterceptions should be authorized. For now just call ListAIBridgeInterceptions on the authz querier. // This cannot be deleted for now because it's included in the // database.Store interface, so dbauthz needs to implement it. return q.ListAIBridgeInterceptions(ctx, arg) } + +func (q *querier) CountAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams, _ rbac.PreparedAuthorized) (int64, error) { + // TODO: Delete this function, all CountAIBridgeInterceptions should be authorized. For now just call CountAIBridgeInterceptions on the authz querier. + // This cannot be deleted for now because it's included in the + // database.Store interface, so dbauthz needs to implement it. + return q.CountAIBridgeInterceptions(ctx, arg) +} diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 730d5f3198478..32c951fb5c20b 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -641,6 +641,19 @@ func (s *MethodTestSuite) TestProvisionerJob() { dbm.EXPECT().UpdateProvisionerJobWithCancelByID(gomock.Any(), arg).Return(nil).AnyTimes() check.Args(arg).Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) + s.Run("UpdatePrebuildProvisionerJobWithCancel", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + arg := database.UpdatePrebuildProvisionerJobWithCancelParams{ + PresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}, + Now: dbtime.Now(), + } + canceledJobs := []database.UpdatePrebuildProvisionerJobWithCancelRow{ + {ID: uuid.New(), WorkspaceID: uuid.New(), TemplateID: uuid.New(), TemplateVersionPresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}}, + {ID: uuid.New(), WorkspaceID: uuid.New(), TemplateID: uuid.New(), TemplateVersionPresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}}, + } + + dbm.EXPECT().UpdatePrebuildProvisionerJobWithCancel(gomock.Any(), arg).Return(canceledJobs, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourcePrebuiltWorkspace, policy.ActionUpdate).Returns(canceledJobs) + })) s.Run("GetProvisionerJobsByIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { org := testutil.Fake(s.T(), faker, database.Organization{}) org2 := testutil.Fake(s.T(), faker, database.Organization{}) @@ -1639,10 +1652,43 @@ func (s *MethodTestSuite) TestUser() { } func (s *MethodTestSuite) TestWorkspace() { + // The Workspace object differs it's type based on whether it's dormant or + // not, which is why we have two tests for it. To ensure we are actually + // testing the correct RBAC objects, we also explicitly create the expected + // object here rather than passing in the model. s.Run("GetWorkspaceByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { ws := testutil.Fake(s.T(), faker, database.Workspace{}) + ws.DormantAt = sql.NullTime{ + Time: time.Time{}, + Valid: false, + } + // Ensure the RBAC is not the dormant type. + require.Equal(s.T(), rbac.ResourceWorkspace.Type, ws.RBACObject().Type) + dbm.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes() + // Explicitly create the expected object. + expected := rbac.ResourceWorkspace.WithID(ws.ID). + InOrg(ws.OrganizationID). + WithOwner(ws.OwnerID.String()). + WithGroupACL(ws.GroupACL.RBACACL()). + WithACLUserList(ws.UserACL.RBACACL()) + check.Args(ws.ID).Asserts(expected, policy.ActionRead).Returns(ws) + })) + s.Run("DormantWorkspace/GetWorkspaceByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + ws := testutil.Fake(s.T(), faker, database.Workspace{ + DormantAt: sql.NullTime{ + Time: time.Now().Add(-time.Hour), + Valid: true, + }, + }) + // Ensure the RBAC changed automatically. + require.Equal(s.T(), rbac.ResourceWorkspaceDormant.Type, ws.RBACObject().Type) dbm.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes() - check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns(ws) + // Explicitly create the expected object. + expected := rbac.ResourceWorkspaceDormant. + WithID(ws.ID). + InOrg(ws.OrganizationID). + WithOwner(ws.OwnerID.String()) + check.Args(ws.ID).Asserts(expected, policy.ActionRead).Returns(ws) })) s.Run("GetWorkspaceByResourceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { ws := testutil.Fake(s.T(), faker, database.Workspace{}) @@ -1656,6 +1702,15 @@ func (s *MethodTestSuite) TestWorkspace() { // No asserts here because SQLFilter. check.Args(arg).Asserts() })) + s.Run("GetWorkspaceAgentsForMetrics", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + row := testutil.Fake(s.T(), faker, database.GetWorkspaceAgentsForMetricsRow{}) + dbm.EXPECT().GetWorkspaceAgentsForMetrics(gomock.Any()).Return([]database.GetWorkspaceAgentsForMetricsRow{row}, nil).AnyTimes() + check.Args().Asserts(rbac.ResourceWorkspace, policy.ActionRead).Returns([]database.GetWorkspaceAgentsForMetricsRow{row}) + })) + s.Run("GetWorkspacesForWorkspaceMetrics", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetWorkspacesForWorkspaceMetrics(gomock.Any()).Return([]database.GetWorkspacesForWorkspaceMetricsRow{}, nil).AnyTimes() + check.Args().Asserts(rbac.ResourceWorkspace, policy.ActionRead) + })) s.Run("GetAuthorizedWorkspaces", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { arg := database.GetWorkspacesParams{} dbm.EXPECT().GetAuthorizedWorkspaces(gomock.Any(), arg, gomock.Any()).Return([]database.GetWorkspacesRow{}, nil).AnyTimes() @@ -1690,20 +1745,20 @@ func (s *MethodTestSuite) TestWorkspace() { ws := testutil.Fake(s.T(), faker, database.Workspace{}) dbM.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes() dbM.EXPECT().GetWorkspaceACLByID(gomock.Any(), ws.ID).Return(database.GetWorkspaceACLByIDRow{}, nil).AnyTimes() - check.Args(ws.ID).Asserts(ws, policy.ActionCreate) + check.Args(ws.ID).Asserts(ws, policy.ActionShare) })) s.Run("UpdateWorkspaceACLByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { w := testutil.Fake(s.T(), faker, database.Workspace{}) arg := database.UpdateWorkspaceACLByIDParams{ID: w.ID} dbm.EXPECT().GetWorkspaceByID(gomock.Any(), w.ID).Return(w, nil).AnyTimes() dbm.EXPECT().UpdateWorkspaceACLByID(gomock.Any(), arg).Return(nil).AnyTimes() - check.Args(arg).Asserts(w, policy.ActionCreate) + check.Args(arg).Asserts(w, policy.ActionShare) })) s.Run("DeleteWorkspaceACLByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { w := testutil.Fake(s.T(), faker, database.Workspace{}) dbm.EXPECT().GetWorkspaceByID(gomock.Any(), w.ID).Return(w, nil).AnyTimes() dbm.EXPECT().DeleteWorkspaceACLByID(gomock.Any(), w.ID).Return(nil).AnyTimes() - check.Args(w.ID).Asserts(w, policy.ActionUpdate) + check.Args(w.ID).Asserts(w, policy.ActionShare) })) s.Run("GetLatestWorkspaceBuildByWorkspaceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { w := testutil.Fake(s.T(), faker, database.Workspace{}) @@ -2314,6 +2369,89 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() { })) } +func (s *MethodTestSuite) TestTasks() { + s.Run("GetTaskByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() + check.Args(task.ID).Asserts(task, policy.ActionRead).Returns(task) + })) + s.Run("DeleteTask", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + arg := database.DeleteTaskParams{ + ID: task.ID, + DeletedAt: dbtime.Now(), + } + dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() + dbm.EXPECT().DeleteTask(gomock.Any(), arg).Return(database.TaskTable{}, nil).AnyTimes() + check.Args(arg).Asserts(task, policy.ActionDelete).Returns(database.TaskTable{}) + })) + s.Run("InsertTask", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + tpl := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, + OrganizationID: tpl.OrganizationID, + }) + + arg := testutil.Fake(s.T(), faker, database.InsertTaskParams{ + OrganizationID: tpl.OrganizationID, + TemplateVersionID: tv.ID, + }) + + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), tpl.ID).Return(tpl, nil).AnyTimes() + dbm.EXPECT().InsertTask(gomock.Any(), arg).Return(database.TaskTable{}, nil).AnyTimes() + + check.Args(arg).Asserts( + tpl, policy.ActionRead, + rbac.ResourceTask.InOrg(arg.OrganizationID).WithOwner(arg.OwnerID.String()), policy.ActionCreate, + ).Returns(database.TaskTable{}) + })) + s.Run("UpsertTaskWorkspaceApp", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + arg := database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: 1, + } + + dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() + dbm.EXPECT().UpsertTaskWorkspaceApp(gomock.Any(), arg).Return(database.TaskWorkspaceApp{}, nil).AnyTimes() + + check.Args(arg).Asserts(task, policy.ActionUpdate).Returns(database.TaskWorkspaceApp{}) + })) + s.Run("UpdateTaskWorkspaceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + ws := testutil.Fake(s.T(), faker, database.Workspace{}) + arg := database.UpdateTaskWorkspaceIDParams{ + ID: task.ID, + WorkspaceID: uuid.NullUUID{UUID: ws.ID, Valid: true}, + } + + dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() + dbm.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes() + dbm.EXPECT().UpdateTaskWorkspaceID(gomock.Any(), arg).Return(database.TaskTable{}, nil).AnyTimes() + + check.Args(arg).Asserts(task, policy.ActionUpdate, ws, policy.ActionUpdate).Returns(database.TaskTable{}) + })) + s.Run("GetTaskByWorkspaceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + task.WorkspaceID = uuid.NullUUID{UUID: uuid.New(), Valid: true} + dbm.EXPECT().GetTaskByWorkspaceID(gomock.Any(), task.WorkspaceID.UUID).Return(task, nil).AnyTimes() + check.Args(task.WorkspaceID.UUID).Asserts(task, policy.ActionRead).Returns(task) + })) + s.Run("ListTasks", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u1 := testutil.Fake(s.T(), faker, database.User{}) + u2 := testutil.Fake(s.T(), faker, database.User{}) + org1 := testutil.Fake(s.T(), faker, database.Organization{}) + org2 := testutil.Fake(s.T(), faker, database.Organization{}) + _ = testutil.Fake(s.T(), faker, database.OrganizationMember{UserID: u1.ID, OrganizationID: org1.ID}) + _ = testutil.Fake(s.T(), faker, database.OrganizationMember{UserID: u2.ID, OrganizationID: org2.ID}) + t1 := testutil.Fake(s.T(), faker, database.Task{OwnerID: u1.ID}) + t2 := testutil.Fake(s.T(), faker, database.Task{OwnerID: u2.ID}) + dbm.EXPECT().ListTasks(gomock.Any(), gomock.Any()).Return([]database.Task{t1, t2}, nil).AnyTimes() + check.Args(database.ListTasksParams{}).Asserts(t1, policy.ActionRead, t2, policy.ActionRead).Returns([]database.Task{t1, t2}) + })) +} + func (s *MethodTestSuite) TestProvisionerKeys() { s.Run("InsertProvisionerKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { org := testutil.Fake(s.T(), faker, database.Organization{}) @@ -2484,10 +2622,12 @@ func (s *MethodTestSuite) TestExtraMethods() { ds, err := db.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(context.Background(), database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams{ OrganizationID: org.ID, + InitiatorID: uuid.Nil, }) s.NoError(err, "get provisioner jobs by org") check.Args(database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams{ OrganizationID: org.ID, + InitiatorID: uuid.Nil, }).Asserts(j1, policy.ActionRead, j2, policy.ActionRead).Returns(ds) })) } @@ -2843,7 +2983,6 @@ func (s *MethodTestSuite) TestSystemFunctions() { dbm.EXPECT().GetParameterSchemasByJobID(gomock.Any(), jobID).Return([]database.ParameterSchema{}, nil).AnyTimes() check.Args(jobID). Asserts(tpl, policy.ActionRead). - ErrorsWithInMemDB(sql.ErrNoRows). Returns([]database.ParameterSchema{}) })) s.Run("GetWorkspaceAppsByAgentIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { @@ -3086,7 +3225,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { })) s.Run("GetAppSecurityKey", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { dbm.EXPECT().GetAppSecurityKey(gomock.Any()).Return("", sql.ErrNoRows).AnyTimes() - check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).ErrorsWithPG(sql.ErrNoRows) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("UpsertAppSecurityKey", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { dbm.EXPECT().UpsertAppSecurityKey(gomock.Any(), "foo").Return(nil).AnyTimes() @@ -3620,6 +3759,14 @@ func (s *MethodTestSuite) TestPrebuilds() { dbm.EXPECT().GetPrebuildMetrics(gomock.Any()).Return([]database.GetPrebuildMetricsRow{}, nil).AnyTimes() check.Args().Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead) })) + s.Run("GetOrganizationsWithPrebuildStatus", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + arg := database.GetOrganizationsWithPrebuildStatusParams{ + UserID: uuid.New(), + GroupName: "test", + } + dbm.EXPECT().GetOrganizationsWithPrebuildStatus(gomock.Any(), arg).Return([]database.GetOrganizationsWithPrebuildStatusRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceOrganization.All(), policy.ActionRead) + })) s.Run("GetPrebuildsSettings", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { dbm.EXPECT().GetPrebuildsSettings(gomock.Any()).Return("{}", nil).AnyTimes() check.Args().Asserts() @@ -3632,6 +3779,10 @@ func (s *MethodTestSuite) TestPrebuilds() { dbm.EXPECT().CountInProgressPrebuilds(gomock.Any()).Return([]database.CountInProgressPrebuildsRow{}, nil).AnyTimes() check.Args().Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead) })) + s.Run("CountPendingNonActivePrebuilds", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().CountPendingNonActivePrebuilds(gomock.Any()).Return([]database.CountPendingNonActivePrebuildsRow{}, nil).AnyTimes() + check.Args().Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead) + })) s.Run("GetPresetsAtFailureLimit", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { dbm.EXPECT().GetPresetsAtFailureLimit(gomock.Any(), int64(0)).Return([]database.GetPresetsAtFailureLimitRow{}, nil).AnyTimes() check.Args(int64(0)).Asserts(rbac.ResourceTemplate.All(), policy.ActionViewInsights) @@ -3799,9 +3950,9 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { })) s.Run("GetOAuth2ProviderAppByRegistrationToken", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{ - RegistrationAccessToken: sql.NullString{String: "test-token", Valid: true}, + RegistrationAccessToken: []byte("test-token"), }) - check.Args(sql.NullString{String: "test-token", Valid: true}).Asserts(rbac.ResourceOauth2App, policy.ActionRead).Returns(app) + check.Args([]byte("test-token")).Asserts(rbac.ResourceOauth2App, policy.ActionRead).Returns(app) })) } @@ -4434,14 +4585,28 @@ func (s *MethodTestSuite) TestAIBridge() { s.Run("ListAIBridgeInterceptions", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { params := database.ListAIBridgeInterceptionsParams{} - db.EXPECT().ListAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return([]database.AIBridgeInterception{}, nil).AnyTimes() + db.EXPECT().ListAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return([]database.ListAIBridgeInterceptionsRow{}, nil).AnyTimes() // No asserts here because SQLFilter. check.Args(params).Asserts() })) s.Run("ListAuthorizedAIBridgeInterceptions", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { params := database.ListAIBridgeInterceptionsParams{} - db.EXPECT().ListAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return([]database.AIBridgeInterception{}, nil).AnyTimes() + db.EXPECT().ListAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return([]database.ListAIBridgeInterceptionsRow{}, nil).AnyTimes() + // No asserts here because SQLFilter. + check.Args(params, emptyPreparedAuthorized{}).Asserts() + })) + + s.Run("CountAIBridgeInterceptions", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + params := database.CountAIBridgeInterceptionsParams{} + db.EXPECT().CountAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return(int64(0), nil).AnyTimes() + // No asserts here because SQLFilter. + check.Args(params).Asserts() + })) + + s.Run("CountAuthorizedAIBridgeInterceptions", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + params := database.CountAIBridgeInterceptionsParams{} + db.EXPECT().CountAuthorizedAIBridgeInterceptions(gomock.Any(), params, gomock.Any()).Return(int64(0), nil).AnyTimes() // No asserts here because SQLFilter. check.Args(params, emptyPreparedAuthorized{}).Asserts() })) @@ -4463,4 +4628,35 @@ func (s *MethodTestSuite) TestAIBridge() { db.EXPECT().ListAIBridgeToolUsagesByInterceptionIDs(gomock.Any(), ids).Return([]database.AIBridgeToolUsage{}, nil).AnyTimes() check.Args(ids).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns([]database.AIBridgeToolUsage{}) })) + + s.Run("UpdateAIBridgeInterceptionEnded", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + intcID := uuid.UUID{1} + params := database.UpdateAIBridgeInterceptionEndedParams{ID: intcID} + intc := testutil.Fake(s.T(), faker, database.AIBridgeInterception{ID: intcID}) + db.EXPECT().GetAIBridgeInterceptionByID(gomock.Any(), intcID).Return(intc, nil).AnyTimes() // Validation. + db.EXPECT().UpdateAIBridgeInterceptionEnded(gomock.Any(), params).Return(intc, nil).AnyTimes() + check.Args(params).Asserts(intc, policy.ActionUpdate).Returns(intc) + })) +} + +func (s *MethodTestSuite) TestTelemetry() { + s.Run("InsertTelemetryLock", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + db.EXPECT().InsertTelemetryLock(gomock.Any(), gomock.Any()).Return(nil).AnyTimes() + check.Args(database.InsertTelemetryLockParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) + })) + + s.Run("DeleteOldTelemetryLocks", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + db.EXPECT().DeleteOldTelemetryLocks(gomock.Any(), gomock.Any()).Return(nil).AnyTimes() + check.Args(time.Time{}).Asserts(rbac.ResourceSystem, policy.ActionDelete) + })) + + s.Run("ListAIBridgeInterceptionsTelemetrySummaries", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + db.EXPECT().ListAIBridgeInterceptionsTelemetrySummaries(gomock.Any(), gomock.Any()).Return([]database.ListAIBridgeInterceptionsTelemetrySummariesRow{}, nil).AnyTimes() + check.Args(database.ListAIBridgeInterceptionsTelemetrySummariesParams{}).Asserts(rbac.ResourceAibridgeInterception, policy.ActionRead) + })) + + s.Run("CalculateAIBridgeInterceptionsTelemetrySummary", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + db.EXPECT().CalculateAIBridgeInterceptionsTelemetrySummary(gomock.Any(), gomock.Any()).Return(database.CalculateAIBridgeInterceptionsTelemetrySummaryRow{}, nil).AnyTimes() + check.Args(database.CalculateAIBridgeInterceptionsTelemetrySummaryParams{}).Asserts(rbac.ResourceAibridgeInterception, policy.ActionRead) + })) } diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go index c9a1b2063d691..91fb68e1a1f3f 100644 --- a/coderd/database/dbauthz/setup_test.go +++ b/coderd/database/dbauthz/setup_test.go @@ -225,6 +225,10 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat if testCase.outputs != nil { // Assert the required outputs s.Equal(len(testCase.outputs), len(outputs), "method %q returned unexpected number of outputs", methodName) + cmpOptions := []cmp.Option{ + // Equate nil and empty slices. + cmpopts.EquateEmpty(), + } for i := range outputs { a, b := testCase.outputs[i].Interface(), outputs[i].Interface() @@ -232,10 +236,9 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat // first check if the values are equal with regard to order. // If not, re-check disregarding order and show a nice diff // output of the two values. - if !cmp.Equal(a, b, cmpopts.EquateEmpty()) { - if diff := cmp.Diff(a, b, - // Equate nil and empty slices. - cmpopts.EquateEmpty(), + if !cmp.Equal(a, b, cmpOptions...) { + diffOpts := append( + append([]cmp.Option{}, cmpOptions...), // Allow slice order to be ignored. cmpopts.SortSlices(func(a, b any) bool { var ab, bb strings.Builder @@ -247,7 +250,8 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat // https://github.com/google/go-cmp/issues/67 return ab.String() < bb.String() }), - ); diff != "" { + ) + if diff := cmp.Diff(a, b, diffOpts...); diff != "" { s.Failf("compare outputs failed", "method %q returned unexpected output %d (-want +got):\n%s", methodName, i, diff) } } @@ -426,24 +430,6 @@ func (m *expects) Errors(err error) *expects { return m } -// ErrorsWithPG is optional. If it is never called, it will not be asserted. -// It will only be asserted if the test is running with a Postgres database. -func (m *expects) ErrorsWithPG(err error) *expects { - if dbtestutil.WillUsePostgres() { - return m.Errors(err) - } - return m -} - -// ErrorsWithInMemDB is optional. If it is never called, it will not be asserted. -// It will only be asserted if the test is running with an in-memory database. -func (m *expects) ErrorsWithInMemDB(err error) *expects { - if !dbtestutil.WillUsePostgres() { - return m.Errors(err) - } - return m -} - func (m *expects) FailSystemObjectChecks() *expects { return m.WithSuccessAuthorizer(func(ctx context.Context, subject rbac.Subject, action policy.Action, obj rbac.Object) error { if obj.Type == rbac.ResourceSystem.Type { diff --git a/coderd/database/dbfake/dbfake.go b/coderd/database/dbfake/dbfake.go index 6d99005fb3334..b812be6e16a82 100644 --- a/coderd/database/dbfake/dbfake.go +++ b/coderd/database/dbfake/dbfake.go @@ -24,6 +24,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/wspubsub" + "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisionersdk" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" ) @@ -40,6 +41,7 @@ type WorkspaceResponse struct { Build database.WorkspaceBuild AgentToken string TemplateVersionResponse + Task database.Task } // WorkspaceBuildBuilder generates workspace builds and associated @@ -54,11 +56,9 @@ type WorkspaceBuildBuilder struct { resources []*sdkproto.Resource params []database.WorkspaceBuildParameter agentToken string - dispo workspaceBuildDisposition -} - -type workspaceBuildDisposition struct { - starting bool + jobStatus database.ProvisionerJobStatus + taskAppID uuid.UUID + taskSeed database.TaskTable } // WorkspaceBuild generates a workspace build for the provided workspace. @@ -117,9 +117,46 @@ func (b WorkspaceBuildBuilder) WithAgent(mutations ...func([]*sdkproto.Agent) [] return b } -func (b WorkspaceBuildBuilder) Starting() WorkspaceBuildBuilder { +func (b WorkspaceBuildBuilder) WithTask(taskSeed database.TaskTable, appSeed *sdkproto.App) WorkspaceBuildBuilder { + //nolint:revive // returns modified struct + b.taskSeed = taskSeed + + if appSeed == nil { + appSeed = &sdkproto.App{} + } + + var err error //nolint: revive // returns modified struct - b.dispo.starting = true + b.taskAppID, err = uuid.Parse(takeFirst(appSeed.Id, uuid.NewString())) + require.NoError(b.t, err) + + return b.Params(database.WorkspaceBuildParameter{ + Name: codersdk.AITaskPromptParameterName, + Value: b.taskSeed.Prompt, + }).WithAgent(func(a []*sdkproto.Agent) []*sdkproto.Agent { + a[0].Apps = []*sdkproto.App{ + { + Id: b.taskAppID.String(), + Slug: takeFirst(appSeed.Slug, "task-app"), + Url: takeFirst(appSeed.Url, ""), + }, + } + return a + }) +} + +func (b WorkspaceBuildBuilder) Starting() WorkspaceBuildBuilder { + b.jobStatus = database.ProvisionerJobStatusRunning + return b +} + +func (b WorkspaceBuildBuilder) Pending() WorkspaceBuildBuilder { + b.jobStatus = database.ProvisionerJobStatusPending + return b +} + +func (b WorkspaceBuildBuilder) Canceled() WorkspaceBuildBuilder { + b.jobStatus = database.ProvisionerJobStatusCanceled return b } @@ -129,11 +166,32 @@ func (b WorkspaceBuildBuilder) Starting() WorkspaceBuildBuilder { // Workspace will be optionally populated if no ID is set on the provided // workspace. func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { + var resp WorkspaceResponse + // Use transaction, like real wsbuilder. + err := b.db.InTx(func(tx database.Store) error { + //nolint:revive // calls do on modified struct + b.db = tx + resp = b.doInTX() + return nil + }, nil) + require.NoError(b.t, err) + return resp +} + +func (b WorkspaceBuildBuilder) doInTX() WorkspaceResponse { b.t.Helper() jobID := uuid.New() b.seed.ID = uuid.New() b.seed.JobID = jobID + if b.taskAppID != uuid.Nil { + b.seed.HasAITask = sql.NullBool{ + Bool: true, + Valid: true, + } + b.seed.AITaskSidebarAppID = uuid.NullUUID{UUID: b.taskAppID, Valid: true} + } + resp := WorkspaceResponse{ AgentToken: b.agentToken, } @@ -164,14 +222,45 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { if b.ws.ID == uuid.Nil { // nolint: revive b.ws = dbgen.Workspace(b.t, b.db, b.ws) - resp.Workspace = b.ws b.logger.Debug(context.Background(), "created workspace", - slog.F("name", resp.Workspace.Name), - slog.F("workspace_id", resp.Workspace.ID)) + slog.F("name", b.ws.Name), + slog.F("workspace_id", b.ws.ID)) } + resp.Workspace = b.ws b.seed.WorkspaceID = b.ws.ID b.seed.InitiatorID = takeFirst(b.seed.InitiatorID, b.ws.OwnerID) + // If a task was requested, ensure it exists and is associated with this + // workspace. + if b.taskAppID != uuid.Nil { + b.logger.Debug(context.Background(), "creating or updating task", "task_id", b.taskSeed.ID) + b.taskSeed.OrganizationID = takeFirst(b.taskSeed.OrganizationID, b.ws.OrganizationID) + b.taskSeed.OwnerID = takeFirst(b.taskSeed.OwnerID, b.ws.OwnerID) + b.taskSeed.Name = takeFirst(b.taskSeed.Name, b.ws.Name) + b.taskSeed.WorkspaceID = uuid.NullUUID{UUID: takeFirst(b.taskSeed.WorkspaceID.UUID, b.ws.ID), Valid: true} + b.taskSeed.TemplateVersionID = takeFirst(b.taskSeed.TemplateVersionID, b.seed.TemplateVersionID) + + // Try to fetch existing task and update its workspace ID. + if task, err := b.db.GetTaskByID(ownerCtx, b.taskSeed.ID); err == nil { + if !task.WorkspaceID.Valid { + b.logger.Info(context.Background(), "updating task workspace id", "task_id", b.taskSeed.ID, "workspace_id", b.ws.ID) + _, err = b.db.UpdateTaskWorkspaceID(ownerCtx, database.UpdateTaskWorkspaceIDParams{ + ID: b.taskSeed.ID, + WorkspaceID: uuid.NullUUID{UUID: b.ws.ID, Valid: true}, + }) + require.NoError(b.t, err, "update task workspace id") + } else if task.WorkspaceID.UUID != b.ws.ID { + require.Fail(b.t, "task already has a workspace id, mismatch", task.WorkspaceID.UUID, b.ws.ID) + } + } else if errors.Is(err, sql.ErrNoRows) { + task := dbgen.Task(b.t, b.db, b.taskSeed) + b.taskSeed.ID = task.ID + b.logger.Info(context.Background(), "created new task", "task_id", b.taskSeed.ID) + } else { + require.NoError(b.t, err, "get task by id") + } + } + // Create a provisioner job for the build! payload, err := json.Marshal(provisionerdserver.WorkspaceProvisionJob{ WorkspaceBuildID: b.seed.ID, @@ -196,7 +285,11 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { require.NoError(b.t, err, "insert job") b.logger.Debug(context.Background(), "inserted provisioner job", slog.F("job_id", job.ID)) - if b.dispo.starting { + switch b.jobStatus { + case database.ProvisionerJobStatusPending: + // Provisioner jobs are created in 'pending' status + b.logger.Debug(context.Background(), "pending the provisioner job") + case database.ProvisionerJobStatusRunning: // might need to do this multiple times if we got a template version // import job as well b.logger.Debug(context.Background(), "looping to acquire provisioner job") @@ -220,7 +313,23 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { break } } - } else { + case database.ProvisionerJobStatusCanceled: + // Set provisioner job status to 'canceled' + b.logger.Debug(context.Background(), "canceling the provisioner job") + err = b.db.UpdateProvisionerJobWithCancelByID(ownerCtx, database.UpdateProvisionerJobWithCancelByIDParams{ + ID: jobID, + CanceledAt: sql.NullTime{ + Time: dbtime.Now(), + Valid: true, + }, + CompletedAt: sql.NullTime{ + Time: dbtime.Now(), + Valid: true, + }, + }) + require.NoError(b.t, err, "cancel job") + default: + // By default, consider jobs in 'succeeded' status b.logger.Debug(context.Background(), "completing the provisioner job") err = b.db.UpdateProvisionerJobWithCompleteByID(ownerCtx, database.UpdateProvisionerJobWithCompleteByIDParams{ ID: job.ID, @@ -242,6 +351,35 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { slog.F("workspace_id", resp.Workspace.ID), slog.F("build_number", resp.Build.BuildNumber)) + // If this is a task workspace, link it to the workspace build. + task, err := b.db.GetTaskByWorkspaceID(ownerCtx, resp.Workspace.ID) + if err != nil { + if b.taskAppID != uuid.Nil { + require.Fail(b.t, "task app configured but failed to get task by workspace id", err) + } + } else { + if b.taskAppID == uuid.Nil { + require.Fail(b.t, "task app not configured but workspace is a task workspace") + } + + app := mustWorkspaceAppByWorkspaceAndBuildAndAppID(ownerCtx, b.t, b.db, resp.Workspace.ID, resp.Build.BuildNumber, b.taskAppID) + _, err = b.db.UpsertTaskWorkspaceApp(ownerCtx, database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: resp.Build.BuildNumber, + WorkspaceAgentID: uuid.NullUUID{UUID: app.AgentID, Valid: true}, + WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, + }) + require.NoError(b.t, err, "upsert task workspace app") + b.logger.Debug(context.Background(), "linked task to workspace build", + slog.F("task_id", task.ID), + slog.F("build_number", resp.Build.BuildNumber)) + + // Update task after linking. + task, err = b.db.GetTaskByID(ownerCtx, task.ID) + require.NoError(b.t, err, "get task by id") + resp.Task = task + } + for i := range b.params { b.params[i].WorkspaceBuildID = resp.Build.ID } @@ -512,6 +650,12 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { t.params[i] = dbgen.TemplateVersionParameter(t.t, t.db, param) } + // Update response with template and version + if resp.Template.ID == uuid.Nil && version.TemplateID.Valid { + template, err := t.db.GetTemplateByID(ownerCtx, version.TemplateID.UUID) + require.NoError(t.t, err) + resp.Template = template + } resp.TemplateVersion = version return resp } @@ -592,3 +736,30 @@ func takeFirst[Value comparable](values ...Value) Value { return v != empty }) } + +// mustWorkspaceAppByWorkspaceAndBuildAndAppID finds a workspace app by +// workspace ID, build number, and app ID. It returns the workspace app +// if found, otherwise fails the test. +func mustWorkspaceAppByWorkspaceAndBuildAndAppID(ctx context.Context, t testing.TB, db database.Store, workspaceID uuid.UUID, buildNumber int32, appID uuid.UUID) database.WorkspaceApp { + t.Helper() + + agents, err := db.GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx, database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams{ + WorkspaceID: workspaceID, + BuildNumber: buildNumber, + }) + require.NoError(t, err, "get workspace agents") + require.NotEmpty(t, agents, "no agents found for workspace") + + for _, agent := range agents { + apps, err := db.GetWorkspaceAppsByAgentID(ctx, agent.ID) + require.NoError(t, err, "get workspace apps") + for _, app := range apps { + if app.ID == appID { + return app + } + } + } + + require.FailNow(t, "could not find workspace app", "workspaceID=%s buildNumber=%d appID=%s", workspaceID, buildNumber, appID) + return database.WorkspaceApp{} // Unreachable. +} diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index c2042e687f607..532460700a1e2 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -3,7 +3,6 @@ package dbgen import ( "context" "crypto/rand" - "crypto/sha256" "database/sql" "encoding/hex" "encoding/json" @@ -20,6 +19,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/database/dbauthz" @@ -27,6 +27,8 @@ import ( "github.com/coder/coder/v2/coderd/database/provisionerjobs" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/taskname" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" "github.com/coder/coder/v2/provisionerd/proto" @@ -159,8 +161,8 @@ func Template(t testing.TB, db database.Store, seed database.Template) database. func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func(*database.InsertAPIKeyParams)) (key database.APIKey, token string) { id, _ := cryptorand.String(10) - secret, _ := cryptorand.String(22) - hashed := sha256.Sum256([]byte(secret)) + secret, hashed, err := apikey.GenerateSecret(22) + require.NoError(t, err) ip := seed.IPAddress if !ip.Valid { @@ -177,7 +179,7 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func ID: takeFirst(seed.ID, id), // 0 defaults to 86400 at the db layer LifetimeSeconds: takeFirst(seed.LifetimeSeconds, 0), - HashedSecret: takeFirstSlice(seed.HashedSecret, hashed[:]), + HashedSecret: takeFirstSlice(seed.HashedSecret, hashed), IPAddress: ip, UserID: takeFirst(seed.UserID, uuid.New()), LastUsed: takeFirst(seed.LastUsed, dbtime.Now()), @@ -186,13 +188,13 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func UpdatedAt: takeFirst(seed.UpdatedAt, dbtime.Now()), LoginType: takeFirst(seed.LoginType, database.LoginTypePassword), Scopes: takeFirstSlice([]database.APIKeyScope(seed.Scopes), []database.APIKeyScope{database.ApiKeyScopeCoderAll}), - AllowList: takeFirstSlice(seed.AllowList, database.AllowList{database.AllowListWildcard()}), + AllowList: takeFirstSlice(seed.AllowList, database.AllowList{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}), TokenName: takeFirst(seed.TokenName), } for _, fn := range munge { fn(¶ms) } - key, err := db.InsertAPIKey(genCtx, params) + key, err = db.InsertAPIKey(genCtx, params) require.NoError(t, err, "insert api key") return key, fmt.Sprintf("%s-%s", key.ID, secret) } @@ -420,6 +422,14 @@ func Workspace(t testing.TB, db database.Store, orig database.WorkspaceTable) da require.NoError(t, err, "set workspace as deleted") workspace.Deleted = true } + if orig.DormantAt.Valid { + _, err = db.UpdateWorkspaceDormantDeletingAt(genCtx, database.UpdateWorkspaceDormantDeletingAtParams{ + ID: workspace.ID, + DormantAt: orig.DormantAt, + }) + require.NoError(t, err, "set workspace as dormant") + workspace.DormantAt = orig.DormantAt + } return workspace } @@ -970,16 +980,15 @@ func WorkspaceResourceMetadatums(t testing.TB, db database.Store, seed database. } func WorkspaceProxy(t testing.TB, db database.Store, orig database.WorkspaceProxy) (database.WorkspaceProxy, string) { - secret, err := cryptorand.HexString(64) + secret, hashedSecret, err := apikey.GenerateSecret(64) require.NoError(t, err, "generate secret") - hashedSecret := sha256.Sum256([]byte(secret)) proxy, err := db.InsertWorkspaceProxy(genCtx, database.InsertWorkspaceProxyParams{ ID: takeFirst(orig.ID, uuid.New()), Name: takeFirst(orig.Name, testutil.GetRandomName(t)), DisplayName: takeFirst(orig.DisplayName, testutil.GetRandomName(t)), Icon: takeFirst(orig.Icon, testutil.GetRandomName(t)), - TokenHashedSecret: hashedSecret[:], + TokenHashedSecret: hashedSecret, CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()), UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()), DerpEnabled: takeFirst(orig.DerpEnabled, false), @@ -1249,7 +1258,7 @@ func OAuth2ProviderApp(t testing.TB, db database.Store, seed database.OAuth2Prov Jwks: seed.Jwks, // pqtype.NullRawMessage{} is not comparable, use existing value SoftwareID: takeFirst(seed.SoftwareID, sql.NullString{}), SoftwareVersion: takeFirst(seed.SoftwareVersion, sql.NullString{}), - RegistrationAccessToken: takeFirst(seed.RegistrationAccessToken, sql.NullString{}), + RegistrationAccessToken: seed.RegistrationAccessToken, RegistrationClientUri: takeFirst(seed.RegistrationClientUri, sql.NullString{}), }) require.NoError(t, err, "insert oauth2 app") @@ -1486,7 +1495,7 @@ func ClaimPrebuild( return claimedWorkspace } -func AIBridgeInterception(t testing.TB, db database.Store, seed database.InsertAIBridgeInterceptionParams) database.AIBridgeInterception { +func AIBridgeInterception(t testing.TB, db database.Store, seed database.InsertAIBridgeInterceptionParams, endedAt *time.Time) database.AIBridgeInterception { interception, err := db.InsertAIBridgeInterception(genCtx, database.InsertAIBridgeInterceptionParams{ ID: takeFirst(seed.ID, uuid.New()), InitiatorID: takeFirst(seed.InitiatorID, uuid.New()), @@ -1495,6 +1504,13 @@ func AIBridgeInterception(t testing.TB, db database.Store, seed database.InsertA Metadata: takeFirstSlice(seed.Metadata, json.RawMessage("{}")), StartedAt: takeFirst(seed.StartedAt, dbtime.Now()), }) + if endedAt != nil { + interception, err = db.UpdateAIBridgeInterceptionEnded(genCtx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: interception.ID, + EndedAt: *endedAt, + }) + require.NoError(t, err, "insert aibridge interception") + } require.NoError(t, err, "insert aibridge interception") return interception } @@ -1551,6 +1567,49 @@ func AIBridgeToolUsage(t testing.TB, db database.Store, seed database.InsertAIBr return toolUsage } +func Task(t testing.TB, db database.Store, orig database.TaskTable) database.Task { + t.Helper() + + parameters := orig.TemplateParameters + if parameters == nil { + parameters = json.RawMessage([]byte("{}")) + } + + task, err := db.InsertTask(genCtx, database.InsertTaskParams{ + ID: takeFirst(orig.ID, uuid.New()), + OrganizationID: orig.OrganizationID, + OwnerID: orig.OwnerID, + Name: takeFirst(orig.Name, taskname.GenerateFallback()), + WorkspaceID: orig.WorkspaceID, + TemplateVersionID: orig.TemplateVersionID, + TemplateParameters: parameters, + Prompt: orig.Prompt, + CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()), + }) + require.NoError(t, err, "failed to insert task") + + // Return the Task from the view instead of the TaskTable + fetched, err := db.GetTaskByID(genCtx, task.ID) + require.NoError(t, err, "failed to fetch task") + require.Equal(t, task.ID, fetched.ID) + + return fetched +} + +func TaskWorkspaceApp(t testing.TB, db database.Store, orig database.TaskWorkspaceApp) database.TaskWorkspaceApp { + t.Helper() + + app, err := db.UpsertTaskWorkspaceApp(genCtx, database.UpsertTaskWorkspaceAppParams{ + TaskID: orig.TaskID, + WorkspaceBuildNumber: orig.WorkspaceBuildNumber, + WorkspaceAgentID: orig.WorkspaceAgentID, + WorkspaceAppID: orig.WorkspaceAppID, + }) + require.NoError(t, err, "failed to upsert task workspace app") + + return app +} + func provisionerJobTiming(t testing.TB, db database.Store, seed database.ProvisionerJobTiming) database.ProvisionerJobTiming { timing, err := db.InsertProvisionerJobTimings(genCtx, database.InsertProvisionerJobTimingsParams{ JobID: takeFirst(seed.JobID, uuid.New()), diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index d2f504964d946..252f6f9b5ad09 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -5,7 +5,6 @@ package dbmetrics import ( "context" - "database/sql" "slices" "time" @@ -159,6 +158,13 @@ func (m queryMetricsStore) BulkMarkNotificationMessagesSent(ctx context.Context, return r0, r1 } +func (m queryMetricsStore) CalculateAIBridgeInterceptionsTelemetrySummary(ctx context.Context, arg database.CalculateAIBridgeInterceptionsTelemetrySummaryParams) (database.CalculateAIBridgeInterceptionsTelemetrySummaryRow, error) { + start := time.Now() + r0, r1 := m.s.CalculateAIBridgeInterceptionsTelemetrySummary(ctx, arg) + m.queryLatencies.WithLabelValues("CalculateAIBridgeInterceptionsTelemetrySummary").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) ClaimPrebuiltWorkspace(ctx context.Context, arg database.ClaimPrebuiltWorkspaceParams) (database.ClaimPrebuiltWorkspaceRow, error) { start := time.Now() r0, r1 := m.s.ClaimPrebuiltWorkspace(ctx, arg) @@ -187,6 +193,13 @@ func (m queryMetricsStore) CleanTailnetTunnels(ctx context.Context) error { return r0 } +func (m queryMetricsStore) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.CountAIBridgeInterceptions(ctx, arg) + m.queryLatencies.WithLabelValues("CountAIBridgeInterceptions").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) CountAuditLogs(ctx context.Context, arg database.CountAuditLogsParams) (int64, error) { start := time.Now() r0, r1 := m.s.CountAuditLogs(ctx, arg) @@ -208,6 +221,13 @@ func (m queryMetricsStore) CountInProgressPrebuilds(ctx context.Context) ([]data return r0, r1 } +func (m queryMetricsStore) CountPendingNonActivePrebuilds(ctx context.Context) ([]database.CountPendingNonActivePrebuildsRow, error) { + start := time.Now() + r0, r1 := m.s.CountPendingNonActivePrebuilds(ctx) + m.queryLatencies.WithLabelValues("CountPendingNonActivePrebuilds").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) { start := time.Now() r0, r1 := m.s.CountUnreadInboxNotificationsByUserID(ctx, userID) @@ -390,6 +410,13 @@ func (m queryMetricsStore) DeleteOldProvisionerDaemons(ctx context.Context) erro return r0 } +func (m queryMetricsStore) DeleteOldTelemetryLocks(ctx context.Context, periodEndingAtBefore time.Time) error { + start := time.Now() + r0 := m.s.DeleteOldTelemetryLocks(ctx, periodEndingAtBefore) + m.queryLatencies.WithLabelValues("DeleteOldTelemetryLocks").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, arg time.Time) error { start := time.Now() r0 := m.s.DeleteOldWorkspaceAgentLogs(ctx, arg) @@ -467,6 +494,13 @@ func (m queryMetricsStore) DeleteTailnetTunnel(ctx context.Context, arg database return r0, r1 } +func (m queryMetricsStore) DeleteTask(ctx context.Context, arg database.DeleteTaskParams) (database.TaskTable, error) { + start := time.Now() + r0, r1 := m.s.DeleteTask(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTask").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { start := time.Now() r0 := m.s.DeleteUserSecret(ctx, id) @@ -1090,7 +1124,7 @@ func (m queryMetricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid return r0, r1 } -func (m queryMetricsStore) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken sql.NullString) (database.OAuth2ProviderApp, error) { +func (m queryMetricsStore) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken []byte) (database.OAuth2ProviderApp, error) { start := time.Now() r0, r1 := m.s.GetOAuth2ProviderAppByRegistrationToken(ctx, registrationAccessToken) m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppByRegistrationToken").Observe(time.Since(start).Seconds()) @@ -1209,6 +1243,13 @@ func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID return organizations, err } +func (m queryMetricsStore) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + start := time.Now() + r0, r1 := m.s.GetOrganizationsWithPrebuildStatus(ctx, arg) + m.queryLatencies.WithLabelValues("GetOrganizationsWithPrebuildStatus").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { start := time.Now() schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) @@ -1482,6 +1523,20 @@ func (m queryMetricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uu return r0, r1 } +func (m queryMetricsStore) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) { + start := time.Now() + r0, r1 := m.s.GetTaskByID(ctx, id) + m.queryLatencies.WithLabelValues("GetTaskByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { + start := time.Now() + r0, r1 := m.s.GetTaskByWorkspaceID(ctx, workspaceID) + m.queryLatencies.WithLabelValues("GetTaskByWorkspaceID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) { start := time.Now() r0, r1 := m.s.GetTelemetryItem(ctx, key) @@ -1958,6 +2013,13 @@ func (m queryMetricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, c return agents, err } +func (m queryMetricsStore) GetWorkspaceAgentsForMetrics(ctx context.Context) ([]database.GetWorkspaceAgentsForMetricsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentsForMetrics(ctx) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsForMetrics").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { start := time.Now() agents, err := m.s.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspaceID) @@ -2210,6 +2272,13 @@ func (m queryMetricsStore) GetWorkspacesEligibleForTransition(ctx context.Contex return workspaces, err } +func (m queryMetricsStore) GetWorkspacesForWorkspaceMetrics(ctx context.Context) ([]database.GetWorkspacesForWorkspaceMetricsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspacesForWorkspaceMetrics(ctx) + m.queryLatencies.WithLabelValues("GetWorkspacesForWorkspaceMetrics").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) InsertAIBridgeInterception(ctx context.Context, arg database.InsertAIBridgeInterceptionParams) (database.AIBridgeInterception, error) { start := time.Now() r0, r1 := m.s.InsertAIBridgeInterception(ctx, arg) @@ -2455,6 +2524,13 @@ func (m queryMetricsStore) InsertReplica(ctx context.Context, arg database.Inser return replica, err } +func (m queryMetricsStore) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) { + start := time.Now() + r0, r1 := m.s.InsertTask(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTask").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error { start := time.Now() r0 := m.s.InsertTelemetryItemIfNotExists(ctx, arg) @@ -2462,6 +2538,13 @@ func (m queryMetricsStore) InsertTelemetryItemIfNotExists(ctx context.Context, a return r0 } +func (m queryMetricsStore) InsertTelemetryLock(ctx context.Context, arg database.InsertTelemetryLockParams) error { + start := time.Now() + r0 := m.s.InsertTelemetryLock(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTelemetryLock").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { start := time.Now() err := m.s.InsertTemplate(ctx, arg) @@ -2672,13 +2755,20 @@ func (m queryMetricsStore) InsertWorkspaceResourceMetadata(ctx context.Context, return metadata, err } -func (m queryMetricsStore) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.AIBridgeInterception, error) { +func (m queryMetricsStore) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.ListAIBridgeInterceptionsRow, error) { start := time.Now() r0, r1 := m.s.ListAIBridgeInterceptions(ctx, arg) m.queryLatencies.WithLabelValues("ListAIBridgeInterceptions").Observe(time.Since(start).Seconds()) return r0, r1 } +func (m queryMetricsStore) ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg database.ListAIBridgeInterceptionsTelemetrySummariesParams) ([]database.ListAIBridgeInterceptionsTelemetrySummariesRow, error) { + start := time.Now() + r0, r1 := m.s.ListAIBridgeInterceptionsTelemetrySummaries(ctx, arg) + m.queryLatencies.WithLabelValues("ListAIBridgeInterceptionsTelemetrySummaries").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) ListAIBridgeTokenUsagesByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]database.AIBridgeTokenUsage, error) { start := time.Now() r0, r1 := m.s.ListAIBridgeTokenUsagesByInterceptionIDs(ctx, interceptionIds) @@ -2714,6 +2804,13 @@ func (m queryMetricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx return r0, r1 } +func (m queryMetricsStore) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) { + start := time.Now() + r0, r1 := m.s.ListTasks(ctx, arg) + m.queryLatencies.WithLabelValues("ListTasks").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { start := time.Now() r0, r1 := m.s.ListUserSecrets(ctx, userID) @@ -2826,6 +2923,13 @@ func (m queryMetricsStore) UnfavoriteWorkspace(ctx context.Context, arg uuid.UUI return r0 } +func (m queryMetricsStore) UpdateAIBridgeInterceptionEnded(ctx context.Context, id database.UpdateAIBridgeInterceptionEndedParams) (database.AIBridgeInterception, error) { + start := time.Now() + r0, r1 := m.s.UpdateAIBridgeInterceptionEnded(ctx, id) + m.queryLatencies.WithLabelValues("UpdateAIBridgeInterceptionEnded").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { start := time.Now() err := m.s.UpdateAPIKeyByID(ctx, arg) @@ -2945,6 +3049,13 @@ func (m queryMetricsStore) UpdateOrganizationDeletedByID(ctx context.Context, ar return r0 } +func (m queryMetricsStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { + start := time.Now() + r0, r1 := m.s.UpdatePrebuildProvisionerJobWithCancel(ctx, arg) + m.queryLatencies.WithLabelValues("UpdatePrebuildProvisionerJobWithCancel").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdatePresetPrebuildStatus(ctx context.Context, arg database.UpdatePresetPrebuildStatusParams) error { start := time.Now() r0 := m.s.UpdatePresetPrebuildStatus(ctx, arg) @@ -3015,6 +3126,13 @@ func (m queryMetricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Cont return r0 } +func (m queryMetricsStore) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { + start := time.Now() + r0, r1 := m.s.UpdateTaskWorkspaceID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTaskWorkspaceID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { start := time.Now() err := m.s.UpdateTemplateACLByID(ctx, arg) @@ -3533,6 +3651,13 @@ func (m queryMetricsStore) UpsertTailnetTunnel(ctx context.Context, arg database return r0, r1 } +func (m queryMetricsStore) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) { + start := time.Now() + r0, r1 := m.s.UpsertTaskWorkspaceApp(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTaskWorkspaceApp").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error { start := time.Now() r0 := m.s.UpsertTelemetryItem(ctx, arg) @@ -3666,9 +3791,16 @@ func (m queryMetricsStore) CountAuthorizedConnectionLogs(ctx context.Context, ar return r0, r1 } -func (m queryMetricsStore) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]database.AIBridgeInterception, error) { +func (m queryMetricsStore) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]database.ListAIBridgeInterceptionsRow, error) { start := time.Now() r0, r1 := m.s.ListAuthorizedAIBridgeInterceptions(ctx, arg, prepared) m.queryLatencies.WithLabelValues("ListAuthorizedAIBridgeInterceptions").Observe(time.Since(start).Seconds()) return r0, r1 } + +func (m queryMetricsStore) CountAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) (int64, error) { + start := time.Now() + r0, r1 := m.s.CountAuthorizedAIBridgeInterceptions(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("CountAuthorizedAIBridgeInterceptions").Observe(time.Since(start).Seconds()) + return r0, r1 +} diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 09edffc9deadc..af89a987a3203 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -11,7 +11,6 @@ package dbmock import ( context "context" - sql "database/sql" reflect "reflect" time "time" @@ -191,6 +190,21 @@ func (mr *MockStoreMockRecorder) BulkMarkNotificationMessagesSent(ctx, arg any) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkMarkNotificationMessagesSent", reflect.TypeOf((*MockStore)(nil).BulkMarkNotificationMessagesSent), ctx, arg) } +// CalculateAIBridgeInterceptionsTelemetrySummary mocks base method. +func (m *MockStore) CalculateAIBridgeInterceptionsTelemetrySummary(ctx context.Context, arg database.CalculateAIBridgeInterceptionsTelemetrySummaryParams) (database.CalculateAIBridgeInterceptionsTelemetrySummaryRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CalculateAIBridgeInterceptionsTelemetrySummary", ctx, arg) + ret0, _ := ret[0].(database.CalculateAIBridgeInterceptionsTelemetrySummaryRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CalculateAIBridgeInterceptionsTelemetrySummary indicates an expected call of CalculateAIBridgeInterceptionsTelemetrySummary. +func (mr *MockStoreMockRecorder) CalculateAIBridgeInterceptionsTelemetrySummary(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CalculateAIBridgeInterceptionsTelemetrySummary", reflect.TypeOf((*MockStore)(nil).CalculateAIBridgeInterceptionsTelemetrySummary), ctx, arg) +} + // ClaimPrebuiltWorkspace mocks base method. func (m *MockStore) ClaimPrebuiltWorkspace(ctx context.Context, arg database.ClaimPrebuiltWorkspaceParams) (database.ClaimPrebuiltWorkspaceRow, error) { m.ctrl.T.Helper() @@ -248,6 +262,21 @@ func (mr *MockStoreMockRecorder) CleanTailnetTunnels(ctx any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanTailnetTunnels", reflect.TypeOf((*MockStore)(nil).CleanTailnetTunnels), ctx) } +// CountAIBridgeInterceptions mocks base method. +func (m *MockStore) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAIBridgeInterceptions", ctx, arg) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAIBridgeInterceptions indicates an expected call of CountAIBridgeInterceptions. +func (mr *MockStoreMockRecorder) CountAIBridgeInterceptions(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAIBridgeInterceptions", reflect.TypeOf((*MockStore)(nil).CountAIBridgeInterceptions), ctx, arg) +} + // CountAuditLogs mocks base method. func (m *MockStore) CountAuditLogs(ctx context.Context, arg database.CountAuditLogsParams) (int64, error) { m.ctrl.T.Helper() @@ -263,6 +292,21 @@ func (mr *MockStoreMockRecorder) CountAuditLogs(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAuditLogs", reflect.TypeOf((*MockStore)(nil).CountAuditLogs), ctx, arg) } +// CountAuthorizedAIBridgeInterceptions mocks base method. +func (m *MockStore) CountAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAuthorizedAIBridgeInterceptions", ctx, arg, prepared) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAuthorizedAIBridgeInterceptions indicates an expected call of CountAuthorizedAIBridgeInterceptions. +func (mr *MockStoreMockRecorder) CountAuthorizedAIBridgeInterceptions(ctx, arg, prepared any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAuthorizedAIBridgeInterceptions", reflect.TypeOf((*MockStore)(nil).CountAuthorizedAIBridgeInterceptions), ctx, arg, prepared) +} + // CountAuthorizedAuditLogs mocks base method. func (m *MockStore) CountAuthorizedAuditLogs(ctx context.Context, arg database.CountAuditLogsParams, prepared rbac.PreparedAuthorized) (int64, error) { m.ctrl.T.Helper() @@ -323,6 +367,21 @@ func (mr *MockStoreMockRecorder) CountInProgressPrebuilds(ctx any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountInProgressPrebuilds", reflect.TypeOf((*MockStore)(nil).CountInProgressPrebuilds), ctx) } +// CountPendingNonActivePrebuilds mocks base method. +func (m *MockStore) CountPendingNonActivePrebuilds(ctx context.Context) ([]database.CountPendingNonActivePrebuildsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountPendingNonActivePrebuilds", ctx) + ret0, _ := ret[0].([]database.CountPendingNonActivePrebuildsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountPendingNonActivePrebuilds indicates an expected call of CountPendingNonActivePrebuilds. +func (mr *MockStoreMockRecorder) CountPendingNonActivePrebuilds(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountPendingNonActivePrebuilds", reflect.TypeOf((*MockStore)(nil).CountPendingNonActivePrebuilds), ctx) +} + // CountUnreadInboxNotificationsByUserID mocks base method. func (m *MockStore) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) { m.ctrl.T.Helper() @@ -692,6 +751,20 @@ func (mr *MockStoreMockRecorder) DeleteOldProvisionerDaemons(ctx any) *gomock.Ca return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldProvisionerDaemons", reflect.TypeOf((*MockStore)(nil).DeleteOldProvisionerDaemons), ctx) } +// DeleteOldTelemetryLocks mocks base method. +func (m *MockStore) DeleteOldTelemetryLocks(ctx context.Context, periodEndingAtBefore time.Time) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteOldTelemetryLocks", ctx, periodEndingAtBefore) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteOldTelemetryLocks indicates an expected call of DeleteOldTelemetryLocks. +func (mr *MockStoreMockRecorder) DeleteOldTelemetryLocks(ctx, periodEndingAtBefore any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldTelemetryLocks", reflect.TypeOf((*MockStore)(nil).DeleteOldTelemetryLocks), ctx, periodEndingAtBefore) +} + // DeleteOldWorkspaceAgentLogs mocks base method. func (m *MockStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, threshold time.Time) error { m.ctrl.T.Helper() @@ -850,6 +923,21 @@ func (mr *MockStoreMockRecorder) DeleteTailnetTunnel(ctx, arg any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTailnetTunnel", reflect.TypeOf((*MockStore)(nil).DeleteTailnetTunnel), ctx, arg) } +// DeleteTask mocks base method. +func (m *MockStore) DeleteTask(ctx context.Context, arg database.DeleteTaskParams) (database.TaskTable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTask", ctx, arg) + ret0, _ := ret[0].(database.TaskTable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteTask indicates an expected call of DeleteTask. +func (mr *MockStoreMockRecorder) DeleteTask(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTask", reflect.TypeOf((*MockStore)(nil).DeleteTask), ctx, arg) +} + // DeleteUserSecret mocks base method. func (m *MockStore) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { m.ctrl.T.Helper() @@ -2280,7 +2368,7 @@ func (mr *MockStoreMockRecorder) GetOAuth2ProviderAppByID(ctx, id any) *gomock.C } // GetOAuth2ProviderAppByRegistrationToken mocks base method. -func (m *MockStore) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken sql.NullString) (database.OAuth2ProviderApp, error) { +func (m *MockStore) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken []byte) (database.OAuth2ProviderApp, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetOAuth2ProviderAppByRegistrationToken", ctx, registrationAccessToken) ret0, _ := ret[0].(database.OAuth2ProviderApp) @@ -2534,6 +2622,21 @@ func (mr *MockStoreMockRecorder) GetOrganizationsByUserID(ctx, arg any) *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsByUserID", reflect.TypeOf((*MockStore)(nil).GetOrganizationsByUserID), ctx, arg) } +// GetOrganizationsWithPrebuildStatus mocks base method. +func (m *MockStore) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetOrganizationsWithPrebuildStatus", ctx, arg) + ret0, _ := ret[0].([]database.GetOrganizationsWithPrebuildStatusRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetOrganizationsWithPrebuildStatus indicates an expected call of GetOrganizationsWithPrebuildStatus. +func (mr *MockStoreMockRecorder) GetOrganizationsWithPrebuildStatus(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsWithPrebuildStatus", reflect.TypeOf((*MockStore)(nil).GetOrganizationsWithPrebuildStatus), ctx, arg) +} + // GetParameterSchemasByJobID mocks base method. func (m *MockStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { m.ctrl.T.Helper() @@ -3119,6 +3222,36 @@ func (mr *MockStoreMockRecorder) GetTailnetTunnelPeerIDs(ctx, srcID any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTailnetTunnelPeerIDs", reflect.TypeOf((*MockStore)(nil).GetTailnetTunnelPeerIDs), ctx, srcID) } +// GetTaskByID mocks base method. +func (m *MockStore) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTaskByID", ctx, id) + ret0, _ := ret[0].(database.Task) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTaskByID indicates an expected call of GetTaskByID. +func (mr *MockStoreMockRecorder) GetTaskByID(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByID", reflect.TypeOf((*MockStore)(nil).GetTaskByID), ctx, id) +} + +// GetTaskByWorkspaceID mocks base method. +func (m *MockStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTaskByWorkspaceID", ctx, workspaceID) + ret0, _ := ret[0].(database.Task) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTaskByWorkspaceID indicates an expected call of GetTaskByWorkspaceID. +func (mr *MockStoreMockRecorder) GetTaskByWorkspaceID(ctx, workspaceID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByWorkspaceID", reflect.TypeOf((*MockStore)(nil).GetTaskByWorkspaceID), ctx, workspaceID) +} + // GetTelemetryItem mocks base method. func (m *MockStore) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) { m.ctrl.T.Helper() @@ -4169,6 +4302,21 @@ func (mr *MockStoreMockRecorder) GetWorkspaceAgentsCreatedAfter(ctx, createdAt a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceAgentsCreatedAfter", reflect.TypeOf((*MockStore)(nil).GetWorkspaceAgentsCreatedAfter), ctx, createdAt) } +// GetWorkspaceAgentsForMetrics mocks base method. +func (m *MockStore) GetWorkspaceAgentsForMetrics(ctx context.Context) ([]database.GetWorkspaceAgentsForMetricsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetWorkspaceAgentsForMetrics", ctx) + ret0, _ := ret[0].([]database.GetWorkspaceAgentsForMetricsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetWorkspaceAgentsForMetrics indicates an expected call of GetWorkspaceAgentsForMetrics. +func (mr *MockStoreMockRecorder) GetWorkspaceAgentsForMetrics(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceAgentsForMetrics", reflect.TypeOf((*MockStore)(nil).GetWorkspaceAgentsForMetrics), ctx) +} + // GetWorkspaceAgentsInLatestBuildByWorkspaceID mocks base method. func (m *MockStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { m.ctrl.T.Helper() @@ -4709,6 +4857,21 @@ func (mr *MockStoreMockRecorder) GetWorkspacesEligibleForTransition(ctx, now any return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspacesEligibleForTransition", reflect.TypeOf((*MockStore)(nil).GetWorkspacesEligibleForTransition), ctx, now) } +// GetWorkspacesForWorkspaceMetrics mocks base method. +func (m *MockStore) GetWorkspacesForWorkspaceMetrics(ctx context.Context) ([]database.GetWorkspacesForWorkspaceMetricsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetWorkspacesForWorkspaceMetrics", ctx) + ret0, _ := ret[0].([]database.GetWorkspacesForWorkspaceMetricsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetWorkspacesForWorkspaceMetrics indicates an expected call of GetWorkspacesForWorkspaceMetrics. +func (mr *MockStoreMockRecorder) GetWorkspacesForWorkspaceMetrics(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspacesForWorkspaceMetrics", reflect.TypeOf((*MockStore)(nil).GetWorkspacesForWorkspaceMetrics), ctx) +} + // InTx mocks base method. func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *database.TxOptions) error { m.ctrl.T.Helper() @@ -5244,6 +5407,21 @@ func (mr *MockStoreMockRecorder) InsertReplica(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertReplica", reflect.TypeOf((*MockStore)(nil).InsertReplica), ctx, arg) } +// InsertTask mocks base method. +func (m *MockStore) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InsertTask", ctx, arg) + ret0, _ := ret[0].(database.TaskTable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// InsertTask indicates an expected call of InsertTask. +func (mr *MockStoreMockRecorder) InsertTask(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTask", reflect.TypeOf((*MockStore)(nil).InsertTask), ctx, arg) +} + // InsertTelemetryItemIfNotExists mocks base method. func (m *MockStore) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error { m.ctrl.T.Helper() @@ -5258,6 +5436,20 @@ func (mr *MockStoreMockRecorder) InsertTelemetryItemIfNotExists(ctx, arg any) *g return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTelemetryItemIfNotExists", reflect.TypeOf((*MockStore)(nil).InsertTelemetryItemIfNotExists), ctx, arg) } +// InsertTelemetryLock mocks base method. +func (m *MockStore) InsertTelemetryLock(ctx context.Context, arg database.InsertTelemetryLockParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InsertTelemetryLock", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// InsertTelemetryLock indicates an expected call of InsertTelemetryLock. +func (mr *MockStoreMockRecorder) InsertTelemetryLock(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTelemetryLock", reflect.TypeOf((*MockStore)(nil).InsertTelemetryLock), ctx, arg) +} + // InsertTemplate mocks base method. func (m *MockStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { m.ctrl.T.Helper() @@ -5699,10 +5891,10 @@ func (mr *MockStoreMockRecorder) InsertWorkspaceResourceMetadata(ctx, arg any) * } // ListAIBridgeInterceptions mocks base method. -func (m *MockStore) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.AIBridgeInterception, error) { +func (m *MockStore) ListAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams) ([]database.ListAIBridgeInterceptionsRow, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ListAIBridgeInterceptions", ctx, arg) - ret0, _ := ret[0].([]database.AIBridgeInterception) + ret0, _ := ret[0].([]database.ListAIBridgeInterceptionsRow) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -5713,6 +5905,21 @@ func (mr *MockStoreMockRecorder) ListAIBridgeInterceptions(ctx, arg any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAIBridgeInterceptions", reflect.TypeOf((*MockStore)(nil).ListAIBridgeInterceptions), ctx, arg) } +// ListAIBridgeInterceptionsTelemetrySummaries mocks base method. +func (m *MockStore) ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg database.ListAIBridgeInterceptionsTelemetrySummariesParams) ([]database.ListAIBridgeInterceptionsTelemetrySummariesRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAIBridgeInterceptionsTelemetrySummaries", ctx, arg) + ret0, _ := ret[0].([]database.ListAIBridgeInterceptionsTelemetrySummariesRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAIBridgeInterceptionsTelemetrySummaries indicates an expected call of ListAIBridgeInterceptionsTelemetrySummaries. +func (mr *MockStoreMockRecorder) ListAIBridgeInterceptionsTelemetrySummaries(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAIBridgeInterceptionsTelemetrySummaries", reflect.TypeOf((*MockStore)(nil).ListAIBridgeInterceptionsTelemetrySummaries), ctx, arg) +} + // ListAIBridgeTokenUsagesByInterceptionIDs mocks base method. func (m *MockStore) ListAIBridgeTokenUsagesByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]database.AIBridgeTokenUsage, error) { m.ctrl.T.Helper() @@ -5759,10 +5966,10 @@ func (mr *MockStoreMockRecorder) ListAIBridgeUserPromptsByInterceptionIDs(ctx, i } // ListAuthorizedAIBridgeInterceptions mocks base method. -func (m *MockStore) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]database.AIBridgeInterception, error) { +func (m *MockStore) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg database.ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]database.ListAIBridgeInterceptionsRow, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ListAuthorizedAIBridgeInterceptions", ctx, arg, prepared) - ret0, _ := ret[0].([]database.AIBridgeInterception) + ret0, _ := ret[0].([]database.ListAIBridgeInterceptionsRow) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -5803,6 +6010,21 @@ func (mr *MockStoreMockRecorder) ListProvisionerKeysByOrganizationExcludeReserve return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListProvisionerKeysByOrganizationExcludeReserved", reflect.TypeOf((*MockStore)(nil).ListProvisionerKeysByOrganizationExcludeReserved), ctx, organizationID) } +// ListTasks mocks base method. +func (m *MockStore) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTasks", ctx, arg) + ret0, _ := ret[0].([]database.Task) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTasks indicates an expected call of ListTasks. +func (mr *MockStoreMockRecorder) ListTasks(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTasks", reflect.TypeOf((*MockStore)(nil).ListTasks), ctx, arg) +} + // ListUserSecrets mocks base method. func (m *MockStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { m.ctrl.T.Helper() @@ -6067,6 +6289,21 @@ func (mr *MockStoreMockRecorder) UnfavoriteWorkspace(ctx, id any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UnfavoriteWorkspace", reflect.TypeOf((*MockStore)(nil).UnfavoriteWorkspace), ctx, id) } +// UpdateAIBridgeInterceptionEnded mocks base method. +func (m *MockStore) UpdateAIBridgeInterceptionEnded(ctx context.Context, arg database.UpdateAIBridgeInterceptionEndedParams) (database.AIBridgeInterception, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateAIBridgeInterceptionEnded", ctx, arg) + ret0, _ := ret[0].(database.AIBridgeInterception) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateAIBridgeInterceptionEnded indicates an expected call of UpdateAIBridgeInterceptionEnded. +func (mr *MockStoreMockRecorder) UpdateAIBridgeInterceptionEnded(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateAIBridgeInterceptionEnded", reflect.TypeOf((*MockStore)(nil).UpdateAIBridgeInterceptionEnded), ctx, arg) +} + // UpdateAPIKeyByID mocks base method. func (m *MockStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { m.ctrl.T.Helper() @@ -6317,6 +6554,21 @@ func (mr *MockStoreMockRecorder) UpdateOrganizationDeletedByID(ctx, arg any) *go return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrganizationDeletedByID", reflect.TypeOf((*MockStore)(nil).UpdateOrganizationDeletedByID), ctx, arg) } +// UpdatePrebuildProvisionerJobWithCancel mocks base method. +func (m *MockStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdatePrebuildProvisionerJobWithCancel", ctx, arg) + ret0, _ := ret[0].([]database.UpdatePrebuildProvisionerJobWithCancelRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdatePrebuildProvisionerJobWithCancel indicates an expected call of UpdatePrebuildProvisionerJobWithCancel. +func (mr *MockStoreMockRecorder) UpdatePrebuildProvisionerJobWithCancel(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdatePrebuildProvisionerJobWithCancel", reflect.TypeOf((*MockStore)(nil).UpdatePrebuildProvisionerJobWithCancel), ctx, arg) +} + // UpdatePresetPrebuildStatus mocks base method. func (m *MockStore) UpdatePresetPrebuildStatus(ctx context.Context, arg database.UpdatePresetPrebuildStatusParams) error { m.ctrl.T.Helper() @@ -6458,6 +6710,21 @@ func (mr *MockStoreMockRecorder) UpdateTailnetPeerStatusByCoordinator(ctx, arg a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTailnetPeerStatusByCoordinator", reflect.TypeOf((*MockStore)(nil).UpdateTailnetPeerStatusByCoordinator), ctx, arg) } +// UpdateTaskWorkspaceID mocks base method. +func (m *MockStore) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateTaskWorkspaceID", ctx, arg) + ret0, _ := ret[0].(database.TaskTable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateTaskWorkspaceID indicates an expected call of UpdateTaskWorkspaceID. +func (mr *MockStoreMockRecorder) UpdateTaskWorkspaceID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTaskWorkspaceID", reflect.TypeOf((*MockStore)(nil).UpdateTaskWorkspaceID), ctx, arg) +} + // UpdateTemplateACLByID mocks base method. func (m *MockStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { m.ctrl.T.Helper() @@ -7517,6 +7784,21 @@ func (mr *MockStoreMockRecorder) UpsertTailnetTunnel(ctx, arg any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertTailnetTunnel", reflect.TypeOf((*MockStore)(nil).UpsertTailnetTunnel), ctx, arg) } +// UpsertTaskWorkspaceApp mocks base method. +func (m *MockStore) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertTaskWorkspaceApp", ctx, arg) + ret0, _ := ret[0].(database.TaskWorkspaceApp) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpsertTaskWorkspaceApp indicates an expected call of UpsertTaskWorkspaceApp. +func (mr *MockStoreMockRecorder) UpsertTaskWorkspaceApp(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertTaskWorkspaceApp", reflect.TypeOf((*MockStore)(nil).UpsertTaskWorkspaceApp), ctx, arg) +} + // UpsertTelemetryItem mocks base method. func (m *MockStore) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error { m.ctrl.T.Helper() diff --git a/coderd/database/dbpurge/dbpurge.go b/coderd/database/dbpurge/dbpurge.go index b9e0023f5a6f8..067fe1f0499e3 100644 --- a/coderd/database/dbpurge/dbpurge.go +++ b/coderd/database/dbpurge/dbpurge.go @@ -24,6 +24,12 @@ const ( // but we won't touch the `connection_logs` table. maxAuditLogConnectionEventAge = 90 * 24 * time.Hour // 90 days auditLogConnectionEventBatchSize = 1000 + // Telemetry heartbeats are used to deduplicate events across replicas. We + // don't need to persist heartbeat rows for longer than 24 hours, as they + // are only used for deduplication across replicas. The time needs to be + // long enough to cover the maximum interval of a heartbeat event (currently + // 1 hour) plus some buffer. + maxTelemetryHeartbeatAge = 24 * time.Hour ) // New creates a new periodically purging database instance. @@ -71,6 +77,10 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. if err := tx.ExpirePrebuildsAPIKeys(ctx, dbtime.Time(start)); err != nil { return xerrors.Errorf("failed to expire prebuilds user api keys: %w", err) } + deleteOldTelemetryLocksBefore := start.Add(-maxTelemetryHeartbeatAge) + if err := tx.DeleteOldTelemetryLocks(ctx, deleteOldTelemetryLocksBefore); err != nil { + return xerrors.Errorf("failed to delete old telemetry locks: %w", err) + } deleteOldAuditLogConnectionEventsBefore := start.Add(-maxAuditLogConnectionEventAge) if err := tx.DeleteOldAuditLogConnectionEvents(ctx, database.DeleteOldAuditLogConnectionEventsParams{ diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go index 02efd4a81fd14..74bf36639fbb5 100644 --- a/coderd/database/dbpurge/dbpurge_test.go +++ b/coderd/database/dbpurge/dbpurge_test.go @@ -704,3 +704,56 @@ func TestExpireOldAPIKeys(t *testing.T) { // Out of an abundance of caution, we do not expire explicitly named prebuilds API keys. assertKeyActive(namedPrebuildsAPIKey.ID) } + +func TestDeleteOldTelemetryHeartbeats(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + + db, _, sqlDB := dbtestutil.NewDBWithSQLDB(t, dbtestutil.WithDumpOnFailure()) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}) + clk := quartz.NewMock(t) + now := clk.Now().UTC() + + // Insert telemetry heartbeats. + err := db.InsertTelemetryLock(ctx, database.InsertTelemetryLockParams{ + EventType: "aibridge_interceptions_summary", + PeriodEndingAt: now.Add(-25 * time.Hour), // should be purged + }) + require.NoError(t, err) + err = db.InsertTelemetryLock(ctx, database.InsertTelemetryLockParams{ + EventType: "aibridge_interceptions_summary", + PeriodEndingAt: now.Add(-23 * time.Hour), // should be kept + }) + require.NoError(t, err) + err = db.InsertTelemetryLock(ctx, database.InsertTelemetryLockParams{ + EventType: "aibridge_interceptions_summary", + PeriodEndingAt: now, // should be kept + }) + require.NoError(t, err) + + done := awaitDoTick(ctx, t, clk) + closer := dbpurge.New(ctx, logger, db, clk) + defer closer.Close() + <-done // doTick() has now run. + + require.Eventuallyf(t, func() bool { + // We use an SQL queries directly here because we don't expose queries + // for deleting heartbeats in the application code. + var totalCount int + err := sqlDB.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM telemetry_locks; + `).Scan(&totalCount) + assert.NoError(t, err) + + var oldCount int + err = sqlDB.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM telemetry_locks WHERE period_ending_at < $1; + `, now.Add(-24*time.Hour)).Scan(&oldCount) + assert.NoError(t, err) + + // Expect 2 heartbeats remaining and none older than 24 hours. + t.Logf("eventually: total count: %d, old count: %d", totalCount, oldCount) + return totalCount == 2 && oldCount == 0 + }, testutil.WaitShort, testutil.IntervalFast, "it should delete old telemetry heartbeats") +} diff --git a/coderd/database/dbrollup/dbrollup_test.go b/coderd/database/dbrollup/dbrollup_test.go index 2c727a6ca101a..c0417cd63134c 100644 --- a/coderd/database/dbrollup/dbrollup_test.go +++ b/coderd/database/dbrollup/dbrollup_test.go @@ -52,10 +52,6 @@ func (w *wrapUpsertDB) UpsertTemplateUsageStats(ctx context.Context) error { func TestRollup_TwoInstancesUseLocking(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("Skipping test; only works with PostgreSQL.") - } - db, ps := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) logger := testutil.Logger(t) diff --git a/coderd/database/dbtestutil/broker.go b/coderd/database/dbtestutil/broker.go index 4e814b93d0477..158a44cbc997c 100644 --- a/coderd/database/dbtestutil/broker.go +++ b/coderd/database/dbtestutil/broker.go @@ -6,6 +6,8 @@ import ( _ "embed" "fmt" "os" + "runtime" + "strings" "sync" "time" @@ -45,6 +47,8 @@ func (b *Broker) Create(t TBSubset, opts ...OpenOption) (ConnectionParams, error host = defaultConnectionParams.Host port = defaultConnectionParams.Port ) + packageName := getTestPackageName(t) + testName := t.Name() // Use a time-based prefix to make it easier to find the database // when debugging. @@ -55,9 +59,9 @@ func (b *Broker) Create(t TBSubset, opts ...OpenOption) (ConnectionParams, error } dbName := now + "_" + dbSuffix - // TODO: add package and test name _, err = b.coderTestingDB.Exec( - "INSERT INTO test_databases (name, process_uuid) VALUES ($1, $2)", dbName, b.uuid) + "INSERT INTO test_databases (name, process_uuid, test_package, test_name) VALUES ($1, $2, $3, $4)", + dbName, b.uuid, packageName, testName) if err != nil { return ConnectionParams{}, xerrors.Errorf("insert test_database row: %w", err) } @@ -104,10 +108,10 @@ func (b *Broker) clean(t TBSubset, dbName string) func() { func (b *Broker) init(t TBSubset) error { b.Lock() defer b.Unlock() - b.refCount++ - t.Cleanup(b.decRef) if b.coderTestingDB != nil { // already initialized + b.refCount++ + t.Cleanup(b.decRef) return nil } @@ -124,8 +128,8 @@ func (b *Broker) init(t TBSubset) error { return xerrors.Errorf("open postgres connection: %w", err) } - // creating the db can succeed even if the database doesn't exist. Ping it to find out. - err = coderTestingDB.Ping() + // coderTestingSQLInit is idempotent, so we can run it every time. + _, err = coderTestingDB.Exec(coderTestingSQLInit) var pqErr *pq.Error if xerrors.As(err, &pqErr) && pqErr.Code == "3D000" { // database does not exist. @@ -145,12 +149,14 @@ func (b *Broker) init(t TBSubset) error { return xerrors.Errorf("ping '%s' database: %w", CoderTestingDBName, err) } b.coderTestingDB = coderTestingDB + b.refCount++ + t.Cleanup(b.decRef) if b.uuid == uuid.Nil { b.uuid = uuid.New() ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second) defer cancel() - b.cleanerFD, err = startCleaner(ctx, b.uuid, coderTestingParams.DSN()) + b.cleanerFD, err = startCleaner(ctx, t, b.uuid, coderTestingParams.DSN()) if err != nil { return xerrors.Errorf("start test db cleaner: %w", err) } @@ -186,3 +192,42 @@ func (b *Broker) decRef() { b.coderTestingDB = nil } } + +// getTestPackageName returns the package name of the test that called it. +func getTestPackageName(t TBSubset) string { + packageName := "unknown" + // Ask runtime.Callers for up to 100 program counters, including runtime.Callers itself. + pc := make([]uintptr, 100) + n := runtime.Callers(0, pc) + if n == 0 { + // No PCs available. This can happen if the first argument to + // runtime.Callers is large. + // + // Return now to avoid processing the zero Frame that would + // otherwise be returned by frames.Next below. + t.Logf("could not determine test package name: no PCs available") + return packageName + } + + pc = pc[:n] // pass only valid pcs to runtime.CallersFrames + frames := runtime.CallersFrames(pc) + + // Loop to get frames. + // A fixed number of PCs can expand to an indefinite number of Frames. + for { + frame, more := frames.Next() + + if strings.HasPrefix(frame.Function, "github.com/coder/coder/v2/") { + packageName = strings.SplitN(strings.TrimPrefix(frame.Function, "github.com/coder/coder/v2/"), ".", 2)[0] + } + if strings.HasPrefix(frame.Function, "testing") { + break + } + + // Check whether there are more frames to process after this one. + if !more { + break + } + } + return packageName +} diff --git a/coderd/database/dbtestutil/broker_internal_test.go b/coderd/database/dbtestutil/broker_internal_test.go new file mode 100644 index 0000000000000..944ae2a4770d6 --- /dev/null +++ b/coderd/database/dbtestutil/broker_internal_test.go @@ -0,0 +1,13 @@ +package dbtestutil + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGetTestPackageName(t *testing.T) { + t.Parallel() + packageName := getTestPackageName(t) + require.Equal(t, "coderd/database/dbtestutil", packageName) +} diff --git a/coderd/database/dbtestutil/cleaner.go b/coderd/database/dbtestutil/cleaner.go index 8387cc18fe2c7..851f4488f8688 100644 --- a/coderd/database/dbtestutil/cleaner.go +++ b/coderd/database/dbtestutil/cleaner.go @@ -22,36 +22,43 @@ const ( cleanerRespOK = "OK" envCleanerParentUUID = "DB_CLEANER_PARENT_UUID" envCleanerDSN = "DB_CLEANER_DSN" -) - -var ( - originalWorkingDir string - errGettingWorkingDir error + envCleanerMagic = "DB_CLEANER_MAGIC" + envCleanerMagicValue = "XEHdJqWehWek8AaWwopy" // 20 random characters to make this collision resistant ) func init() { - // We expect our tests to run from somewhere in the project tree where `go run` below in `startCleaner` will - // be able to resolve the command package. However, some of the tests modify the working directory during the run. - // So, we grab the working directory during package init, before tests are run, and then set that work dir on the - // subcommand process before it starts. - originalWorkingDir, errGettingWorkingDir = os.Getwd() + // We are hijacking the init() function here to do something very non-standard. + // + // We want to be able to run the cleaner as a subprocess of the test process so that it can outlive the test binary + // and still clean up, even if the test process times out or is killed. So, what we do is in startCleaner() below, + // which is called in the parent process, we exec our own binary and set a collision-resistant environment variable. + // Then here in the init(), which will run before main() and therefore before executing tests, we check for the + // environment variable, and if present we know this is the child process and we exec the cleaner. Instead of + // returning normally from init() we call os.Exit(). This prevents tests from being re-run in the child process (and + // recursion). + // + // If the magic value is not present, we know we are the parent process and init() returns normally. + magicValue := os.Getenv(envCleanerMagic) + if magicValue == envCleanerMagicValue { + RunCleaner() + os.Exit(0) + } } // startCleaner starts the cleaner in a subprocess. holdThis is an opaque reference that needs to be kept from being // garbage collected until we are done with all test databases (e.g. the end of the process). -func startCleaner(ctx context.Context, parentUUID uuid.UUID, dsn string) (holdThis any, err error) { - cmd := exec.Command("go", "run", "github.com/coder/coder/v2/coderd/database/dbtestutil/cleanercmd") +func startCleaner(ctx context.Context, _ TBSubset, parentUUID uuid.UUID, dsn string) (holdThis any, err error) { + bin, err := os.Executable() + if err != nil { + return nil, xerrors.Errorf("could not get executable path: %w", err) + } + cmd := exec.Command(bin) cmd.Env = append(os.Environ(), fmt.Sprintf("%s=%s", envCleanerParentUUID, parentUUID.String()), fmt.Sprintf("%s=%s", envCleanerDSN, dsn), + fmt.Sprintf("%s=%s", envCleanerMagic, envCleanerMagicValue), ) - // c.f. comment on `func init()` in this file. - if errGettingWorkingDir != nil { - return nil, xerrors.Errorf("failed to get working directory during init: %w", errGettingWorkingDir) - } - cmd.Dir = originalWorkingDir - // Here we don't actually use the reference to the stdin pipe, because we never write anything to it. When this // process exits, the pipe is closed by the OS and this triggers the cleaner to do its cleaning work. But, we do // need to hang on to a reference to it so that it doesn't get garbage collected and trigger cleanup early. @@ -178,8 +185,7 @@ func (c *cleaner) waitAndClean() { } // RunCleaner runs the test database cleaning process. It takes no arguments but uses stdio and environment variables -// for its operation. It is designed to be launched as the only task of a `main()` process, but is included in this -// package to share constants with the parent code that launches it above. +// for its operation. // // The cleaner is designed to run in a separate process from the main test suite, connected over stdio. If the main test // process ends (panics, times out, or is killed) without explicitly discarding the databases it clones, the cleaner diff --git a/coderd/database/dbtestutil/cleanercmd/main.go b/coderd/database/dbtestutil/cleanercmd/main.go deleted file mode 100644 index 038637320e0a1..0000000000000 --- a/coderd/database/dbtestutil/cleanercmd/main.go +++ /dev/null @@ -1,7 +0,0 @@ -package main - -import "github.com/coder/coder/v2/coderd/database/dbtestutil" - -func main() { - dbtestutil.RunCleaner() -} diff --git a/coderd/database/dbtestutil/coder_testing.sql b/coderd/database/dbtestutil/coder_testing.sql index edaab486c847a..453b38d2d4510 100644 --- a/coderd/database/dbtestutil/coder_testing.sql +++ b/coderd/database/dbtestutil/coder_testing.sql @@ -1,3 +1,6 @@ +BEGIN TRANSACTION; +SELECT pg_advisory_xact_lock(7283699); + CREATE TABLE IF NOT EXISTS test_databases ( name text PRIMARY KEY, created_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -6,3 +9,10 @@ CREATE TABLE IF NOT EXISTS test_databases ( ); CREATE INDEX IF NOT EXISTS test_databases_process_uuid ON test_databases (process_uuid, dropped_at); + +ALTER TABLE test_databases ADD COLUMN IF NOT EXISTS test_name text; +COMMENT ON COLUMN test_databases.test_name IS 'Name of the test that created the database'; +ALTER TABLE test_databases ADD COLUMN IF NOT EXISTS test_package text; +COMMENT ON COLUMN test_databases.test_package IS 'Package of the test that created the database'; + +COMMIT; diff --git a/coderd/database/dbtestutil/db.go b/coderd/database/dbtestutil/db.go index a960822e2a732..3d636e6833131 100644 --- a/coderd/database/dbtestutil/db.go +++ b/coderd/database/dbtestutil/db.go @@ -23,13 +23,6 @@ import ( "github.com/coder/coder/v2/testutil" ) -// WillUsePostgres returns true if a call to NewDB() will return a real, postgres-backed Store and Pubsub. -// TODO(hugodutka): since we removed the in-memory database, this is always true, -// and we need to remove this function. https://github.com/coder/internal/issues/758 -func WillUsePostgres() bool { - return true -} - type options struct { fixedTimezone string dumpOnFailure bool @@ -75,10 +68,6 @@ func withReturnSQLDB(f func(*sql.DB)) Option { func NewDBWithSQLDB(t testing.TB, opts ...Option) (database.Store, pubsub.Pubsub, *sql.DB) { t.Helper() - if !WillUsePostgres() { - t.Fatal("cannot use NewDBWithSQLDB without PostgreSQL, consider adding `if !dbtestutil.WillUsePostgres() { t.Skip() }` to this test") - } - var sqlDB *sql.DB opts = append(opts, withReturnSQLDB(func(db *sql.DB) { sqlDB = db @@ -242,10 +231,11 @@ func PGDump(dbURL string) ([]byte, error) { "PGCLIENTENCODING=UTF8", "PGDATABASE=", // we should always specify the database name in the connection string } - var stdout bytes.Buffer + var stdout, stderr bytes.Buffer cmd.Stdout = &stdout + cmd.Stderr = &stderr if err := cmd.Run(); err != nil { - return nil, xerrors.Errorf("exec pg_dump: %w", err) + return nil, xerrors.Errorf("exec pg_dump: %w\n%s", err, stderr.String()) } return stdout.Bytes(), nil } diff --git a/coderd/database/dbtestutil/postgres.go b/coderd/database/dbtestutil/postgres.go index 567fae0dafc48..a55a99f972ca2 100644 --- a/coderd/database/dbtestutil/postgres.go +++ b/coderd/database/dbtestutil/postgres.go @@ -166,6 +166,7 @@ type TBSubset interface { Cleanup(func()) Helper() Logf(format string, args ...any) + TempDir() string } // Open creates a new PostgreSQL database instance. diff --git a/coderd/database/dbtestutil/postgres_test.go b/coderd/database/dbtestutil/postgres_test.go index e653895f8e961..ecf18c9cfdecb 100644 --- a/coderd/database/dbtestutil/postgres_test.go +++ b/coderd/database/dbtestutil/postgres_test.go @@ -20,9 +20,6 @@ func TestMain(m *testing.M) { func TestOpen(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } connect, err := dbtestutil.Open(t) require.NoError(t, err) @@ -37,9 +34,6 @@ func TestOpen(t *testing.T) { func TestOpen_InvalidDBFrom(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } _, err := dbtestutil.Open(t, dbtestutil.WithDBFrom("__invalid__")) require.Error(t, err) @@ -49,9 +43,6 @@ func TestOpen_InvalidDBFrom(t *testing.T) { func TestOpen_ValidDBFrom(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } // first check if we can create a new template db dsn, err := dbtestutil.Open(t, dbtestutil.WithDBFrom("")) @@ -115,9 +106,6 @@ func TestOpen_ValidDBFrom(t *testing.T) { func TestOpen_Panic(t *testing.T) { t.Skip("unskip this to manually test that we don't leak a database into postgres") t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } _, err := dbtestutil.Open(t) require.NoError(t, err) @@ -127,9 +115,6 @@ func TestOpen_Panic(t *testing.T) { func TestOpen_Timeout(t *testing.T) { t.Skip("unskip this and set a short timeout to manually test that we don't leak a database into postgres") t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } _, err := dbtestutil.Open(t) require.NoError(t, err) diff --git a/coderd/database/dbtestutil/randtz/randtz.go b/coderd/database/dbtestutil/randtz/randtz.go deleted file mode 100644 index 1a53bfaf725fd..0000000000000 --- a/coderd/database/dbtestutil/randtz/randtz.go +++ /dev/null @@ -1,1034 +0,0 @@ -package randtz - -import ( - "math/rand" - "sync" - "testing" - "time" -) - -var ( - randTZName string - randTZNameOnce sync.Once -) - -// Name returns a random timezone name from the list of all -// timezones known to PostgreSQL. -func Name(t testing.TB) string { - t.Helper() - - randTZNameOnce.Do(func() { - // nolint: gosec // not used for cryptography - rnd := rand.New(rand.NewSource(time.Now().Unix())) - idx := rnd.Intn(len(tznames)) - randTZName = tznames[idx] - t.Logf("Random db timezone is %q\nIf you need a specific timezone, use dbtestutil.WithTimezone()", randTZName) - }) - - return randTZName -} - -// tznames is a list of all timezone names known to postgresql. -// The below list was generated with the query -// select name from pg_timezone_names order by name asc; -var tznames = []string{ - "Africa/Abidjan", - "Africa/Accra", - "Africa/Addis_Ababa", - "Africa/Algiers", - "Africa/Asmara", - "Africa/Asmera", - "Africa/Bamako", - "Africa/Bangui", - "Africa/Banjul", - "Africa/Bissau", - "Africa/Blantyre", - "Africa/Brazzaville", - "Africa/Bujumbura", - "Africa/Cairo", - "Africa/Casablanca", - "Africa/Ceuta", - "Africa/Conakry", - "Africa/Dakar", - "Africa/Dar_es_Salaam", - "Africa/Djibouti", - "Africa/Douala", - "Africa/El_Aaiun", - "Africa/Freetown", - "Africa/Gaborone", - "Africa/Harare", - "Africa/Johannesburg", - "Africa/Juba", - "Africa/Kampala", - "Africa/Khartoum", - "Africa/Kigali", - "Africa/Kinshasa", - "Africa/Lagos", - "Africa/Libreville", - "Africa/Lome", - "Africa/Luanda", - "Africa/Lubumbashi", - "Africa/Lusaka", - "Africa/Malabo", - "Africa/Maputo", - "Africa/Maseru", - "Africa/Mbabane", - "Africa/Mogadishu", - "Africa/Monrovia", - "Africa/Nairobi", - "Africa/Ndjamena", - "Africa/Niamey", - "Africa/Nouakchott", - "Africa/Ouagadougou", - "Africa/Porto-Novo", - "Africa/Sao_Tome", - "Africa/Timbuktu", - "Africa/Tripoli", - "Africa/Tunis", - "Africa/Windhoek", - "America/Adak", - "America/Anchorage", - "America/Anguilla", - "America/Antigua", - "America/Araguaina", - "America/Argentina/Buenos_Aires", - "America/Argentina/Catamarca", - "America/Argentina/ComodRivadavia", - "America/Argentina/Cordoba", - "America/Argentina/Jujuy", - "America/Argentina/La_Rioja", - "America/Argentina/Mendoza", - "America/Argentina/Rio_Gallegos", - "America/Argentina/Salta", - "America/Argentina/San_Juan", - "America/Argentina/San_Luis", - "America/Argentina/Tucuman", - "America/Argentina/Ushuaia", - "America/Aruba", - "America/Asuncion", - "America/Atikokan", - "America/Atka", - "America/Bahia", - "America/Bahia_Banderas", - "America/Barbados", - "America/Belem", - "America/Belize", - "America/Blanc-Sablon", - "America/Boa_Vista", - "America/Bogota", - "America/Boise", - "America/Buenos_Aires", - "America/Cambridge_Bay", - "America/Campo_Grande", - "America/Cancun", - "America/Caracas", - "America/Catamarca", - "America/Cayenne", - "America/Cayman", - "America/Chicago", - "America/Chihuahua", - "America/Ciudad_Juarez", - "America/Coral_Harbor", - "America/Cordoba", - "America/Costa_Rica", - "America/Creston", - "America/Cuiaba", - "America/Curacao", - "America/Danmarkshavn", - "America/Dawson", - "America/Dawson_Creek", - "America/Denver", - "America/Detroit", - "America/Dominica", - "America/Edmonton", - "America/Eirunepe", - "America/El_Salvador", - "America/Ensenada", - "America/Fortaleza", - "America/Fort_Nelson", - "America/Fort_Wayne", - "America/Glace_Bay", - "America/Godthab", - "America/Goose_Bay", - "America/Grand_Turk", - "America/Grenada", - "America/Guadeloupe", - "America/Guatemala", - "America/Guayaquil", - "America/Guyana", - "America/Halifax", - "America/Havana", - "America/Hermosillo", - "America/Indiana/Indianapolis", - "America/Indiana/Knox", - "America/Indiana/Marengo", - "America/Indiana/Petersburg", - "America/Indianapolis", - "America/Indiana/Tell_City", - "America/Indiana/Vevay", - "America/Indiana/Vincennes", - "America/Indiana/Winamac", - "America/Inuvik", - "America/Iqaluit", - "America/Jamaica", - "America/Jujuy", - "America/Juneau", - "America/Kentucky/Louisville", - "America/Kentucky/Monticello", - "America/Knox_IN", - "America/Kralendijk", - "America/La_Paz", - "America/Lima", - "America/Los_Angeles", - "America/Louisville", - "America/Lower_Princes", - "America/Maceio", - "America/Managua", - "America/Manaus", - "America/Marigot", - "America/Martinique", - "America/Matamoros", - "America/Mazatlan", - "America/Mendoza", - "America/Menominee", - "America/Merida", - "America/Metlakatla", - "America/Mexico_City", - "America/Miquelon", - "America/Moncton", - "America/Monterrey", - "America/Montevideo", - "America/Montreal", - "America/Montserrat", - "America/Nassau", - "America/New_York", - "America/Nipigon", - "America/Nome", - "America/Noronha", - "America/North_Dakota/Beulah", - "America/North_Dakota/Center", - "America/North_Dakota/New_Salem", - "America/Nuuk", - "America/Ojinaga", - "America/Panama", - "America/Pangnirtung", - "America/Paramaribo", - "America/Phoenix", - "America/Port-au-Prince", - "America/Porto_Acre", - "America/Port_of_Spain", - "America/Porto_Velho", - "America/Puerto_Rico", - "America/Punta_Arenas", - "America/Rainy_River", - "America/Rankin_Inlet", - "America/Recife", - "America/Regina", - "America/Resolute", - "America/Rio_Branco", - "America/Rosario", - "America/Santa_Isabel", - "America/Santarem", - "America/Santiago", - "America/Santo_Domingo", - "America/Sao_Paulo", - "America/Scoresbysund", - "America/Shiprock", - "America/Sitka", - "America/St_Barthelemy", - "America/St_Johns", - "America/St_Kitts", - "America/St_Lucia", - "America/St_Thomas", - "America/St_Vincent", - "America/Swift_Current", - "America/Tegucigalpa", - "America/Thule", - "America/Thunder_Bay", - "America/Tijuana", - "America/Toronto", - "America/Tortola", - "America/Vancouver", - "America/Virgin", - "America/Whitehorse", - "America/Winnipeg", - "America/Yakutat", - "America/Yellowknife", - "Antarctica/Casey", - "Antarctica/Davis", - "Antarctica/DumontDUrville", - "Antarctica/Macquarie", - "Antarctica/Mawson", - "Antarctica/McMurdo", - "Antarctica/Palmer", - "Antarctica/Rothera", - "Antarctica/South_Pole", - "Antarctica/Syowa", - "Antarctica/Troll", - "Antarctica/Vostok", - "Arctic/Longyearbyen", - "Asia/Aden", - "Asia/Almaty", - "Asia/Amman", - "Asia/Anadyr", - "Asia/Aqtau", - "Asia/Aqtobe", - "Asia/Ashgabat", - "Asia/Ashkhabad", - "Asia/Atyrau", - "Asia/Baghdad", - "Asia/Bahrain", - "Asia/Baku", - "Asia/Bangkok", - "Asia/Barnaul", - "Asia/Beirut", - "Asia/Bishkek", - "Asia/Brunei", - "Asia/Calcutta", - "Asia/Chita", - "Asia/Choibalsan", - "Asia/Chongqing", - "Asia/Chungking", - "Asia/Colombo", - "Asia/Dacca", - "Asia/Damascus", - "Asia/Dhaka", - "Asia/Dili", - "Asia/Dubai", - "Asia/Dushanbe", - "Asia/Famagusta", - "Asia/Gaza", - "Asia/Harbin", - "Asia/Hebron", - "Asia/Ho_Chi_Minh", - "Asia/Hong_Kong", - "Asia/Hovd", - "Asia/Irkutsk", - "Asia/Istanbul", - "Asia/Jakarta", - "Asia/Jayapura", - "Asia/Jerusalem", - "Asia/Kabul", - "Asia/Kamchatka", - "Asia/Karachi", - "Asia/Kashgar", - "Asia/Kathmandu", - "Asia/Katmandu", - "Asia/Khandyga", - "Asia/Kolkata", - "Asia/Krasnoyarsk", - "Asia/Kuala_Lumpur", - "Asia/Kuching", - "Asia/Kuwait", - "Asia/Macao", - "Asia/Macau", - "Asia/Magadan", - "Asia/Makassar", - "Asia/Manila", - "Asia/Muscat", - "Asia/Nicosia", - "Asia/Novokuznetsk", - "Asia/Novosibirsk", - "Asia/Omsk", - "Asia/Oral", - "Asia/Phnom_Penh", - "Asia/Pontianak", - "Asia/Pyongyang", - "Asia/Qatar", - "Asia/Qostanay", - "Asia/Qyzylorda", - "Asia/Rangoon", - "Asia/Riyadh", - "Asia/Saigon", - "Asia/Sakhalin", - "Asia/Samarkand", - "Asia/Seoul", - "Asia/Shanghai", - "Asia/Singapore", - "Asia/Srednekolymsk", - "Asia/Taipei", - "Asia/Tashkent", - "Asia/Tbilisi", - "Asia/Tehran", - "Asia/Tel_Aviv", - "Asia/Thimbu", - "Asia/Thimphu", - "Asia/Tokyo", - "Asia/Tomsk", - "Asia/Ujung_Pandang", - "Asia/Ulaanbaatar", - "Asia/Ulan_Bator", - "Asia/Urumqi", - "Asia/Ust-Nera", - "Asia/Vientiane", - "Asia/Vladivostok", - "Asia/Yakutsk", - "Asia/Yangon", - "Asia/Yekaterinburg", - "Asia/Yerevan", - "Atlantic/Azores", - "Atlantic/Bermuda", - "Atlantic/Canary", - "Atlantic/Cape_Verde", - "Atlantic/Faeroe", - "Atlantic/Faroe", - "Atlantic/Jan_Mayen", - "Atlantic/Madeira", - "Atlantic/Reykjavik", - "Atlantic/South_Georgia", - "Atlantic/Stanley", - "Atlantic/St_Helena", - "Australia/ACT", - "Australia/Adelaide", - "Australia/Brisbane", - "Australia/Broken_Hill", - "Australia/Canberra", - "Australia/Currie", - "Australia/Darwin", - "Australia/Eucla", - "Australia/Hobart", - "Australia/LHI", - "Australia/Lindeman", - "Australia/Lord_Howe", - "Australia/Melbourne", - "Australia/North", - "Australia/NSW", - "Australia/Perth", - "Australia/Queensland", - "Australia/South", - "Australia/Sydney", - "Australia/Tasmania", - "Australia/Victoria", - "Australia/West", - "Australia/Yancowinna", - "Brazil/Acre", - "Brazil/DeNoronha", - "Brazil/East", - "Brazil/West", - "Canada/Atlantic", - "Canada/Central", - "Canada/Eastern", - "Canada/Mountain", - "Canada/Newfoundland", - "Canada/Pacific", - "Canada/Saskatchewan", - "Canada/Yukon", - "CET", - "Chile/Continental", - "Chile/EasterIsland", - "CST6CDT", - "Cuba", - "EET", - "Egypt", - "Eire", - "EST", - "EST5EDT", - "Etc/GMT", - "Etc/GMT+0", - "Etc/GMT-0", - "Etc/GMT0", - "Etc/GMT+1", - "Etc/GMT-1", - "Etc/GMT+10", - "Etc/GMT-10", - "Etc/GMT+11", - "Etc/GMT-11", - "Etc/GMT+12", - "Etc/GMT-12", - "Etc/GMT-13", - "Etc/GMT-14", - "Etc/GMT+2", - "Etc/GMT-2", - "Etc/GMT+3", - "Etc/GMT-3", - "Etc/GMT+4", - "Etc/GMT-4", - "Etc/GMT+5", - "Etc/GMT-5", - "Etc/GMT+6", - "Etc/GMT-6", - "Etc/GMT+7", - "Etc/GMT-7", - "Etc/GMT+8", - "Etc/GMT-8", - "Etc/GMT+9", - "Etc/GMT-9", - "Etc/Greenwich", - "Etc/UCT", - "Etc/Universal", - "Etc/UTC", - "Etc/Zulu", - "Europe/Amsterdam", - "Europe/Andorra", - "Europe/Astrakhan", - "Europe/Athens", - "Europe/Belfast", - "Europe/Belgrade", - "Europe/Berlin", - "Europe/Bratislava", - "Europe/Brussels", - "Europe/Bucharest", - "Europe/Budapest", - "Europe/Busingen", - "Europe/Chisinau", - "Europe/Copenhagen", - "Europe/Dublin", - "Europe/Gibraltar", - "Europe/Guernsey", - "Europe/Helsinki", - "Europe/Isle_of_Man", - "Europe/Istanbul", - "Europe/Jersey", - "Europe/Kaliningrad", - "Europe/Kiev", - "Europe/Kirov", - "Europe/Lisbon", - "Europe/Ljubljana", - "Europe/London", - "Europe/Luxembourg", - "Europe/Madrid", - "Europe/Malta", - "Europe/Mariehamn", - "Europe/Minsk", - "Europe/Monaco", - "Europe/Moscow", - "Europe/Nicosia", - "Europe/Oslo", - "Europe/Paris", - "Europe/Podgorica", - "Europe/Prague", - "Europe/Riga", - "Europe/Rome", - "Europe/Samara", - "Europe/San_Marino", - "Europe/Sarajevo", - "Europe/Saratov", - "Europe/Simferopol", - "Europe/Skopje", - "Europe/Sofia", - "Europe/Stockholm", - "Europe/Tallinn", - "Europe/Tirane", - "Europe/Tiraspol", - "Europe/Ulyanovsk", - "Europe/Uzhgorod", - "Europe/Vaduz", - "Europe/Vatican", - "Europe/Vienna", - "Europe/Vilnius", - "Europe/Volgograd", - "Europe/Warsaw", - "Europe/Zagreb", - "Europe/Zaporozhye", - "Europe/Zurich", - "Factory", - "GB", - "GB-Eire", - "GMT", - "GMT+0", - "GMT-0", - "GMT0", - "Greenwich", - "Hongkong", - "HST", - "Iceland", - "Indian/Antananarivo", - "Indian/Chagos", - "Indian/Christmas", - "Indian/Cocos", - "Indian/Comoro", - "Indian/Kerguelen", - "Indian/Mahe", - "Indian/Maldives", - "Indian/Mauritius", - "Indian/Mayotte", - "Indian/Reunion", - "Iran", - "Israel", - "Jamaica", - "Japan", - "Kwajalein", - "Libya", - "localtime", - "MET", - "Mexico/BajaNorte", - "Mexico/BajaSur", - "Mexico/General", - "MST", - "MST7MDT", - "Navajo", - "NZ", - "NZ-CHAT", - "Pacific/Apia", - "Pacific/Auckland", - "Pacific/Bougainville", - "Pacific/Chatham", - "Pacific/Chuuk", - "Pacific/Easter", - "Pacific/Efate", - "Pacific/Enderbury", - "Pacific/Fakaofo", - "Pacific/Fiji", - "Pacific/Funafuti", - "Pacific/Galapagos", - "Pacific/Gambier", - "Pacific/Guadalcanal", - "Pacific/Guam", - "Pacific/Honolulu", - "Pacific/Johnston", - "Pacific/Kiritimati", - "Pacific/Kosrae", - "Pacific/Kwajalein", - "Pacific/Majuro", - "Pacific/Marquesas", - "Pacific/Midway", - "Pacific/Nauru", - "Pacific/Niue", - "Pacific/Norfolk", - "Pacific/Noumea", - "Pacific/Pago_Pago", - "Pacific/Palau", - "Pacific/Pitcairn", - "Pacific/Pohnpei", - "Pacific/Ponape", - "Pacific/Port_Moresby", - "Pacific/Rarotonga", - "Pacific/Saipan", - "Pacific/Samoa", - "Pacific/Tahiti", - "Pacific/Tarawa", - "Pacific/Tongatapu", - "Pacific/Truk", - "Pacific/Wake", - "Pacific/Wallis", - "Pacific/Yap", - "Poland", - "Portugal", - "posix/Africa/Abidjan", - "posix/Africa/Accra", - "posix/Africa/Addis_Ababa", - "posix/Africa/Algiers", - "posix/Africa/Asmara", - "posix/Africa/Asmera", - "posix/Africa/Bamako", - "posix/Africa/Bangui", - "posix/Africa/Banjul", - "posix/Africa/Bissau", - "posix/Africa/Blantyre", - "posix/Africa/Brazzaville", - "posix/Africa/Bujumbura", - "posix/Africa/Cairo", - "posix/Africa/Casablanca", - "posix/Africa/Ceuta", - "posix/Africa/Conakry", - "posix/Africa/Dakar", - "posix/Africa/Dar_es_Salaam", - "posix/Africa/Djibouti", - "posix/Africa/Douala", - "posix/Africa/El_Aaiun", - "posix/Africa/Freetown", - "posix/Africa/Gaborone", - "posix/Africa/Harare", - "posix/Africa/Johannesburg", - "posix/Africa/Juba", - "posix/Africa/Kampala", - "posix/Africa/Khartoum", - "posix/Africa/Kigali", - "posix/Africa/Kinshasa", - "posix/Africa/Lagos", - "posix/Africa/Libreville", - "posix/Africa/Lome", - "posix/Africa/Luanda", - "posix/Africa/Lubumbashi", - "posix/Africa/Lusaka", - "posix/Africa/Malabo", - "posix/Africa/Maputo", - "posix/Africa/Maseru", - "posix/Africa/Mbabane", - "posix/Africa/Mogadishu", - "posix/Africa/Monrovia", - "posix/Africa/Nairobi", - "posix/Africa/Ndjamena", - "posix/Africa/Niamey", - "posix/Africa/Nouakchott", - "posix/Africa/Ouagadougou", - "posix/Africa/Porto-Novo", - "posix/Africa/Sao_Tome", - "posix/Africa/Timbuktu", - "posix/Africa/Tripoli", - "posix/Africa/Tunis", - "posix/Africa/Windhoek", - "posix/America/Adak", - "posix/America/Anchorage", - "posix/America/Anguilla", - "posix/America/Antigua", - "posix/America/Araguaina", - "posix/America/Argentina/Buenos_Aires", - "posix/America/Argentina/Catamarca", - "posix/America/Argentina/ComodRivadavia", - "posix/America/Argentina/Cordoba", - "posix/America/Argentina/Jujuy", - "posix/America/Argentina/La_Rioja", - "posix/America/Argentina/Mendoza", - "posix/America/Argentina/Rio_Gallegos", - "posix/America/Argentina/Salta", - "posix/America/Argentina/San_Juan", - "posix/America/Argentina/San_Luis", - "posix/America/Argentina/Tucuman", - "posix/America/Argentina/Ushuaia", - "posix/America/Aruba", - "posix/America/Asuncion", - "posix/America/Atikokan", - "posix/America/Atka", - "posix/America/Bahia", - "posix/America/Bahia_Banderas", - "posix/America/Barbados", - "posix/America/Belem", - "posix/America/Belize", - "posix/America/Blanc-Sablon", - "posix/America/Boa_Vista", - "posix/America/Bogota", - "posix/America/Boise", - "posix/America/Buenos_Aires", - "posix/America/Cambridge_Bay", - "posix/America/Campo_Grande", - "posix/America/Cancun", - "posix/America/Caracas", - "posix/America/Catamarca", - "posix/America/Cayenne", - "posix/America/Cayman", - "posix/America/Chicago", - "posix/America/Chihuahua", - "posix/America/Ciudad_Juarez", - "posix/America/Coral_Harbor", - "posix/America/Cordoba", - "posix/America/Costa_Rica", - "posix/America/Creston", - "posix/America/Cuiaba", - "posix/America/Curacao", - "posix/America/Danmarkshavn", - "posix/America/Dawson", - "posix/America/Dawson_Creek", - "posix/America/Denver", - "posix/America/Detroit", - "posix/America/Dominica", - "posix/America/Edmonton", - "posix/America/Eirunepe", - "posix/America/El_Salvador", - "posix/America/Ensenada", - "posix/America/Fortaleza", - "posix/America/Fort_Nelson", - "posix/America/Fort_Wayne", - "posix/America/Glace_Bay", - "posix/America/Godthab", - "posix/America/Goose_Bay", - "posix/America/Grand_Turk", - "posix/America/Grenada", - "posix/America/Guadeloupe", - "posix/America/Guatemala", - "posix/America/Guayaquil", - "posix/America/Guyana", - "posix/America/Halifax", - "posix/America/Havana", - "posix/America/Hermosillo", - "posix/America/Indiana/Indianapolis", - "posix/America/Indiana/Knox", - "posix/America/Indiana/Marengo", - "posix/America/Indiana/Petersburg", - "posix/America/Indianapolis", - "posix/America/Indiana/Tell_City", - "posix/America/Indiana/Vevay", - "posix/America/Indiana/Vincennes", - "posix/America/Indiana/Winamac", - "posix/America/Inuvik", - "posix/America/Iqaluit", - "posix/America/Jamaica", - "posix/America/Jujuy", - "posix/America/Juneau", - "posix/America/Kentucky/Louisville", - "posix/America/Kentucky/Monticello", - "posix/America/Knox_IN", - "posix/America/Kralendijk", - "posix/America/La_Paz", - "posix/America/Lima", - "posix/America/Los_Angeles", - "posix/America/Louisville", - "posix/America/Lower_Princes", - "posix/America/Maceio", - "posix/America/Managua", - "posix/America/Manaus", - "posix/America/Marigot", - "posix/America/Martinique", - "posix/America/Matamoros", - "posix/America/Mazatlan", - "posix/America/Mendoza", - "posix/America/Menominee", - "posix/America/Merida", - "posix/America/Metlakatla", - "posix/America/Mexico_City", - "posix/America/Miquelon", - "posix/America/Moncton", - "posix/America/Monterrey", - "posix/America/Montevideo", - "posix/America/Montreal", - "posix/America/Montserrat", - "posix/America/Nassau", - "posix/America/New_York", - "posix/America/Nipigon", - "posix/America/Nome", - "posix/America/Noronha", - "posix/America/North_Dakota/Beulah", - "posix/America/North_Dakota/Center", - "posix/America/North_Dakota/New_Salem", - "posix/America/Nuuk", - "posix/America/Ojinaga", - "posix/America/Panama", - "posix/America/Pangnirtung", - "posix/America/Paramaribo", - "posix/America/Phoenix", - "posix/America/Port-au-Prince", - "posix/America/Porto_Acre", - "posix/America/Port_of_Spain", - "posix/America/Porto_Velho", - "posix/America/Puerto_Rico", - "posix/America/Punta_Arenas", - "posix/America/Rainy_River", - "posix/America/Rankin_Inlet", - "posix/America/Recife", - "posix/America/Regina", - "posix/America/Resolute", - "posix/America/Rio_Branco", - "posix/America/Rosario", - "posix/America/Santa_Isabel", - "posix/America/Santarem", - "posix/America/Santiago", - "posix/America/Santo_Domingo", - "posix/America/Sao_Paulo", - "posix/America/Scoresbysund", - "posix/America/Shiprock", - "posix/America/Sitka", - "posix/America/St_Barthelemy", - "posix/America/St_Johns", - "posix/America/St_Kitts", - "posix/America/St_Lucia", - "posix/America/St_Thomas", - "posix/America/St_Vincent", - "posix/America/Swift_Current", - "posix/America/Tegucigalpa", - "posix/America/Thule", - "posix/America/Thunder_Bay", - "posix/America/Tijuana", - "posix/America/Toronto", - "posix/America/Tortola", - "posix/America/Vancouver", - "posix/America/Virgin", - "posix/America/Whitehorse", - "posix/America/Winnipeg", - "posix/America/Yakutat", - "posix/America/Yellowknife", - "posix/Antarctica/Casey", - "posix/Antarctica/Davis", - "posix/Antarctica/DumontDUrville", - "posix/Antarctica/Macquarie", - "posix/Antarctica/Mawson", - "posix/Antarctica/McMurdo", - "posix/Antarctica/Palmer", - "posix/Antarctica/Rothera", - "posix/Antarctica/South_Pole", - "posix/Antarctica/Syowa", - "posix/Antarctica/Troll", - "posix/Antarctica/Vostok", - "posix/Arctic/Longyearbyen", - "posix/Asia/Aden", - "posix/Asia/Almaty", - "posix/Asia/Amman", - "posix/Asia/Anadyr", - "posix/Asia/Aqtau", - "posix/Asia/Aqtobe", - "posix/Asia/Ashgabat", - "posix/Asia/Ashkhabad", - "posix/Asia/Atyrau", - "posix/Asia/Baghdad", - "posix/Asia/Bahrain", - "posix/Asia/Baku", - "posix/Asia/Bangkok", - "posix/Asia/Barnaul", - "posix/Asia/Beirut", - "posix/Asia/Bishkek", - "posix/Asia/Brunei", - "posix/Asia/Calcutta", - "posix/Asia/Chita", - "posix/Asia/Choibalsan", - "posix/Asia/Chongqing", - "posix/Asia/Chungking", - "posix/Asia/Colombo", - "posix/Asia/Dacca", - "posix/Asia/Damascus", - "posix/Asia/Dhaka", - "posix/Asia/Dili", - "posix/Asia/Dubai", - "posix/Asia/Dushanbe", - "posix/Asia/Famagusta", - "posix/Asia/Gaza", - "posix/Asia/Harbin", - "posix/Asia/Hebron", - "posix/Asia/Ho_Chi_Minh", - "posix/Asia/Hong_Kong", - "posix/Asia/Hovd", - "posix/Asia/Irkutsk", - "posix/Asia/Istanbul", - "posix/Asia/Jakarta", - "posix/Asia/Jayapura", - "posix/Asia/Jerusalem", - "posix/Asia/Kabul", - "posix/Asia/Kamchatka", - "posix/Asia/Karachi", - "posix/Asia/Kashgar", - "posix/Asia/Kathmandu", - "posix/Asia/Katmandu", - "posix/Asia/Khandyga", - "posix/Asia/Kolkata", - "posix/Asia/Krasnoyarsk", - "posix/Asia/Kuala_Lumpur", - "posix/Asia/Kuching", - "posix/Asia/Kuwait", - "posix/Asia/Macao", - "posix/Asia/Macau", - "posix/Asia/Magadan", - "posix/Asia/Makassar", - "posix/Asia/Manila", - "posix/Asia/Muscat", - "posix/Asia/Nicosia", - "posix/Asia/Novokuznetsk", - "posix/Asia/Novosibirsk", - "posix/Asia/Omsk", - "posix/Asia/Oral", - "posix/Asia/Phnom_Penh", - "posix/Asia/Pontianak", - "posix/Asia/Pyongyang", - "posix/Asia/Qatar", - "posix/Asia/Qostanay", - "posix/Asia/Qyzylorda", - "posix/Asia/Rangoon", - "posix/Asia/Riyadh", - "posix/Asia/Saigon", - "posix/Asia/Sakhalin", - "posix/Asia/Samarkand", - "posix/Asia/Seoul", - "posix/Asia/Shanghai", - "posix/Asia/Singapore", - "posix/Asia/Srednekolymsk", - "posix/Asia/Taipei", - "posix/Asia/Tashkent", - "posix/Asia/Tbilisi", - "posix/Asia/Tehran", - "posix/Asia/Tel_Aviv", - "posix/Asia/Thimbu", - "posix/Asia/Thimphu", - "posix/Asia/Tokyo", - "posix/Asia/Tomsk", - "posix/Asia/Ujung_Pandang", - "posix/Asia/Ulaanbaatar", - "posix/Asia/Ulan_Bator", - "posix/Asia/Urumqi", - "posix/Asia/Ust-Nera", - "posix/Asia/Vientiane", - "posix/Asia/Vladivostok", - "posix/Asia/Yakutsk", - "posix/Asia/Yangon", - "posix/Asia/Yekaterinburg", - "posix/Asia/Yerevan", - "posix/Atlantic/Azores", - "posix/Atlantic/Bermuda", - "posix/Atlantic/Canary", - "posix/Atlantic/Cape_Verde", - "posix/Atlantic/Faeroe", - "posix/Atlantic/Faroe", - "posix/Atlantic/Jan_Mayen", - "posix/Atlantic/Madeira", - "posix/Atlantic/Reykjavik", - "posix/Atlantic/South_Georgia", - "posix/Atlantic/Stanley", - "posix/Atlantic/St_Helena", - "posix/Australia/ACT", - "posix/Australia/Adelaide", - "posix/Australia/Brisbane", - "posix/Australia/Broken_Hill", - "posix/Australia/Canberra", - "posix/Australia/Currie", - "posix/Australia/Darwin", - "posix/Australia/Eucla", - "posix/Australia/Hobart", - "posix/Australia/LHI", - "posix/Australia/Lindeman", - "posix/Australia/Lord_Howe", - "posix/Australia/Melbourne", - "posix/Australia/North", - "posix/Australia/NSW", - "posix/Australia/Perth", - "posix/Australia/Queensland", - "posix/Australia/South", - "posix/Australia/Sydney", - "posix/Australia/Tasmania", - "posix/Australia/Victoria", - "posix/Australia/West", - "posix/Australia/Yancowinna", - "posix/Brazil/Acre", - "posix/Brazil/DeNoronha", - "posix/Brazil/East", - "posix/Brazil/West", - "posix/Canada/Atlantic", - "posix/Canada/Central", - "posix/Canada/Eastern", - "posix/Canada/Mountain", - "posix/Canada/Newfoundland", - "posix/Canada/Pacific", - "posix/Canada/Saskatchewan", - "posix/Canada/Yukon", - "posix/CET", - "posix/Chile/Continental", - "posix/Chile/EasterIsland", - "posix/CST6CDT", - "posix/Cuba", - "posix/EET", - "posix/Egypt", - "posix/Eire", - "posix/EST", - "posix/EST5EDT", - "posix/Etc/GMT", - "posix/Etc/GMT+0", - "posix/Etc/GMT-0", - "posix/Etc/GMT0", - "posix/Etc/GMT+1", - "posix/Etc/GMT-1", - "posix/Etc/GMT+10", - "posix/Etc/GMT-10", - "posix/Etc/GMT+11", - "posix/Etc/GMT-11", - "posix/Etc/GMT+12", - "posix/Etc/GMT-12", - "posix/Etc/GMT-13", - "posix/Etc/GMT-14", - "posix/Etc/GMT+2", - "posix/Etc/GMT-2", - "posix/Etc/GMT+3", - "posix/Etc/GMT-3", - "posix/Etc/GMT+4", - "posix/Etc/GMT-4", - "posix/Etc/GMT+5", - "posix/Etc/GMT-5", - "posix/Etc/GMT+6", - "posix/Etc/GMT-6", - "posix/Etc/GMT+7", - "posix/Etc/GMT-7", - "posix/Etc/GMT+8", - "posix/Etc/GMT-8", - "posix/Etc/GMT+9", - "posix/Etc/GMT-9", - "posix/Etc/Greenwich", - "posix/Etc/UCT", - "posix/Etc/Universal", - "posix/Etc/UTC", - "posix/Etc/Zulu", - "posix/Europe/Amsterdam", -} diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 7a4be98b95045..8790bd27df693 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -157,7 +157,54 @@ CREATE TYPE api_key_scope AS ENUM ( 'coder:workspaces.access', 'coder:templates.build', 'coder:templates.author', - 'coder:apikeys.manage_self' + 'coder:apikeys.manage_self', + 'aibridge_interception:*', + 'api_key:*', + 'assign_org_role:*', + 'assign_role:*', + 'audit_log:*', + 'connection_log:*', + 'crypto_key:*', + 'debug_info:*', + 'deployment_config:*', + 'deployment_stats:*', + 'file:*', + 'group:*', + 'group_member:*', + 'idpsync_settings:*', + 'inbox_notification:*', + 'license:*', + 'notification_message:*', + 'notification_preference:*', + 'notification_template:*', + 'oauth2_app:*', + 'oauth2_app_code_token:*', + 'oauth2_app_secret:*', + 'organization:*', + 'organization_member:*', + 'prebuilt_workspace:*', + 'provisioner_daemon:*', + 'provisioner_jobs:*', + 'replicas:*', + 'system:*', + 'tailnet_coordinator:*', + 'template:*', + 'usage_event:*', + 'user:*', + 'user_secret:*', + 'webpush_subscription:*', + 'workspace:*', + 'workspace_agent_devcontainers:*', + 'workspace_agent_resource_monitor:*', + 'workspace_dormant:*', + 'workspace_proxy:*', + 'task:create', + 'task:read', + 'task:update', + 'task:delete', + 'task:*', + 'workspace:share', + 'workspace_dormant:share' ); CREATE TYPE app_sharing_level AS ENUM ( @@ -415,7 +462,8 @@ CREATE TYPE resource_type AS ENUM ( 'idp_sync_settings_role', 'workspace_agent', 'workspace_app', - 'prebuilds_settings' + 'prebuilds_settings', + 'task' ); CREATE TYPE startup_script_behavior AS ENUM ( @@ -432,6 +480,15 @@ CREATE TYPE tailnet_status AS ENUM ( 'lost' ); +CREATE TYPE task_status AS ENUM ( + 'pending', + 'initializing', + 'active', + 'paused', + 'unknown', + 'error' +); + CREATE TYPE user_status AS ENUM ( 'active', 'suspended', @@ -998,7 +1055,8 @@ CREATE TABLE aibridge_interceptions ( provider text NOT NULL, model text NOT NULL, started_at timestamp with time zone NOT NULL, - metadata jsonb + metadata jsonb, + ended_at timestamp with time zone ); COMMENT ON TABLE aibridge_interceptions IS 'Audit log of requests intercepted by AI Bridge'; @@ -1068,7 +1126,8 @@ CREATE TABLE api_keys ( ip_address inet DEFAULT '0.0.0.0'::inet NOT NULL, token_name text DEFAULT ''::text NOT NULL, scopes api_key_scope[] NOT NULL, - allow_list text[] NOT NULL + allow_list text[] NOT NULL, + CONSTRAINT api_keys_allow_list_not_empty CHECK ((array_length(allow_list, 1) > 0)) ); COMMENT ON COLUMN api_keys.hashed_secret IS 'hashed_secret contains a SHA256 hash of the key secret. This is considered a secret and MUST NOT be returned from the API as it is used for API key encryption in app proxying code.'; @@ -1479,7 +1538,7 @@ CREATE TABLE oauth2_provider_apps ( jwks jsonb, software_id text, software_version text, - registration_access_token text, + registration_access_token bytea, registration_client_uri text ); @@ -1751,9 +1810,9 @@ CREATE TABLE tailnet_tunnels ( CREATE TABLE task_workspace_apps ( task_id uuid NOT NULL, - workspace_build_id uuid NOT NULL, - workspace_agent_id uuid NOT NULL, - workspace_app_id uuid NOT NULL + workspace_agent_id uuid, + workspace_app_id uuid, + workspace_build_number integer NOT NULL ); CREATE TABLE tasks ( @@ -1769,6 +1828,200 @@ CREATE TABLE tasks ( deleted_at timestamp with time zone ); +CREATE VIEW visible_users AS + SELECT users.id, + users.username, + users.name, + users.avatar_url + FROM users; + +COMMENT ON VIEW visible_users IS 'Visible fields of users are allowed to be joined with other tables for including context of other resources.'; + +CREATE TABLE workspace_agents ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + name character varying(64) NOT NULL, + first_connected_at timestamp with time zone, + last_connected_at timestamp with time zone, + disconnected_at timestamp with time zone, + resource_id uuid NOT NULL, + auth_token uuid NOT NULL, + auth_instance_id character varying, + architecture character varying(64) NOT NULL, + environment_variables jsonb, + operating_system character varying(64) NOT NULL, + instance_metadata jsonb, + resource_metadata jsonb, + directory character varying(4096) DEFAULT ''::character varying NOT NULL, + version text DEFAULT ''::text NOT NULL, + last_connected_replica_id uuid, + connection_timeout_seconds integer DEFAULT 0 NOT NULL, + troubleshooting_url text DEFAULT ''::text NOT NULL, + motd_file text DEFAULT ''::text NOT NULL, + lifecycle_state workspace_agent_lifecycle_state DEFAULT 'created'::workspace_agent_lifecycle_state NOT NULL, + expanded_directory character varying(4096) DEFAULT ''::character varying NOT NULL, + logs_length integer DEFAULT 0 NOT NULL, + logs_overflowed boolean DEFAULT false NOT NULL, + started_at timestamp with time zone, + ready_at timestamp with time zone, + subsystems workspace_agent_subsystem[] DEFAULT '{}'::workspace_agent_subsystem[], + display_apps display_app[] DEFAULT '{vscode,vscode_insiders,web_terminal,ssh_helper,port_forwarding_helper}'::display_app[], + api_version text DEFAULT ''::text NOT NULL, + display_order integer DEFAULT 0 NOT NULL, + parent_id uuid, + api_key_scope agent_key_scope_enum DEFAULT 'all'::agent_key_scope_enum NOT NULL, + deleted boolean DEFAULT false NOT NULL, + CONSTRAINT max_logs_length CHECK ((logs_length <= 1048576)), + CONSTRAINT subsystems_not_none CHECK ((NOT ('none'::workspace_agent_subsystem = ANY (subsystems)))) +); + +COMMENT ON COLUMN workspace_agents.version IS 'Version tracks the version of the currently running workspace agent. Workspace agents register their version upon start.'; + +COMMENT ON COLUMN workspace_agents.connection_timeout_seconds IS 'Connection timeout in seconds, 0 means disabled.'; + +COMMENT ON COLUMN workspace_agents.troubleshooting_url IS 'URL for troubleshooting the agent.'; + +COMMENT ON COLUMN workspace_agents.motd_file IS 'Path to file inside workspace containing the message of the day (MOTD) to show to the user when logging in via SSH.'; + +COMMENT ON COLUMN workspace_agents.lifecycle_state IS 'The current lifecycle state reported by the workspace agent.'; + +COMMENT ON COLUMN workspace_agents.expanded_directory IS 'The resolved path of a user-specified directory. e.g. ~/coder -> /home/coder/coder'; + +COMMENT ON COLUMN workspace_agents.logs_length IS 'Total length of startup logs'; + +COMMENT ON COLUMN workspace_agents.logs_overflowed IS 'Whether the startup logs overflowed in length'; + +COMMENT ON COLUMN workspace_agents.started_at IS 'The time the agent entered the starting lifecycle state'; + +COMMENT ON COLUMN workspace_agents.ready_at IS 'The time the agent entered the ready or start_error lifecycle state'; + +COMMENT ON COLUMN workspace_agents.display_order IS 'Specifies the order in which to display agents in user interfaces.'; + +COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.'; + +COMMENT ON COLUMN workspace_agents.deleted IS 'Indicates whether or not the agent has been deleted. This is currently only applicable to sub agents.'; + +CREATE TABLE workspace_apps ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + agent_id uuid NOT NULL, + display_name character varying(64) NOT NULL, + icon character varying(256) NOT NULL, + command character varying(65534), + url character varying(65534), + healthcheck_url text DEFAULT ''::text NOT NULL, + healthcheck_interval integer DEFAULT 0 NOT NULL, + healthcheck_threshold integer DEFAULT 0 NOT NULL, + health workspace_app_health DEFAULT 'disabled'::workspace_app_health NOT NULL, + subdomain boolean DEFAULT false NOT NULL, + sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL, + slug text NOT NULL, + external boolean DEFAULT false NOT NULL, + display_order integer DEFAULT 0 NOT NULL, + hidden boolean DEFAULT false NOT NULL, + open_in workspace_app_open_in DEFAULT 'slim-window'::workspace_app_open_in NOT NULL, + display_group text, + tooltip character varying(2048) DEFAULT ''::character varying NOT NULL +); + +COMMENT ON COLUMN workspace_apps.display_order IS 'Specifies the order in which to display agent app in user interfaces.'; + +COMMENT ON COLUMN workspace_apps.hidden IS 'Determines if the app is not shown in user interfaces.'; + +COMMENT ON COLUMN workspace_apps.tooltip IS 'Markdown text that is displayed when hovering over workspace apps.'; + +CREATE TABLE workspace_builds ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + workspace_id uuid NOT NULL, + template_version_id uuid NOT NULL, + build_number integer NOT NULL, + transition workspace_transition NOT NULL, + initiator_id uuid NOT NULL, + provisioner_state bytea, + job_id uuid NOT NULL, + deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL, + reason build_reason DEFAULT 'initiator'::build_reason NOT NULL, + daily_cost integer DEFAULT 0 NOT NULL, + max_deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL, + template_version_preset_id uuid, + has_ai_task boolean, + ai_task_sidebar_app_id uuid, + has_external_agent boolean, + CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))), + CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone))) +); + +CREATE VIEW tasks_with_status AS + SELECT tasks.id, + tasks.organization_id, + tasks.owner_id, + tasks.name, + tasks.workspace_id, + tasks.template_version_id, + tasks.template_parameters, + tasks.prompt, + tasks.created_at, + tasks.deleted_at, + CASE + WHEN ((tasks.workspace_id IS NULL) OR (latest_build.job_status IS NULL)) THEN 'pending'::task_status + WHEN (latest_build.job_status = 'failed'::provisioner_job_status) THEN 'error'::task_status + WHEN ((latest_build.transition = ANY (ARRAY['stop'::workspace_transition, 'delete'::workspace_transition])) AND (latest_build.job_status = 'succeeded'::provisioner_job_status)) THEN 'paused'::task_status + WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = 'pending'::provisioner_job_status)) THEN 'initializing'::task_status + WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = ANY (ARRAY['running'::provisioner_job_status, 'succeeded'::provisioner_job_status]))) THEN + CASE + WHEN agent_status."none" THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END AS status, + task_app.workspace_build_number, + task_app.workspace_agent_id, + task_app.workspace_app_id, + task_owner.owner_username, + task_owner.owner_name, + task_owner.owner_avatar_url + FROM (((((tasks + CROSS JOIN LATERAL ( SELECT vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM visible_users vu + WHERE (vu.id = tasks.owner_id)) task_owner) + LEFT JOIN LATERAL ( SELECT task_app_1.workspace_build_number, + task_app_1.workspace_agent_id, + task_app_1.workspace_app_id + FROM task_workspace_apps task_app_1 + WHERE (task_app_1.task_id = tasks.id) + ORDER BY task_app_1.workspace_build_number DESC + LIMIT 1) task_app ON (true)) + LEFT JOIN LATERAL ( SELECT workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM (workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON ((provisioner_job.id = workspace_build.job_id))) + WHERE ((workspace_build.workspace_id = tasks.workspace_id) AND (workspace_build.build_number = task_app.workspace_build_number))) latest_build ON (true)) + CROSS JOIN LATERAL ( SELECT (count(*) = 0) AS "none", + bool_or((workspace_agent.lifecycle_state = ANY (ARRAY['created'::workspace_agent_lifecycle_state, 'starting'::workspace_agent_lifecycle_state]))) AS connecting, + bool_and((workspace_agent.lifecycle_state = 'ready'::workspace_agent_lifecycle_state)) AS connected + FROM workspace_agents workspace_agent + WHERE (workspace_agent.id = task_app.workspace_agent_id)) agent_status) + CROSS JOIN LATERAL ( SELECT bool_or((workspace_app.health = 'unhealthy'::workspace_app_health)) AS any_unhealthy, + bool_or((workspace_app.health = 'initializing'::workspace_app_health)) AS any_initializing, + bool_and((workspace_app.health = ANY (ARRAY['healthy'::workspace_app_health, 'disabled'::workspace_app_health]))) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE (workspace_app.id = task_app.workspace_app_id)) app_status) + WHERE (tasks.deleted_at IS NULL); + CREATE TABLE telemetry_items ( key text NOT NULL, value text NOT NULL, @@ -1776,6 +2029,18 @@ CREATE TABLE telemetry_items ( updated_at timestamp with time zone DEFAULT now() NOT NULL ); +CREATE TABLE telemetry_locks ( + event_type text NOT NULL, + period_ending_at timestamp with time zone NOT NULL, + CONSTRAINT telemetry_lock_event_type_constraint CHECK ((event_type = 'aibridge_interceptions_summary'::text)) +); + +COMMENT ON TABLE telemetry_locks IS 'Telemetry lock tracking table for deduplication of heartbeat events across replicas.'; + +COMMENT ON COLUMN telemetry_locks.event_type IS 'The type of event that was sent.'; + +COMMENT ON COLUMN telemetry_locks.period_ending_at IS 'The heartbeat period end timestamp.'; + CREATE TABLE template_usage_stats ( start_time timestamp with time zone NOT NULL, end_time timestamp with time zone NOT NULL, @@ -1962,15 +2227,6 @@ COMMENT ON COLUMN template_versions.external_auth_providers IS 'IDs of External COMMENT ON COLUMN template_versions.message IS 'Message describing the changes in this version of the template, similar to a Git commit message. Like a commit message, this should be a short, high-level description of the changes in this version of the template. This message is immutable and should not be updated after the fact.'; -CREATE VIEW visible_users AS - SELECT users.id, - users.username, - users.name, - users.avatar_url - FROM users; - -COMMENT ON VIEW visible_users IS 'Visible fields of users are allowed to be joined with other tables for including context of other resources.'; - CREATE VIEW template_version_with_user AS SELECT template_versions.id, template_versions.template_id, @@ -2332,71 +2588,6 @@ CREATE TABLE workspace_agent_volume_resource_monitors ( debounced_until timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL ); -CREATE TABLE workspace_agents ( - id uuid NOT NULL, - created_at timestamp with time zone NOT NULL, - updated_at timestamp with time zone NOT NULL, - name character varying(64) NOT NULL, - first_connected_at timestamp with time zone, - last_connected_at timestamp with time zone, - disconnected_at timestamp with time zone, - resource_id uuid NOT NULL, - auth_token uuid NOT NULL, - auth_instance_id character varying, - architecture character varying(64) NOT NULL, - environment_variables jsonb, - operating_system character varying(64) NOT NULL, - instance_metadata jsonb, - resource_metadata jsonb, - directory character varying(4096) DEFAULT ''::character varying NOT NULL, - version text DEFAULT ''::text NOT NULL, - last_connected_replica_id uuid, - connection_timeout_seconds integer DEFAULT 0 NOT NULL, - troubleshooting_url text DEFAULT ''::text NOT NULL, - motd_file text DEFAULT ''::text NOT NULL, - lifecycle_state workspace_agent_lifecycle_state DEFAULT 'created'::workspace_agent_lifecycle_state NOT NULL, - expanded_directory character varying(4096) DEFAULT ''::character varying NOT NULL, - logs_length integer DEFAULT 0 NOT NULL, - logs_overflowed boolean DEFAULT false NOT NULL, - started_at timestamp with time zone, - ready_at timestamp with time zone, - subsystems workspace_agent_subsystem[] DEFAULT '{}'::workspace_agent_subsystem[], - display_apps display_app[] DEFAULT '{vscode,vscode_insiders,web_terminal,ssh_helper,port_forwarding_helper}'::display_app[], - api_version text DEFAULT ''::text NOT NULL, - display_order integer DEFAULT 0 NOT NULL, - parent_id uuid, - api_key_scope agent_key_scope_enum DEFAULT 'all'::agent_key_scope_enum NOT NULL, - deleted boolean DEFAULT false NOT NULL, - CONSTRAINT max_logs_length CHECK ((logs_length <= 1048576)), - CONSTRAINT subsystems_not_none CHECK ((NOT ('none'::workspace_agent_subsystem = ANY (subsystems)))) -); - -COMMENT ON COLUMN workspace_agents.version IS 'Version tracks the version of the currently running workspace agent. Workspace agents register their version upon start.'; - -COMMENT ON COLUMN workspace_agents.connection_timeout_seconds IS 'Connection timeout in seconds, 0 means disabled.'; - -COMMENT ON COLUMN workspace_agents.troubleshooting_url IS 'URL for troubleshooting the agent.'; - -COMMENT ON COLUMN workspace_agents.motd_file IS 'Path to file inside workspace containing the message of the day (MOTD) to show to the user when logging in via SSH.'; - -COMMENT ON COLUMN workspace_agents.lifecycle_state IS 'The current lifecycle state reported by the workspace agent.'; - -COMMENT ON COLUMN workspace_agents.expanded_directory IS 'The resolved path of a user-specified directory. e.g. ~/coder -> /home/coder/coder'; - -COMMENT ON COLUMN workspace_agents.logs_length IS 'Total length of startup logs'; - -COMMENT ON COLUMN workspace_agents.logs_overflowed IS 'Whether the startup logs overflowed in length'; - -COMMENT ON COLUMN workspace_agents.started_at IS 'The time the agent entered the starting lifecycle state'; - -COMMENT ON COLUMN workspace_agents.ready_at IS 'The time the agent entered the ready or start_error lifecycle state'; - -COMMENT ON COLUMN workspace_agents.display_order IS 'Specifies the order in which to display agents in user interfaces.'; - -COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.'; - -COMMENT ON COLUMN workspace_agents.deleted IS 'Indicates whether or not the agent has been deleted. This is currently only applicable to sub agents.'; - CREATE UNLOGGED TABLE workspace_app_audit_sessions ( agent_id uuid NOT NULL, app_id uuid NOT NULL, @@ -2485,35 +2676,6 @@ CREATE TABLE workspace_app_statuses ( uri text ); -CREATE TABLE workspace_apps ( - id uuid NOT NULL, - created_at timestamp with time zone NOT NULL, - agent_id uuid NOT NULL, - display_name character varying(64) NOT NULL, - icon character varying(256) NOT NULL, - command character varying(65534), - url character varying(65534), - healthcheck_url text DEFAULT ''::text NOT NULL, - healthcheck_interval integer DEFAULT 0 NOT NULL, - healthcheck_threshold integer DEFAULT 0 NOT NULL, - health workspace_app_health DEFAULT 'disabled'::workspace_app_health NOT NULL, - subdomain boolean DEFAULT false NOT NULL, - sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL, - slug text NOT NULL, - external boolean DEFAULT false NOT NULL, - display_order integer DEFAULT 0 NOT NULL, - hidden boolean DEFAULT false NOT NULL, - open_in workspace_app_open_in DEFAULT 'slim-window'::workspace_app_open_in NOT NULL, - display_group text, - tooltip character varying(2048) DEFAULT ''::character varying NOT NULL -); - -COMMENT ON COLUMN workspace_apps.display_order IS 'Specifies the order in which to display agent app in user interfaces.'; - -COMMENT ON COLUMN workspace_apps.hidden IS 'Determines if the app is not shown in user interfaces.'; - -COMMENT ON COLUMN workspace_apps.tooltip IS 'Markdown text that is displayed when hovering over workspace apps.'; - CREATE TABLE workspace_build_parameters ( workspace_build_id uuid NOT NULL, name text NOT NULL, @@ -2524,29 +2686,6 @@ COMMENT ON COLUMN workspace_build_parameters.name IS 'Parameter name'; COMMENT ON COLUMN workspace_build_parameters.value IS 'Parameter value'; -CREATE TABLE workspace_builds ( - id uuid NOT NULL, - created_at timestamp with time zone NOT NULL, - updated_at timestamp with time zone NOT NULL, - workspace_id uuid NOT NULL, - template_version_id uuid NOT NULL, - build_number integer NOT NULL, - transition workspace_transition NOT NULL, - initiator_id uuid NOT NULL, - provisioner_state bytea, - job_id uuid NOT NULL, - deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL, - reason build_reason DEFAULT 'initiator'::build_reason NOT NULL, - daily_cost integer DEFAULT 0 NOT NULL, - max_deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL, - template_version_preset_id uuid, - has_ai_task boolean, - ai_task_sidebar_app_id uuid, - has_external_agent boolean, - CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))), - CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone))) -); - CREATE VIEW workspace_build_with_user AS SELECT workspace_builds.id, workspace_builds.created_at, @@ -2783,11 +2922,13 @@ CREATE VIEW workspaces_expanded AS templates.name AS template_name, templates.display_name AS template_display_name, templates.icon AS template_icon, - templates.description AS template_description - FROM (((workspaces + templates.description AS template_description, + tasks.id AS task_id + FROM ((((workspaces JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) JOIN organizations ON ((workspaces.organization_id = organizations.id))) - JOIN templates ON ((workspaces.template_id = templates.id))); + JOIN templates ON ((workspaces.template_id = templates.id))) + LEFT JOIN tasks ON ((workspaces.id = tasks.workspace_id))); COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; @@ -2962,12 +3103,18 @@ ALTER TABLE ONLY tailnet_peers ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_pkey PRIMARY KEY (coordinator_id, src_id, dst_id); +ALTER TABLE ONLY task_workspace_apps + ADD CONSTRAINT task_workspace_apps_pkey PRIMARY KEY (task_id, workspace_build_number); + ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_pkey PRIMARY KEY (id); ALTER TABLE ONLY telemetry_items ADD CONSTRAINT telemetry_items_pkey PRIMARY KEY (key); +ALTER TABLE ONLY telemetry_locks + ADD CONSTRAINT telemetry_locks_pkey PRIMARY KEY (event_type, period_ending_at); + ALTER TABLE ONLY template_usage_stats ADD CONSTRAINT template_usage_stats_pkey PRIMARY KEY (start_time, template_id, user_id); @@ -3193,6 +3340,8 @@ CREATE INDEX idx_tailnet_tunnels_dst_id ON tailnet_tunnels USING hash (dst_id); CREATE INDEX idx_tailnet_tunnels_src_id ON tailnet_tunnels USING hash (src_id); +CREATE INDEX idx_telemetry_locks_period_ending_at ON telemetry_locks USING btree (period_ending_at); + CREATE UNIQUE INDEX idx_template_version_presets_default ON template_version_presets USING btree (template_version_id) WHERE (is_default = true); CREATE INDEX idx_template_versions_has_ai_task ON template_versions USING btree (has_ai_task); @@ -3227,6 +3376,16 @@ COMMENT ON INDEX provisioner_jobs_worker_id_organization_id_completed_at_idx IS CREATE UNIQUE INDEX provisioner_keys_organization_id_name_idx ON provisioner_keys USING btree (organization_id, lower((name)::text)); +CREATE INDEX tasks_organization_id_idx ON tasks USING btree (organization_id); + +CREATE INDEX tasks_owner_id_idx ON tasks USING btree (owner_id); + +CREATE UNIQUE INDEX tasks_owner_id_name_unique_idx ON tasks USING btree (owner_id, lower(name)) WHERE (deleted_at IS NULL); + +COMMENT ON INDEX tasks_owner_id_name_unique_idx IS 'Index to ensure uniqueness for task owner/name'; + +CREATE INDEX tasks_workspace_id_idx ON tasks USING btree (workspace_id); + CREATE INDEX template_usage_stats_start_time_idx ON template_usage_stats USING btree (start_time DESC); COMMENT ON INDEX template_usage_stats_start_time_idx IS 'Index for querying MAX(start_time).'; @@ -3381,6 +3540,9 @@ COMMENT ON TRIGGER workspace_agent_name_unique_trigger ON workspace_agents IS 'U the uniqueness requirement. A trigger allows us to enforce uniqueness going forward without requiring a migration to clean up historical data.'; +ALTER TABLE ONLY aibridge_interceptions + ADD CONSTRAINT aibridge_interceptions_initiator_id_fkey FOREIGN KEY (initiator_id) REFERENCES users(id); + ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; @@ -3507,9 +3669,6 @@ ALTER TABLE ONLY task_workspace_apps ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_app_id_fkey FOREIGN KEY (workspace_app_id) REFERENCES workspace_apps(id) ON DELETE CASCADE; -ALTER TABLE ONLY task_workspace_apps - ADD CONSTRAINT task_workspace_apps_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; - ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go index 8497ac2bcd4b4..6737275dd340e 100644 --- a/coderd/database/foreign_key_constraint.go +++ b/coderd/database/foreign_key_constraint.go @@ -6,6 +6,7 @@ type ForeignKeyConstraint string // ForeignKeyConstraint enums. const ( + ForeignKeyAibridgeInterceptionsInitiatorID ForeignKeyConstraint = "aibridge_interceptions_initiator_id_fkey" // ALTER TABLE ONLY aibridge_interceptions ADD CONSTRAINT aibridge_interceptions_initiator_id_fkey FOREIGN KEY (initiator_id) REFERENCES users(id); ForeignKeyAPIKeysUserIDUUID ForeignKeyConstraint = "api_keys_user_id_uuid_fkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; ForeignKeyConnectionLogsOrganizationID ForeignKeyConstraint = "connection_logs_organization_id_fkey" // ALTER TABLE ONLY connection_logs ADD CONSTRAINT connection_logs_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; ForeignKeyConnectionLogsWorkspaceID ForeignKeyConstraint = "connection_logs_workspace_id_fkey" // ALTER TABLE ONLY connection_logs ADD CONSTRAINT connection_logs_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; @@ -48,7 +49,6 @@ const ( ForeignKeyTaskWorkspaceAppsTaskID ForeignKeyConstraint = "task_workspace_apps_task_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_task_id_fkey FOREIGN KEY (task_id) REFERENCES tasks(id) ON DELETE CASCADE; ForeignKeyTaskWorkspaceAppsWorkspaceAgentID ForeignKeyConstraint = "task_workspace_apps_workspace_agent_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; ForeignKeyTaskWorkspaceAppsWorkspaceAppID ForeignKeyConstraint = "task_workspace_apps_workspace_app_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_app_id_fkey FOREIGN KEY (workspace_app_id) REFERENCES workspace_apps(id) ON DELETE CASCADE; - ForeignKeyTaskWorkspaceAppsWorkspaceBuildID ForeignKeyConstraint = "task_workspace_apps_workspace_build_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; ForeignKeyTasksOrganizationID ForeignKeyConstraint = "tasks_organization_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; ForeignKeyTasksOwnerID ForeignKeyConstraint = "tasks_owner_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE CASCADE; ForeignKeyTasksTemplateVersionID ForeignKeyConstraint = "tasks_template_version_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; diff --git a/coderd/database/gen/dump/main.go b/coderd/database/gen/dump/main.go index 1d84339eecce9..25bcbcd3960f4 100644 --- a/coderd/database/gen/dump/main.go +++ b/coderd/database/gen/dump/main.go @@ -35,6 +35,10 @@ func (*mockTB) Logf(format string, args ...any) { _, _ = fmt.Printf(format, args...) } +func (*mockTB) TempDir() string { + panic("not implemented") +} + func main() { t := &mockTB{} defer func() { diff --git a/coderd/database/migrations/000377_add_api_key_scope_wildcards.down.sql b/coderd/database/migrations/000377_add_api_key_scope_wildcards.down.sql new file mode 100644 index 0000000000000..a414b39a912ee --- /dev/null +++ b/coderd/database/migrations/000377_add_api_key_scope_wildcards.down.sql @@ -0,0 +1,2 @@ +-- No-op: enum values remain to avoid churn. Removing enum values requires +-- doing a create/cast/drop cycle which is intentionally omitted here. diff --git a/coderd/database/migrations/000377_add_api_key_scope_wildcards.up.sql b/coderd/database/migrations/000377_add_api_key_scope_wildcards.up.sql new file mode 100644 index 0000000000000..aed5a18a3e31d --- /dev/null +++ b/coderd/database/migrations/000377_add_api_key_scope_wildcards.up.sql @@ -0,0 +1,42 @@ +-- Add wildcard api_key_scope entries so every RBAC resource has a matching resource:* value. +-- Generated via: CGO_ENABLED=0 go run ./scripts/generate_api_key_scope_enum +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'aibridge_interception:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'api_key:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'assign_org_role:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'assign_role:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'audit_log:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'connection_log:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'crypto_key:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'debug_info:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'deployment_config:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'deployment_stats:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'file:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'group:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'group_member:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'idpsync_settings:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'inbox_notification:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'license:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_message:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_preference:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_template:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app_code_token:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app_secret:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'organization:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'organization_member:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'prebuilt_workspace:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'provisioner_daemon:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'provisioner_jobs:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'replicas:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'system:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'tailnet_coordinator:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'template:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'usage_event:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'user:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'user_secret:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'webpush_subscription:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_agent_devcontainers:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_agent_resource_monitor:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_dormant:*'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_proxy:*'; diff --git a/coderd/database/migrations/000378_add_tasks_rbac.down.sql b/coderd/database/migrations/000378_add_tasks_rbac.down.sql new file mode 100644 index 0000000000000..761a8d943f198 --- /dev/null +++ b/coderd/database/migrations/000378_add_tasks_rbac.down.sql @@ -0,0 +1,3 @@ +-- Revert Tasks RBAC. +-- No-op: enum values remain to avoid churn. Removing enum values requires +-- doing a create/cast/drop cycle which is intentionally omitted here. diff --git a/coderd/database/migrations/000378_add_tasks_rbac.up.sql b/coderd/database/migrations/000378_add_tasks_rbac.up.sql new file mode 100644 index 0000000000000..18d81ac4436c1 --- /dev/null +++ b/coderd/database/migrations/000378_add_tasks_rbac.up.sql @@ -0,0 +1,6 @@ +-- Tasks RBAC. +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:create'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:read'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:update'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:delete'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:*'; diff --git a/coderd/database/migrations/000379_create_tasks_with_status_view.down.sql b/coderd/database/migrations/000379_create_tasks_with_status_view.down.sql new file mode 100644 index 0000000000000..45754139a7940 --- /dev/null +++ b/coderd/database/migrations/000379_create_tasks_with_status_view.down.sql @@ -0,0 +1,33 @@ +DROP VIEW IF EXISTS tasks_with_status; +DROP TYPE IF EXISTS task_status; + +DROP INDEX IF EXISTS tasks_organization_id_idx; +DROP INDEX IF EXISTS tasks_owner_id_idx; +DROP INDEX IF EXISTS tasks_workspace_id_idx; + +ALTER TABLE task_workspace_apps + DROP CONSTRAINT IF EXISTS task_workspace_apps_pkey; + +-- Add back workspace_build_id column. +ALTER TABLE task_workspace_apps + ADD COLUMN workspace_build_id UUID; + +-- Try to populate workspace_build_id from workspace_builds. +UPDATE task_workspace_apps +SET workspace_build_id = workspace_builds.id +FROM workspace_builds +WHERE workspace_builds.build_number = task_workspace_apps.workspace_build_number + AND workspace_builds.workspace_id IN ( + SELECT workspace_id FROM tasks WHERE tasks.id = task_workspace_apps.task_id + ); + +-- Remove rows that couldn't be restored. +DELETE FROM task_workspace_apps +WHERE workspace_build_id IS NULL; + +-- Restore original schema. +ALTER TABLE task_workspace_apps + DROP COLUMN workspace_build_number, + ALTER COLUMN workspace_build_id SET NOT NULL, + ALTER COLUMN workspace_agent_id SET NOT NULL, + ALTER COLUMN workspace_app_id SET NOT NULL; diff --git a/coderd/database/migrations/000379_create_tasks_with_status_view.up.sql b/coderd/database/migrations/000379_create_tasks_with_status_view.up.sql new file mode 100644 index 0000000000000..7af0e71482b42 --- /dev/null +++ b/coderd/database/migrations/000379_create_tasks_with_status_view.up.sql @@ -0,0 +1,104 @@ +-- Replace workspace_build_id with workspace_build_number. +ALTER TABLE task_workspace_apps + ADD COLUMN workspace_build_number INTEGER; + +-- Try to populate workspace_build_number from workspace_builds. +UPDATE task_workspace_apps +SET workspace_build_number = workspace_builds.build_number +FROM workspace_builds +WHERE workspace_builds.id = task_workspace_apps.workspace_build_id; + +-- Remove rows that couldn't be migrated. +DELETE FROM task_workspace_apps +WHERE workspace_build_number IS NULL; + +ALTER TABLE task_workspace_apps + DROP COLUMN workspace_build_id, + ALTER COLUMN workspace_build_number SET NOT NULL, + ALTER COLUMN workspace_agent_id DROP NOT NULL, + ALTER COLUMN workspace_app_id DROP NOT NULL, + ADD CONSTRAINT task_workspace_apps_pkey PRIMARY KEY (task_id, workspace_build_number); + +-- Add indexes for common joins or filters. +CREATE INDEX IF NOT EXISTS tasks_workspace_id_idx ON tasks (workspace_id); +CREATE INDEX IF NOT EXISTS tasks_owner_id_idx ON tasks (owner_id); +CREATE INDEX IF NOT EXISTS tasks_organization_id_idx ON tasks (organization_id); + +CREATE TYPE task_status AS ENUM ( + 'pending', + 'initializing', + 'active', + 'paused', + 'unknown', + 'error' +); + +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status + FROM + tasks + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000380_task_name_unique.down.sql b/coderd/database/migrations/000380_task_name_unique.down.sql new file mode 100644 index 0000000000000..b15f33255508d --- /dev/null +++ b/coderd/database/migrations/000380_task_name_unique.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS tasks_owner_id_name_unique_idx; diff --git a/coderd/database/migrations/000380_task_name_unique.up.sql b/coderd/database/migrations/000380_task_name_unique.up.sql new file mode 100644 index 0000000000000..13ccf0b2d3fa0 --- /dev/null +++ b/coderd/database/migrations/000380_task_name_unique.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX IF NOT EXISTS tasks_owner_id_name_unique_idx ON tasks (owner_id, LOWER(name)) WHERE deleted_at IS NULL; +COMMENT ON INDEX tasks_owner_id_name_unique_idx IS 'Index to ensure uniqueness for task owner/name'; diff --git a/coderd/database/migrations/000381_add_task_audit.down.sql b/coderd/database/migrations/000381_add_task_audit.down.sql new file mode 100644 index 0000000000000..362f597df0911 --- /dev/null +++ b/coderd/database/migrations/000381_add_task_audit.down.sql @@ -0,0 +1 @@ +-- Nothing to do diff --git a/coderd/database/migrations/000381_add_task_audit.up.sql b/coderd/database/migrations/000381_add_task_audit.up.sql new file mode 100644 index 0000000000000..006391ac1fbaf --- /dev/null +++ b/coderd/database/migrations/000381_add_task_audit.up.sql @@ -0,0 +1 @@ +ALTER TYPE resource_type ADD VALUE IF NOT EXISTS 'task'; diff --git a/coderd/database/migrations/000382_add_columns_to_tasks_with_status.down.sql b/coderd/database/migrations/000382_add_columns_to_tasks_with_status.down.sql new file mode 100644 index 0000000000000..c9cd9c866510d --- /dev/null +++ b/coderd/database/migrations/000382_add_columns_to_tasks_with_status.down.sql @@ -0,0 +1,72 @@ +DROP VIEW IF EXISTS tasks_with_status; + +-- Restore from 00037_add_columns_to_tasks_with_status.up.sql. +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status + FROM + tasks + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000382_add_columns_to_tasks_with_status.up.sql b/coderd/database/migrations/000382_add_columns_to_tasks_with_status.up.sql new file mode 100644 index 0000000000000..4d949384c0d08 --- /dev/null +++ b/coderd/database/migrations/000382_add_columns_to_tasks_with_status.up.sql @@ -0,0 +1,74 @@ +-- Drop view from 00037_add_columns_to_tasks_with_status.up.sql. +DROP VIEW IF EXISTS tasks_with_status; + +-- Add task_app columns. +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status, + task_app.* + FROM + tasks + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.down.sql b/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.down.sql new file mode 100644 index 0000000000000..9a87362653f31 --- /dev/null +++ b/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.down.sql @@ -0,0 +1,5 @@ +-- Remove Task 'completed' transition template notification +DELETE FROM notification_templates WHERE id = '8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c'; + +-- Remove Task 'failed' transition template notification +DELETE FROM notification_templates WHERE id = '3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e'; diff --git a/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.up.sql b/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.up.sql new file mode 100644 index 0000000000000..a9d6b01103088 --- /dev/null +++ b/coderd/database/migrations/000383_add_task_completed_failed_notification_templates.up.sql @@ -0,0 +1,63 @@ +-- Task transition to 'complete' status +INSERT INTO notification_templates ( + id, + name, + title_template, + body_template, + actions, + "group", + method, + kind, + enabled_by_default +) VALUES ( + '8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c', + 'Task Completed', + E'Task ''{{.Labels.workspace}}'' completed', + E'The task ''{{.Labels.task}}'' has completed successfully.', + '[ + { + "label": "View task", + "url": "{{base_url}}/tasks/{{.UserUsername}}/{{.Labels.workspace}}" + }, + { + "label": "View workspace", + "url": "{{base_url}}/@{{.UserUsername}}/{{.Labels.workspace}}" + } + ]'::jsonb, + 'Task Events', + NULL, + 'system'::notification_template_kind, + true + ); + +-- Task transition to 'failed' status +INSERT INTO notification_templates ( + id, + name, + title_template, + body_template, + actions, + "group", + method, + kind, + enabled_by_default +) VALUES ( + '3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e', + 'Task Failed', + E'Task ''{{.Labels.workspace}}'' failed', + E'The task ''{{.Labels.task}}'' has failed. Check the logs for more details.', + '[ + { + "label": "View task", + "url": "{{base_url}}/tasks/{{.UserUsername}}/{{.Labels.workspace}}" + }, + { + "label": "View workspace", + "url": "{{base_url}}/@{{.UserUsername}}/{{.Labels.workspace}}" + } + ]'::jsonb, + 'Task Events', + NULL, + 'system'::notification_template_kind, + true + ); diff --git a/coderd/database/migrations/000384_add_workspace_share_scope.down.sql b/coderd/database/migrations/000384_add_workspace_share_scope.down.sql new file mode 100644 index 0000000000000..46aa4042e02cf --- /dev/null +++ b/coderd/database/migrations/000384_add_workspace_share_scope.down.sql @@ -0,0 +1,3 @@ +-- No-op: keep enum values to avoid dependency churn. +-- If strict removal is required, create a new enum type without these values, +-- cast columns, drop the old type, and rename. diff --git a/coderd/database/migrations/000384_add_workspace_share_scope.up.sql b/coderd/database/migrations/000384_add_workspace_share_scope.up.sql new file mode 100644 index 0000000000000..e27f2e9ab18fa --- /dev/null +++ b/coderd/database/migrations/000384_add_workspace_share_scope.up.sql @@ -0,0 +1,2 @@ +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace:share'; +ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_dormant:share'; diff --git a/coderd/database/migrations/000385_aibridge_fks.down.sql b/coderd/database/migrations/000385_aibridge_fks.down.sql new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/coderd/database/migrations/000385_aibridge_fks.up.sql b/coderd/database/migrations/000385_aibridge_fks.up.sql new file mode 100644 index 0000000000000..cc9cfbfdd93e4 --- /dev/null +++ b/coderd/database/migrations/000385_aibridge_fks.up.sql @@ -0,0 +1,49 @@ +-- We didn't add an FK as a premature optimization when the aibridge tables were +-- added, but for the initiator_id it's pretty annoying not having a strong +-- reference. +-- +-- Since the aibridge feature is still in early access, we're going to add the +-- FK and drop any rows that violate it (which should be none). This isn't a +-- very efficient migration, but since the feature is behind an experimental +-- flag, it shouldn't have any impact on deployments that aren't using the +-- feature. + +-- Step 1: Add FK without validating it +ALTER TABLE aibridge_interceptions + ADD CONSTRAINT aibridge_interceptions_initiator_id_fkey + FOREIGN KEY (initiator_id) + REFERENCES users(id) + -- We can't: + -- - Cascade delete because this is an auditing feature, and it also + -- wouldn't delete related aibridge rows since we don't FK them. + -- - Set null because you can't correlate to the original user ID if the + -- user somehow gets deleted. + -- + -- So we just use the default and don't do anything. This will result in a + -- deferred constraint violation error when the user is deleted. + -- + -- In Coder, we don't delete user rows ever, so this should never happen + -- unless an admin manually deletes a user with SQL. + ON DELETE NO ACTION + -- Delay validation of existing data until after we've dropped rows that + -- violate the FK. + NOT VALID; + +-- Step 2: Drop existing interceptions that violate the FK. +DELETE FROM aibridge_interceptions +WHERE initiator_id NOT IN (SELECT id FROM users); + +-- Step 3: Drop existing rows from other tables that no longer have a valid +-- interception in the database. +DELETE FROM aibridge_token_usages +WHERE interception_id NOT IN (SELECT id FROM aibridge_interceptions); + +DELETE FROM aibridge_user_prompts +WHERE interception_id NOT IN (SELECT id FROM aibridge_interceptions); + +DELETE FROM aibridge_tool_usages +WHERE interception_id NOT IN (SELECT id FROM aibridge_interceptions); + +-- Step 4: Validate the FK +ALTER TABLE aibridge_interceptions + VALIDATE CONSTRAINT aibridge_interceptions_initiator_id_fkey; diff --git a/coderd/database/migrations/000386_aibridge_interceptions_ended_at.down.sql b/coderd/database/migrations/000386_aibridge_interceptions_ended_at.down.sql new file mode 100644 index 0000000000000..f578deb23c4c0 --- /dev/null +++ b/coderd/database/migrations/000386_aibridge_interceptions_ended_at.down.sql @@ -0,0 +1 @@ +ALTER TABLE aibridge_interceptions DROP COLUMN ended_at; diff --git a/coderd/database/migrations/000386_aibridge_interceptions_ended_at.up.sql b/coderd/database/migrations/000386_aibridge_interceptions_ended_at.up.sql new file mode 100644 index 0000000000000..e4cca7e5a5c56 --- /dev/null +++ b/coderd/database/migrations/000386_aibridge_interceptions_ended_at.up.sql @@ -0,0 +1 @@ +ALTER TABLE aibridge_interceptions ADD COLUMN ended_at TIMESTAMP WITH TIME ZONE DEFAULT NULL; diff --git a/coderd/database/migrations/000387_migrate_task_workspaces.down.sql b/coderd/database/migrations/000387_migrate_task_workspaces.down.sql new file mode 100644 index 0000000000000..b26683717106f --- /dev/null +++ b/coderd/database/migrations/000387_migrate_task_workspaces.down.sql @@ -0,0 +1,3 @@ +-- No-op: This migration is not reversible as it transforms existing data into +-- a new schema. Rolling back would require deleting tasks and potentially +-- losing data. diff --git a/coderd/database/migrations/000387_migrate_task_workspaces.up.sql b/coderd/database/migrations/000387_migrate_task_workspaces.up.sql new file mode 100644 index 0000000000000..8c09cfe44dc37 --- /dev/null +++ b/coderd/database/migrations/000387_migrate_task_workspaces.up.sql @@ -0,0 +1,113 @@ +-- Migrate existing task workspaces to the new tasks data model. This migration +-- identifies workspaces that were created as tasks (has_ai_task = true) and +-- populates the tasks and task_workspace_apps tables with their data. + +-- Step 1: Create tasks from workspaces with has_ai_task TRUE in their latest build. +INSERT INTO tasks ( + id, + organization_id, + owner_id, + name, + workspace_id, + template_version_id, + template_parameters, + prompt, + created_at, + deleted_at +) +SELECT + gen_random_uuid() AS id, + w.organization_id, + w.owner_id, + w.name, + w.id AS workspace_id, + latest_task_build.template_version_id, + COALESCE(params.template_parameters, '{}'::jsonb) AS template_parameters, + COALESCE(ai_prompt.value, '') AS prompt, + w.created_at, + CASE WHEN w.deleted = true THEN w.deleting_at ELSE NULL END AS deleted_at +FROM workspaces w +INNER JOIN LATERAL ( + -- Find the latest build for this workspace that has has_ai_task = true. + SELECT + wb.template_version_id + FROM workspace_builds wb + WHERE wb.workspace_id = w.id + AND wb.has_ai_task = true + ORDER BY wb.build_number DESC + LIMIT 1 +) latest_task_build ON true +LEFT JOIN LATERAL ( + -- Find the latest build that has a non-empty AI Prompt parameter. + SELECT + wb.id + FROM workspace_builds wb + WHERE wb.workspace_id = w.id + AND EXISTS ( + SELECT 1 + FROM workspace_build_parameters wbp + WHERE wbp.workspace_build_id = wb.id + AND wbp.name = 'AI Prompt' + AND wbp.value != '' + ) + ORDER BY wb.build_number DESC + LIMIT 1 +) latest_prompt_build ON true +LEFT JOIN LATERAL ( + -- Extract the AI Prompt parameter value from the prompt build. + SELECT wbp.value + FROM workspace_build_parameters wbp + WHERE wbp.workspace_build_id = latest_prompt_build.id + AND wbp.name = 'AI Prompt' + LIMIT 1 +) ai_prompt ON true +LEFT JOIN LATERAL ( + -- Aggregate all other parameters (excluding AI Prompt) from the prompt build. + SELECT jsonb_object_agg(wbp.name, wbp.value) AS template_parameters + FROM workspace_build_parameters wbp + WHERE wbp.workspace_build_id = latest_prompt_build.id + AND wbp.name != 'AI Prompt' +) params ON true +WHERE + -- Skip deleted workspaces because of duplicate name. + w.deleted = false + -- Safe-guard, do not create tasks for workspaces that are already tasks. + AND NOT EXISTS ( + SELECT 1 + FROM tasks t + WHERE t.workspace_id = w.id + ); + +-- Step 2: Populate task_workspace_apps table with build/agent/app information. +INSERT INTO task_workspace_apps ( + task_id, + workspace_build_number, + workspace_agent_id, + workspace_app_id +) +SELECT + t.id AS task_id, + latest_build.build_number AS workspace_build_number, + sidebar_app.agent_id AS workspace_agent_id, + sidebar_app.id AS workspace_app_id +FROM tasks t +INNER JOIN LATERAL ( + -- Find the latest build for this tasks workspace. + SELECT + wb.build_number, + wb.ai_task_sidebar_app_id + FROM workspace_builds wb + WHERE wb.workspace_id = t.workspace_id + ORDER BY wb.build_number DESC + LIMIT 1 +) latest_build ON true +-- Get the sidebar app (optional, can be NULL). +LEFT JOIN workspace_apps sidebar_app + ON sidebar_app.id = latest_build.ai_task_sidebar_app_id +WHERE + -- Safe-guard, do not create for existing tasks. + NOT EXISTS ( + SELECT 1 + FROM task_workspace_apps twa + WHERE twa.task_id = t.id + ); diff --git a/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.down.sql b/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.down.sql new file mode 100644 index 0000000000000..3e56dbf873511 --- /dev/null +++ b/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE oauth2_provider_apps + ALTER COLUMN registration_access_token + SET DATA TYPE text + USING encode(registration_access_token, 'escape'); diff --git a/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.up.sql b/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.up.sql new file mode 100644 index 0000000000000..b278fed80e4ff --- /dev/null +++ b/coderd/database/migrations/000388_oauth_app_byte_reg_access_token.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE oauth2_provider_apps + ALTER COLUMN registration_access_token + SET DATA TYPE bytea + USING decode(registration_access_token, 'escape'); diff --git a/coderd/database/migrations/000389_api_key_allow_list_constraint.down.sql b/coderd/database/migrations/000389_api_key_allow_list_constraint.down.sql new file mode 100644 index 0000000000000..aa6aa87f10522 --- /dev/null +++ b/coderd/database/migrations/000389_api_key_allow_list_constraint.down.sql @@ -0,0 +1,3 @@ +-- Drop all CHECK constraints added in the up migration +ALTER TABLE api_keys +DROP CONSTRAINT api_keys_allow_list_not_empty; diff --git a/coderd/database/migrations/000389_api_key_allow_list_constraint.up.sql b/coderd/database/migrations/000389_api_key_allow_list_constraint.up.sql new file mode 100644 index 0000000000000..6dc46b522be92 --- /dev/null +++ b/coderd/database/migrations/000389_api_key_allow_list_constraint.up.sql @@ -0,0 +1,10 @@ +-- Defensively update any API keys with empty allow_list to have default '*:*' +-- This ensures all existing keys have at least one entry before adding the constraint +UPDATE api_keys +SET allow_list = ARRAY['*:*'] +WHERE allow_list = ARRAY[]::text[] OR array_length(allow_list, 1) IS NULL; + +-- Add CHECK constraint to ensure allow_list array is never empty +ALTER TABLE api_keys +ADD CONSTRAINT api_keys_allow_list_not_empty +CHECK (array_length(allow_list, 1) > 0); diff --git a/coderd/database/migrations/000390_telemetry_locks.down.sql b/coderd/database/migrations/000390_telemetry_locks.down.sql new file mode 100644 index 0000000000000..b9ba97839f3d4 --- /dev/null +++ b/coderd/database/migrations/000390_telemetry_locks.down.sql @@ -0,0 +1 @@ +DROP TABLE telemetry_locks; diff --git a/coderd/database/migrations/000390_telemetry_locks.up.sql b/coderd/database/migrations/000390_telemetry_locks.up.sql new file mode 100644 index 0000000000000..f791c83ba7d17 --- /dev/null +++ b/coderd/database/migrations/000390_telemetry_locks.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE telemetry_locks ( + event_type TEXT NOT NULL CONSTRAINT telemetry_lock_event_type_constraint CHECK (event_type IN ('aibridge_interceptions_summary')), + period_ending_at TIMESTAMP WITH TIME ZONE NOT NULL, + + PRIMARY KEY (event_type, period_ending_at) +); + +COMMENT ON TABLE telemetry_locks IS 'Telemetry lock tracking table for deduplication of heartbeat events across replicas.'; +COMMENT ON COLUMN telemetry_locks.event_type IS 'The type of event that was sent.'; +COMMENT ON COLUMN telemetry_locks.period_ending_at IS 'The heartbeat period end timestamp.'; + +CREATE INDEX idx_telemetry_locks_period_ending_at ON telemetry_locks (period_ending_at); diff --git a/coderd/database/migrations/000391_tasks_with_status_user_fields.down.sql b/coderd/database/migrations/000391_tasks_with_status_user_fields.down.sql new file mode 100644 index 0000000000000..ff103d47e0da2 --- /dev/null +++ b/coderd/database/migrations/000391_tasks_with_status_user_fields.down.sql @@ -0,0 +1,74 @@ +-- Drop view from 000390_tasks_with_status_user_fields.up.sql. +DROP VIEW IF EXISTS tasks_with_status; + +-- Restore from 000382_add_columns_to_tasks_with_status.up.sql. +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status, + task_app.* + FROM + tasks + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000391_tasks_with_status_user_fields.up.sql b/coderd/database/migrations/000391_tasks_with_status_user_fields.up.sql new file mode 100644 index 0000000000000..243772c241bf7 --- /dev/null +++ b/coderd/database/migrations/000391_tasks_with_status_user_fields.up.sql @@ -0,0 +1,84 @@ +-- Drop view from 00037_add_columns_to_tasks_with_status.up.sql. +DROP VIEW IF EXISTS tasks_with_status; + +-- Add owner_name, owner_avatar_url columns. +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status, + task_app.*, + task_owner.* + FROM + tasks + CROSS JOIN LATERAL ( + SELECT + vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM visible_users vu + WHERE vu.id = tasks.owner_id + ) task_owner + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; + diff --git a/coderd/database/migrations/000392_disable_tasks_notifications_by_default.down.sql b/coderd/database/migrations/000392_disable_tasks_notifications_by_default.down.sql new file mode 100644 index 0000000000000..82fed7bf1d682 --- /dev/null +++ b/coderd/database/migrations/000392_disable_tasks_notifications_by_default.down.sql @@ -0,0 +1,8 @@ +UPDATE notification_templates +SET enabled_by_default = true +WHERE id IN ( + '8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c', + '3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e', + 'bd4b7168-d05e-4e19-ad0f-3593b77aa90f', + 'd4a6271c-cced-4ed0-84ad-afd02a9c7799' +); diff --git a/coderd/database/migrations/000392_disable_tasks_notifications_by_default.up.sql b/coderd/database/migrations/000392_disable_tasks_notifications_by_default.up.sql new file mode 100644 index 0000000000000..e51c9a57940a7 --- /dev/null +++ b/coderd/database/migrations/000392_disable_tasks_notifications_by_default.up.sql @@ -0,0 +1,8 @@ +UPDATE notification_templates +SET enabled_by_default = false +WHERE id IN ( + '8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c', + '3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e', + 'bd4b7168-d05e-4e19-ad0f-3593b77aa90f', + 'd4a6271c-cced-4ed0-84ad-afd02a9c7799' +); diff --git a/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql b/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql new file mode 100644 index 0000000000000..ed30e6a0f64f3 --- /dev/null +++ b/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql @@ -0,0 +1,39 @@ +DROP VIEW workspaces_expanded; + +-- Recreate the view from 000354_workspace_acl.up.sql +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description + FROM (((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql b/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql new file mode 100644 index 0000000000000..f01354e65bd50 --- /dev/null +++ b/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql @@ -0,0 +1,42 @@ +DROP VIEW workspaces_expanded; + +-- Add nullable task_id to workspaces_expanded view +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description, + tasks.id AS task_id + FROM ((((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))) + LEFT JOIN tasks ON ((workspaces.id = tasks.workspace_id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; + diff --git a/coderd/database/migrations/migrate_test.go b/coderd/database/migrations/migrate_test.go index f31a3adb0eb3b..7bab30c0d45e7 100644 --- a/coderd/database/migrations/migrate_test.go +++ b/coderd/database/migrations/migrate_test.go @@ -469,3 +469,416 @@ func TestMigration000362AggregateUsageEvents(t *testing.T) { require.JSONEq(t, string(expectedDailyRows[i].usageData), string(row.UsageData)) } } + +func TestMigration000387MigrateTaskWorkspaces(t *testing.T) { + t.Parallel() + + // This test verifies the migration of task workspaces to the new tasks data model. + // Test cases: + // + // Task 1 (ws1) - Basic case: + // - Single build with has_ai_task=true, prompt, and parameters + // - Verifies: all task fields are populated correctly + // + // Task 2 (ws2) - No AI Prompt parameter: + // - Single build with has_ai_task=true but NO AI Prompt parameter + // - Verifies: prompt defaults to empty string (tests LEFT JOIN for optional prompt) + // + // Task 3 (ws3) - Latest build is stop: + // - Build 1: start with agents/apps and prompt + // - Build 2: stop build (references same app via ai_task_sidebar_app_id) + // - Verifies: twa uses latest build number with agents/apps from that build's ai_task_sidebar_app_id + // + // Antagonists - Should NOT be migrated: + // - Regular workspace without has_ai_task flag + // - Deleted workspace (w.deleted = true) + + const migrationVersion = 387 + + ctx := testutil.Context(t, testutil.WaitLong) + sqlDB := testSQLDB(t) + + // Migrate up to the migration before the task workspace migration. + next, err := migrations.Stepper(sqlDB) + require.NoError(t, err) + for { + version, more, err := next() + require.NoError(t, err) + if !more { + t.Fatalf("migration %d not found", migrationVersion) + } + if version == migrationVersion-1 { + break + } + } + + now := time.Now().UTC().Truncate(time.Microsecond) + deletingAt := now.Add(24 * time.Hour).Truncate(time.Microsecond) + + // Define all IDs upfront. + orgID := uuid.New() + userID := uuid.New() + templateID := uuid.New() + templateVersionID := uuid.New() + templateJobID := uuid.New() + + // Task workspace 1: basic case with prompt and parameters. + ws1ID := uuid.New() + ws1Build1JobID := uuid.New() + ws1Build1ID := uuid.New() + ws1Resource1ID := uuid.New() + ws1Agent1ID := uuid.New() + ws1App1ID := uuid.New() + + // Task workspace 2: no AI Prompt parameter. + ws2ID := uuid.New() + ws2Build1JobID := uuid.New() + ws2Build1ID := uuid.New() + ws2Resource1ID := uuid.New() + ws2Agent1ID := uuid.New() + ws2App1ID := uuid.New() + + // Task workspace 3: has both start and stop builds. + ws3ID := uuid.New() + ws3Build1JobID := uuid.New() + ws3Build1ID := uuid.New() + ws3Resource1ID := uuid.New() + ws3Agent1ID := uuid.New() + ws3App1ID := uuid.New() + ws3Build2JobID := uuid.New() + ws3Build2ID := uuid.New() + ws3Resource2ID := uuid.New() + + // Antagonist 1: deleted workspace. + wsAntDeletedID := uuid.New() + wsAntDeletedBuild1JobID := uuid.New() + wsAntDeletedBuild1ID := uuid.New() + wsAntDeletedResource1ID := uuid.New() + wsAntDeletedAgent1ID := uuid.New() + wsAntDeletedApp1ID := uuid.New() + + // Antagonist 2: regular workspace without has_ai_task. + wsAntID := uuid.New() + wsAntBuild1JobID := uuid.New() + wsAntBuild1ID := uuid.New() + + // Create all fixtures in a single transaction. + tx, err := sqlDB.BeginTx(ctx, nil) + require.NoError(t, err) + defer tx.Rollback() + + // Execute fixture setup as individual statements. + fixtures := []struct { + query string + args []any + }{ + // Setup organization, user, and template. + { + `INSERT INTO organizations (id, name, display_name, description, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)`, + []any{orgID, "test-org", "Test Org", "Test Org", now, now}, + }, + { + `INSERT INTO users (id, username, email, hashed_password, created_at, updated_at, status, rbac_roles, login_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, + []any{userID, "testuser", "test@example.com", []byte{}, now, now, "active", []byte("{}"), "password"}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{templateJobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "template_version_import", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO template_versions (id, organization_id, name, readme, created_at, updated_at, job_id, created_by) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + []any{templateVersionID, orgID, "v1.0", "Test template", now, now, templateJobID, userID}, + }, + { + `INSERT INTO templates (id, organization_id, name, created_at, updated_at, provisioner, active_version_id, created_by) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + []any{templateID, orgID, "test-template", now, now, "terraform", templateVersionID, userID}, + }, + { + `UPDATE template_versions SET template_id = $1 WHERE id = $2`, + []any{templateID, templateVersionID}, + }, + + // Task workspace 1 is a normal start build. + { + `INSERT INTO workspaces (id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, last_used_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, + []any{ws1ID, now, now, userID, orgID, templateID, false, "task-ws-1", now}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{ws1Build1JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, daily_cost, instance_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws1Resource1ID, now, ws1Build1JobID, "start", "docker_container", "main", false, "", 0, ""}, + }, + { + `INSERT INTO workspace_agents (id, created_at, updated_at, name, resource_id, auth_token, architecture, operating_system, directory, connection_timeout_seconds, lifecycle_state, logs_length, logs_overflowed) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`, + []any{ws1Agent1ID, now, now, "agent1", ws1Resource1ID, uuid.New(), "amd64", "linux", "/home/coder", 120, "ready", 0, false}, + }, + { + `INSERT INTO workspace_apps (id, created_at, agent_id, slug, display_name, icon, command, url, subdomain, external) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws1App1ID, now, ws1Agent1ID, "code-server", "Code Server", "", "", "http://localhost:8080", false, false}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, has_ai_task, ai_task_sidebar_app_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)`, + []any{ws1Build1ID, now, now, ws1ID, templateVersionID, 1, "start", userID, []byte{}, ws1Build1JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour), true, ws1App1ID}, + }, + { + `INSERT INTO workspace_build_parameters (workspace_build_id, name, value) VALUES ($1, $2, $3)`, + []any{ws1Build1ID, "AI Prompt", "Build a web server"}, + }, + { + `INSERT INTO workspace_build_parameters (workspace_build_id, name, value) VALUES ($1, $2, $3)`, + []any{ws1Build1ID, "region", "us-east-1"}, + }, + { + `INSERT INTO workspace_build_parameters (workspace_build_id, name, value) VALUES ($1, $2, $3)`, + []any{ws1Build1ID, "instance_type", "t2.micro"}, + }, + + // Task workspace 2: no AI Prompt parameter (tests LEFT JOIN). + { + `INSERT INTO workspaces (id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, last_used_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, + []any{ws2ID, now, now, userID, orgID, templateID, false, "task-ws-2-no-prompt", now}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{ws2Build1JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, daily_cost, instance_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws2Resource1ID, now, ws2Build1JobID, "start", "docker_container", "main", false, "", 0, ""}, + }, + { + `INSERT INTO workspace_agents (id, created_at, updated_at, name, resource_id, auth_token, architecture, operating_system, directory, connection_timeout_seconds, lifecycle_state, logs_length, logs_overflowed) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`, + []any{ws2Agent1ID, now, now, "agent2", ws2Resource1ID, uuid.New(), "amd64", "linux", "/home/coder", 120, "ready", 0, false}, + }, + { + `INSERT INTO workspace_apps (id, created_at, agent_id, slug, display_name, icon, command, url, subdomain, external) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws2App1ID, now, ws2Agent1ID, "terminal", "Terminal", "", "", "http://localhost:3000", false, false}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, has_ai_task, ai_task_sidebar_app_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)`, + []any{ws2Build1ID, now, now, ws2ID, templateVersionID, 1, "start", userID, []byte{}, ws2Build1JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour), true, ws2App1ID}, + }, + // Note: No AI Prompt parameter for ws2 - this tests the LEFT JOIN for optional prompt. + + // Task workspace 3: has both start and stop builds. + { + `INSERT INTO workspaces (id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, last_used_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, + []any{ws3ID, now, now, userID, orgID, templateID, false, "task-ws-3-stop", now}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{ws3Build1JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, daily_cost, instance_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws3Resource1ID, now, ws3Build1JobID, "start", "docker_container", "main", false, "", 0, ""}, + }, + { + `INSERT INTO workspace_agents (id, created_at, updated_at, name, resource_id, auth_token, architecture, operating_system, directory, connection_timeout_seconds, lifecycle_state, logs_length, logs_overflowed) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`, + []any{ws3Agent1ID, now, now, "agent3", ws3Resource1ID, uuid.New(), "amd64", "linux", "/home/coder", 120, "ready", 0, false}, + }, + { + `INSERT INTO workspace_apps (id, created_at, agent_id, slug, display_name, icon, command, url, subdomain, external) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws3App1ID, now, ws3Agent1ID, "app3", "App3", "", "", "http://localhost:5000", false, false}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, has_ai_task, ai_task_sidebar_app_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)`, + []any{ws3Build1ID, now, now, ws3ID, templateVersionID, 1, "start", userID, []byte{}, ws3Build1JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour), true, ws3App1ID}, + }, + { + `INSERT INTO workspace_build_parameters (workspace_build_id, name, value) VALUES ($1, $2, $3)`, + []any{ws3Build1ID, "AI Prompt", "Task with stop build"}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{ws3Build2JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, daily_cost, instance_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{ws3Resource2ID, now, ws3Build2JobID, "stop", "docker_container", "main", false, "", 0, ""}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, has_ai_task, ai_task_sidebar_app_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)`, + []any{ws3Build2ID, now, now, ws3ID, templateVersionID, 2, "stop", userID, []byte{}, ws3Build2JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour), true, ws3App1ID}, + }, + + // Antagonist 1: deleted workspace. + { + `INSERT INTO workspaces (id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, last_used_at, deleting_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{wsAntDeletedID, now, now, userID, orgID, templateID, true, "deleted-task-workspace", now, deletingAt}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{wsAntDeletedBuild1JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_resources (id, created_at, job_id, transition, type, name, hide, icon, daily_cost, instance_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{wsAntDeletedResource1ID, now, wsAntDeletedBuild1JobID, "start", "docker_container", "main", false, "", 0, ""}, + }, + { + `INSERT INTO workspace_agents (id, created_at, updated_at, name, resource_id, auth_token, architecture, operating_system, directory, connection_timeout_seconds, lifecycle_state, logs_length, logs_overflowed) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`, + []any{wsAntDeletedAgent1ID, now, now, "agent-deleted", wsAntDeletedResource1ID, uuid.New(), "amd64", "linux", "/home/coder", 120, "ready", 0, false}, + }, + { + `INSERT INTO workspace_apps (id, created_at, agent_id, slug, display_name, icon, command, url, subdomain, external) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, + []any{wsAntDeletedApp1ID, now, wsAntDeletedAgent1ID, "app-deleted", "AppDeleted", "", "", "http://localhost:6000", false, false}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, has_ai_task, ai_task_sidebar_app_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)`, + []any{wsAntDeletedBuild1ID, now, now, wsAntDeletedID, templateVersionID, 1, "start", userID, []byte{}, wsAntDeletedBuild1JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour), true, wsAntDeletedApp1ID}, + }, + { + `INSERT INTO workspace_build_parameters (workspace_build_id, name, value) VALUES ($1, $2, $3)`, + []any{wsAntDeletedBuild1ID, "AI Prompt", "Should not migrate deleted"}, + }, + + // Antagonist 2: regular workspace without has_ai_task. + { + `INSERT INTO workspaces (id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, last_used_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, + []any{wsAntID, now, now, userID, orgID, templateID, false, "regular-workspace", now}, + }, + { + `INSERT INTO provisioner_jobs (id, created_at, updated_at, started_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, file_id, type, input, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{wsAntBuild1JobID, now, now, now, now, "", orgID, userID, "terraform", "file", uuid.New(), "workspace_build", []byte("{}"), []byte("{}")}, + }, + { + `INSERT INTO workspace_builds (id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`, + []any{wsAntBuild1ID, now, now, wsAntID, templateVersionID, 1, "start", userID, []byte{}, wsAntBuild1JobID, now.Add(8 * time.Hour), "initiator", 0, now.Add(8 * time.Hour)}, + }, + } + + for _, fixture := range fixtures { + _, err = tx.ExecContext(ctx, fixture.query, fixture.args...) + require.NoError(t, err) + } + + err = tx.Commit() + require.NoError(t, err) + + // Run the migration. + version, _, err := next() + require.NoError(t, err) + require.EqualValues(t, migrationVersion, version) + + // Should have exactly 3 tasks (not antagonists). + var taskCount int + err = sqlDB.QueryRowContext(ctx, "SELECT COUNT(*) FROM tasks").Scan(&taskCount) + require.NoError(t, err) + require.Equal(t, 3, taskCount, "should have created 3 tasks from workspaces") + + // Verify task 1, normal start build. + var task1 struct { + id uuid.UUID + name string + workspaceID uuid.UUID + templateVersionID uuid.UUID + prompt string + templateParameters []byte + createdAt time.Time + deletedAt *time.Time + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT id, name, workspace_id, template_version_id, prompt, template_parameters, created_at, deleted_at + FROM tasks WHERE workspace_id = $1 + `, ws1ID).Scan(&task1.id, &task1.name, &task1.workspaceID, &task1.templateVersionID, &task1.prompt, &task1.templateParameters, &task1.createdAt, &task1.deletedAt) + require.NoError(t, err) + require.Equal(t, "task-ws-1", task1.name) + require.Equal(t, "Build a web server", task1.prompt) + require.JSONEq(t, `{"region":"us-east-1","instance_type":"t2.micro"}`, string(task1.templateParameters)) + require.Nil(t, task1.deletedAt) + + // Verify task_workspace_apps for task 1. + var twa1 struct { + buildNumber int32 + agentID uuid.UUID + appID uuid.UUID + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps WHERE task_id = $1 + `, task1.id).Scan(&twa1.buildNumber, &twa1.agentID, &twa1.appID) + require.NoError(t, err) + require.Equal(t, int32(1), twa1.buildNumber) + require.Equal(t, ws1Agent1ID, twa1.agentID) + require.Equal(t, ws1App1ID, twa1.appID) + + // Verify task 2, no AI Prompt parameter. + var task2 struct { + id uuid.UUID + name string + prompt string + templateParameters []byte + deletedAt *time.Time + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT id, name, prompt, template_parameters, deleted_at + FROM tasks WHERE workspace_id = $1 + `, ws2ID).Scan(&task2.id, &task2.name, &task2.prompt, &task2.templateParameters, &task2.deletedAt) + require.NoError(t, err) + require.Equal(t, "task-ws-2-no-prompt", task2.name) + require.Equal(t, "", task2.prompt, "prompt should be empty string when no AI Prompt parameter") + require.JSONEq(t, `{}`, string(task2.templateParameters), "no parameters") + require.Nil(t, task2.deletedAt) + + // Verify task_workspace_apps for task 2. + var twa2 struct { + buildNumber int32 + agentID uuid.UUID + appID uuid.UUID + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps WHERE task_id = $1 + `, task2.id).Scan(&twa2.buildNumber, &twa2.agentID, &twa2.appID) + require.NoError(t, err) + require.Equal(t, int32(1), twa2.buildNumber) + require.Equal(t, ws2Agent1ID, twa2.agentID) + require.Equal(t, ws2App1ID, twa2.appID) + + // Verify task 3, has both start and stop builds. + var task3 struct { + id uuid.UUID + name string + prompt string + templateParameters []byte + templateVersionID uuid.UUID + deletedAt *time.Time + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT id, name, prompt, template_parameters, template_version_id, deleted_at + FROM tasks WHERE workspace_id = $1 + `, ws3ID).Scan(&task3.id, &task3.name, &task3.prompt, &task3.templateParameters, &task3.templateVersionID, &task3.deletedAt) + require.NoError(t, err) + require.Equal(t, "task-ws-3-stop", task3.name) + require.Equal(t, "Task with stop build", task3.prompt) + require.JSONEq(t, `{}`, string(task3.templateParameters), "no other parameters") + require.Equal(t, templateVersionID, task3.templateVersionID) + require.Nil(t, task3.deletedAt) + + // Verify task_workspace_apps for task 3 uses latest build and its ai_task_sidebar_app_id. + var twa3 struct { + buildNumber int32 + agentID uuid.UUID + appID uuid.UUID + } + err = sqlDB.QueryRowContext(ctx, ` + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps WHERE task_id = $1 + `, task3.id).Scan(&twa3.buildNumber, &twa3.agentID, &twa3.appID) + require.NoError(t, err) + require.Equal(t, int32(2), twa3.buildNumber, "should use latest build number") + require.Equal(t, ws3Agent1ID, twa3.agentID, "should use agent from latest build's ai_task_sidebar_app_id") + require.Equal(t, ws3App1ID, twa3.appID, "should use app from latest build's ai_task_sidebar_app_id") + + // Verify antagonists should NOT be migrated. + var antCount int + err = sqlDB.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM tasks + WHERE workspace_id IN ($1, $2) + `, wsAntDeletedID, wsAntID).Scan(&antCount) + require.NoError(t, err) + require.Equal(t, 0, antCount, "antagonist workspaces (deleted and regular) should not be migrated") +} diff --git a/coderd/database/migrations/testdata/fixtures/000370_aibridge.up.sql b/coderd/database/migrations/testdata/fixtures/000370_aibridge.up.sql index 0a66555eea0f1..ef49405ae6f73 100644 --- a/coderd/database/migrations/testdata/fixtures/000370_aibridge.up.sql +++ b/coderd/database/migrations/testdata/fixtures/000370_aibridge.up.sql @@ -8,7 +8,7 @@ INSERT INTO ) VALUES ( 'be003e1e-b38f-43bf-847d-928074dd0aa8', - '30095c71-380b-457a-8995-97b8ee6e5307', + '30095c71-380b-457a-8995-97b8ee6e5307', -- admin@coder.com, from 000022_initial_v0.6.6.up.sql 'openai', 'gpt-5', '2025-09-15 12:45:13.921148+00' @@ -77,3 +77,82 @@ VALUES ( '{}', '2025-09-15 12:45:21.674335+00' ); + +-- For a later migration, we'll add an invalid interception without a valid +-- initiator_id. +INSERT INTO + aibridge_interceptions ( + id, + initiator_id, + provider, + model, + started_at + ) +VALUES ( + 'c6d29c6e-26a3-4137-bb2e-9dfeef3c1c26', + 'cab8d56a-8922-4999-81a9-046b43ac1312', -- user does not exist + 'openai', + 'gpt-5', + '2025-09-15 12:45:13.921148+00' + ); +INSERT INTO + aibridge_token_usages ( + id, + interception_id, + provider_response_id, + input_tokens, + output_tokens, + metadata, + created_at + ) +VALUES ( + '5650db6c-0b7c-49e3-bb26-9b2ba0107e11', + 'c6d29c6e-26a3-4137-bb2e-9dfeef3c1c26', + 'chatcmpl-CG2s28QlpKIoooUtXuLTmGbdtyS1k', + 10950, + 118, + '{}', + '2025-09-15 12:45:21.674413+00' + ); +INSERT INTO + aibridge_user_prompts ( + id, + interception_id, + provider_response_id, + prompt, + metadata, + created_at + ) +VALUES ( + '1e76cb5b-7c34-4160-b604-a4256f856169', + 'c6d29c6e-26a3-4137-bb2e-9dfeef3c1c26', + 'chatcmpl-CG2s28QlpKIoooUtXuLTmGbdtyS1k', + 'how many workspaces do i have', + '{}', + '2025-09-15 12:45:21.674335+00' + ); +INSERT INTO + aibridge_tool_usages ( + id, + interception_id, + provider_response_id, + tool, + server_url, + input, + injected, + invocation_error, + metadata, + created_at + ) +VALUES ( + '351b440f-d605-4f37-8ceb-011f0377b695', + 'c6d29c6e-26a3-4137-bb2e-9dfeef3c1c26', + 'chatcmpl-CG2s28QlpKIoooUtXuLTmGbdtyS1k', + 'coder_list_workspaces', + 'http://localhost:3000/api/experimental/mcp/http', + '{}', + true, + NULL, + '{}', + '2025-09-15 12:45:21.674413+00' + ); diff --git a/coderd/database/migrations/testdata/fixtures/000379_create_tasks_with_status_view.up.sql b/coderd/database/migrations/testdata/fixtures/000379_create_tasks_with_status_view.up.sql new file mode 100644 index 0000000000000..c2d1bf11475b8 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000379_create_tasks_with_status_view.up.sql @@ -0,0 +1,6 @@ +INSERT INTO public.task_workspace_apps VALUES ( + 'f5a1c3e4-8b2d-4f6a-9d7e-2a8b5c9e1f3d', -- task_id + NULL, -- workspace_agent_id + NULL, -- workspace_app_id + 99 -- workspace_build_number +) ON CONFLICT DO NOTHING; diff --git a/coderd/database/migrations/testdata/fixtures/000390_telemetry_locks.up.sql b/coderd/database/migrations/testdata/fixtures/000390_telemetry_locks.up.sql new file mode 100644 index 0000000000000..f41f45a7325d6 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000390_telemetry_locks.up.sql @@ -0,0 +1,8 @@ +INSERT INTO telemetry_locks ( + event_type, + period_ending_at +) +VALUES ( + 'aibridge_interceptions_summary', + '2025-01-01 00:00:00+00'::timestamptz +); diff --git a/coderd/database/modelmethods.go b/coderd/database/modelmethods.go index 230b9274eadcb..b3202342e3ffa 100644 --- a/coderd/database/modelmethods.go +++ b/coderd/database/modelmethods.go @@ -132,6 +132,20 @@ func (w ConnectionLog) RBACObject() rbac.Object { return obj } +func (t Task) RBACObject() rbac.Object { + return rbac.ResourceTask. + WithID(t.ID). + WithOwner(t.OwnerID.String()). + InOrg(t.OrganizationID) +} + +func (t TaskTable) RBACObject() rbac.Object { + return rbac.ResourceTask. + WithID(t.ID). + WithOwner(t.OwnerID.String()). + InOrg(t.OrganizationID) +} + func (s APIKeyScope) ToRBAC() rbac.ScopeName { switch s { case ApiKeyScopeCoderAll: @@ -145,24 +159,30 @@ func (s APIKeyScope) ToRBAC() rbac.ScopeName { } } -// APIKeyScopes allows expanding multiple API key scopes into a single -// RBAC scope for authorization. This implements rbac.ExpandableScope so -// callers can pass the list directly without deriving a single scope. +// APIKeyScopes represents a collection of individual API key scope names as +// stored in the database. Helper methods on this type are used to derive the +// RBAC scope that should be authorized for the key. type APIKeyScopes []APIKeyScope -var _ rbac.ExpandableScope = APIKeyScopes{} +// WithAllowList wraps the scopes with a database allow list, producing an +// ExpandableScope that always enforces the allow list overlay when expanded. +func (s APIKeyScopes) WithAllowList(list AllowList) APIKeyScopeSet { + return APIKeyScopeSet{Scopes: s, AllowList: list} +} // Has returns true if the slice contains the provided scope. func (s APIKeyScopes) Has(target APIKeyScope) bool { return slices.Contains(s, target) } -// Expand merges the permissions of all scopes in the list into a single scope. -// If the list is empty, it defaults to rbac.ScopeAll. -func (s APIKeyScopes) Expand() (rbac.Scope, error) { +// expandRBACScope merges the permissions of all scopes in the list into a +// single RBAC scope. If the list is empty, it defaults to rbac.ScopeAll for +// backward compatibility. This method is internal; use ScopeSet() to combine +// scopes with the API key's allow list for authorization. +func (s APIKeyScopes) expandRBACScope() (rbac.Scope, error) { // Default to ScopeAll for backward compatibility when no scopes provided. if len(s) == 0 { - return rbac.ScopeAll.Expand() + return rbac.Scope{}, xerrors.New("no scopes provided") } var merged rbac.Scope @@ -170,13 +190,12 @@ func (s APIKeyScopes) Expand() (rbac.Scope, error) { // Identifier is informational; not used in policy evaluation. Identifier: rbac.RoleIdentifier{Name: "Scope_Multiple"}, Site: nil, - Org: map[string][]rbac.Permission{}, User: nil, + ByOrgID: map[string]rbac.OrgPermissions{}, } - // Track allow list union, collapsing to wildcard if any child is wildcard. - allowAll := false - allowSet := make(map[string]rbac.AllowListElement) + // Collect allow lists for a union after expanding all scopes. + allowLists := make([][]rbac.AllowListElement, 0, len(s)) for _, s := range s { expanded, err := s.ToRBAC().Expand() @@ -186,38 +205,31 @@ func (s APIKeyScopes) Expand() (rbac.Scope, error) { // Merge role permissions: union by simple concatenation. merged.Site = append(merged.Site, expanded.Site...) - for orgID, perms := range expanded.Org { - merged.Org[orgID] = append(merged.Org[orgID], perms...) + for orgID, perms := range expanded.ByOrgID { + orgPerms := merged.ByOrgID[orgID] + orgPerms.Org = append(orgPerms.Org, perms.Org...) + orgPerms.Member = append(orgPerms.Member, perms.Member...) + merged.ByOrgID[orgID] = orgPerms } merged.User = append(merged.User, expanded.User...) - // Merge allow lists. - for _, e := range expanded.AllowIDList { - if e.ID == policy.WildcardSymbol && e.Type == policy.WildcardSymbol { - allowAll = true - // No need to track other entries once wildcard is present. - continue - } - key := e.String() - allowSet[key] = e - } + allowLists = append(allowLists, expanded.AllowIDList) } // De-duplicate permissions across Site/Org/User merged.Site = rbac.DeduplicatePermissions(merged.Site) - for orgID, perms := range merged.Org { - merged.Org[orgID] = rbac.DeduplicatePermissions(perms) - } merged.User = rbac.DeduplicatePermissions(merged.User) + for orgID, perms := range merged.ByOrgID { + perms.Org = rbac.DeduplicatePermissions(perms.Org) + perms.Member = rbac.DeduplicatePermissions(perms.Member) + merged.ByOrgID[orgID] = perms + } - if allowAll || len(allowSet) == 0 { - merged.AllowIDList = []rbac.AllowListElement{rbac.AllowListAll()} - } else { - merged.AllowIDList = make([]rbac.AllowListElement, 0, len(allowSet)) - for _, v := range allowSet { - merged.AllowIDList = append(merged.AllowIDList, v) - } + union, err := rbac.UnionAllowLists(allowLists...) + if err != nil { + return rbac.Scope{}, err } + merged.AllowIDList = union return merged, nil } @@ -235,6 +247,37 @@ func (s APIKeyScopes) Name() rbac.RoleIdentifier { return rbac.RoleIdentifier{Name: "scopes[" + strings.Join(names, "+") + "]"} } +// APIKeyScopeSet merges expanded scopes with the API key's DB allow_list. If +// the DB allow_list is a wildcard or empty, the merged scope's allow list is +// unchanged. Otherwise, the DB allow_list overrides the merged AllowIDList to +// enforce the token's resource scoping consistently across all permissions. +type APIKeyScopeSet struct { + Scopes APIKeyScopes + AllowList AllowList +} + +var _ rbac.ExpandableScope = APIKeyScopeSet{} + +func (s APIKeyScopeSet) Name() rbac.RoleIdentifier { return s.Scopes.Name() } + +func (s APIKeyScopeSet) Expand() (rbac.Scope, error) { + merged, err := s.Scopes.expandRBACScope() + if err != nil { + return rbac.Scope{}, err + } + merged.AllowIDList = rbac.IntersectAllowLists(merged.AllowIDList, s.AllowList) + return merged, nil +} + +// ScopeSet returns the scopes combined with the database allow list. It is the +// canonical way to expose an API key's effective scope for authorization. +func (k APIKey) ScopeSet() APIKeyScopeSet { + return APIKeyScopeSet{ + Scopes: k.Scopes, + AllowList: k.AllowList, + } +} + func (k APIKey) RBACObject() rbac.Object { return rbac.ResourceApiKey.WithIDString(k.ID). WithOwner(k.UserID.String()) diff --git a/coderd/database/modelmethods_internal_test.go b/coderd/database/modelmethods_internal_test.go index 16d80d69c15f0..574d1892061ad 100644 --- a/coderd/database/modelmethods_internal_test.go +++ b/coderd/database/modelmethods_internal_test.go @@ -3,6 +3,7 @@ package database import ( "testing" + "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/rbac" @@ -38,7 +39,7 @@ func TestAPIKeyScopesExpand(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s, err := tc.scopes.Expand() + s, err := tc.scopes.expandRBACScope() require.NoError(t, err) tc.want(t, s) }) @@ -59,7 +60,7 @@ func TestAPIKeyScopesExpand(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s, err := tc.scopes.Expand() + s, err := tc.scopes.expandRBACScope() require.NoError(t, err) requirePermission(t, s, tc.res, tc.act) requireAllowAll(t, s) @@ -70,7 +71,7 @@ func TestAPIKeyScopesExpand(t *testing.T) { t.Run("merge", func(t *testing.T) { t.Parallel() scopes := APIKeyScopes{ApiKeyScopeCoderApplicationConnect, ApiKeyScopeCoderAll, ApiKeyScopeWorkspaceRead} - s, err := scopes.Expand() + s, err := scopes.expandRBACScope() require.NoError(t, err) requirePermission(t, s, rbac.ResourceWildcard.Type, policy.Action(policy.WildcardSymbol)) requirePermission(t, s, rbac.ResourceWorkspace.Type, policy.ActionApplicationConnect) @@ -78,13 +79,68 @@ func TestAPIKeyScopesExpand(t *testing.T) { requireAllowAll(t, s) }) - t.Run("empty_defaults_to_all", func(t *testing.T) { + t.Run("effective_scope_keep_types", func(t *testing.T) { t.Parallel() - s, err := (APIKeyScopes{}).Expand() + workspaceID := uuid.New() + + effective := APIKeyScopeSet{ + Scopes: APIKeyScopes{ApiKeyScopeWorkspaceRead}, + AllowList: AllowList{ + {Type: rbac.ResourceWorkspace.Type, ID: workspaceID.String()}, + }, + } + + expanded, err := effective.Expand() require.NoError(t, err) - requirePermission(t, s, rbac.ResourceWildcard.Type, policy.Action(policy.WildcardSymbol)) + require.Len(t, expanded.AllowIDList, 1) + require.Equal(t, "workspace", expanded.AllowIDList[0].Type) + require.Equal(t, workspaceID.String(), expanded.AllowIDList[0].ID) + }) + + t.Run("empty_rejected", func(t *testing.T) { + t.Parallel() + _, err := (APIKeyScopes{}).expandRBACScope() + require.Error(t, err) + require.ErrorContains(t, err, "no scopes provided") + }) + + t.Run("allow_list_overrides", func(t *testing.T) { + t.Parallel() + allowID := uuid.NewString() + set := APIKeyScopes{ApiKeyScopeWorkspaceRead}.WithAllowList(AllowList{ + {Type: rbac.ResourceWorkspace.Type, ID: allowID}, + }) + s, err := set.Expand() + require.NoError(t, err) + require.Len(t, s.AllowIDList, 1) + require.Equal(t, rbac.AllowListElement{Type: rbac.ResourceWorkspace.Type, ID: allowID}, s.AllowIDList[0]) + }) + + t.Run("allow_list_wildcard_keeps_merged", func(t *testing.T) { + t.Parallel() + set := APIKeyScopes{ApiKeyScopeWorkspaceRead}.WithAllowList(AllowList{ + {Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}, + }) + s, err := set.Expand() + require.NoError(t, err) + requirePermission(t, s, rbac.ResourceWorkspace.Type, policy.ActionRead) requireAllowAll(t, s) }) + + t.Run("scope_set_helper", func(t *testing.T) { + t.Parallel() + allowID := uuid.NewString() + key := APIKey{ + Scopes: APIKeyScopes{ApiKeyScopeWorkspaceRead}, + AllowList: AllowList{ + {Type: rbac.ResourceWorkspace.Type, ID: allowID}, + }, + } + s, err := key.ScopeSet().Expand() + require.NoError(t, err) + require.Len(t, s.AllowIDList, 1) + require.Equal(t, rbac.AllowListElement{Type: rbac.ResourceWorkspace.Type, ID: allowID}, s.AllowIDList[0]) + }) } // Helpers diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go index 8e2d74fb8ffec..f9b058a40986e 100644 --- a/coderd/database/modelqueries.go +++ b/coderd/database/modelqueries.go @@ -321,6 +321,7 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, &i.TemplateVersionID, &i.TemplateVersionName, &i.LatestBuildCompletedAt, @@ -763,10 +764,11 @@ func (q *sqlQuerier) CountAuthorizedConnectionLogs(ctx context.Context, arg Coun } type aibridgeQuerier interface { - ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]AIBridgeInterception, error) + ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]ListAIBridgeInterceptionsRow, error) + CountAuthorizedAIBridgeInterceptions(ctx context.Context, arg CountAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) (int64, error) } -func (q *sqlQuerier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]AIBridgeInterception, error) { +func (q *sqlQuerier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) ([]ListAIBridgeInterceptionsRow, error) { authorizedFilter, err := prepared.CompileToSQL(ctx, regosql.ConvertConfig{ VariableConverter: regosql.AIBridgeInterceptionConverter(), }) @@ -786,22 +788,28 @@ func (q *sqlQuerier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, ar arg.Provider, arg.Model, arg.AfterID, + arg.Offset, arg.Limit, ) if err != nil { return nil, err } defer rows.Close() - var items []AIBridgeInterception + var items []ListAIBridgeInterceptionsRow for rows.Next() { - var i AIBridgeInterception + var i ListAIBridgeInterceptionsRow if err := rows.Scan( - &i.ID, - &i.InitiatorID, - &i.Provider, - &i.Model, - &i.StartedAt, - &i.Metadata, + &i.AIBridgeInterception.ID, + &i.AIBridgeInterception.InitiatorID, + &i.AIBridgeInterception.Provider, + &i.AIBridgeInterception.Model, + &i.AIBridgeInterception.StartedAt, + &i.AIBridgeInterception.Metadata, + &i.AIBridgeInterception.EndedAt, + &i.VisibleUser.ID, + &i.VisibleUser.Username, + &i.VisibleUser.Name, + &i.VisibleUser.AvatarURL, ); err != nil { return nil, err } @@ -816,6 +824,45 @@ func (q *sqlQuerier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, ar return items, nil } +func (q *sqlQuerier) CountAuthorizedAIBridgeInterceptions(ctx context.Context, arg CountAIBridgeInterceptionsParams, prepared rbac.PreparedAuthorized) (int64, error) { + authorizedFilter, err := prepared.CompileToSQL(ctx, regosql.ConvertConfig{ + VariableConverter: regosql.AIBridgeInterceptionConverter(), + }) + if err != nil { + return 0, xerrors.Errorf("compile authorized filter: %w", err) + } + filtered, err := insertAuthorizedFilter(countAIBridgeInterceptions, fmt.Sprintf(" AND %s", authorizedFilter)) + if err != nil { + return 0, xerrors.Errorf("insert authorized filter: %w", err) + } + + query := fmt.Sprintf("-- name: CountAuthorizedAIBridgeInterceptions :one\n%s", filtered) + rows, err := q.db.QueryContext(ctx, query, + arg.StartedAfter, + arg.StartedBefore, + arg.InitiatorID, + arg.Provider, + arg.Model, + ) + if err != nil { + return 0, err + } + defer rows.Close() + var count int64 + for rows.Next() { + if err := rows.Scan(&count); err != nil { + return 0, err + } + } + if err := rows.Close(); err != nil { + return 0, err + } + if err := rows.Err(); err != nil { + return 0, err + } + return count, nil +} + func insertAuthorizedFilter(query string, replaceWith string) (string, error) { if !strings.Contains(query, authorizedQueryPlaceholder) { return "", xerrors.Errorf("query does not contain authorized replace string, this is not an authorized query") diff --git a/coderd/database/models.go b/coderd/database/models.go index eecb657edb15d..ade3348ba3c69 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -166,6 +166,53 @@ const ( ApiKeyScopeCoderTemplatesbuild APIKeyScope = "coder:templates.build" ApiKeyScopeCoderTemplatesauthor APIKeyScope = "coder:templates.author" ApiKeyScopeCoderApikeysmanageSelf APIKeyScope = "coder:apikeys.manage_self" + ApiKeyScopeAibridgeInterception APIKeyScope = "aibridge_interception:*" + ApiKeyScopeApiKey APIKeyScope = "api_key:*" + ApiKeyScopeAssignOrgRole APIKeyScope = "assign_org_role:*" + ApiKeyScopeAssignRole APIKeyScope = "assign_role:*" + ApiKeyScopeAuditLog APIKeyScope = "audit_log:*" + ApiKeyScopeConnectionLog APIKeyScope = "connection_log:*" + ApiKeyScopeCryptoKey APIKeyScope = "crypto_key:*" + ApiKeyScopeDebugInfo APIKeyScope = "debug_info:*" + ApiKeyScopeDeploymentConfig APIKeyScope = "deployment_config:*" + ApiKeyScopeDeploymentStats APIKeyScope = "deployment_stats:*" + ApiKeyScopeFile APIKeyScope = "file:*" + ApiKeyScopeGroup APIKeyScope = "group:*" + ApiKeyScopeGroupMember APIKeyScope = "group_member:*" + ApiKeyScopeIdpsyncSettings APIKeyScope = "idpsync_settings:*" + ApiKeyScopeInboxNotification APIKeyScope = "inbox_notification:*" + ApiKeyScopeLicense APIKeyScope = "license:*" + ApiKeyScopeNotificationMessage APIKeyScope = "notification_message:*" + ApiKeyScopeNotificationPreference APIKeyScope = "notification_preference:*" + ApiKeyScopeNotificationTemplate APIKeyScope = "notification_template:*" + ApiKeyScopeOauth2App APIKeyScope = "oauth2_app:*" + ApiKeyScopeOauth2AppCodeToken APIKeyScope = "oauth2_app_code_token:*" + ApiKeyScopeOauth2AppSecret APIKeyScope = "oauth2_app_secret:*" + ApiKeyScopeOrganization APIKeyScope = "organization:*" + ApiKeyScopeOrganizationMember APIKeyScope = "organization_member:*" + ApiKeyScopePrebuiltWorkspace APIKeyScope = "prebuilt_workspace:*" + ApiKeyScopeProvisionerDaemon APIKeyScope = "provisioner_daemon:*" + ApiKeyScopeProvisionerJobs APIKeyScope = "provisioner_jobs:*" + ApiKeyScopeReplicas APIKeyScope = "replicas:*" + ApiKeyScopeSystem APIKeyScope = "system:*" + ApiKeyScopeTailnetCoordinator APIKeyScope = "tailnet_coordinator:*" + ApiKeyScopeTemplate APIKeyScope = "template:*" + ApiKeyScopeUsageEvent APIKeyScope = "usage_event:*" + ApiKeyScopeUser APIKeyScope = "user:*" + ApiKeyScopeUserSecret APIKeyScope = "user_secret:*" + ApiKeyScopeWebpushSubscription APIKeyScope = "webpush_subscription:*" + ApiKeyScopeWorkspace APIKeyScope = "workspace:*" + ApiKeyScopeWorkspaceAgentDevcontainers APIKeyScope = "workspace_agent_devcontainers:*" + ApiKeyScopeWorkspaceAgentResourceMonitor APIKeyScope = "workspace_agent_resource_monitor:*" + ApiKeyScopeWorkspaceDormant APIKeyScope = "workspace_dormant:*" + ApiKeyScopeWorkspaceProxy APIKeyScope = "workspace_proxy:*" + ApiKeyScopeTaskCreate APIKeyScope = "task:create" + ApiKeyScopeTaskRead APIKeyScope = "task:read" + ApiKeyScopeTaskUpdate APIKeyScope = "task:update" + ApiKeyScopeTaskDelete APIKeyScope = "task:delete" + ApiKeyScopeTask APIKeyScope = "task:*" + ApiKeyScopeWorkspaceShare APIKeyScope = "workspace:share" + ApiKeyScopeWorkspaceDormantShare APIKeyScope = "workspace_dormant:share" ) func (e *APIKeyScope) Scan(src interface{}) error { @@ -351,7 +398,54 @@ func (e APIKeyScope) Valid() bool { ApiKeyScopeCoderWorkspacesaccess, ApiKeyScopeCoderTemplatesbuild, ApiKeyScopeCoderTemplatesauthor, - ApiKeyScopeCoderApikeysmanageSelf: + ApiKeyScopeCoderApikeysmanageSelf, + ApiKeyScopeAibridgeInterception, + ApiKeyScopeApiKey, + ApiKeyScopeAssignOrgRole, + ApiKeyScopeAssignRole, + ApiKeyScopeAuditLog, + ApiKeyScopeConnectionLog, + ApiKeyScopeCryptoKey, + ApiKeyScopeDebugInfo, + ApiKeyScopeDeploymentConfig, + ApiKeyScopeDeploymentStats, + ApiKeyScopeFile, + ApiKeyScopeGroup, + ApiKeyScopeGroupMember, + ApiKeyScopeIdpsyncSettings, + ApiKeyScopeInboxNotification, + ApiKeyScopeLicense, + ApiKeyScopeNotificationMessage, + ApiKeyScopeNotificationPreference, + ApiKeyScopeNotificationTemplate, + ApiKeyScopeOauth2App, + ApiKeyScopeOauth2AppCodeToken, + ApiKeyScopeOauth2AppSecret, + ApiKeyScopeOrganization, + ApiKeyScopeOrganizationMember, + ApiKeyScopePrebuiltWorkspace, + ApiKeyScopeProvisionerDaemon, + ApiKeyScopeProvisionerJobs, + ApiKeyScopeReplicas, + ApiKeyScopeSystem, + ApiKeyScopeTailnetCoordinator, + ApiKeyScopeTemplate, + ApiKeyScopeUsageEvent, + ApiKeyScopeUser, + ApiKeyScopeUserSecret, + ApiKeyScopeWebpushSubscription, + ApiKeyScopeWorkspace, + ApiKeyScopeWorkspaceAgentDevcontainers, + ApiKeyScopeWorkspaceAgentResourceMonitor, + ApiKeyScopeWorkspaceDormant, + ApiKeyScopeWorkspaceProxy, + ApiKeyScopeTaskCreate, + ApiKeyScopeTaskRead, + ApiKeyScopeTaskUpdate, + ApiKeyScopeTaskDelete, + ApiKeyScopeTask, + ApiKeyScopeWorkspaceShare, + ApiKeyScopeWorkspaceDormantShare: return true } return false @@ -506,6 +600,53 @@ func AllAPIKeyScopeValues() []APIKeyScope { ApiKeyScopeCoderTemplatesbuild, ApiKeyScopeCoderTemplatesauthor, ApiKeyScopeCoderApikeysmanageSelf, + ApiKeyScopeAibridgeInterception, + ApiKeyScopeApiKey, + ApiKeyScopeAssignOrgRole, + ApiKeyScopeAssignRole, + ApiKeyScopeAuditLog, + ApiKeyScopeConnectionLog, + ApiKeyScopeCryptoKey, + ApiKeyScopeDebugInfo, + ApiKeyScopeDeploymentConfig, + ApiKeyScopeDeploymentStats, + ApiKeyScopeFile, + ApiKeyScopeGroup, + ApiKeyScopeGroupMember, + ApiKeyScopeIdpsyncSettings, + ApiKeyScopeInboxNotification, + ApiKeyScopeLicense, + ApiKeyScopeNotificationMessage, + ApiKeyScopeNotificationPreference, + ApiKeyScopeNotificationTemplate, + ApiKeyScopeOauth2App, + ApiKeyScopeOauth2AppCodeToken, + ApiKeyScopeOauth2AppSecret, + ApiKeyScopeOrganization, + ApiKeyScopeOrganizationMember, + ApiKeyScopePrebuiltWorkspace, + ApiKeyScopeProvisionerDaemon, + ApiKeyScopeProvisionerJobs, + ApiKeyScopeReplicas, + ApiKeyScopeSystem, + ApiKeyScopeTailnetCoordinator, + ApiKeyScopeTemplate, + ApiKeyScopeUsageEvent, + ApiKeyScopeUser, + ApiKeyScopeUserSecret, + ApiKeyScopeWebpushSubscription, + ApiKeyScopeWorkspace, + ApiKeyScopeWorkspaceAgentDevcontainers, + ApiKeyScopeWorkspaceAgentResourceMonitor, + ApiKeyScopeWorkspaceDormant, + ApiKeyScopeWorkspaceProxy, + ApiKeyScopeTaskCreate, + ApiKeyScopeTaskRead, + ApiKeyScopeTaskUpdate, + ApiKeyScopeTaskDelete, + ApiKeyScopeTask, + ApiKeyScopeWorkspaceShare, + ApiKeyScopeWorkspaceDormantShare, } } @@ -2535,6 +2676,7 @@ const ( ResourceTypeWorkspaceAgent ResourceType = "workspace_agent" ResourceTypeWorkspaceApp ResourceType = "workspace_app" ResourceTypePrebuildsSettings ResourceType = "prebuilds_settings" + ResourceTypeTask ResourceType = "task" ) func (e *ResourceType) Scan(src interface{}) error { @@ -2598,7 +2740,8 @@ func (e ResourceType) Valid() bool { ResourceTypeIdpSyncSettingsRole, ResourceTypeWorkspaceAgent, ResourceTypeWorkspaceApp, - ResourceTypePrebuildsSettings: + ResourceTypePrebuildsSettings, + ResourceTypeTask: return true } return false @@ -2631,6 +2774,7 @@ func AllResourceTypeValues() []ResourceType { ResourceTypeWorkspaceAgent, ResourceTypeWorkspaceApp, ResourceTypePrebuildsSettings, + ResourceTypeTask, } } @@ -2750,6 +2894,76 @@ func AllTailnetStatusValues() []TailnetStatus { } } +type TaskStatus string + +const ( + TaskStatusPending TaskStatus = "pending" + TaskStatusInitializing TaskStatus = "initializing" + TaskStatusActive TaskStatus = "active" + TaskStatusPaused TaskStatus = "paused" + TaskStatusUnknown TaskStatus = "unknown" + TaskStatusError TaskStatus = "error" +) + +func (e *TaskStatus) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = TaskStatus(s) + case string: + *e = TaskStatus(s) + default: + return fmt.Errorf("unsupported scan type for TaskStatus: %T", src) + } + return nil +} + +type NullTaskStatus struct { + TaskStatus TaskStatus `json:"task_status"` + Valid bool `json:"valid"` // Valid is true if TaskStatus is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullTaskStatus) Scan(value interface{}) error { + if value == nil { + ns.TaskStatus, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.TaskStatus.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullTaskStatus) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.TaskStatus), nil +} + +func (e TaskStatus) Valid() bool { + switch e { + case TaskStatusPending, + TaskStatusInitializing, + TaskStatusActive, + TaskStatusPaused, + TaskStatusUnknown, + TaskStatusError: + return true + } + return false +} + +func AllTaskStatusValues() []TaskStatus { + return []TaskStatus{ + TaskStatusPending, + TaskStatusInitializing, + TaskStatusActive, + TaskStatusPaused, + TaskStatusUnknown, + TaskStatusError, + } +} + // Defines the users status: active, dormant, or suspended. type UserStatus string @@ -3399,6 +3613,7 @@ type AIBridgeInterception struct { Model string `db:"model" json:"model"` StartedAt time.Time `db:"started_at" json:"started_at"` Metadata pqtype.NullRawMessage `db:"metadata" json:"metadata"` + EndedAt sql.NullTime `db:"ended_at" json:"ended_at"` } // Audit log of tokens used by intercepted requests in AI Bridge @@ -3741,7 +3956,7 @@ type OAuth2ProviderApp struct { // RFC 7591: Version of the client software SoftwareVersion sql.NullString `db:"software_version" json:"software_version"` // RFC 7592: Hashed registration access token for client management - RegistrationAccessToken sql.NullString `db:"registration_access_token" json:"registration_access_token"` + RegistrationAccessToken []byte `db:"registration_access_token" json:"registration_access_token"` // RFC 7592: URI for client configuration endpoint RegistrationClientUri sql.NullString `db:"registration_client_uri" json:"registration_client_uri"` } @@ -3992,6 +4207,26 @@ type TailnetTunnel struct { } type Task struct { + ID uuid.UUID `db:"id" json:"id"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + Name string `db:"name" json:"name"` + WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"` + Prompt string `db:"prompt" json:"prompt"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + DeletedAt sql.NullTime `db:"deleted_at" json:"deleted_at"` + Status TaskStatus `db:"status" json:"status"` + WorkspaceBuildNumber sql.NullInt32 `db:"workspace_build_number" json:"workspace_build_number"` + WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"` + WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"` + OwnerUsername string `db:"owner_username" json:"owner_username"` + OwnerName string `db:"owner_name" json:"owner_name"` + OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` +} + +type TaskTable struct { ID uuid.UUID `db:"id" json:"id"` OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` @@ -4005,10 +4240,10 @@ type Task struct { } type TaskWorkspaceApp struct { - TaskID uuid.UUID `db:"task_id" json:"task_id"` - WorkspaceBuildID uuid.UUID `db:"workspace_build_id" json:"workspace_build_id"` - WorkspaceAgentID uuid.UUID `db:"workspace_agent_id" json:"workspace_agent_id"` - WorkspaceAppID uuid.UUID `db:"workspace_app_id" json:"workspace_app_id"` + TaskID uuid.UUID `db:"task_id" json:"task_id"` + WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"` + WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"` + WorkspaceBuildNumber int32 `db:"workspace_build_number" json:"workspace_build_number"` } type TelemetryItem struct { @@ -4018,6 +4253,14 @@ type TelemetryItem struct { UpdatedAt time.Time `db:"updated_at" json:"updated_at"` } +// Telemetry lock tracking table for deduplication of heartbeat events across replicas. +type TelemetryLock struct { + // The type of event that was sent. + EventType string `db:"event_type" json:"event_type"` + // The heartbeat period end timestamp. + PeriodEndingAt time.Time `db:"period_ending_at" json:"period_ending_at"` +} + // Joins in the display name information such as username, avatar, and organization name. type Template struct { ID uuid.UUID `db:"id" json:"id"` @@ -4420,6 +4663,7 @@ type Workspace struct { TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` TemplateIcon string `db:"template_icon" json:"template_icon"` TemplateDescription string `db:"template_description" json:"template_description"` + TaskID uuid.NullUUID `db:"task_id" json:"task_id"` } type WorkspaceAgent struct { diff --git a/coderd/database/pubsub/pubsub_test.go b/coderd/database/pubsub/pubsub_test.go index 4f4a387276355..79ce80ea5448e 100644 --- a/coderd/database/pubsub/pubsub_test.go +++ b/coderd/database/pubsub/pubsub_test.go @@ -19,9 +19,6 @@ import ( func TestPGPubsub_Metrics(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := testutil.Logger(t) connectionURL, err := dbtestutil.Open(t) @@ -122,9 +119,6 @@ func TestPGPubsub_Metrics(t *testing.T) { func TestPGPubsubDriver(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } ctx := testutil.Context(t, testutil.WaitLong) logger := slogtest.Make(t, &slogtest.Options{ diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 27e828e45a950..b1a450939834d 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -6,7 +6,6 @@ package database import ( "context" - "database/sql" "time" "github.com/google/uuid" @@ -61,15 +60,21 @@ type sqlcQuerier interface { BatchUpdateWorkspaceNextStartAt(ctx context.Context, arg BatchUpdateWorkspaceNextStartAtParams) error BulkMarkNotificationMessagesFailed(ctx context.Context, arg BulkMarkNotificationMessagesFailedParams) (int64, error) BulkMarkNotificationMessagesSent(ctx context.Context, arg BulkMarkNotificationMessagesSentParams) (int64, error) + // Calculates the telemetry summary for a given provider, model, and client + // combination for telemetry reporting. + CalculateAIBridgeInterceptionsTelemetrySummary(ctx context.Context, arg CalculateAIBridgeInterceptionsTelemetrySummaryParams) (CalculateAIBridgeInterceptionsTelemetrySummaryRow, error) ClaimPrebuiltWorkspace(ctx context.Context, arg ClaimPrebuiltWorkspaceParams) (ClaimPrebuiltWorkspaceRow, error) CleanTailnetCoordinators(ctx context.Context) error CleanTailnetLostPeers(ctx context.Context) error CleanTailnetTunnels(ctx context.Context) error + CountAIBridgeInterceptions(ctx context.Context, arg CountAIBridgeInterceptionsParams) (int64, error) CountAuditLogs(ctx context.Context, arg CountAuditLogsParams) (int64, error) CountConnectionLogs(ctx context.Context, arg CountConnectionLogsParams) (int64, error) // CountInProgressPrebuilds returns the number of in-progress prebuilds, grouped by preset ID and transition. - // Prebuild considered in-progress if it's in the "starting", "stopping", or "deleting" state. + // Prebuild considered in-progress if it's in the "pending", "starting", "stopping", or "deleting" state. CountInProgressPrebuilds(ctx context.Context) ([]CountInProgressPrebuildsRow, error) + // CountPendingNonActivePrebuilds returns the number of pending prebuilds for non-active template versions + CountPendingNonActivePrebuilds(ctx context.Context) ([]CountPendingNonActivePrebuildsRow, error) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) CreateUserSecret(ctx context.Context, arg CreateUserSecretParams) (UserSecret, error) CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error) @@ -105,6 +110,8 @@ type sqlcQuerier interface { // A provisioner daemon with "zeroed" last_seen_at column indicates possible // connectivity issues (no provisioner daemon activity since registration). DeleteOldProvisionerDaemons(ctx context.Context) error + // Deletes old telemetry locks from the telemetry_locks table. + DeleteOldTelemetryLocks(ctx context.Context, periodEndingAtBefore time.Time) error // If an agent hasn't connected in the last 7 days, we purge it's logs. // Exception: if the logs are related to the latest build, we keep those around. // Logs can take up a lot of space, so it's important we clean up frequently. @@ -119,6 +126,7 @@ type sqlcQuerier interface { DeleteTailnetClientSubscription(ctx context.Context, arg DeleteTailnetClientSubscriptionParams) error DeleteTailnetPeer(ctx context.Context, arg DeleteTailnetPeerParams) (DeleteTailnetPeerRow, error) DeleteTailnetTunnel(ctx context.Context, arg DeleteTailnetTunnelParams) (DeleteTailnetTunnelRow, error) + DeleteTask(ctx context.Context, arg DeleteTaskParams) (TaskTable, error) DeleteUserSecret(ctx context.Context, id uuid.UUID) error DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg DeleteWebpushSubscriptionByUserIDAndEndpointParams) error DeleteWebpushSubscriptions(ctx context.Context, ids []uuid.UUID) error @@ -244,7 +252,7 @@ type sqlcQuerier interface { // RFC 7591/7592 Dynamic Client Registration queries GetOAuth2ProviderAppByClientID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error) - GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken sql.NullString) (OAuth2ProviderApp, error) + GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken []byte) (OAuth2ProviderApp, error) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppCode, error) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (OAuth2ProviderAppCode, error) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppSecret, error) @@ -261,6 +269,9 @@ type sqlcQuerier interface { GetOrganizationResourceCountByID(ctx context.Context, organizationID uuid.UUID) (GetOrganizationResourceCountByIDRow, error) GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error) GetOrganizationsByUserID(ctx context.Context, arg GetOrganizationsByUserIDParams) ([]Organization, error) + // GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their + // membership status for the prebuilds system user (org membership, group existence, group membership). + GetOrganizationsWithPrebuildStatus(ctx context.Context, arg GetOrganizationsWithPrebuildStatusParams) ([]GetOrganizationsWithPrebuildStatusRow, error) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]ParameterSchema, error) GetPrebuildMetrics(ctx context.Context) ([]GetPrebuildMetricsRow, error) GetPrebuildsSettings(ctx context.Context) (string, error) @@ -331,6 +342,8 @@ type sqlcQuerier interface { GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]TailnetPeer, error) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerBindingsRow, error) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerIDsRow, error) + GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error) + GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error) GetTelemetryItem(ctx context.Context, key string) (TelemetryItem, error) GetTelemetryItems(ctx context.Context) ([]TelemetryItem, error) // GetTemplateAppInsights returns the aggregate usage of each app in a given @@ -462,6 +475,7 @@ type sqlcQuerier interface { GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAgent, error) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]WorkspaceAgent, error) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceAgent, error) + GetWorkspaceAgentsForMetrics(ctx context.Context) ([]GetWorkspaceAgentsForMetricsRow, error) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgent, error) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg GetWorkspaceAppByAgentIDAndSlugParams) (WorkspaceApp, error) GetWorkspaceAppStatusesByAppIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAppStatus, error) @@ -508,6 +522,7 @@ type sqlcQuerier interface { GetWorkspacesAndAgentsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]GetWorkspacesAndAgentsByOwnerIDRow, error) GetWorkspacesByTemplateID(ctx context.Context, templateID uuid.UUID) ([]WorkspaceTable, error) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]GetWorkspacesEligibleForTransitionRow, error) + GetWorkspacesForWorkspaceMetrics(ctx context.Context) ([]GetWorkspacesForWorkspaceMetricsRow, error) InsertAIBridgeInterception(ctx context.Context, arg InsertAIBridgeInterceptionParams) (AIBridgeInterception, error) InsertAIBridgeTokenUsage(ctx context.Context, arg InsertAIBridgeTokenUsageParams) (AIBridgeTokenUsage, error) InsertAIBridgeToolUsage(ctx context.Context, arg InsertAIBridgeToolUsageParams) (AIBridgeToolUsage, error) @@ -550,7 +565,14 @@ type sqlcQuerier interface { InsertProvisionerJobTimings(ctx context.Context, arg InsertProvisionerJobTimingsParams) ([]ProvisionerJobTiming, error) InsertProvisionerKey(ctx context.Context, arg InsertProvisionerKeyParams) (ProvisionerKey, error) InsertReplica(ctx context.Context, arg InsertReplicaParams) (Replica, error) + InsertTask(ctx context.Context, arg InsertTaskParams) (TaskTable, error) InsertTelemetryItemIfNotExists(ctx context.Context, arg InsertTelemetryItemIfNotExistsParams) error + // Inserts a new lock row into the telemetry_locks table. Replicas should call + // this function prior to attempting to generate or publish a heartbeat event to + // the telemetry service. + // If the query returns a duplicate primary key error, the replica should not + // attempt to generate or publish the event to the telemetry service. + InsertTelemetryLock(ctx context.Context, arg InsertTelemetryLockParams) error InsertTemplate(ctx context.Context, arg InsertTemplateParams) error InsertTemplateVersion(ctx context.Context, arg InsertTemplateVersionParams) error InsertTemplateVersionParameter(ctx context.Context, arg InsertTemplateVersionParameterParams) (TemplateVersionParameter, error) @@ -586,12 +608,16 @@ type sqlcQuerier interface { InsertWorkspaceProxy(ctx context.Context, arg InsertWorkspaceProxyParams) (WorkspaceProxy, error) InsertWorkspaceResource(ctx context.Context, arg InsertWorkspaceResourceParams) (WorkspaceResource, error) InsertWorkspaceResourceMetadata(ctx context.Context, arg InsertWorkspaceResourceMetadataParams) ([]WorkspaceResourceMetadatum, error) - ListAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams) ([]AIBridgeInterception, error) + ListAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams) ([]ListAIBridgeInterceptionsRow, error) + // Finds all unique AIBridge interception telemetry summaries combinations + // (provider, model, client) in the given timeframe for telemetry reporting. + ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg ListAIBridgeInterceptionsTelemetrySummariesParams) ([]ListAIBridgeInterceptionsTelemetrySummariesRow, error) ListAIBridgeTokenUsagesByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]AIBridgeTokenUsage, error) ListAIBridgeToolUsagesByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]AIBridgeToolUsage, error) ListAIBridgeUserPromptsByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]AIBridgeUserPrompt, error) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error) + ListTasks(ctx context.Context, arg ListTasksParams) ([]Task, error) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]UserSecret, error) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgentPortShare, error) MarkAllInboxNotificationsAsRead(ctx context.Context, arg MarkAllInboxNotificationsAsReadParams) error @@ -623,6 +649,7 @@ type sqlcQuerier interface { // This will always work regardless of the current state of the template version. UnarchiveTemplateVersion(ctx context.Context, arg UnarchiveTemplateVersionParams) error UnfavoriteWorkspace(ctx context.Context, id uuid.UUID) error + UpdateAIBridgeInterceptionEnded(ctx context.Context, arg UpdateAIBridgeInterceptionEndedParams) (AIBridgeInterception, error) UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error UpdateCryptoKeyDeletesAt(ctx context.Context, arg UpdateCryptoKeyDeletesAtParams) (CryptoKey, error) UpdateCustomRole(ctx context.Context, arg UpdateCustomRoleParams) (CustomRole, error) @@ -640,6 +667,10 @@ type sqlcQuerier interface { UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg UpdateOAuth2ProviderAppSecretByIDParams) (OAuth2ProviderAppSecret, error) UpdateOrganization(ctx context.Context, arg UpdateOrganizationParams) (Organization, error) UpdateOrganizationDeletedByID(ctx context.Context, arg UpdateOrganizationDeletedByIDParams) error + // Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an + // inactive template version. + // This is an optimization to clean up stale pending jobs. + UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]UpdatePrebuildProvisionerJobWithCancelRow, error) UpdatePresetPrebuildStatus(ctx context.Context, arg UpdatePresetPrebuildStatusParams) error UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error @@ -650,6 +681,7 @@ type sqlcQuerier interface { UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error UpdateReplica(ctx context.Context, arg UpdateReplicaParams) (Replica, error) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg UpdateTailnetPeerStatusByCoordinatorParams) error + UpdateTaskWorkspaceID(ctx context.Context, arg UpdateTaskWorkspaceIDParams) (TaskTable, error) UpdateTemplateACLByID(ctx context.Context, arg UpdateTemplateACLByIDParams) error UpdateTemplateAccessControlByID(ctx context.Context, arg UpdateTemplateAccessControlByIDParams) error UpdateTemplateActiveVersionByID(ctx context.Context, arg UpdateTemplateActiveVersionByIDParams) error @@ -729,6 +761,7 @@ type sqlcQuerier interface { UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (TailnetCoordinator, error) UpsertTailnetPeer(ctx context.Context, arg UpsertTailnetPeerParams) (TailnetPeer, error) UpsertTailnetTunnel(ctx context.Context, arg UpsertTailnetTunnelParams) (TailnetTunnel, error) + UpsertTaskWorkspaceApp(ctx context.Context, arg UpsertTaskWorkspaceAppParams) (TaskWorkspaceApp, error) UpsertTelemetryItem(ctx context.Context, arg UpsertTelemetryItemParams) error // This query aggregates the workspace_agent_stats and workspace_app_stats data // into a single table for efficient storage and querying. Half-hour buckets are diff --git a/coderd/database/querier_test.go b/coderd/database/querier_test.go index c7daaaed356d3..773f944756576 100644 --- a/coderd/database/querier_test.go +++ b/coderd/database/querier_test.go @@ -3742,9 +3742,6 @@ func TestGetProvisionerJobsByIDsWithQueuePosition(t *testing.T) { func TestGetProvisionerJobsByIDsWithQueuePosition_MixedStatuses(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } db, _ := dbtestutil.NewDB(t) now := dbtime.Now() @@ -4084,10 +4081,6 @@ func TestGetUserStatusCounts(t *testing.T) { t.Parallel() t.Skip("https://github.com/coder/internal/issues/464") - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } - timezones := []string{ "Canada/Newfoundland", "Africa/Johannesburg", @@ -4625,10 +4618,6 @@ func TestGetUserStatusCounts(t *testing.T) { func TestOrganizationDeleteTrigger(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } - t.Run("WorkspaceExists", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) @@ -4942,9 +4931,6 @@ func createPrebuiltWorkspace( func TestWorkspacePrebuildsView(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } now := dbtime.Now() orgID := uuid.New() @@ -5046,9 +5032,6 @@ func TestWorkspacePrebuildsView(t *testing.T) { func TestGetPresetsBackoff(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } now := dbtime.Now() orgID := uuid.New() @@ -5565,9 +5548,6 @@ func TestGetPresetsBackoff(t *testing.T) { func TestGetPresetsAtFailureLimit(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.SkipNow() - } now := dbtime.Now() hourBefore := now.Add(-time.Hour) @@ -5871,10 +5851,6 @@ func TestGetPresetsAtFailureLimit(t *testing.T) { func TestWorkspaceAgentNameUniqueTrigger(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test makes use of a database trigger not implemented in dbmem") - } - createWorkspaceWithAgent := func(t *testing.T, db database.Store, org database.Organization, agentName string) (database.WorkspaceBuild, database.WorkspaceResource, database.WorkspaceAgent) { t.Helper() @@ -6141,10 +6117,6 @@ func requireUsersMatch(t testing.TB, expected []database.User, found []database. func TestGetRunningPrebuiltWorkspaces(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("Test requires PostgreSQL for complex queries") - } - ctx := testutil.Context(t, testutil.WaitLong) db, _ := dbtestutil.NewDB(t) now := dbtime.Now() @@ -6653,6 +6625,652 @@ func TestGetLatestWorkspaceBuildsByWorkspaceIDs(t *testing.T) { } } +func TestTasksWithStatusView(t *testing.T) { + t.Parallel() + + createProvisionerJob := func(t *testing.T, db database.Store, org database.Organization, user database.User, buildStatus database.ProvisionerJobStatus) database.ProvisionerJob { + t.Helper() + + var jobParams database.ProvisionerJob + + switch buildStatus { + case database.ProvisionerJobStatusPending: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + } + case database.ProvisionerJobStatusRunning: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + } + case database.ProvisionerJobStatusFailed: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + Error: sql.NullString{Valid: true, String: "job failed"}, + } + case database.ProvisionerJobStatusSucceeded: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + } + default: + t.Errorf("invalid build status: %v", buildStatus) + } + + return dbgen.ProvisionerJob(t, db, nil, jobParams) + } + + createTask := func( + ctx context.Context, + t *testing.T, + db database.Store, + org database.Organization, + user database.User, + buildStatus database.ProvisionerJobStatus, + buildTransition database.WorkspaceTransition, + agentState database.WorkspaceAgentLifecycleState, + appHealths []database.WorkspaceAppHealth, + ) database.Task { + t.Helper() + + template := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + + if buildStatus == "" { + return dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: "test-task", + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + } + + job := createProvisionerJob(t, db, org, user, buildStatus) + + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + TemplateID: template.ID, + OwnerID: user.ID, + }) + workspaceID := uuid.NullUUID{Valid: true, UUID: workspace.ID} + + task := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: "test-task", + WorkspaceID: workspaceID, + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + + workspaceBuild := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: workspace.ID, + TemplateVersionID: templateVersion.ID, + BuildNumber: 1, + Transition: buildTransition, + InitiatorID: user.ID, + JobID: job.ID, + }) + workspaceBuildNumber := workspaceBuild.BuildNumber + + _, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: workspaceBuildNumber, + }) + require.NoError(t, err) + + resource := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{ + JobID: job.ID, + }) + + if agentState != "" { + agent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{ + ResourceID: resource.ID, + }) + workspaceAgentID := agent.ID + + _, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: workspaceBuildNumber, + WorkspaceAgentID: uuid.NullUUID{UUID: workspaceAgentID, Valid: true}, + }) + require.NoError(t, err) + + err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.ID, + LifecycleState: agentState, + }) + require.NoError(t, err) + + for i, health := range appHealths { + app := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ + AgentID: workspaceAgentID, + Slug: fmt.Sprintf("test-app-%d", i), + DisplayName: fmt.Sprintf("Test App %d", i+1), + Health: health, + }) + if i == 0 { + // Assume the first app is the tasks app. + _, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: workspaceBuildNumber, + WorkspaceAgentID: uuid.NullUUID{UUID: workspaceAgentID, Valid: true}, + WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, + }) + require.NoError(t, err) + } + } + } + + return task + } + + tests := []struct { + name string + buildStatus database.ProvisionerJobStatus + buildTransition database.WorkspaceTransition + agentState database.WorkspaceAgentLifecycleState + appHealths []database.WorkspaceAppHealth + expectedStatus database.TaskStatus + description string + expectBuildNumberValid bool + expectBuildNumber int32 + expectWorkspaceAgentValid bool + expectWorkspaceAppValid bool + }{ + { + name: "NoWorkspace", + expectedStatus: "pending", + description: "Task with no workspace assigned", + expectBuildNumberValid: false, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "FailedBuild", + buildStatus: database.ProvisionerJobStatusFailed, + buildTransition: database.WorkspaceTransitionStart, + expectedStatus: database.TaskStatusError, + description: "Latest workspace build failed", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "StoppedWorkspace", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStop, + expectedStatus: database.TaskStatusPaused, + description: "Workspace is stopped", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "DeletedWorkspace", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionDelete, + expectedStatus: database.TaskStatusPaused, + description: "Workspace is deleted", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "PendingStart", + buildStatus: database.ProvisionerJobStatusPending, + buildTransition: database.WorkspaceTransitionStart, + expectedStatus: database.TaskStatusInitializing, + description: "Workspace build is starting (pending)", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "RunningStart", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + expectedStatus: database.TaskStatusInitializing, + description: "Workspace build is starting (running)", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "StartingAgent", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateStarting, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing}, + expectedStatus: database.TaskStatusInitializing, + description: "Workspace is running but agent is starting", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "CreatedAgent", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateCreated, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing}, + expectedStatus: database.TaskStatusInitializing, + description: "Workspace is running but agent is created", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "ReadyAgentInitializingApp", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing}, + expectedStatus: database.TaskStatusInitializing, + description: "Agent is ready but app is initializing", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "ReadyAgentHealthyApp", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy}, + expectedStatus: database.TaskStatusActive, + description: "Agent is ready and app is healthy", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "ReadyAgentDisabledApp", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthDisabled}, + expectedStatus: database.TaskStatusActive, + description: "Agent is ready and app health checking is disabled", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "ReadyAgentUnhealthyApp", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthUnhealthy}, + expectedStatus: database.TaskStatusError, + description: "Agent is ready but app is unhealthy", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "AgentStartTimeout", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateStartTimeout, + expectedStatus: database.TaskStatusUnknown, + description: "Agent start timed out", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: false, + }, + { + name: "AgentStartError", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateStartError, + expectedStatus: database.TaskStatusUnknown, + description: "Agent failed to start", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: false, + }, + { + name: "AgentShuttingDown", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateShuttingDown, + expectedStatus: database.TaskStatusUnknown, + description: "Agent is shutting down", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: false, + }, + { + name: "AgentOff", + buildStatus: database.ProvisionerJobStatusSucceeded, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateOff, + expectedStatus: database.TaskStatusUnknown, + description: "Agent is off", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: false, + }, + { + name: "RunningJobReadyAgentHealthyApp", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy}, + expectedStatus: database.TaskStatusActive, + description: "Running job with ready agent and healthy app should be active", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "RunningJobReadyAgentInitializingApp", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing}, + expectedStatus: database.TaskStatusInitializing, + description: "Running job with ready agent but initializing app should be initializing", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "RunningJobReadyAgentUnhealthyApp", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthUnhealthy}, + expectedStatus: database.TaskStatusError, + description: "Running job with ready agent but unhealthy app should be error", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "RunningJobConnectingAgent", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateStarting, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing}, + expectedStatus: database.TaskStatusInitializing, + description: "Running job with connecting agent should be initializing", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "RunningJobReadyAgentDisabledApp", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthDisabled}, + expectedStatus: database.TaskStatusActive, + description: "Running job with ready agent and disabled app health checking should be active", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + { + name: "RunningJobReadyAgentHealthyTaskAppUnhealthyOtherAppIsOK", + buildStatus: database.ProvisionerJobStatusRunning, + buildTransition: database.WorkspaceTransitionStart, + agentState: database.WorkspaceAgentLifecycleStateReady, + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy, database.WorkspaceAppHealthUnhealthy}, + expectedStatus: database.TaskStatusActive, + description: "Running job with ready agent and multiple healthy apps should be active", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: true, + expectWorkspaceAppValid: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitLong) + + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + + task := createTask(ctx, t, db, org, user, tt.buildStatus, tt.buildTransition, tt.agentState, tt.appHealths) + + got, err := db.GetTaskByID(ctx, task.ID) + require.NoError(t, err) + + require.Equal(t, tt.expectedStatus, got.Status) + + require.Equal(t, tt.expectBuildNumberValid, got.WorkspaceBuildNumber.Valid) + if tt.expectBuildNumberValid { + require.Equal(t, tt.expectBuildNumber, got.WorkspaceBuildNumber.Int32) + } + + require.Equal(t, tt.expectWorkspaceAgentValid, got.WorkspaceAgentID.Valid) + if tt.expectWorkspaceAgentValid { + require.NotEqual(t, uuid.Nil, got.WorkspaceAgentID.UUID) + } + + require.Equal(t, tt.expectWorkspaceAppValid, got.WorkspaceAppID.Valid) + if tt.expectWorkspaceAppValid { + require.NotEqual(t, uuid.Nil, got.WorkspaceAppID.UUID) + } + }) + } +} + +func TestGetTaskByWorkspaceID(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setupTask func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable) + wantErr bool + }{ + { + name: "task doesn't exist", + wantErr: true, + }, + { + name: "task with no workspace id", + setupTask: func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable) { + dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: "test-task", + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + wantErr: true, + }, + { + name: "task with workspace id", + setupTask: func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable) { + workspaceID := uuid.NullUUID{Valid: true, UUID: workspace.ID} + dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: "test-task", + WorkspaceID: workspaceID, + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + wantErr: false, + }, + } + + db, _ := dbtestutil.NewDB(t) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + template := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + TemplateID: uuid.NullUUID{Valid: true, UUID: template.ID}, + CreatedBy: user.ID, + }) + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template.ID, + }) + + if tt.setupTask != nil { + tt.setupTask(t, db, org, user, templateVersion, workspace) + } + + ctx := testutil.Context(t, testutil.WaitLong) + + task, err := db.GetTaskByWorkspaceID(ctx, workspace.ID) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.False(t, task.WorkspaceBuildNumber.Valid) + require.False(t, task.WorkspaceAgentID.Valid) + require.False(t, task.WorkspaceAppID.Valid) + } + }) + } +} + +func TestTaskNameUniqueness(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + + org := dbgen.Organization(t, db, database.Organization{}) + user1 := dbgen.User(t, db, database.User{}) + user2 := dbgen.User(t, db, database.User{}) + template := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user1.ID, + }) + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + OrganizationID: org.ID, + CreatedBy: user1.ID, + }) + + taskName := "my-task" + + // Create initial task for user1. + task1 := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user1.ID, + Name: taskName, + TemplateVersionID: tv.ID, + Prompt: "Test prompt", + }) + require.NotEqual(t, uuid.Nil, task1.ID) + + tests := []struct { + name string + ownerID uuid.UUID + taskName string + wantErr bool + }{ + { + name: "duplicate task name same user", + ownerID: user1.ID, + taskName: taskName, + wantErr: true, + }, + { + name: "duplicate task name different case same user", + ownerID: user1.ID, + taskName: "MY-TASK", + wantErr: true, + }, + { + name: "same task name different user", + ownerID: user2.ID, + taskName: taskName, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + taskID := uuid.New() + task, err := db.InsertTask(ctx, database.InsertTaskParams{ + ID: taskID, + OrganizationID: org.ID, + OwnerID: tt.ownerID, + Name: tt.taskName, + TemplateVersionID: tv.ID, + TemplateParameters: json.RawMessage("{}"), + Prompt: "Test prompt", + CreatedAt: dbtime.Now(), + }) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.NotEqual(t, uuid.Nil, task.ID) + require.NotEqual(t, task1.ID, task.ID) + require.Equal(t, taskID, task.ID) + } + }) + } +} + func TestUsageEventsTrigger(t *testing.T) { t.Parallel() @@ -6780,3 +7398,397 @@ func TestUsageEventsTrigger(t *testing.T) { require.Len(t, rows, 0) }) } + +func TestListTasks(t *testing.T) { + t.Parallel() + + db, ps := dbtestutil.NewDB(t) + + // Given: two organizations and two users, one of which is a member of both + org1 := dbgen.Organization(t, db, database.Organization{}) + org2 := dbgen.Organization(t, db, database.Organization{}) + user1 := dbgen.User(t, db, database.User{}) + user2 := dbgen.User(t, db, database.User{}) + _ = dbgen.OrganizationMember(t, db, database.OrganizationMember{ + OrganizationID: org1.ID, + UserID: user1.ID, + }) + _ = dbgen.OrganizationMember(t, db, database.OrganizationMember{ + OrganizationID: org2.ID, + UserID: user2.ID, + }) + + // Given: a template with an active version + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user1.ID, + OrganizationID: org1.ID, + }) + tpl := dbgen.Template(t, db, database.Template{ + CreatedBy: user1.ID, + OrganizationID: org1.ID, + ActiveVersionID: tv.ID, + }) + + // Helper function to create a task + createTask := func(orgID, ownerID uuid.UUID) database.Task { + ws := dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: orgID, + OwnerID: ownerID, + TemplateID: tpl.ID, + }) + pj := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{}) + sidebarAppID := uuid.New() + wb := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + JobID: pj.ID, + TemplateVersionID: tv.ID, + WorkspaceID: ws.ID, + }) + wr := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{ + JobID: pj.ID, + }) + agt := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{ + ResourceID: wr.ID, + }) + wa := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ + ID: sidebarAppID, + AgentID: agt.ID, + }) + tsk := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: orgID, + OwnerID: ownerID, + Prompt: testutil.GetRandomName(t), + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: ws.ID, Valid: true}, + }) + _ = dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{ + TaskID: tsk.ID, + WorkspaceBuildNumber: wb.BuildNumber, + WorkspaceAgentID: uuid.NullUUID{Valid: true, UUID: agt.ID}, + WorkspaceAppID: uuid.NullUUID{Valid: true, UUID: wa.ID}, + }) + t.Logf("task_id:%s owner_id:%s org_id:%s", tsk.ID, ownerID, orgID) + return tsk + } + + // Given: user1 has one task, user2 has one task, user3 has two tasks (one in each org) + task1 := createTask(org1.ID, user1.ID) + task2 := createTask(org1.ID, user2.ID) + task3 := createTask(org2.ID, user2.ID) + + // Then: run various filters and assert expected results + for _, tc := range []struct { + name string + filter database.ListTasksParams + expectIDs []uuid.UUID + }{ + { + name: "no filter", + filter: database.ListTasksParams{ + OwnerID: uuid.Nil, + OrganizationID: uuid.Nil, + }, + expectIDs: []uuid.UUID{task3.ID, task2.ID, task1.ID}, + }, + { + name: "filter by user ID", + filter: database.ListTasksParams{ + OwnerID: user1.ID, + OrganizationID: uuid.Nil, + }, + expectIDs: []uuid.UUID{task1.ID}, + }, + { + name: "filter by organization ID", + filter: database.ListTasksParams{ + OwnerID: uuid.Nil, + OrganizationID: org1.ID, + }, + expectIDs: []uuid.UUID{task2.ID, task1.ID}, + }, + { + name: "filter by user and organization ID", + filter: database.ListTasksParams{ + OwnerID: user2.ID, + OrganizationID: org2.ID, + }, + expectIDs: []uuid.UUID{task3.ID}, + }, + { + name: "no results", + filter: database.ListTasksParams{ + OwnerID: user1.ID, + OrganizationID: org2.ID, + }, + expectIDs: nil, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + tasks, err := db.ListTasks(ctx, tc.filter) + require.NoError(t, err) + require.Len(t, tasks, len(tc.expectIDs)) + + for idx, eid := range tc.expectIDs { + task := tasks[idx] + assert.Equal(t, eid, task.ID, "task ID mismatch at index %d", idx) + + require.True(t, task.WorkspaceBuildNumber.Valid) + require.Greater(t, task.WorkspaceBuildNumber.Int32, int32(0)) + require.True(t, task.WorkspaceAgentID.Valid) + require.NotEqual(t, uuid.Nil, task.WorkspaceAgentID.UUID) + require.True(t, task.WorkspaceAppID.Valid) + require.NotEqual(t, uuid.Nil, task.WorkspaceAppID.UUID) + } + }) + } +} + +func TestUpdateTaskWorkspaceID(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + + // Create organization, users, template, and template version. + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + template := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + TemplateID: uuid.NullUUID{Valid: true, UUID: template.ID}, + CreatedBy: user.ID, + }) + + // Create another template for mismatch test. + template2 := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + + tests := []struct { + name string + setupTask func(t *testing.T) database.Task + setupWS func(t *testing.T) database.WorkspaceTable + wantErr bool + wantNoRow bool + }{ + { + name: "successful update with matching template", + setupTask: func(t *testing.T) database.Task { + return dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: testutil.GetRandomName(t), + WorkspaceID: uuid.NullUUID{}, + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + setupWS: func(t *testing.T) database.WorkspaceTable { + return dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template.ID, + }) + }, + wantErr: false, + wantNoRow: false, + }, + { + name: "task already has workspace_id", + setupTask: func(t *testing.T) database.Task { + existingWS := dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template.ID, + }) + return dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: testutil.GetRandomName(t), + WorkspaceID: uuid.NullUUID{Valid: true, UUID: existingWS.ID}, + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + setupWS: func(t *testing.T) database.WorkspaceTable { + return dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template.ID, + }) + }, + wantErr: false, + wantNoRow: true, // No row should be returned because WHERE condition fails. + }, + { + name: "template mismatch between task and workspace", + setupTask: func(t *testing.T) database.Task { + return dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: testutil.GetRandomName(t), + WorkspaceID: uuid.NullUUID{}, // NULL workspace_id + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + setupWS: func(t *testing.T) database.WorkspaceTable { + return dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template2.ID, // Different template, JOIN will fail. + }) + }, + wantErr: false, + wantNoRow: true, // No row should be returned because JOIN condition fails. + }, + { + name: "task does not exist", + setupTask: func(t *testing.T) database.Task { + return database.Task{ + ID: uuid.New(), // Non-existent task ID. + } + }, + setupWS: func(t *testing.T) database.WorkspaceTable { + return dbgen.Workspace(t, db, database.WorkspaceTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateID: template.ID, + }) + }, + wantErr: false, + wantNoRow: true, + }, + { + name: "workspace does not exist", + setupTask: func(t *testing.T) database.Task { + return dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + Name: testutil.GetRandomName(t), + WorkspaceID: uuid.NullUUID{}, + TemplateVersionID: templateVersion.ID, + Prompt: "Test prompt", + }) + }, + setupWS: func(t *testing.T) database.WorkspaceTable { + return database.WorkspaceTable{ + ID: uuid.New(), // Non-existent workspace ID. + } + }, + wantErr: false, + wantNoRow: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + task := tt.setupTask(t) + workspace := tt.setupWS(t) + + updatedTask, err := db.UpdateTaskWorkspaceID(ctx, database.UpdateTaskWorkspaceIDParams{ + ID: task.ID, + WorkspaceID: uuid.NullUUID{Valid: true, UUID: workspace.ID}, + }) + + if tt.wantErr { + require.Error(t, err) + return + } + + if tt.wantNoRow { + require.ErrorIs(t, err, sql.ErrNoRows) + return + } + + require.NoError(t, err) + require.Equal(t, task.ID, updatedTask.ID) + require.True(t, updatedTask.WorkspaceID.Valid) + require.Equal(t, workspace.ID, updatedTask.WorkspaceID.UUID) + require.Equal(t, task.OrganizationID, updatedTask.OrganizationID) + require.Equal(t, task.OwnerID, updatedTask.OwnerID) + require.Equal(t, task.Name, updatedTask.Name) + require.Equal(t, task.TemplateVersionID, updatedTask.TemplateVersionID) + + // Verify the update persisted by fetching the task again. + fetchedTask, err := db.GetTaskByID(ctx, task.ID) + require.NoError(t, err) + require.True(t, fetchedTask.WorkspaceID.Valid) + require.Equal(t, workspace.ID, fetchedTask.WorkspaceID.UUID) + }) + } +} + +func TestUpdateAIBridgeInterceptionEnded(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + + t.Run("NonExistingInterception", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + got, err := db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: uuid.New(), + EndedAt: time.Now(), + }) + require.ErrorContains(t, err, "no rows in result set") + require.EqualValues(t, database.AIBridgeInterception{}, got) + }) + + t.Run("OK", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + + user := dbgen.User(t, db, database.User{}) + interceptions := []database.AIBridgeInterception{} + + for _, uid := range []uuid.UUID{{1}, {2}, {3}} { + insertParams := database.InsertAIBridgeInterceptionParams{ + ID: uid, + InitiatorID: user.ID, + Metadata: json.RawMessage("{}"), + } + + intc, err := db.InsertAIBridgeInterception(ctx, insertParams) + require.NoError(t, err) + require.Equal(t, uid, intc.ID) + require.False(t, intc.EndedAt.Valid) + interceptions = append(interceptions, intc) + } + + intc0 := interceptions[0] + endedAt := time.Now() + // Mark first interception as done + updated, err := db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: intc0.ID, + EndedAt: endedAt, + }) + require.NoError(t, err) + require.EqualValues(t, updated.ID, intc0.ID) + require.True(t, updated.EndedAt.Valid) + require.WithinDuration(t, endedAt, updated.EndedAt.Time, 5*time.Second) + + // Updating first interception again should fail + updated, err = db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: intc0.ID, + EndedAt: endedAt.Add(time.Hour), + }) + require.ErrorIs(t, err, sql.ErrNoRows) + + // Other interceptions should not have ended_at set + for _, intc := range interceptions[1:] { + got, err := db.GetAIBridgeInterceptionByID(ctx, intc.ID) + require.NoError(t, err) + require.False(t, got.EndedAt.Valid) + } + }) +} diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 6bf1a3e25d8a0..ff32a1126792d 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -111,9 +111,222 @@ func (q *sqlQuerier) ActivityBumpWorkspace(ctx context.Context, arg ActivityBump return err } +const calculateAIBridgeInterceptionsTelemetrySummary = `-- name: CalculateAIBridgeInterceptionsTelemetrySummary :one +WITH interceptions_in_range AS ( + -- Get all matching interceptions in the given timeframe. + SELECT + id, + initiator_id, + (ended_at - started_at) AS duration + FROM + aibridge_interceptions + WHERE + provider = $1::text + AND model = $2::text + -- TODO: use the client value once we have it (see https://github.com/coder/aibridge/issues/31) + AND 'unknown' = $3::text + AND ended_at IS NOT NULL -- incomplete interceptions are not included in summaries + AND ended_at >= $4::timestamptz + AND ended_at < $5::timestamptz +), +interception_counts AS ( + SELECT + COUNT(id) AS interception_count, + COUNT(DISTINCT initiator_id) AS unique_initiator_count + FROM + interceptions_in_range +), +duration_percentiles AS ( + SELECT + (COALESCE(PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p50_millis, + (COALESCE(PERCENTILE_CONT(0.90) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p90_millis, + (COALESCE(PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p95_millis, + (COALESCE(PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p99_millis + FROM + interceptions_in_range +), +token_aggregates AS ( + SELECT + COALESCE(SUM(tu.input_tokens), 0) AS token_count_input, + COALESCE(SUM(tu.output_tokens), 0) AS token_count_output, + -- Cached tokens are stored in metadata JSON, extract if available. + -- Read tokens may be stored in: + -- - cache_read_input (Anthropic) + -- - prompt_cached (OpenAI) + COALESCE(SUM( + COALESCE((tu.metadata->>'cache_read_input')::bigint, 0) + + COALESCE((tu.metadata->>'prompt_cached')::bigint, 0) + ), 0) AS token_count_cached_read, + -- Written tokens may be stored in: + -- - cache_creation_input (Anthropic) + -- Note that cache_ephemeral_5m_input and cache_ephemeral_1h_input on + -- Anthropic are included in the cache_creation_input field. + COALESCE(SUM( + COALESCE((tu.metadata->>'cache_creation_input')::bigint, 0) + ), 0) AS token_count_cached_written, + COUNT(tu.id) AS token_usages_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_token_usages tu ON i.id = tu.interception_id +), +prompt_aggregates AS ( + SELECT + COUNT(up.id) AS user_prompts_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_user_prompts up ON i.id = up.interception_id +), +tool_aggregates AS ( + SELECT + COUNT(tu.id) FILTER (WHERE tu.injected = true) AS tool_calls_count_injected, + COUNT(tu.id) FILTER (WHERE tu.injected = false) AS tool_calls_count_non_injected, + COUNT(tu.id) FILTER (WHERE tu.injected = true AND tu.invocation_error IS NOT NULL) AS injected_tool_call_error_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_tool_usages tu ON i.id = tu.interception_id +) +SELECT + ic.interception_count::bigint AS interception_count, + dp.interception_duration_p50_millis::bigint AS interception_duration_p50_millis, + dp.interception_duration_p90_millis::bigint AS interception_duration_p90_millis, + dp.interception_duration_p95_millis::bigint AS interception_duration_p95_millis, + dp.interception_duration_p99_millis::bigint AS interception_duration_p99_millis, + ic.unique_initiator_count::bigint AS unique_initiator_count, + pa.user_prompts_count::bigint AS user_prompts_count, + tok_agg.token_usages_count::bigint AS token_usages_count, + tok_agg.token_count_input::bigint AS token_count_input, + tok_agg.token_count_output::bigint AS token_count_output, + tok_agg.token_count_cached_read::bigint AS token_count_cached_read, + tok_agg.token_count_cached_written::bigint AS token_count_cached_written, + tool_agg.tool_calls_count_injected::bigint AS tool_calls_count_injected, + tool_agg.tool_calls_count_non_injected::bigint AS tool_calls_count_non_injected, + tool_agg.injected_tool_call_error_count::bigint AS injected_tool_call_error_count +FROM + interception_counts ic, + duration_percentiles dp, + token_aggregates tok_agg, + prompt_aggregates pa, + tool_aggregates tool_agg +` + +type CalculateAIBridgeInterceptionsTelemetrySummaryParams struct { + Provider string `db:"provider" json:"provider"` + Model string `db:"model" json:"model"` + Client string `db:"client" json:"client"` + EndedAtAfter time.Time `db:"ended_at_after" json:"ended_at_after"` + EndedAtBefore time.Time `db:"ended_at_before" json:"ended_at_before"` +} + +type CalculateAIBridgeInterceptionsTelemetrySummaryRow struct { + InterceptionCount int64 `db:"interception_count" json:"interception_count"` + InterceptionDurationP50Millis int64 `db:"interception_duration_p50_millis" json:"interception_duration_p50_millis"` + InterceptionDurationP90Millis int64 `db:"interception_duration_p90_millis" json:"interception_duration_p90_millis"` + InterceptionDurationP95Millis int64 `db:"interception_duration_p95_millis" json:"interception_duration_p95_millis"` + InterceptionDurationP99Millis int64 `db:"interception_duration_p99_millis" json:"interception_duration_p99_millis"` + UniqueInitiatorCount int64 `db:"unique_initiator_count" json:"unique_initiator_count"` + UserPromptsCount int64 `db:"user_prompts_count" json:"user_prompts_count"` + TokenUsagesCount int64 `db:"token_usages_count" json:"token_usages_count"` + TokenCountInput int64 `db:"token_count_input" json:"token_count_input"` + TokenCountOutput int64 `db:"token_count_output" json:"token_count_output"` + TokenCountCachedRead int64 `db:"token_count_cached_read" json:"token_count_cached_read"` + TokenCountCachedWritten int64 `db:"token_count_cached_written" json:"token_count_cached_written"` + ToolCallsCountInjected int64 `db:"tool_calls_count_injected" json:"tool_calls_count_injected"` + ToolCallsCountNonInjected int64 `db:"tool_calls_count_non_injected" json:"tool_calls_count_non_injected"` + InjectedToolCallErrorCount int64 `db:"injected_tool_call_error_count" json:"injected_tool_call_error_count"` +} + +// Calculates the telemetry summary for a given provider, model, and client +// combination for telemetry reporting. +func (q *sqlQuerier) CalculateAIBridgeInterceptionsTelemetrySummary(ctx context.Context, arg CalculateAIBridgeInterceptionsTelemetrySummaryParams) (CalculateAIBridgeInterceptionsTelemetrySummaryRow, error) { + row := q.db.QueryRowContext(ctx, calculateAIBridgeInterceptionsTelemetrySummary, + arg.Provider, + arg.Model, + arg.Client, + arg.EndedAtAfter, + arg.EndedAtBefore, + ) + var i CalculateAIBridgeInterceptionsTelemetrySummaryRow + err := row.Scan( + &i.InterceptionCount, + &i.InterceptionDurationP50Millis, + &i.InterceptionDurationP90Millis, + &i.InterceptionDurationP95Millis, + &i.InterceptionDurationP99Millis, + &i.UniqueInitiatorCount, + &i.UserPromptsCount, + &i.TokenUsagesCount, + &i.TokenCountInput, + &i.TokenCountOutput, + &i.TokenCountCachedRead, + &i.TokenCountCachedWritten, + &i.ToolCallsCountInjected, + &i.ToolCallsCountNonInjected, + &i.InjectedToolCallErrorCount, + ) + return i, err +} + +const countAIBridgeInterceptions = `-- name: CountAIBridgeInterceptions :one +SELECT + COUNT(*) +FROM + aibridge_interceptions +WHERE + -- Filter by time frame + CASE + WHEN $1::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= $1::timestamptz + ELSE true + END + AND CASE + WHEN $2::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at <= $2::timestamptz + ELSE true + END + -- Filter initiator_id + AND CASE + WHEN $3::uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN aibridge_interceptions.initiator_id = $3::uuid + ELSE true + END + -- Filter provider + AND CASE + WHEN $4::text != '' THEN aibridge_interceptions.provider = $4::text + ELSE true + END + -- Filter model + AND CASE + WHEN $5::text != '' THEN aibridge_interceptions.model = $5::text + ELSE true + END + -- Authorize Filter clause will be injected below in ListAuthorizedAIBridgeInterceptions + -- @authorize_filter +` + +type CountAIBridgeInterceptionsParams struct { + StartedAfter time.Time `db:"started_after" json:"started_after"` + StartedBefore time.Time `db:"started_before" json:"started_before"` + InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"` + Provider string `db:"provider" json:"provider"` + Model string `db:"model" json:"model"` +} + +func (q *sqlQuerier) CountAIBridgeInterceptions(ctx context.Context, arg CountAIBridgeInterceptionsParams) (int64, error) { + row := q.db.QueryRowContext(ctx, countAIBridgeInterceptions, + arg.StartedAfter, + arg.StartedBefore, + arg.InitiatorID, + arg.Provider, + arg.Model, + ) + var count int64 + err := row.Scan(&count) + return count, err +} + const getAIBridgeInterceptionByID = `-- name: GetAIBridgeInterceptionByID :one SELECT - id, initiator_id, provider, model, started_at, metadata + id, initiator_id, provider, model, started_at, metadata, ended_at FROM aibridge_interceptions WHERE @@ -130,13 +343,14 @@ func (q *sqlQuerier) GetAIBridgeInterceptionByID(ctx context.Context, id uuid.UU &i.Model, &i.StartedAt, &i.Metadata, + &i.EndedAt, ) return i, err } const getAIBridgeInterceptions = `-- name: GetAIBridgeInterceptions :many SELECT - id, initiator_id, provider, model, started_at, metadata + id, initiator_id, provider, model, started_at, metadata, ended_at FROM aibridge_interceptions ` @@ -157,6 +371,7 @@ func (q *sqlQuerier) GetAIBridgeInterceptions(ctx context.Context) ([]AIBridgeIn &i.Model, &i.StartedAt, &i.Metadata, + &i.EndedAt, ); err != nil { return nil, err } @@ -306,7 +521,7 @@ INSERT INTO aibridge_interceptions ( ) VALUES ( $1, $2, $3, $4, COALESCE($5::jsonb, '{}'::jsonb), $6 ) -RETURNING id, initiator_id, provider, model, started_at, metadata +RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at ` type InsertAIBridgeInterceptionParams struct { @@ -335,6 +550,7 @@ func (q *sqlQuerier) InsertAIBridgeInterception(ctx context.Context, arg InsertA &i.Model, &i.StartedAt, &i.Metadata, + &i.EndedAt, ) return i, err } @@ -473,9 +689,12 @@ func (q *sqlQuerier) InsertAIBridgeUserPrompt(ctx context.Context, arg InsertAIB const listAIBridgeInterceptions = `-- name: ListAIBridgeInterceptions :many SELECT - id, initiator_id, provider, model, started_at, metadata + aibridge_interceptions.id, aibridge_interceptions.initiator_id, aibridge_interceptions.provider, aibridge_interceptions.model, aibridge_interceptions.started_at, aibridge_interceptions.metadata, aibridge_interceptions.ended_at, + visible_users.id, visible_users.username, visible_users.name, visible_users.avatar_url FROM aibridge_interceptions +JOIN + visible_users ON visible_users.id = aibridge_interceptions.initiator_id WHERE -- Filter by time frame CASE @@ -522,7 +741,8 @@ WHERE ORDER BY aibridge_interceptions.started_at DESC, aibridge_interceptions.id DESC -LIMIT COALESCE(NULLIF($7::integer, 0), 100) +LIMIT COALESCE(NULLIF($8::integer, 0), 100) +OFFSET $7 ` type ListAIBridgeInterceptionsParams struct { @@ -532,10 +752,16 @@ type ListAIBridgeInterceptionsParams struct { Provider string `db:"provider" json:"provider"` Model string `db:"model" json:"model"` AfterID uuid.UUID `db:"after_id" json:"after_id"` + Offset int32 `db:"offset_" json:"offset_"` Limit int32 `db:"limit_" json:"limit_"` } -func (q *sqlQuerier) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams) ([]AIBridgeInterception, error) { +type ListAIBridgeInterceptionsRow struct { + AIBridgeInterception AIBridgeInterception `db:"aibridge_interception" json:"aibridge_interception"` + VisibleUser VisibleUser `db:"visible_user" json:"visible_user"` +} + +func (q *sqlQuerier) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams) ([]ListAIBridgeInterceptionsRow, error) { rows, err := q.db.QueryContext(ctx, listAIBridgeInterceptions, arg.StartedAfter, arg.StartedBefore, @@ -543,22 +769,28 @@ func (q *sqlQuerier) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBr arg.Provider, arg.Model, arg.AfterID, + arg.Offset, arg.Limit, ) if err != nil { return nil, err } defer rows.Close() - var items []AIBridgeInterception + var items []ListAIBridgeInterceptionsRow for rows.Next() { - var i AIBridgeInterception + var i ListAIBridgeInterceptionsRow if err := rows.Scan( - &i.ID, - &i.InitiatorID, - &i.Provider, - &i.Model, - &i.StartedAt, - &i.Metadata, + &i.AIBridgeInterception.ID, + &i.AIBridgeInterception.InitiatorID, + &i.AIBridgeInterception.Provider, + &i.AIBridgeInterception.Model, + &i.AIBridgeInterception.StartedAt, + &i.AIBridgeInterception.Metadata, + &i.AIBridgeInterception.EndedAt, + &i.VisibleUser.ID, + &i.VisibleUser.Username, + &i.VisibleUser.Name, + &i.VisibleUser.AvatarURL, ); err != nil { return nil, err } @@ -573,6 +805,57 @@ func (q *sqlQuerier) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBr return items, nil } +const listAIBridgeInterceptionsTelemetrySummaries = `-- name: ListAIBridgeInterceptionsTelemetrySummaries :many +SELECT + DISTINCT ON (provider, model, client) + provider, + model, + -- TODO: use the client value once we have it (see https://github.com/coder/aibridge/issues/31) + 'unknown' AS client +FROM + aibridge_interceptions +WHERE + ended_at IS NOT NULL -- incomplete interceptions are not included in summaries + AND ended_at >= $1::timestamptz + AND ended_at < $2::timestamptz +` + +type ListAIBridgeInterceptionsTelemetrySummariesParams struct { + EndedAtAfter time.Time `db:"ended_at_after" json:"ended_at_after"` + EndedAtBefore time.Time `db:"ended_at_before" json:"ended_at_before"` +} + +type ListAIBridgeInterceptionsTelemetrySummariesRow struct { + Provider string `db:"provider" json:"provider"` + Model string `db:"model" json:"model"` + Client string `db:"client" json:"client"` +} + +// Finds all unique AIBridge interception telemetry summaries combinations +// (provider, model, client) in the given timeframe for telemetry reporting. +func (q *sqlQuerier) ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg ListAIBridgeInterceptionsTelemetrySummariesParams) ([]ListAIBridgeInterceptionsTelemetrySummariesRow, error) { + rows, err := q.db.QueryContext(ctx, listAIBridgeInterceptionsTelemetrySummaries, arg.EndedAtAfter, arg.EndedAtBefore) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListAIBridgeInterceptionsTelemetrySummariesRow + for rows.Next() { + var i ListAIBridgeInterceptionsTelemetrySummariesRow + if err := rows.Scan(&i.Provider, &i.Model, &i.Client); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const listAIBridgeTokenUsagesByInterceptionIDs = `-- name: ListAIBridgeTokenUsagesByInterceptionIDs :many SELECT id, interception_id, provider_response_id, input_tokens, output_tokens, metadata, created_at @@ -704,6 +987,35 @@ func (q *sqlQuerier) ListAIBridgeUserPromptsByInterceptionIDs(ctx context.Contex return items, nil } +const updateAIBridgeInterceptionEnded = `-- name: UpdateAIBridgeInterceptionEnded :one +UPDATE aibridge_interceptions + SET ended_at = $1::timestamptz +WHERE + id = $2::uuid + AND ended_at IS NULL +RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at +` + +type UpdateAIBridgeInterceptionEndedParams struct { + EndedAt time.Time `db:"ended_at" json:"ended_at"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) UpdateAIBridgeInterceptionEnded(ctx context.Context, arg UpdateAIBridgeInterceptionEndedParams) (AIBridgeInterception, error) { + row := q.db.QueryRowContext(ctx, updateAIBridgeInterceptionEnded, arg.EndedAt, arg.ID) + var i AIBridgeInterception + err := row.Scan( + &i.ID, + &i.InitiatorID, + &i.Provider, + &i.Model, + &i.StartedAt, + &i.Metadata, + &i.EndedAt, + ) + return i, err +} + const deleteAPIKeyByID = `-- name: DeleteAPIKeyByID :exec DELETE FROM api_keys @@ -6132,7 +6444,7 @@ const getOAuth2ProviderAppByRegistrationToken = `-- name: GetOAuth2ProviderAppBy SELECT id, created_at, updated_at, name, icon, callback_url, redirect_uris, client_type, dynamically_registered, client_id_issued_at, client_secret_expires_at, grant_types, response_types, token_endpoint_auth_method, scope, contacts, client_uri, logo_uri, tos_uri, policy_uri, jwks_uri, jwks, software_id, software_version, registration_access_token, registration_client_uri FROM oauth2_provider_apps WHERE registration_access_token = $1 ` -func (q *sqlQuerier) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken sql.NullString) (OAuth2ProviderApp, error) { +func (q *sqlQuerier) GetOAuth2ProviderAppByRegistrationToken(ctx context.Context, registrationAccessToken []byte) (OAuth2ProviderApp, error) { row := q.db.QueryRowContext(ctx, getOAuth2ProviderAppByRegistrationToken, registrationAccessToken) var i OAuth2ProviderApp err := row.Scan( @@ -6533,7 +6845,7 @@ type InsertOAuth2ProviderAppParams struct { Jwks pqtype.NullRawMessage `db:"jwks" json:"jwks"` SoftwareID sql.NullString `db:"software_id" json:"software_id"` SoftwareVersion sql.NullString `db:"software_version" json:"software_version"` - RegistrationAccessToken sql.NullString `db:"registration_access_token" json:"registration_access_token"` + RegistrationAccessToken []byte `db:"registration_access_token" json:"registration_access_token"` RegistrationClientUri sql.NullString `db:"registration_client_uri" json:"registration_client_uri"` } @@ -7850,7 +8162,7 @@ type CountInProgressPrebuildsRow struct { } // CountInProgressPrebuilds returns the number of in-progress prebuilds, grouped by preset ID and transition. -// Prebuild considered in-progress if it's in the "starting", "stopping", or "deleting" state. +// Prebuild considered in-progress if it's in the "pending", "starting", "stopping", or "deleting" state. func (q *sqlQuerier) CountInProgressPrebuilds(ctx context.Context) ([]CountInProgressPrebuildsRow, error) { rows, err := q.db.QueryContext(ctx, countInProgressPrebuilds) if err != nil { @@ -7880,6 +8192,58 @@ func (q *sqlQuerier) CountInProgressPrebuilds(ctx context.Context) ([]CountInPro return items, nil } +const countPendingNonActivePrebuilds = `-- name: CountPendingNonActivePrebuilds :many +SELECT + wpb.template_version_preset_id AS preset_id, + COUNT(*)::int AS count +FROM workspace_prebuild_builds wpb +INNER JOIN provisioner_jobs pj ON pj.id = wpb.job_id +INNER JOIN workspaces w ON w.id = wpb.workspace_id +INNER JOIN templates t ON t.id = w.template_id +WHERE + wpb.template_version_id != t.active_version_id + -- Only considers initial builds, i.e. created by the reconciliation loop + AND wpb.build_number = 1 + -- Only consider 'start' transitions (provisioning), not 'stop'/'delete' (deprovisioning) + -- Deprovisioning jobs should complete naturally as they're already cleaning up resources + AND wpb.transition = 'start'::workspace_transition + -- Pending jobs that have not yet been picked up by a provisioner + AND pj.job_status = 'pending'::provisioner_job_status + AND pj.worker_id IS NULL + AND pj.canceled_at IS NULL + AND pj.completed_at IS NULL +GROUP BY wpb.template_version_preset_id +` + +type CountPendingNonActivePrebuildsRow struct { + PresetID uuid.NullUUID `db:"preset_id" json:"preset_id"` + Count int32 `db:"count" json:"count"` +} + +// CountPendingNonActivePrebuilds returns the number of pending prebuilds for non-active template versions +func (q *sqlQuerier) CountPendingNonActivePrebuilds(ctx context.Context) ([]CountPendingNonActivePrebuildsRow, error) { + rows, err := q.db.QueryContext(ctx, countPendingNonActivePrebuilds) + if err != nil { + return nil, err + } + defer rows.Close() + var items []CountPendingNonActivePrebuildsRow + for rows.Next() { + var i CountPendingNonActivePrebuildsRow + if err := rows.Scan(&i.PresetID, &i.Count); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const findMatchingPresetID = `-- name: FindMatchingPresetID :one WITH provided_params AS ( SELECT @@ -7921,6 +8285,93 @@ func (q *sqlQuerier) FindMatchingPresetID(ctx context.Context, arg FindMatchingP return template_version_preset_id, err } +const getOrganizationsWithPrebuildStatus = `-- name: GetOrganizationsWithPrebuildStatus :many +WITH orgs_with_prebuilds AS ( + -- Get unique organizations that have presets with prebuilds configured + SELECT DISTINCT o.id, o.name + FROM organizations o + INNER JOIN templates t ON t.organization_id = o.id + INNER JOIN template_versions tv ON tv.template_id = t.id + INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id + WHERE tvp.desired_instances IS NOT NULL +), +prebuild_user_membership AS ( + -- Check if the user is a member of the organizations + SELECT om.organization_id + FROM organization_members om + INNER JOIN orgs_with_prebuilds owp ON owp.id = om.organization_id + WHERE om.user_id = $1::uuid +), +prebuild_groups AS ( + -- Check if the organizations have the prebuilds group + SELECT g.organization_id, g.id as group_id + FROM groups g + INNER JOIN orgs_with_prebuilds owp ON owp.id = g.organization_id + WHERE g.name = $2::text +), +prebuild_group_membership AS ( + -- Check if the user is in the prebuilds group + SELECT pg.organization_id + FROM prebuild_groups pg + INNER JOIN group_members gm ON gm.group_id = pg.group_id + WHERE gm.user_id = $1::uuid +) +SELECT + owp.id AS organization_id, + owp.name AS organization_name, + (pum.organization_id IS NOT NULL)::boolean AS has_prebuild_user, + pg.group_id AS prebuilds_group_id, + (pgm.organization_id IS NOT NULL)::boolean AS has_prebuild_user_in_group +FROM orgs_with_prebuilds owp +LEFT JOIN prebuild_groups pg ON pg.organization_id = owp.id +LEFT JOIN prebuild_user_membership pum ON pum.organization_id = owp.id +LEFT JOIN prebuild_group_membership pgm ON pgm.organization_id = owp.id +` + +type GetOrganizationsWithPrebuildStatusParams struct { + UserID uuid.UUID `db:"user_id" json:"user_id"` + GroupName string `db:"group_name" json:"group_name"` +} + +type GetOrganizationsWithPrebuildStatusRow struct { + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + OrganizationName string `db:"organization_name" json:"organization_name"` + HasPrebuildUser bool `db:"has_prebuild_user" json:"has_prebuild_user"` + PrebuildsGroupID uuid.NullUUID `db:"prebuilds_group_id" json:"prebuilds_group_id"` + HasPrebuildUserInGroup bool `db:"has_prebuild_user_in_group" json:"has_prebuild_user_in_group"` +} + +// GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their +// membership status for the prebuilds system user (org membership, group existence, group membership). +func (q *sqlQuerier) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg GetOrganizationsWithPrebuildStatusParams) ([]GetOrganizationsWithPrebuildStatusRow, error) { + rows, err := q.db.QueryContext(ctx, getOrganizationsWithPrebuildStatus, arg.UserID, arg.GroupName) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetOrganizationsWithPrebuildStatusRow + for rows.Next() { + var i GetOrganizationsWithPrebuildStatusRow + if err := rows.Scan( + &i.OrganizationID, + &i.OrganizationName, + &i.HasPrebuildUser, + &i.PrebuildsGroupID, + &i.HasPrebuildUserInGroup, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getPrebuildMetrics = `-- name: GetPrebuildMetrics :many SELECT t.name as template_name, @@ -8322,6 +8773,79 @@ func (q *sqlQuerier) GetTemplatePresetsWithPrebuilds(ctx context.Context, templa return items, nil } +const updatePrebuildProvisionerJobWithCancel = `-- name: UpdatePrebuildProvisionerJobWithCancel :many +WITH jobs_to_cancel AS ( + SELECT pj.id, w.id AS workspace_id, w.template_id, wpb.template_version_preset_id + FROM provisioner_jobs pj + INNER JOIN workspace_prebuild_builds wpb ON wpb.job_id = pj.id + INNER JOIN workspaces w ON w.id = wpb.workspace_id + INNER JOIN templates t ON t.id = w.template_id + WHERE + wpb.template_version_id != t.active_version_id + AND wpb.template_version_preset_id = $2 + -- Only considers initial builds, i.e. created by the reconciliation loop + AND wpb.build_number = 1 + -- Only consider 'start' transitions (provisioning), not 'stop'/'delete' (deprovisioning) + -- Deprovisioning jobs should complete naturally as they're already cleaning up resources + AND wpb.transition = 'start'::workspace_transition + -- Pending jobs that have not yet been picked up by a provisioner + AND pj.job_status = 'pending'::provisioner_job_status + AND pj.worker_id IS NULL + AND pj.canceled_at IS NULL + AND pj.completed_at IS NULL +) +UPDATE provisioner_jobs +SET + canceled_at = $1::timestamptz, + completed_at = $1::timestamptz +FROM jobs_to_cancel +WHERE provisioner_jobs.id = jobs_to_cancel.id +RETURNING jobs_to_cancel.id, jobs_to_cancel.workspace_id, jobs_to_cancel.template_id, jobs_to_cancel.template_version_preset_id +` + +type UpdatePrebuildProvisionerJobWithCancelParams struct { + Now time.Time `db:"now" json:"now"` + PresetID uuid.NullUUID `db:"preset_id" json:"preset_id"` +} + +type UpdatePrebuildProvisionerJobWithCancelRow struct { + ID uuid.UUID `db:"id" json:"id"` + WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` + TemplateID uuid.UUID `db:"template_id" json:"template_id"` + TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` +} + +// Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an +// inactive template version. +// This is an optimization to clean up stale pending jobs. +func (q *sqlQuerier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]UpdatePrebuildProvisionerJobWithCancelRow, error) { + rows, err := q.db.QueryContext(ctx, updatePrebuildProvisionerJobWithCancel, arg.Now, arg.PresetID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UpdatePrebuildProvisionerJobWithCancelRow + for rows.Next() { + var i UpdatePrebuildProvisionerJobWithCancelRow + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.TemplateID, + &i.TemplateVersionPresetID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getActivePresetPrebuildSchedules = `-- name: GetActivePresetPrebuildSchedules :many SELECT tvpps.id, tvpps.preset_id, tvpps.cron_expression, tvpps.desired_instances @@ -8966,7 +9490,7 @@ WHERE -- Filter by max age if provided AND ( $7::bigint IS NULL - OR pd.last_seen_at IS NULL + OR pd.last_seen_at IS NULL OR pd.last_seen_at >= (NOW() - ($7::bigint || ' ms')::interval) ) AND ( @@ -9291,11 +9815,11 @@ func (q *sqlQuerier) InsertProvisionerJobLogs(ctx context.Context, arg InsertPro } const updateProvisionerJobLogsLength = `-- name: UpdateProvisionerJobLogsLength :exec -UPDATE +UPDATE provisioner_jobs -SET +SET logs_length = logs_length + $2 -WHERE +WHERE id = $1 ` @@ -9310,11 +9834,11 @@ func (q *sqlQuerier) UpdateProvisionerJobLogsLength(ctx context.Context, arg Upd } const updateProvisionerJobLogsOverflowed = `-- name: UpdateProvisionerJobLogsOverflowed :exec -UPDATE +UPDATE provisioner_jobs -SET +SET logs_overflowed = $2 -WHERE +WHERE id = $1 ` @@ -9834,6 +10358,7 @@ WHERE AND (COALESCE(array_length($2::uuid[], 1), 0) = 0 OR pj.id = ANY($2::uuid[])) AND (COALESCE(array_length($3::provisioner_job_status[], 1), 0) = 0 OR pj.job_status = ANY($3::provisioner_job_status[])) AND ($4::tagset = 'null'::tagset OR provisioner_tagset_contains(pj.tags::tagset, $4::tagset)) + AND ($5::uuid = '00000000-0000-0000-0000-000000000000'::uuid OR pj.initiator_id = $5::uuid) GROUP BY pj.id, qp.queue_position, @@ -9849,7 +10374,7 @@ GROUP BY ORDER BY pj.created_at DESC LIMIT - $5::int + $6::int ` type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams struct { @@ -9857,6 +10382,7 @@ type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerPar IDs []uuid.UUID `db:"ids" json:"ids"` Status []ProvisionerJobStatus `db:"status" json:"status"` Tags StringMap `db:"tags" json:"tags"` + InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"` Limit sql.NullInt32 `db:"limit" json:"limit"` } @@ -9881,6 +10407,7 @@ func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionA pq.Array(arg.IDs), pq.Array(arg.Status), arg.Tags, + arg.InitiatorID, arg.Limit, ) if err != nil { @@ -10373,7 +10900,7 @@ FROM provisioner_keys WHERE organization_id = $1 -AND +AND lower(name) = lower($2) ` @@ -10489,10 +11016,10 @@ WHERE AND -- exclude reserved built-in key id != '00000000-0000-0000-0000-000000000001'::uuid -AND +AND -- exclude reserved user-auth key id != '00000000-0000-0000-0000-000000000002'::uuid -AND +AND -- exclude reserved psk key id != '00000000-0000-0000-0000-000000000003'::uuid ` @@ -12504,6 +13031,280 @@ func (q *sqlQuerier) UpsertTailnetTunnel(ctx context.Context, arg UpsertTailnetT return i, err } +const deleteTask = `-- name: DeleteTask :one +UPDATE tasks +SET + deleted_at = $1::timestamptz +WHERE + id = $2::uuid + AND deleted_at IS NULL +RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at +` + +type DeleteTaskParams struct { + DeletedAt time.Time `db:"deleted_at" json:"deleted_at"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) DeleteTask(ctx context.Context, arg DeleteTaskParams) (TaskTable, error) { + row := q.db.QueryRowContext(ctx, deleteTask, arg.DeletedAt, arg.ID) + var i TaskTable + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + ) + return i, err +} + +const getTaskByID = `-- name: GetTaskByID :one +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE id = $1::uuid +` + +func (q *sqlQuerier) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error) { + row := q.db.QueryRowContext(ctx, getTaskByID, id) + var i Task + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + &i.Status, + &i.WorkspaceBuildNumber, + &i.WorkspaceAgentID, + &i.WorkspaceAppID, + &i.OwnerUsername, + &i.OwnerName, + &i.OwnerAvatarUrl, + ) + return i, err +} + +const getTaskByWorkspaceID = `-- name: GetTaskByWorkspaceID :one +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE workspace_id = $1::uuid +` + +func (q *sqlQuerier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error) { + row := q.db.QueryRowContext(ctx, getTaskByWorkspaceID, workspaceID) + var i Task + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + &i.Status, + &i.WorkspaceBuildNumber, + &i.WorkspaceAgentID, + &i.WorkspaceAppID, + &i.OwnerUsername, + &i.OwnerName, + &i.OwnerAvatarUrl, + ) + return i, err +} + +const insertTask = `-- name: InsertTask :one +INSERT INTO tasks + (id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9) +RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at +` + +type InsertTaskParams struct { + ID uuid.UUID `db:"id" json:"id"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + Name string `db:"name" json:"name"` + WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"` + Prompt string `db:"prompt" json:"prompt"` + CreatedAt time.Time `db:"created_at" json:"created_at"` +} + +func (q *sqlQuerier) InsertTask(ctx context.Context, arg InsertTaskParams) (TaskTable, error) { + row := q.db.QueryRowContext(ctx, insertTask, + arg.ID, + arg.OrganizationID, + arg.OwnerID, + arg.Name, + arg.WorkspaceID, + arg.TemplateVersionID, + arg.TemplateParameters, + arg.Prompt, + arg.CreatedAt, + ) + var i TaskTable + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + ) + return i, err +} + +const listTasks = `-- name: ListTasks :many +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status tws +WHERE tws.deleted_at IS NULL +AND CASE WHEN $1::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.owner_id = $1::UUID ELSE TRUE END +AND CASE WHEN $2::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.organization_id = $2::UUID ELSE TRUE END +AND CASE WHEN $3::text != '' THEN tws.status = $3::task_status ELSE TRUE END +ORDER BY tws.created_at DESC +` + +type ListTasksParams struct { + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + Status string `db:"status" json:"status"` +} + +func (q *sqlQuerier) ListTasks(ctx context.Context, arg ListTasksParams) ([]Task, error) { + rows, err := q.db.QueryContext(ctx, listTasks, arg.OwnerID, arg.OrganizationID, arg.Status) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Task + for rows.Next() { + var i Task + if err := rows.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + &i.Status, + &i.WorkspaceBuildNumber, + &i.WorkspaceAgentID, + &i.WorkspaceAppID, + &i.OwnerUsername, + &i.OwnerName, + &i.OwnerAvatarUrl, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateTaskWorkspaceID = `-- name: UpdateTaskWorkspaceID :one +UPDATE + tasks +SET + workspace_id = $2 +FROM + workspaces w +JOIN + template_versions tv +ON + tv.template_id = w.template_id +WHERE + tasks.id = $1 + AND tasks.workspace_id IS NULL + AND w.id = $2 + AND tv.id = tasks.template_version_id +RETURNING + tasks.id, tasks.organization_id, tasks.owner_id, tasks.name, tasks.workspace_id, tasks.template_version_id, tasks.template_parameters, tasks.prompt, tasks.created_at, tasks.deleted_at +` + +type UpdateTaskWorkspaceIDParams struct { + ID uuid.UUID `db:"id" json:"id"` + WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` +} + +func (q *sqlQuerier) UpdateTaskWorkspaceID(ctx context.Context, arg UpdateTaskWorkspaceIDParams) (TaskTable, error) { + row := q.db.QueryRowContext(ctx, updateTaskWorkspaceID, arg.ID, arg.WorkspaceID) + var i TaskTable + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + ) + return i, err +} + +const upsertTaskWorkspaceApp = `-- name: UpsertTaskWorkspaceApp :one +INSERT INTO task_workspace_apps + (task_id, workspace_build_number, workspace_agent_id, workspace_app_id) +VALUES + ($1, $2, $3, $4) +ON CONFLICT (task_id, workspace_build_number) +DO UPDATE SET + workspace_agent_id = EXCLUDED.workspace_agent_id, + workspace_app_id = EXCLUDED.workspace_app_id +RETURNING task_id, workspace_agent_id, workspace_app_id, workspace_build_number +` + +type UpsertTaskWorkspaceAppParams struct { + TaskID uuid.UUID `db:"task_id" json:"task_id"` + WorkspaceBuildNumber int32 `db:"workspace_build_number" json:"workspace_build_number"` + WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"` + WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"` +} + +func (q *sqlQuerier) UpsertTaskWorkspaceApp(ctx context.Context, arg UpsertTaskWorkspaceAppParams) (TaskWorkspaceApp, error) { + row := q.db.QueryRowContext(ctx, upsertTaskWorkspaceApp, + arg.TaskID, + arg.WorkspaceBuildNumber, + arg.WorkspaceAgentID, + arg.WorkspaceAppID, + ) + var i TaskWorkspaceApp + err := row.Scan( + &i.TaskID, + &i.WorkspaceAgentID, + &i.WorkspaceAppID, + &i.WorkspaceBuildNumber, + ) + return i, err +} + const getTelemetryItem = `-- name: GetTelemetryItem :one SELECT key, value, created_at, updated_at FROM telemetry_items WHERE key = $1 ` @@ -12584,6 +13385,41 @@ func (q *sqlQuerier) UpsertTelemetryItem(ctx context.Context, arg UpsertTelemetr return err } +const deleteOldTelemetryLocks = `-- name: DeleteOldTelemetryLocks :exec +DELETE FROM + telemetry_locks +WHERE + period_ending_at < $1::timestamptz +` + +// Deletes old telemetry locks from the telemetry_locks table. +func (q *sqlQuerier) DeleteOldTelemetryLocks(ctx context.Context, periodEndingAtBefore time.Time) error { + _, err := q.db.ExecContext(ctx, deleteOldTelemetryLocks, periodEndingAtBefore) + return err +} + +const insertTelemetryLock = `-- name: InsertTelemetryLock :exec +INSERT INTO + telemetry_locks (event_type, period_ending_at) +VALUES + ($1, $2) +` + +type InsertTelemetryLockParams struct { + EventType string `db:"event_type" json:"event_type"` + PeriodEndingAt time.Time `db:"period_ending_at" json:"period_ending_at"` +} + +// Inserts a new lock row into the telemetry_locks table. Replicas should call +// this function prior to attempting to generate or publish a heartbeat event to +// the telemetry service. +// If the query returns a duplicate primary key error, the replica should not +// attempt to generate or publish the event to the telemetry service. +func (q *sqlQuerier) InsertTelemetryLock(ctx context.Context, arg InsertTelemetryLockParams) error { + _, err := q.db.ExecContext(ctx, insertTelemetryLock, arg.EventType, arg.PeriodEndingAt) + return err +} + const getTemplateAverageBuildTime = `-- name: GetTemplateAverageBuildTime :one WITH build_times AS ( SELECT @@ -14286,14 +15122,14 @@ DO $$ DECLARE table_record record; BEGIN - FOR table_record IN - SELECT table_schema, table_name - FROM information_schema.tables + FOR table_record IN + SELECT table_schema, table_name + FROM information_schema.tables WHERE table_schema NOT IN ('pg_catalog', 'information_schema') AND table_type = 'BASE TABLE' LOOP - EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL', - table_record.table_schema, + EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL', + table_record.table_schema, table_record.table_name); END LOOP; END; @@ -17457,6 +18293,102 @@ func (q *sqlQuerier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, created return items, nil } +const getWorkspaceAgentsForMetrics = `-- name: GetWorkspaceAgentsForMetrics :many +SELECT + w.id as workspace_id, + w.name as workspace_name, + u.username as owner_username, + t.name as template_name, + tv.name as template_version_name, + workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope, workspace_agents.deleted +FROM workspaces w +JOIN users u ON w.owner_id = u.id +JOIN templates t ON w.template_id = t.id +JOIN workspace_builds wb ON w.id = wb.workspace_id +LEFT JOIN template_versions tv ON wb.template_version_id = tv.id +JOIN workspace_resources wr ON wb.job_id = wr.job_id +JOIN workspace_agents ON wr.id = workspace_agents.resource_id +WHERE w.deleted = false +AND wb.build_number = ( + SELECT MAX(wb2.build_number) + FROM workspace_builds wb2 + WHERE wb2.workspace_id = w.id +) +AND workspace_agents.deleted = FALSE +` + +type GetWorkspaceAgentsForMetricsRow struct { + WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` + WorkspaceName string `db:"workspace_name" json:"workspace_name"` + OwnerUsername string `db:"owner_username" json:"owner_username"` + TemplateName string `db:"template_name" json:"template_name"` + TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` + WorkspaceAgent WorkspaceAgent `db:"workspace_agent" json:"workspace_agent"` +} + +func (q *sqlQuerier) GetWorkspaceAgentsForMetrics(ctx context.Context) ([]GetWorkspaceAgentsForMetricsRow, error) { + rows, err := q.db.QueryContext(ctx, getWorkspaceAgentsForMetrics) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetWorkspaceAgentsForMetricsRow + for rows.Next() { + var i GetWorkspaceAgentsForMetricsRow + if err := rows.Scan( + &i.WorkspaceID, + &i.WorkspaceName, + &i.OwnerUsername, + &i.TemplateName, + &i.TemplateVersionName, + &i.WorkspaceAgent.ID, + &i.WorkspaceAgent.CreatedAt, + &i.WorkspaceAgent.UpdatedAt, + &i.WorkspaceAgent.Name, + &i.WorkspaceAgent.FirstConnectedAt, + &i.WorkspaceAgent.LastConnectedAt, + &i.WorkspaceAgent.DisconnectedAt, + &i.WorkspaceAgent.ResourceID, + &i.WorkspaceAgent.AuthToken, + &i.WorkspaceAgent.AuthInstanceID, + &i.WorkspaceAgent.Architecture, + &i.WorkspaceAgent.EnvironmentVariables, + &i.WorkspaceAgent.OperatingSystem, + &i.WorkspaceAgent.InstanceMetadata, + &i.WorkspaceAgent.ResourceMetadata, + &i.WorkspaceAgent.Directory, + &i.WorkspaceAgent.Version, + &i.WorkspaceAgent.LastConnectedReplicaID, + &i.WorkspaceAgent.ConnectionTimeoutSeconds, + &i.WorkspaceAgent.TroubleshootingURL, + &i.WorkspaceAgent.MOTDFile, + &i.WorkspaceAgent.LifecycleState, + &i.WorkspaceAgent.ExpandedDirectory, + &i.WorkspaceAgent.LogsLength, + &i.WorkspaceAgent.LogsOverflowed, + &i.WorkspaceAgent.StartedAt, + &i.WorkspaceAgent.ReadyAt, + pq.Array(&i.WorkspaceAgent.Subsystems), + pq.Array(&i.WorkspaceAgent.DisplayApps), + &i.WorkspaceAgent.APIVersion, + &i.WorkspaceAgent.DisplayOrder, + &i.WorkspaceAgent.ParentID, + &i.WorkspaceAgent.APIKeyScope, + &i.WorkspaceAgent.Deleted, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getWorkspaceAgentsInLatestBuildByWorkspaceID = `-- name: GetWorkspaceAgentsInLatestBuildByWorkspaceID :many SELECT workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope, workspace_agents.deleted @@ -18278,7 +19210,7 @@ WITH agent_stats AS ( coalesce((PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_95 FROM workspace_agent_stats -- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms. - WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0 + WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0 GROUP BY user_id, agent_id, workspace_id, template_id ), latest_agent_stats AS ( SELECT @@ -20995,7 +21927,7 @@ func (q *sqlQuerier) GetWorkspaceACLByID(ctx context.Context, id uuid.UUID) (Get const getWorkspaceByAgentID = `-- name: GetWorkspaceByAgentID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21056,13 +21988,14 @@ func (q *sqlQuerier) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUI &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByID = `-- name: GetWorkspaceByID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded WHERE @@ -21104,13 +22037,14 @@ func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Worksp &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByOwnerIDAndName = `-- name: GetWorkspaceByOwnerIDAndName :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21159,13 +22093,14 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByResourceID = `-- name: GetWorkspaceByResourceID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21221,13 +22156,14 @@ func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uu &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21295,6 +22231,7 @@ func (q *sqlQuerier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspace &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } @@ -21344,7 +22281,7 @@ SELECT ), filtered_workspaces AS ( SELECT - workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, + workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, workspaces.task_id, latest_build.template_version_id, latest_build.template_version_name, latest_build.completed_at as latest_build_completed_at, @@ -21635,7 +22572,7 @@ WHERE -- @authorize_filter ), filtered_workspaces_order AS ( SELECT - fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.group_acl, fw.user_acl, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_ai_task, fw.latest_build_has_external_agent + fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.group_acl, fw.user_acl, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.task_id, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_ai_task, fw.latest_build_has_external_agent FROM filtered_workspaces fw ORDER BY @@ -21656,7 +22593,7 @@ WHERE $25 ), filtered_workspaces_order_with_summary AS ( SELECT - fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.group_acl, fwo.user_acl, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_ai_task, fwo.latest_build_has_external_agent + fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.group_acl, fwo.user_acl, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.task_id, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_ai_task, fwo.latest_build_has_external_agent FROM filtered_workspaces_order fwo -- Return a technical summary row with total count of workspaces. @@ -21692,6 +22629,7 @@ WHERE '', -- template_display_name '', -- template_icon '', -- template_description + '00000000-0000-0000-0000-000000000000'::uuid, -- task_id -- Extra columns added to ` + "`" + `filtered_workspaces` + "`" + ` '00000000-0000-0000-0000-000000000000'::uuid, -- template_version_id '', -- template_version_name @@ -21711,7 +22649,7 @@ WHERE filtered_workspaces ) SELECT - fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.group_acl, fwos.user_acl, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_ai_task, fwos.latest_build_has_external_agent, + fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.group_acl, fwos.user_acl, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.task_id, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_ai_task, fwos.latest_build_has_external_agent, tc.count FROM filtered_workspaces_order_with_summary fwos @@ -21779,6 +22717,7 @@ type GetWorkspacesRow struct { TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` TemplateIcon string `db:"template_icon" json:"template_icon"` TemplateDescription string `db:"template_description" json:"template_description"` + TaskID uuid.NullUUID `db:"task_id" json:"task_id"` TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` LatestBuildCompletedAt sql.NullTime `db:"latest_build_completed_at" json:"latest_build_completed_at"` @@ -21861,6 +22800,7 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, &i.TemplateVersionID, &i.TemplateVersionName, &i.LatestBuildCompletedAt, @@ -22166,6 +23106,64 @@ func (q *sqlQuerier) GetWorkspacesEligibleForTransition(ctx context.Context, now return items, nil } +const getWorkspacesForWorkspaceMetrics = `-- name: GetWorkspacesForWorkspaceMetrics :many +SELECT + u.username as owner_username, + t.name as template_name, + tv.name as template_version_name, + pj.job_status as latest_build_status, + wb.transition as latest_build_transition +FROM workspaces w +JOIN users u ON w.owner_id = u.id +JOIN templates t ON w.template_id = t.id +JOIN workspace_builds wb ON w.id = wb.workspace_id +JOIN provisioner_jobs pj ON wb.job_id = pj.id +LEFT JOIN template_versions tv ON wb.template_version_id = tv.id +WHERE w.deleted = false +AND wb.build_number = ( + SELECT MAX(wb2.build_number) + FROM workspace_builds wb2 + WHERE wb2.workspace_id = w.id +) +` + +type GetWorkspacesForWorkspaceMetricsRow struct { + OwnerUsername string `db:"owner_username" json:"owner_username"` + TemplateName string `db:"template_name" json:"template_name"` + TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` + LatestBuildStatus ProvisionerJobStatus `db:"latest_build_status" json:"latest_build_status"` + LatestBuildTransition WorkspaceTransition `db:"latest_build_transition" json:"latest_build_transition"` +} + +func (q *sqlQuerier) GetWorkspacesForWorkspaceMetrics(ctx context.Context) ([]GetWorkspacesForWorkspaceMetricsRow, error) { + rows, err := q.db.QueryContext(ctx, getWorkspacesForWorkspaceMetrics) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetWorkspacesForWorkspaceMetricsRow + for rows.Next() { + var i GetWorkspacesForWorkspaceMetricsRow + if err := rows.Scan( + &i.OwnerUsername, + &i.TemplateName, + &i.TemplateVersionName, + &i.LatestBuildStatus, + &i.LatestBuildTransition, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const insertWorkspace = `-- name: InsertWorkspace :one INSERT INTO workspaces ( diff --git a/coderd/database/queries/aibridge.sql b/coderd/database/queries/aibridge.sql index 79d41defd54af..fd5a9868bbaa8 100644 --- a/coderd/database/queries/aibridge.sql +++ b/coderd/database/queries/aibridge.sql @@ -6,6 +6,14 @@ INSERT INTO aibridge_interceptions ( ) RETURNING *; +-- name: UpdateAIBridgeInterceptionEnded :one +UPDATE aibridge_interceptions + SET ended_at = @ended_at::timestamptz +WHERE + id = @id::uuid + AND ended_at IS NULL +RETURNING *; + -- name: InsertAIBridgeTokenUsage :one INSERT INTO aibridge_token_usages ( id, interception_id, provider_response_id, input_tokens, output_tokens, metadata, created_at @@ -75,11 +83,48 @@ ORDER BY created_at ASC, id ASC; +-- name: CountAIBridgeInterceptions :one +SELECT + COUNT(*) +FROM + aibridge_interceptions +WHERE + -- Filter by time frame + CASE + WHEN @started_after::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= @started_after::timestamptz + ELSE true + END + AND CASE + WHEN @started_before::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at <= @started_before::timestamptz + ELSE true + END + -- Filter initiator_id + AND CASE + WHEN @initiator_id::uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN aibridge_interceptions.initiator_id = @initiator_id::uuid + ELSE true + END + -- Filter provider + AND CASE + WHEN @provider::text != '' THEN aibridge_interceptions.provider = @provider::text + ELSE true + END + -- Filter model + AND CASE + WHEN @model::text != '' THEN aibridge_interceptions.model = @model::text + ELSE true + END + -- Authorize Filter clause will be injected below in ListAuthorizedAIBridgeInterceptions + -- @authorize_filter +; + -- name: ListAIBridgeInterceptions :many SELECT - * + sqlc.embed(aibridge_interceptions), + sqlc.embed(visible_users) FROM aibridge_interceptions +JOIN + visible_users ON visible_users.id = aibridge_interceptions.initiator_id WHERE -- Filter by time frame CASE @@ -127,6 +172,7 @@ ORDER BY aibridge_interceptions.started_at DESC, aibridge_interceptions.id DESC LIMIT COALESCE(NULLIF(@limit_::integer, 0), 100) +OFFSET @offset_ ; -- name: ListAIBridgeTokenUsagesByInterceptionIDs :many @@ -161,3 +207,122 @@ WHERE ORDER BY created_at ASC, id ASC; + +-- name: ListAIBridgeInterceptionsTelemetrySummaries :many +-- Finds all unique AIBridge interception telemetry summaries combinations +-- (provider, model, client) in the given timeframe for telemetry reporting. +SELECT + DISTINCT ON (provider, model, client) + provider, + model, + -- TODO: use the client value once we have it (see https://github.com/coder/aibridge/issues/31) + 'unknown' AS client +FROM + aibridge_interceptions +WHERE + ended_at IS NOT NULL -- incomplete interceptions are not included in summaries + AND ended_at >= @ended_at_after::timestamptz + AND ended_at < @ended_at_before::timestamptz; + +-- name: CalculateAIBridgeInterceptionsTelemetrySummary :one +-- Calculates the telemetry summary for a given provider, model, and client +-- combination for telemetry reporting. +WITH interceptions_in_range AS ( + -- Get all matching interceptions in the given timeframe. + SELECT + id, + initiator_id, + (ended_at - started_at) AS duration + FROM + aibridge_interceptions + WHERE + provider = @provider::text + AND model = @model::text + -- TODO: use the client value once we have it (see https://github.com/coder/aibridge/issues/31) + AND 'unknown' = @client::text + AND ended_at IS NOT NULL -- incomplete interceptions are not included in summaries + AND ended_at >= @ended_at_after::timestamptz + AND ended_at < @ended_at_before::timestamptz +), +interception_counts AS ( + SELECT + COUNT(id) AS interception_count, + COUNT(DISTINCT initiator_id) AS unique_initiator_count + FROM + interceptions_in_range +), +duration_percentiles AS ( + SELECT + (COALESCE(PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p50_millis, + (COALESCE(PERCENTILE_CONT(0.90) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p90_millis, + (COALESCE(PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p95_millis, + (COALESCE(PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY EXTRACT(EPOCH FROM duration)), 0) * 1000)::bigint AS interception_duration_p99_millis + FROM + interceptions_in_range +), +token_aggregates AS ( + SELECT + COALESCE(SUM(tu.input_tokens), 0) AS token_count_input, + COALESCE(SUM(tu.output_tokens), 0) AS token_count_output, + -- Cached tokens are stored in metadata JSON, extract if available. + -- Read tokens may be stored in: + -- - cache_read_input (Anthropic) + -- - prompt_cached (OpenAI) + COALESCE(SUM( + COALESCE((tu.metadata->>'cache_read_input')::bigint, 0) + + COALESCE((tu.metadata->>'prompt_cached')::bigint, 0) + ), 0) AS token_count_cached_read, + -- Written tokens may be stored in: + -- - cache_creation_input (Anthropic) + -- Note that cache_ephemeral_5m_input and cache_ephemeral_1h_input on + -- Anthropic are included in the cache_creation_input field. + COALESCE(SUM( + COALESCE((tu.metadata->>'cache_creation_input')::bigint, 0) + ), 0) AS token_count_cached_written, + COUNT(tu.id) AS token_usages_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_token_usages tu ON i.id = tu.interception_id +), +prompt_aggregates AS ( + SELECT + COUNT(up.id) AS user_prompts_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_user_prompts up ON i.id = up.interception_id +), +tool_aggregates AS ( + SELECT + COUNT(tu.id) FILTER (WHERE tu.injected = true) AS tool_calls_count_injected, + COUNT(tu.id) FILTER (WHERE tu.injected = false) AS tool_calls_count_non_injected, + COUNT(tu.id) FILTER (WHERE tu.injected = true AND tu.invocation_error IS NOT NULL) AS injected_tool_call_error_count + FROM + interceptions_in_range i + LEFT JOIN + aibridge_tool_usages tu ON i.id = tu.interception_id +) +SELECT + ic.interception_count::bigint AS interception_count, + dp.interception_duration_p50_millis::bigint AS interception_duration_p50_millis, + dp.interception_duration_p90_millis::bigint AS interception_duration_p90_millis, + dp.interception_duration_p95_millis::bigint AS interception_duration_p95_millis, + dp.interception_duration_p99_millis::bigint AS interception_duration_p99_millis, + ic.unique_initiator_count::bigint AS unique_initiator_count, + pa.user_prompts_count::bigint AS user_prompts_count, + tok_agg.token_usages_count::bigint AS token_usages_count, + tok_agg.token_count_input::bigint AS token_count_input, + tok_agg.token_count_output::bigint AS token_count_output, + tok_agg.token_count_cached_read::bigint AS token_count_cached_read, + tok_agg.token_count_cached_written::bigint AS token_count_cached_written, + tool_agg.tool_calls_count_injected::bigint AS tool_calls_count_injected, + tool_agg.tool_calls_count_non_injected::bigint AS tool_calls_count_non_injected, + tool_agg.injected_tool_call_error_count::bigint AS injected_tool_call_error_count +FROM + interception_counts ic, + duration_percentiles dp, + token_aggregates tok_agg, + prompt_aggregates pa, + tool_aggregates tool_agg +; diff --git a/coderd/database/queries/prebuilds.sql b/coderd/database/queries/prebuilds.sql index 2ad7f41d41fea..ae70593b269d9 100644 --- a/coderd/database/queries/prebuilds.sql +++ b/coderd/database/queries/prebuilds.sql @@ -121,7 +121,7 @@ ORDER BY latest_prebuilds.id; -- name: CountInProgressPrebuilds :many -- CountInProgressPrebuilds returns the number of in-progress prebuilds, grouped by preset ID and transition. --- Prebuild considered in-progress if it's in the "starting", "stopping", or "deleting" state. +-- Prebuild considered in-progress if it's in the "pending", "starting", "stopping", or "deleting" state. SELECT t.id AS template_id, wpb.template_version_id, wpb.transition, COUNT(wpb.transition)::int AS count, wlb.template_version_preset_id as preset_id FROM workspace_latest_builds wlb INNER JOIN workspace_prebuild_builds wpb ON wpb.id = wlb.id @@ -272,3 +272,102 @@ FROM preset_matches pm WHERE pm.total_preset_params = pm.matching_params -- All preset parameters must match ORDER BY pm.total_preset_params DESC -- Return the preset with the most parameters LIMIT 1; + +-- name: CountPendingNonActivePrebuilds :many +-- CountPendingNonActivePrebuilds returns the number of pending prebuilds for non-active template versions +SELECT + wpb.template_version_preset_id AS preset_id, + COUNT(*)::int AS count +FROM workspace_prebuild_builds wpb +INNER JOIN provisioner_jobs pj ON pj.id = wpb.job_id +INNER JOIN workspaces w ON w.id = wpb.workspace_id +INNER JOIN templates t ON t.id = w.template_id +WHERE + wpb.template_version_id != t.active_version_id + -- Only considers initial builds, i.e. created by the reconciliation loop + AND wpb.build_number = 1 + -- Only consider 'start' transitions (provisioning), not 'stop'/'delete' (deprovisioning) + -- Deprovisioning jobs should complete naturally as they're already cleaning up resources + AND wpb.transition = 'start'::workspace_transition + -- Pending jobs that have not yet been picked up by a provisioner + AND pj.job_status = 'pending'::provisioner_job_status + AND pj.worker_id IS NULL + AND pj.canceled_at IS NULL + AND pj.completed_at IS NULL +GROUP BY wpb.template_version_preset_id; + +-- name: UpdatePrebuildProvisionerJobWithCancel :many +-- Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an +-- inactive template version. +-- This is an optimization to clean up stale pending jobs. +WITH jobs_to_cancel AS ( + SELECT pj.id, w.id AS workspace_id, w.template_id, wpb.template_version_preset_id + FROM provisioner_jobs pj + INNER JOIN workspace_prebuild_builds wpb ON wpb.job_id = pj.id + INNER JOIN workspaces w ON w.id = wpb.workspace_id + INNER JOIN templates t ON t.id = w.template_id + WHERE + wpb.template_version_id != t.active_version_id + AND wpb.template_version_preset_id = @preset_id + -- Only considers initial builds, i.e. created by the reconciliation loop + AND wpb.build_number = 1 + -- Only consider 'start' transitions (provisioning), not 'stop'/'delete' (deprovisioning) + -- Deprovisioning jobs should complete naturally as they're already cleaning up resources + AND wpb.transition = 'start'::workspace_transition + -- Pending jobs that have not yet been picked up by a provisioner + AND pj.job_status = 'pending'::provisioner_job_status + AND pj.worker_id IS NULL + AND pj.canceled_at IS NULL + AND pj.completed_at IS NULL +) +UPDATE provisioner_jobs +SET + canceled_at = @now::timestamptz, + completed_at = @now::timestamptz +FROM jobs_to_cancel +WHERE provisioner_jobs.id = jobs_to_cancel.id +RETURNING jobs_to_cancel.id, jobs_to_cancel.workspace_id, jobs_to_cancel.template_id, jobs_to_cancel.template_version_preset_id; + +-- name: GetOrganizationsWithPrebuildStatus :many +-- GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their +-- membership status for the prebuilds system user (org membership, group existence, group membership). +WITH orgs_with_prebuilds AS ( + -- Get unique organizations that have presets with prebuilds configured + SELECT DISTINCT o.id, o.name + FROM organizations o + INNER JOIN templates t ON t.organization_id = o.id + INNER JOIN template_versions tv ON tv.template_id = t.id + INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id + WHERE tvp.desired_instances IS NOT NULL +), +prebuild_user_membership AS ( + -- Check if the user is a member of the organizations + SELECT om.organization_id + FROM organization_members om + INNER JOIN orgs_with_prebuilds owp ON owp.id = om.organization_id + WHERE om.user_id = @user_id::uuid +), +prebuild_groups AS ( + -- Check if the organizations have the prebuilds group + SELECT g.organization_id, g.id as group_id + FROM groups g + INNER JOIN orgs_with_prebuilds owp ON owp.id = g.organization_id + WHERE g.name = @group_name::text +), +prebuild_group_membership AS ( + -- Check if the user is in the prebuilds group + SELECT pg.organization_id + FROM prebuild_groups pg + INNER JOIN group_members gm ON gm.group_id = pg.group_id + WHERE gm.user_id = @user_id::uuid +) +SELECT + owp.id AS organization_id, + owp.name AS organization_name, + (pum.organization_id IS NOT NULL)::boolean AS has_prebuild_user, + pg.group_id AS prebuilds_group_id, + (pgm.organization_id IS NOT NULL)::boolean AS has_prebuild_user_in_group +FROM orgs_with_prebuilds owp +LEFT JOIN prebuild_groups pg ON pg.organization_id = owp.id +LEFT JOIN prebuild_user_membership pum ON pum.organization_id = owp.id +LEFT JOIN prebuild_group_membership pgm ON pgm.organization_id = owp.id; diff --git a/coderd/database/queries/provisionerdaemons.sql b/coderd/database/queries/provisionerdaemons.sql index ad6c0948eb448..03997c504cb1a 100644 --- a/coderd/database/queries/provisionerdaemons.sql +++ b/coderd/database/queries/provisionerdaemons.sql @@ -113,7 +113,7 @@ WHERE -- Filter by max age if provided AND ( sqlc.narg('max_age_ms')::bigint IS NULL - OR pd.last_seen_at IS NULL + OR pd.last_seen_at IS NULL OR pd.last_seen_at >= (NOW() - (sqlc.narg('max_age_ms')::bigint || ' ms')::interval) ) AND ( diff --git a/coderd/database/queries/provisionerjoblogs.sql b/coderd/database/queries/provisionerjoblogs.sql index c0ef188bdd382..14b9ccda9b1ff 100644 --- a/coderd/database/queries/provisionerjoblogs.sql +++ b/coderd/database/queries/provisionerjoblogs.sql @@ -19,19 +19,19 @@ SELECT unnest(@level :: log_level [ ]) AS LEVEL, unnest(@stage :: VARCHAR(128) [ ]) AS stage, unnest(@output :: VARCHAR(1024) [ ]) AS output RETURNING *; - + -- name: UpdateProvisionerJobLogsOverflowed :exec -UPDATE +UPDATE provisioner_jobs -SET +SET logs_overflowed = $2 -WHERE +WHERE id = $1; - + -- name: UpdateProvisionerJobLogsLength :exec -UPDATE +UPDATE provisioner_jobs -SET +SET logs_length = logs_length + $2 -WHERE +WHERE id = $1; diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql index dfc95a0bb4570..02d67d628a861 100644 --- a/coderd/database/queries/provisionerjobs.sql +++ b/coderd/database/queries/provisionerjobs.sql @@ -224,6 +224,7 @@ WHERE AND (COALESCE(array_length(@ids::uuid[], 1), 0) = 0 OR pj.id = ANY(@ids::uuid[])) AND (COALESCE(array_length(@status::provisioner_job_status[], 1), 0) = 0 OR pj.job_status = ANY(@status::provisioner_job_status[])) AND (@tags::tagset = 'null'::tagset OR provisioner_tagset_contains(pj.tags::tagset, @tags::tagset)) + AND (@initiator_id::uuid = '00000000-0000-0000-0000-000000000000'::uuid OR pj.initiator_id = @initiator_id::uuid) GROUP BY pj.id, qp.queue_position, diff --git a/coderd/database/queries/provisionerkeys.sql b/coderd/database/queries/provisionerkeys.sql index 3fb05a8d0f613..0bf95069ddfe6 100644 --- a/coderd/database/queries/provisionerkeys.sql +++ b/coderd/database/queries/provisionerkeys.sql @@ -34,7 +34,7 @@ FROM provisioner_keys WHERE organization_id = $1 -AND +AND lower(name) = lower(@name); -- name: ListProvisionerKeysByOrganizationExcludeReserved :many @@ -47,10 +47,10 @@ WHERE AND -- exclude reserved built-in key id != '00000000-0000-0000-0000-000000000001'::uuid -AND +AND -- exclude reserved user-auth key id != '00000000-0000-0000-0000-000000000002'::uuid -AND +AND -- exclude reserved psk key id != '00000000-0000-0000-0000-000000000003'::uuid; diff --git a/coderd/database/queries/tasks.sql b/coderd/database/queries/tasks.sql new file mode 100644 index 0000000000000..d0617ad39f4dc --- /dev/null +++ b/coderd/database/queries/tasks.sql @@ -0,0 +1,59 @@ +-- name: InsertTask :one +INSERT INTO tasks + (id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9) +RETURNING *; + +-- name: UpdateTaskWorkspaceID :one +UPDATE + tasks +SET + workspace_id = $2 +FROM + workspaces w +JOIN + template_versions tv +ON + tv.template_id = w.template_id +WHERE + tasks.id = $1 + AND tasks.workspace_id IS NULL + AND w.id = $2 + AND tv.id = tasks.template_version_id +RETURNING + tasks.*; + +-- name: UpsertTaskWorkspaceApp :one +INSERT INTO task_workspace_apps + (task_id, workspace_build_number, workspace_agent_id, workspace_app_id) +VALUES + ($1, $2, $3, $4) +ON CONFLICT (task_id, workspace_build_number) +DO UPDATE SET + workspace_agent_id = EXCLUDED.workspace_agent_id, + workspace_app_id = EXCLUDED.workspace_app_id +RETURNING *; + +-- name: GetTaskByID :one +SELECT * FROM tasks_with_status WHERE id = @id::uuid; + +-- name: GetTaskByWorkspaceID :one +SELECT * FROM tasks_with_status WHERE workspace_id = @workspace_id::uuid; + +-- name: ListTasks :many +SELECT * FROM tasks_with_status tws +WHERE tws.deleted_at IS NULL +AND CASE WHEN @owner_id::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.owner_id = @owner_id::UUID ELSE TRUE END +AND CASE WHEN @organization_id::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.organization_id = @organization_id::UUID ELSE TRUE END +AND CASE WHEN @status::text != '' THEN tws.status = @status::task_status ELSE TRUE END +ORDER BY tws.created_at DESC; + +-- name: DeleteTask :one +UPDATE tasks +SET + deleted_at = @deleted_at::timestamptz +WHERE + id = @id::uuid + AND deleted_at IS NULL +RETURNING *; diff --git a/coderd/database/queries/telemetrylocks.sql b/coderd/database/queries/telemetrylocks.sql new file mode 100644 index 0000000000000..14e9730a69394 --- /dev/null +++ b/coderd/database/queries/telemetrylocks.sql @@ -0,0 +1,17 @@ +-- name: InsertTelemetryLock :exec +-- Inserts a new lock row into the telemetry_locks table. Replicas should call +-- this function prior to attempting to generate or publish a heartbeat event to +-- the telemetry service. +-- If the query returns a duplicate primary key error, the replica should not +-- attempt to generate or publish the event to the telemetry service. +INSERT INTO + telemetry_locks (event_type, period_ending_at) +VALUES + ($1, $2); + +-- name: DeleteOldTelemetryLocks :exec +-- Deletes old telemetry locks from the telemetry_locks table. +DELETE FROM + telemetry_locks +WHERE + period_ending_at < @period_ending_at_before::timestamptz; diff --git a/coderd/database/queries/testadmin.sql b/coderd/database/queries/testadmin.sql index 77d39ce52768c..9cbaf67d2273c 100644 --- a/coderd/database/queries/testadmin.sql +++ b/coderd/database/queries/testadmin.sql @@ -6,14 +6,14 @@ DO $$ DECLARE table_record record; BEGIN - FOR table_record IN - SELECT table_schema, table_name - FROM information_schema.tables + FOR table_record IN + SELECT table_schema, table_name + FROM information_schema.tables WHERE table_schema NOT IN ('pg_catalog', 'information_schema') AND table_type = 'BASE TABLE' LOOP - EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL', - table_record.table_schema, + EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL', + table_record.table_schema, table_record.table_name); END LOOP; END; diff --git a/coderd/database/queries/workspaceagents.sql b/coderd/database/queries/workspaceagents.sql index c67435d7cbd06..cc59e96544778 100644 --- a/coderd/database/queries/workspaceagents.sql +++ b/coderd/database/queries/workspaceagents.sql @@ -365,3 +365,26 @@ WHERE id = $1 AND parent_id IS NOT NULL AND deleted = FALSE; + +-- name: GetWorkspaceAgentsForMetrics :many +SELECT + w.id as workspace_id, + w.name as workspace_name, + u.username as owner_username, + t.name as template_name, + tv.name as template_version_name, + sqlc.embed(workspace_agents) +FROM workspaces w +JOIN users u ON w.owner_id = u.id +JOIN templates t ON w.template_id = t.id +JOIN workspace_builds wb ON w.id = wb.workspace_id +LEFT JOIN template_versions tv ON wb.template_version_id = tv.id +JOIN workspace_resources wr ON wb.job_id = wr.job_id +JOIN workspace_agents ON wr.id = workspace_agents.resource_id +WHERE w.deleted = false +AND wb.build_number = ( + SELECT MAX(wb2.build_number) + FROM workspace_builds wb2 + WHERE wb2.workspace_id = w.id +) +AND workspace_agents.deleted = FALSE; diff --git a/coderd/database/queries/workspaceagentstats.sql b/coderd/database/queries/workspaceagentstats.sql index f2f2bdbe2824e..9c49b281f6e87 100644 --- a/coderd/database/queries/workspaceagentstats.sql +++ b/coderd/database/queries/workspaceagentstats.sql @@ -189,7 +189,7 @@ WITH agent_stats AS ( coalesce((PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_95 FROM workspace_agent_stats -- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms. - WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0 + WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0 GROUP BY user_id, agent_id, workspace_id, template_id ), latest_agent_stats AS ( SELECT diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql index 7ab67e98e41e6..d48285bb7de9c 100644 --- a/coderd/database/queries/workspaces.sql +++ b/coderd/database/queries/workspaces.sql @@ -457,6 +457,7 @@ WHERE '', -- template_display_name '', -- template_icon '', -- template_description + '00000000-0000-0000-0000-000000000000'::uuid, -- task_id -- Extra columns added to `filtered_workspaces` '00000000-0000-0000-0000-000000000000'::uuid, -- template_version_id '', -- template_version_name @@ -983,3 +984,23 @@ WHERE AND fsb.initiator_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid GROUP BY t.name, COALESCE(tvp.name, ''), o.name ORDER BY t.name, preset_name, o.name; + +-- name: GetWorkspacesForWorkspaceMetrics :many +SELECT + u.username as owner_username, + t.name as template_name, + tv.name as template_version_name, + pj.job_status as latest_build_status, + wb.transition as latest_build_transition +FROM workspaces w +JOIN users u ON w.owner_id = u.id +JOIN templates t ON w.template_id = t.id +JOIN workspace_builds wb ON w.id = wb.workspace_id +JOIN provisioner_jobs pj ON wb.job_id = pj.id +LEFT JOIN template_versions tv ON wb.template_version_id = tv.id +WHERE w.deleted = false +AND wb.build_number = ( + SELECT MAX(wb2.build_number) + FROM workspace_builds wb2 + WHERE wb2.workspace_id = w.id +); diff --git a/coderd/database/sqlc.yaml b/coderd/database/sqlc.yaml index 702064ecf260d..af700c14519be 100644 --- a/coderd/database/sqlc.yaml +++ b/coderd/database/sqlc.yaml @@ -103,6 +103,9 @@ sql: - column: "user_links.claims" go_type: type: "UserLinkClaims" + # Workaround for sqlc not interpreting the left join correctly. + - column: "tasks_with_status.workspace_build_number" + go_type: "database/sql.NullInt32" rename: group_member: GroupMemberTable group_members_expanded: GroupMember @@ -112,6 +115,8 @@ sql: workspace_build_with_user: WorkspaceBuild workspace: WorkspaceTable workspaces_expanded: Workspace + task: TaskTable + tasks_with_status: Task template_version: TemplateVersionTable template_version_with_user: TemplateVersion api_key: APIKey diff --git a/coderd/database/types.go b/coderd/database/types.go index b534e9e269195..fefba8acb747e 100644 --- a/coderd/database/types.go +++ b/coderd/database/types.go @@ -163,9 +163,7 @@ func (m StringMapOfInt) Value() (driver.Value, error) { type CustomRolePermissions []CustomRolePermission -// APIKeyScopes implements sql.Scanner and driver.Valuer so it can be read from -// and written to the Postgres api_key_scope[] enum array column. -func (s *APIKeyScopes) Scan(src interface{}) error { +func (s *APIKeyScopes) Scan(src any) error { var arr []string if err := pq.Array(&arr).Scan(src); err != nil { return err @@ -314,36 +312,11 @@ func ParseIP(ipStr string) pqtype.Inet { } } -// AllowListTarget represents a single scope allow-list entry. -// It encodes a resource tuple (type, id) and provides helpers for -// consistent string and JSON representations across the codebase. -type AllowListTarget struct { - Type string `json:"type"` - ID string `json:"id"` -} - -// String returns the canonical database representation "type:id". -func (t AllowListTarget) String() string { - return t.Type + ":" + t.ID -} - -// ParseAllowListTarget parses the canonical string form "type:id". -func ParseAllowListTarget(s string) (AllowListTarget, error) { - targetType, id, ok := rbac.ParseResourceAction(s) - if !ok { - return AllowListTarget{}, xerrors.Errorf("invalid allow list target: %q", s) - } - return AllowListTarget{Type: targetType, ID: id}, nil -} - -// AllowListWildcard returns the wildcard allow-list entry {"*","*"}. -func AllowListWildcard() AllowListTarget { return AllowListTarget{Type: "*", ID: "*"} } - // AllowList is a typed wrapper around a list of AllowListTarget entries. // It implements sql.Scanner and driver.Valuer so it can be stored in and // loaded from a Postgres text[] column that stores each entry in the // canonical form "type:id". -type AllowList []AllowListTarget +type AllowList []rbac.AllowListElement // Scan implements sql.Scanner. It supports inputs that pq.Array can decode // into []string, and then converts each element to an AllowListTarget. @@ -352,13 +325,13 @@ func (a *AllowList) Scan(src any) error { if err := pq.Array(&raw).Scan(src); err != nil { return err } - out := make([]AllowListTarget, len(raw)) + out := make([]rbac.AllowListElement, len(raw)) for i, s := range raw { - t, err := ParseAllowListTarget(s) + e, err := rbac.ParseAllowListEntry(s) if err != nil { return err } - out[i] = t + out[i] = e } *a = out return nil diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index 36fca8f058135..b804d9a73071e 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -59,8 +59,10 @@ const ( UniqueTailnetCoordinatorsPkey UniqueConstraint = "tailnet_coordinators_pkey" // ALTER TABLE ONLY tailnet_coordinators ADD CONSTRAINT tailnet_coordinators_pkey PRIMARY KEY (id); UniqueTailnetPeersPkey UniqueConstraint = "tailnet_peers_pkey" // ALTER TABLE ONLY tailnet_peers ADD CONSTRAINT tailnet_peers_pkey PRIMARY KEY (id, coordinator_id); UniqueTailnetTunnelsPkey UniqueConstraint = "tailnet_tunnels_pkey" // ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_pkey PRIMARY KEY (coordinator_id, src_id, dst_id); + UniqueTaskWorkspaceAppsPkey UniqueConstraint = "task_workspace_apps_pkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_pkey PRIMARY KEY (task_id, workspace_build_number); UniqueTasksPkey UniqueConstraint = "tasks_pkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_pkey PRIMARY KEY (id); UniqueTelemetryItemsPkey UniqueConstraint = "telemetry_items_pkey" // ALTER TABLE ONLY telemetry_items ADD CONSTRAINT telemetry_items_pkey PRIMARY KEY (key); + UniqueTelemetryLocksPkey UniqueConstraint = "telemetry_locks_pkey" // ALTER TABLE ONLY telemetry_locks ADD CONSTRAINT telemetry_locks_pkey PRIMARY KEY (event_type, period_ending_at); UniqueTemplateUsageStatsPkey UniqueConstraint = "template_usage_stats_pkey" // ALTER TABLE ONLY template_usage_stats ADD CONSTRAINT template_usage_stats_pkey PRIMARY KEY (start_time, template_id, user_id); UniqueTemplateVersionParametersTemplateVersionIDNameKey UniqueConstraint = "template_version_parameters_template_version_id_name_key" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_name_key UNIQUE (template_version_id, name); UniqueTemplateVersionPresetParametersPkey UniqueConstraint = "template_version_preset_parameters_pkey" // ALTER TABLE ONLY template_version_preset_parameters ADD CONSTRAINT template_version_preset_parameters_pkey PRIMARY KEY (id); @@ -120,6 +122,7 @@ const ( UniqueNotificationMessagesDedupeHashIndex UniqueConstraint = "notification_messages_dedupe_hash_idx" // CREATE UNIQUE INDEX notification_messages_dedupe_hash_idx ON notification_messages USING btree (dedupe_hash); UniqueOrganizationsSingleDefaultOrg UniqueConstraint = "organizations_single_default_org" // CREATE UNIQUE INDEX organizations_single_default_org ON organizations USING btree (is_default) WHERE (is_default = true); UniqueProvisionerKeysOrganizationIDNameIndex UniqueConstraint = "provisioner_keys_organization_id_name_idx" // CREATE UNIQUE INDEX provisioner_keys_organization_id_name_idx ON provisioner_keys USING btree (organization_id, lower((name)::text)); + UniqueTasksOwnerIDNameUniqueIndex UniqueConstraint = "tasks_owner_id_name_unique_idx" // CREATE UNIQUE INDEX tasks_owner_id_name_unique_idx ON tasks USING btree (owner_id, lower(name)) WHERE (deleted_at IS NULL); UniqueTemplateUsageStatsStartTimeTemplateIDUserIDIndex UniqueConstraint = "template_usage_stats_start_time_template_id_user_id_idx" // CREATE UNIQUE INDEX template_usage_stats_start_time_template_id_user_id_idx ON template_usage_stats USING btree (start_time, template_id, user_id); UniqueTemplatesOrganizationIDNameIndex UniqueConstraint = "templates_organization_id_name_idx" // CREATE UNIQUE INDEX templates_organization_id_name_idx ON templates USING btree (organization_id, lower((name)::text)) WHERE (deleted = false); UniqueUserLinksLinkedIDLoginTypeIndex UniqueConstraint = "user_links_linked_id_login_type_idx" // CREATE UNIQUE INDEX user_links_linked_id_login_type_idx ON user_links USING btree (linked_id, login_type) WHERE (linked_id <> ''::text); diff --git a/coderd/debug.go b/coderd/debug.go index 64c7c9e632d0a..4c0eff7f3366f 100644 --- a/coderd/debug.go +++ b/coderd/debug.go @@ -325,3 +325,57 @@ func loadDismissedHealthchecks(ctx context.Context, db database.Store, logger sl } return dismissedHealthchecks } + +// @Summary Debug pprof index +// @ID debug-pprof-index +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/pprof [get] +// @x-apidocgen {"skip": true} +func _debugPprofIndex(http.ResponseWriter, *http.Request) {} //nolint:unused + +// @Summary Debug pprof cmdline +// @ID debug-pprof-cmdline +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/pprof/cmdline [get] +// @x-apidocgen {"skip": true} +func _debugPprofCmdline(http.ResponseWriter, *http.Request) {} //nolint:unused + +// @Summary Debug pprof profile +// @ID debug-pprof-profile +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/pprof/profile [get] +// @x-apidocgen {"skip": true} +func _debugPprofProfile(http.ResponseWriter, *http.Request) {} //nolint:unused + +// @Summary Debug pprof symbol +// @ID debug-pprof-symbol +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/pprof/symbol [get] +// @x-apidocgen {"skip": true} +func _debugPprofSymbol(http.ResponseWriter, *http.Request) {} //nolint:unused + +// @Summary Debug pprof trace +// @ID debug-pprof-trace +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/pprof/trace [get] +// @x-apidocgen {"skip": true} +func _debugPprofTrace(http.ResponseWriter, *http.Request) {} //nolint:unused + +// @Summary Debug metrics +// @ID debug-metrics +// @Security CoderSessionToken +// @Success 200 +// @Tags Debug +// @Router /debug/metrics [get] +// @x-apidocgen {"skip": true} +func _debugMetrics(http.ResponseWriter, *http.Request) {} //nolint:unused diff --git a/coderd/externalauth/externalauth.go b/coderd/externalauth/externalauth.go index 41797706eeff1..f33a9d36700b8 100644 --- a/coderd/externalauth/externalauth.go +++ b/coderd/externalauth/externalauth.go @@ -781,6 +781,9 @@ func applyDefaultsToConfig(config *codersdk.ExternalAuthConfig) { // Dynamic defaults switch codersdk.EnhancedExternalAuthProvider(config.Type) { + case codersdk.EnhancedExternalAuthProviderGitHub: + copyDefaultSettings(config, gitHubDefaults(config)) + return case codersdk.EnhancedExternalAuthProviderGitLab: copyDefaultSettings(config, gitlabDefaults(config)) return @@ -814,6 +817,9 @@ func copyDefaultSettings(config *codersdk.ExternalAuthConfig, defaults codersdk. if config.ValidateURL == "" { config.ValidateURL = defaults.ValidateURL } + if config.RevokeURL == "" { + config.RevokeURL = defaults.RevokeURL + } if config.AppInstallURL == "" { config.AppInstallURL = defaults.AppInstallURL } @@ -852,6 +858,29 @@ func copyDefaultSettings(config *codersdk.ExternalAuthConfig, defaults codersdk. } } +// gitHubDefaults returns default config values for GitHub. +// The only dynamic value is the revocation URL which depends on client ID. +func gitHubDefaults(config *codersdk.ExternalAuthConfig) codersdk.ExternalAuthConfig { + defaults := codersdk.ExternalAuthConfig{ + AuthURL: xgithub.Endpoint.AuthURL, + TokenURL: xgithub.Endpoint.TokenURL, + ValidateURL: "https://api.github.com/user", + DisplayName: "GitHub", + DisplayIcon: "/icon/github.svg", + Regex: `^(https?://)?github\.com(/.*)?$`, + // "workflow" is required for managing GitHub Actions in a repository. + Scopes: []string{"repo", "workflow"}, + DeviceCodeURL: "https://github.com/login/device/code", + AppInstallationsURL: "https://api.github.com/user/installations", + } + + if config.RevokeURL == "" && config.ClientID != "" { + defaults.RevokeURL = fmt.Sprintf("https://api.github.com/applications/%s/grant", config.ClientID) + } + + return defaults +} + func bitbucketServerDefaults(config *codersdk.ExternalAuthConfig) codersdk.ExternalAuthConfig { defaults := codersdk.ExternalAuthConfig{ DisplayName: "Bitbucket Server", @@ -1050,18 +1079,6 @@ var staticDefaults = map[codersdk.EnhancedExternalAuthProvider]codersdk.External Regex: `^(https?://)?bitbucket\.org(/.*)?$`, Scopes: []string{"account", "repository:write"}, }, - codersdk.EnhancedExternalAuthProviderGitHub: { - AuthURL: xgithub.Endpoint.AuthURL, - TokenURL: xgithub.Endpoint.TokenURL, - ValidateURL: "https://api.github.com/user", - DisplayName: "GitHub", - DisplayIcon: "/icon/github.svg", - Regex: `^(https?://)?github\.com(/.*)?$`, - // "workflow" is required for managing GitHub Actions in a repository. - Scopes: []string{"repo", "workflow"}, - DeviceCodeURL: "https://github.com/login/device/code", - AppInstallationsURL: "https://api.github.com/user/installations", - }, codersdk.EnhancedExternalAuthProviderSlack: { AuthURL: "https://slack.com/oauth/v2/authorize", TokenURL: "https://slack.com/api/oauth.v2.access", diff --git a/coderd/externalauth/externalauth_internal_test.go b/coderd/externalauth/externalauth_internal_test.go index f50593c019b4f..65bb5ee7deb62 100644 --- a/coderd/externalauth/externalauth_internal_test.go +++ b/coderd/externalauth/externalauth_internal_test.go @@ -19,6 +19,7 @@ func TestGitlabDefaults(t *testing.T) { AuthURL: "https://gitlab.com/oauth/authorize", TokenURL: "https://gitlab.com/oauth/token", ValidateURL: "https://gitlab.com/oauth/token/info", + RevokeURL: "https://gitlab.com/oauth/revoke", DisplayName: "GitLab", DisplayIcon: "/icon/gitlab.svg", Regex: `^(https?://)?gitlab\.com(/.*)?$`, @@ -79,6 +80,7 @@ func TestGitlabDefaults(t *testing.T) { config.AuthURL = "https://gitlab.company.org/oauth/authorize?foo=bar" config.ValidateURL = "https://gitlab.company.org/oauth/token/info" config.TokenURL = "https://gitlab.company.org/oauth/token" + config.RevokeURL = "https://gitlab.company.org/oauth/revoke" config.Regex = `^(https?://)?gitlab\.company\.org(/.*)?$` }, }, @@ -90,6 +92,7 @@ func TestGitlabDefaults(t *testing.T) { AuthURL: "https://auth.com/auth", ValidateURL: "https://validate.com/validate", TokenURL: "https://token.com/token", + RevokeURL: "https://token.com/revoke", Regex: "random", }, expected: cloud, @@ -97,6 +100,7 @@ func TestGitlabDefaults(t *testing.T) { config.AuthURL = "https://auth.com/auth" config.ValidateURL = "https://validate.com/validate" config.TokenURL = "https://token.com/token" + config.RevokeURL = "https://token.com/revoke" config.Regex = `random` }, }, diff --git a/coderd/healthcheck/accessurl_test.go b/coderd/healthcheck/accessurl_test.go index 29bf008346b37..85f362959718e 100644 --- a/coderd/healthcheck/accessurl_test.go +++ b/coderd/healthcheck/accessurl_test.go @@ -55,7 +55,7 @@ func TestAccessURL(t *testing.T) { defer cancel() report.Run(ctx, &healthcheck.AccessURLReportOptions{ - Client: nil, // defaults to http.DefaultClient + Client: &http.Client{}, AccessURL: nil, }) diff --git a/coderd/healthcheck/derphealth/derp_test.go b/coderd/healthcheck/derphealth/derp_test.go index c009ea982d620..08dc7db97f982 100644 --- a/coderd/healthcheck/derphealth/derp_test.go +++ b/coderd/healthcheck/derphealth/derp_test.go @@ -511,7 +511,8 @@ func tsDERPMap(ctx context.Context, t testing.TB) *tailcfg.DERPMap { req, err := http.NewRequestWithContext(ctx, "GET", ipn.DefaultControlURL+"/derpmap/default", nil) require.NoError(t, err) - res, err := http.DefaultClient.Do(req) + client := &http.Client{} + res, err := client.Do(req) require.NoError(t, err) defer res.Body.Close() require.Equal(t, http.StatusOK, res.StatusCode) diff --git a/coderd/httpmw/apikey.go b/coderd/httpmw/apikey.go index b534a124ddb95..29296fea59f5b 100644 --- a/coderd/httpmw/apikey.go +++ b/coderd/httpmw/apikey.go @@ -2,8 +2,6 @@ package httpmw import ( "context" - "crypto/sha256" - "crypto/subtle" "database/sql" "errors" "fmt" @@ -20,6 +18,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" @@ -188,8 +187,7 @@ func APIKeyFromRequest(ctx context.Context, db database.Store, sessionTokenFunc } // Checking to see if the secret is valid. - hashedSecret := sha256.Sum256([]byte(keySecret)) - if subtle.ConstantTimeCompare(key.HashedSecret, hashedSecret[:]) != 1 { + if !apikey.ValidateHash(key.HashedSecret, keySecret) { return nil, codersdk.Response{ Message: SignedOutErrorMessage, Detail: "API key secret is invalid.", @@ -434,7 +432,7 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon // If the key is valid, we also fetch the user roles and status. // The roles are used for RBAC authorize checks, and the status // is to block 'suspended' users from accessing the platform. - actor, userStatus, err := UserRBACSubject(ctx, cfg.DB, key.UserID, key.Scopes) + actor, userStatus, err := UserRBACSubject(ctx, cfg.DB, key.UserID, key.ScopeSet()) if err != nil { return write(http.StatusUnauthorized, codersdk.Response{ Message: internalErrorMessage, diff --git a/coderd/httpmw/apikey_test.go b/coderd/httpmw/apikey_test.go index 6e00b7a4535e2..020dc28e60139 100644 --- a/coderd/httpmw/apikey_test.go +++ b/coderd/httpmw/apikey_test.go @@ -19,6 +19,7 @@ import ( "golang.org/x/exp/slices" "golang.org/x/oauth2" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" @@ -32,10 +33,10 @@ import ( "github.com/coder/coder/v2/testutil" ) -func randomAPIKeyParts() (id string, secret string) { +func randomAPIKeyParts() (id string, secret string, hashedSecret []byte) { id, _ = cryptorand.String(10) - secret, _ = cryptorand.String(22) - return id, secret + secret, hashedSecret, _ = apikey.GenerateSecret(22) + return id, secret, hashedSecret } func TestAPIKey(t *testing.T) { @@ -171,10 +172,10 @@ func TestAPIKey(t *testing.T) { t.Run("NotFound", func(t *testing.T) { t.Parallel() var ( - db, _ = dbtestutil.NewDB(t) - id, secret = randomAPIKeyParts() - r = httptest.NewRequest("GET", "/", nil) - rw = httptest.NewRecorder() + db, _ = dbtestutil.NewDB(t) + id, secret, _ = randomAPIKeyParts() + r = httptest.NewRequest("GET", "/", nil) + rw = httptest.NewRecorder() ) r.Header.Set(codersdk.SessionTokenHeader, fmt.Sprintf("%s-%s", id, secret)) diff --git a/coderd/httpmw/authorize_test.go b/coderd/httpmw/authorize_test.go index 3ec4ca5a4c8f2..529ba94774539 100644 --- a/coderd/httpmw/authorize_test.go +++ b/coderd/httpmw/authorize_test.go @@ -2,7 +2,6 @@ package httpmw_test import ( "context" - "crypto/sha256" "fmt" "net" "net/http" @@ -20,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -141,10 +141,7 @@ func TestExtractUserRoles(t *testing.T) { } func addUser(t *testing.T, db database.Store, roles ...string) (database.User, string) { - var ( - id, secret = randomAPIKeyParts() - hashed = sha256.Sum256([]byte(secret)) - ) + id, secret, hashed := randomAPIKeyParts() if roles == nil { roles = []string{} } @@ -168,12 +165,14 @@ func addUser(t *testing.T, db database.Store, roles ...string) (database.User, s _, err = db.InsertAPIKey(context.Background(), database.InsertAPIKeyParams{ ID: id, UserID: user.ID, - HashedSecret: hashed[:], + HashedSecret: hashed, LastUsed: dbtime.Now(), ExpiresAt: dbtime.Now().Add(time.Minute), LoginType: database.LoginTypePassword, Scopes: database.APIKeyScopes{database.ApiKeyScopeCoderAll}, - AllowList: database.AllowList{database.AllowListWildcard()}, + AllowList: database.AllowList{ + {Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}, + }, IPAddress: pqtype.Inet{ IPNet: net.IPNet{ IP: net.ParseIP("0.0.0.0"), diff --git a/coderd/httpmw/authz.go b/coderd/httpmw/authz.go index 9f1f397c858e0..758f95cad28a9 100644 --- a/coderd/httpmw/authz.go +++ b/coderd/httpmw/authz.go @@ -4,6 +4,7 @@ package httpmw import ( "net/http" + "strconv" "github.com/go-chi/chi/v5" @@ -39,14 +40,24 @@ func AsAuthzSystem(mws ...func(http.Handler) http.Handler) func(http.Handler) ht } } -// RecordAuthzChecks enables recording all of the authorization checks that +// RecordAuthzChecks enables recording all the authorization checks that // occurred in the processing of a request. This is mostly helpful for debugging // and understanding what permissions are required for a given action. // +// Can either be toggled on by a deployment wide configuration value, or opt-in on +// a per-request basis by setting the `x-record-authz-checks` header to a truthy value. +// // Requires using a Recorder Authorizer. -func RecordAuthzChecks(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - r = r.WithContext(rbac.WithAuthzCheckRecorder(r.Context())) - next.ServeHTTP(rw, r) - }) +// +//nolint:revive +func RecordAuthzChecks(always bool) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if enabled, _ := strconv.ParseBool(r.Header.Get("x-record-authz-checks")); enabled || always { + r = r.WithContext(rbac.WithAuthzCheckRecorder(r.Context())) + } + + next.ServeHTTP(rw, r) + }) + } } diff --git a/coderd/httpmw/taskparam.go b/coderd/httpmw/taskparam.go new file mode 100644 index 0000000000000..6ecc888b378fe --- /dev/null +++ b/coderd/httpmw/taskparam.go @@ -0,0 +1,57 @@ +package httpmw + +import ( + "context" + "net/http" + + "cdr.dev/slog" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/httpmw/loggermw" + "github.com/coder/coder/v2/codersdk" +) + +type taskParamContextKey struct{} + +// TaskParam returns the task from the ExtractTaskParam handler. +func TaskParam(r *http.Request) database.Task { + task, ok := r.Context().Value(taskParamContextKey{}).(database.Task) + if !ok { + panic("developer error: task param middleware not provided") + } + return task +} + +// ExtractTaskParam grabs a task from the "task" URL parameter by UUID. +func ExtractTaskParam(db database.Store) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + taskID, parsed := ParseUUIDParam(rw, r, "task") + if !parsed { + return + } + task, err := db.GetTaskByID(ctx, taskID) + if err != nil { + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task.", + Detail: err.Error(), + }) + return + } + + ctx = context.WithValue(ctx, taskParamContextKey{}, task) + + if rlogger := loggermw.RequestLoggerFromContext(ctx); rlogger != nil { + rlogger.WithFields(slog.F("task_id", task.ID), slog.F("task_name", task.Name)) + } + + next.ServeHTTP(rw, r.WithContext(ctx)) + }) + } +} diff --git a/coderd/httpmw/taskparam_test.go b/coderd/httpmw/taskparam_test.go new file mode 100644 index 0000000000000..559ccc2a2df2d --- /dev/null +++ b/coderd/httpmw/taskparam_test.go @@ -0,0 +1,120 @@ +package httpmw_test + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/codersdk" +) + +func TestTaskParam(t *testing.T) { + t.Parallel() + + setup := func(db database.Store) (*http.Request, database.User) { + user := dbgen.User(t, db, database.User{}) + _, token := dbgen.APIKey(t, db, database.APIKey{ + UserID: user.ID, + }) + + r := httptest.NewRequest("GET", "/", nil) + r.Header.Set(codersdk.SessionTokenHeader, token) + + ctx := chi.NewRouteContext() + ctx.URLParams.Add("user", "me") + r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, ctx)) + return r, user + } + + t.Run("None", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + rtr := chi.NewRouter() + rtr.Use(httpmw.ExtractTaskParam(db)) + rtr.Get("/", nil) + r, _ := setup(db) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusBadRequest, res.StatusCode) + }) + + t.Run("NotFound", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + rtr := chi.NewRouter() + rtr.Use(httpmw.ExtractTaskParam(db)) + rtr.Get("/", nil) + r, _ := setup(db) + chi.RouteContext(r.Context()).URLParams.Add("task", uuid.NewString()) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("Found", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ + DB: db, + RedirectToLogin: false, + }), + httpmw.ExtractTaskParam(db), + ) + rtr.Get("/", func(rw http.ResponseWriter, r *http.Request) { + _ = httpmw.TaskParam(r) + rw.WriteHeader(http.StatusOK) + }) + r, user := setup(db) + org := dbgen.Organization(t, db, database.Organization{}) + tpl := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{ + UUID: tpl.ID, + Valid: true, + }, + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + Name: "test-workspace", + OrganizationID: org.ID, + TemplateID: tpl.ID, + }) + task := dbgen.Task(t, db, database.TaskTable{ + Name: "test-task", + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: workspace.ID, Valid: true}, + Prompt: "test prompt", + }) + chi.RouteContext(r.Context()).URLParams.Add("task", task.ID.String()) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) +} diff --git a/coderd/httpmw/workspaceparam_test.go b/coderd/httpmw/workspaceparam_test.go index 78e0929df7bca..e83cbe437e9ac 100644 --- a/coderd/httpmw/workspaceparam_test.go +++ b/coderd/httpmw/workspaceparam_test.go @@ -2,7 +2,6 @@ package httpmw_test import ( "context" - "crypto/sha256" "encoding/json" "fmt" "net" @@ -22,6 +21,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" ) @@ -30,10 +30,7 @@ func TestWorkspaceParam(t *testing.T) { t.Parallel() setup := func(db database.Store) (*http.Request, database.User) { - var ( - id, secret = randomAPIKeyParts() - hashed = sha256.Sum256([]byte(secret)) - ) + id, secret, hashed := randomAPIKeyParts() r := httptest.NewRequest("GET", "/", nil) r.Header.Set(codersdk.SessionTokenHeader, fmt.Sprintf("%s-%s", id, secret)) @@ -43,7 +40,7 @@ func TestWorkspaceParam(t *testing.T) { user, err := db.InsertUser(r.Context(), database.InsertUserParams{ ID: userID, Email: "testaccount@coder.com", - HashedPassword: hashed[:], + HashedPassword: hashed, Username: username, CreatedAt: dbtime.Now(), UpdatedAt: dbtime.Now(), @@ -62,12 +59,14 @@ func TestWorkspaceParam(t *testing.T) { _, err = db.InsertAPIKey(r.Context(), database.InsertAPIKeyParams{ ID: id, UserID: user.ID, - HashedSecret: hashed[:], + HashedSecret: hashed, LastUsed: dbtime.Now(), ExpiresAt: dbtime.Now().Add(time.Minute), LoginType: database.LoginTypePassword, Scopes: database.APIKeyScopes{database.ApiKeyScopeCoderAll}, - AllowList: database.AllowList{database.AllowListWildcard()}, + AllowList: database.AllowList{ + {Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}, + }, IPAddress: pqtype.Inet{ IPNet: net.IPNet{ IP: net.IPv4(127, 0, 0, 1), diff --git a/coderd/httpmw/workspaceproxy.go b/coderd/httpmw/workspaceproxy.go index 1f2de1ed46160..39f665210b66f 100644 --- a/coderd/httpmw/workspaceproxy.go +++ b/coderd/httpmw/workspaceproxy.go @@ -2,8 +2,6 @@ package httpmw import ( "context" - "crypto/sha256" - "crypto/subtle" "database/sql" "net/http" "strings" @@ -12,6 +10,7 @@ import ( "github.com/google/uuid" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" @@ -125,8 +124,7 @@ func ExtractWorkspaceProxy(opts ExtractWorkspaceProxyConfig) func(http.Handler) } // Do a subtle constant time comparison of the hash of the secret. - hashedSecret := sha256.Sum256([]byte(secret)) - if subtle.ConstantTimeCompare(proxy.TokenHashedSecret, hashedSecret[:]) != 1 { + if !apikey.ValidateHash(proxy.TokenHashedSecret, secret) { httpapi.Write(ctx, w, http.StatusUnauthorized, codersdk.Response{ Message: "Invalid external proxy token", Detail: "Invalid proxy token secret.", diff --git a/coderd/jobreaper/detector_test.go b/coderd/jobreaper/detector_test.go index 4078f92c03a36..9d3b7054fcc3c 100644 --- a/coderd/jobreaper/detector_test.go +++ b/coderd/jobreaper/detector_test.go @@ -533,6 +533,108 @@ func TestDetectorPendingWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testin detector.Wait() } +// TestDetectorWorkspaceBuildForDormantWorkspace ensures that the jobreaper has +// enough permissions to fix dormant workspaces. +// +// Dormant workspaces are treated as rbac.ResourceWorkspaceDormant rather than +// rbac.ResourceWorkspace, which resulted in a bug where the jobreaper would +// be able to see but not fix dormant workspaces. +func TestDetectorWorkspaceBuildForDormantWorkspace(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitLong) + db, pubsub = dbtestutil.NewDB(t) + log = testutil.Logger(t) + tickCh = make(chan time.Time) + statsCh = make(chan jobreaper.Stats) + ) + + var ( + now = time.Now() + tenMinAgo = now.Add(-time.Minute * 10) + sixMinAgo = now.Add(-time.Minute * 6) + org = dbgen.Organization(t, db, database.Organization{}) + user = dbgen.User(t, db, database.User{}) + file = dbgen.File(t, db, database.File{}) + template = dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + templateVersion = dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + TemplateID: uuid.NullUUID{ + UUID: template.ID, + Valid: true, + }, + CreatedBy: user.ID, + }) + workspace = dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: template.ID, + DormantAt: sql.NullTime{ + Time: now.Add(-time.Hour), + Valid: true, + }, + }) + + // First build. + expectedWorkspaceBuildState = []byte(`{"dean":"cool","colin":"also cool"}`) + currentWorkspaceBuildJob = dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + CreatedAt: tenMinAgo, + UpdatedAt: sixMinAgo, + StartedAt: sql.NullTime{ + Time: tenMinAgo, + Valid: true, + }, + OrganizationID: org.ID, + InitiatorID: user.ID, + Provisioner: database.ProvisionerTypeEcho, + StorageMethod: database.ProvisionerStorageMethodFile, + FileID: file.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + Input: []byte("{}"), + }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: workspace.ID, + TemplateVersionID: templateVersion.ID, + BuildNumber: 1, + JobID: currentWorkspaceBuildJob.ID, + // Should not be overridden. + ProvisionerState: expectedWorkspaceBuildState, + }) + ) + + t.Log("current job ID: ", currentWorkspaceBuildJob.ID) + + // Ensure the RBAC is the dormant type to ensure we're testing the right + // thing. + require.Equal(t, rbac.ResourceWorkspaceDormant.Type, workspace.RBACObject().Type) + + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector.Start() + tickCh <- now + + stats := <-statsCh + require.NoError(t, stats.Error) + require.Len(t, stats.TerminatedJobIDs, 1) + require.Equal(t, currentWorkspaceBuildJob.ID, stats.TerminatedJobIDs[0]) + + // Check that the current provisioner job was updated. + job, err := db.GetProvisionerJobByID(ctx, currentWorkspaceBuildJob.ID) + require.NoError(t, err) + require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second) + require.True(t, job.CompletedAt.Valid) + require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second) + require.True(t, job.Error.Valid) + require.Contains(t, job.Error.String, "Build has been detected as hung") + require.False(t, job.ErrorCode.Valid) + + detector.Close() + detector.Wait() +} + func TestDetectorHungOtherJobTypes(t *testing.T) { t.Parallel() diff --git a/coderd/mcp/mcp_e2e_test.go b/coderd/mcp/mcp_e2e_test.go index 2813757a50310..f101cfbdd5b65 100644 --- a/coderd/mcp/mcp_e2e_test.go +++ b/coderd/mcp/mcp_e2e_test.go @@ -141,7 +141,8 @@ func TestMCPHTTP_E2E_UnauthenticatedAccess(t *testing.T) { require.NoError(t, err, "Should be able to create HTTP request") req.Header.Set("Content-Type", "application/json") - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) require.NoError(t, err, "Should be able to make HTTP request") defer resp.Body.Close() @@ -613,7 +614,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) tokenReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") - tokenResp, err := http.DefaultClient.Do(tokenReq) + tokenResp, err := client.Do(tokenReq) require.NoError(t, err) defer tokenResp.Body.Close() @@ -711,7 +712,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) refreshReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") - refreshResp, err := http.DefaultClient.Do(refreshReq) + refreshResp, err := client.Do(refreshReq) require.NoError(t, err) defer refreshResp.Body.Close() @@ -846,7 +847,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { regReq.Header.Set("Content-Type", "application/json") // Dynamic client registration should not require authentication (public endpoint) - regResp, err := http.DefaultClient.Do(regReq) + regResp, err := client.Do(regReq) require.NoError(t, err) defer regResp.Body.Close() @@ -936,7 +937,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) tokenReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") - tokenResp, err := http.DefaultClient.Do(tokenReq) + tokenResp, err := client.Do(tokenReq) require.NoError(t, err) defer tokenResp.Body.Close() @@ -1037,7 +1038,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) refreshReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") - refreshResp, err := http.DefaultClient.Do(refreshReq) + refreshResp, err := client.Do(refreshReq) require.NoError(t, err) defer refreshResp.Body.Close() @@ -1151,7 +1152,8 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) regReq1.Header.Set("Content-Type", "application/json") - regResp1, err := http.DefaultClient.Do(regReq1) + client := &http.Client{} + regResp1, err := client.Do(regReq1) require.NoError(t, err) defer regResp1.Body.Close() @@ -1181,7 +1183,7 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { require.NoError(t, err) regReq2.Header.Set("Content-Type", "application/json") - regResp2, err := http.DefaultClient.Do(regReq2) + regResp2, err := client.Do(regReq2) require.NoError(t, err) defer regResp2.Body.Close() diff --git a/coderd/notifications/events.go b/coderd/notifications/events.go index 12adcfbb08b37..83e8e990a338a 100644 --- a/coderd/notifications/events.go +++ b/coderd/notifications/events.go @@ -55,6 +55,8 @@ var ( // Task-related events. var ( - TemplateTaskWorking = uuid.MustParse("bd4b7168-d05e-4e19-ad0f-3593b77aa90f") - TemplateTaskIdle = uuid.MustParse("d4a6271c-cced-4ed0-84ad-afd02a9c7799") + TemplateTaskWorking = uuid.MustParse("bd4b7168-d05e-4e19-ad0f-3593b77aa90f") + TemplateTaskIdle = uuid.MustParse("d4a6271c-cced-4ed0-84ad-afd02a9c7799") + TemplateTaskCompleted = uuid.MustParse("8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c") + TemplateTaskFailed = uuid.MustParse("3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e") ) diff --git a/coderd/notifications/metrics_test.go b/coderd/notifications/metrics_test.go index 6ba6635a50c4c..975a6db0dd02b 100644 --- a/coderd/notifications/metrics_test.go +++ b/coderd/notifications/metrics_test.go @@ -33,9 +33,6 @@ func TestMetrics(t *testing.T) { t.Parallel() // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) @@ -390,13 +387,6 @@ func TestInflightDispatchesMetric(t *testing.T) { func TestCustomMethodMetricCollection(t *testing.T) { t.Parallel() - - // SETUP - if !dbtestutil.WillUsePostgres() { - // UpdateNotificationTemplateMethodByID only makes sense with a real database. - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) diff --git a/coderd/notifications/notifications_test.go b/coderd/notifications/notifications_test.go index 9689e6467db97..d395bd748cd5a 100644 --- a/coderd/notifications/notifications_test.go +++ b/coderd/notifications/notifications_test.go @@ -66,11 +66,6 @@ func TestMain(m *testing.M) { func TestBasicNotificationRoundtrip(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -278,11 +273,6 @@ func TestWebhookDispatch(t *testing.T) { func TestBackpressure(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitShort)) @@ -407,11 +397,6 @@ func TestBackpressure(t *testing.T) { func TestRetries(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - const maxAttempts = 3 ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) @@ -507,11 +492,6 @@ func TestRetries(t *testing.T) { func TestExpiredLeaseIsRequeued(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -751,10 +731,6 @@ func enumerateAllTemplates(t *testing.T) ([]string, error) { func TestNotificationTemplates_Golden(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on the notification templates added by migrations in the database") - } - const ( username = "bob" password = "🀫" @@ -1301,6 +1277,34 @@ func TestNotificationTemplates_Golden(t *testing.T) { Data: map[string]any{}, }, }, + { + name: "TemplateTaskCompleted", + id: notifications.TemplateTaskCompleted, + payload: types.MessagePayload{ + UserName: "Bobby", + UserEmail: "bobby@coder.com", + UserUsername: "bobby", + Labels: map[string]string{ + "task": "my-task", + "workspace": "my-workspace", + }, + Data: map[string]any{}, + }, + }, + { + name: "TemplateTaskFailed", + id: notifications.TemplateTaskFailed, + payload: types.MessagePayload{ + UserName: "Bobby", + UserEmail: "bobby@coder.com", + UserUsername: "bobby", + Labels: map[string]string{ + "task": "my-task", + "workspace": "my-workspace", + }, + Data: map[string]any{}, + }, + }, } // We must have a test case for every notification_template. This is enforced below: @@ -1677,10 +1681,6 @@ func normalizeGoldenWebhook(content []byte) []byte { func TestDisabledByDefaultBeforeEnqueue(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it is testing business-logic implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logbuf := strings.Builder{} @@ -1704,11 +1704,6 @@ func TestDisabledByDefaultBeforeEnqueue(t *testing.T) { func TestDisabledBeforeEnqueue(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it is testing business-logic implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logbuf := strings.Builder{} @@ -1743,11 +1738,6 @@ func TestDisabledBeforeEnqueue(t *testing.T) { func TestDisabledAfterEnqueue(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it is testing business-logic implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1799,11 +1789,6 @@ func TestDisabledAfterEnqueue(t *testing.T) { func TestCustomNotificationMethod(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1901,12 +1886,6 @@ func TestCustomNotificationMethod(t *testing.T) { func TestNotificationsTemplates(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - // Notification system templates are only served from the database and not dbmem at this time. - t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) api := coderdtest.New(t, createOpts(t)) @@ -1938,11 +1917,6 @@ func createOpts(t *testing.T) *coderdtest.Options { func TestNotificationDuplicates(t *testing.T) { t.Parallel() - // SETUP - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it is testing the dedupe hash trigger in the database") - } - ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logbuf := strings.Builder{} diff --git a/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskCompleted.html.golden b/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskCompleted.html.golden new file mode 100644 index 0000000000000..769d5595dbc3e --- /dev/null +++ b/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskCompleted.html.golden @@ -0,0 +1,84 @@ +From: system@coder.com +To: bobby@coder.com +Subject: Task 'my-workspace' completed +Message-Id: 02ee4935-73be-4fa1-a290-ff9999026b13@blush-whale-48 +Date: Fri, 11 Oct 2024 09:03:06 +0000 +Content-Type: multipart/alternative; boundary=bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +MIME-Version: 1.0 + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +Content-Transfer-Encoding: quoted-printable +Content-Type: text/plain; charset=UTF-8 + +Hi Bobby, + +The task 'my-task' has completed successfully. + + +View task: http://test.com/tasks/bobby/my-workspace + +View workspace: http://test.com/@bobby/my-workspace + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +Content-Transfer-Encoding: quoted-printable +Content-Type: text/html; charset=UTF-8 + + + + + + + Task 'my-workspace' completed + + +
+
+ 3D"Cod= +
+

+ Task 'my-workspace' completed +

+
+

Hi Bobby,

+

The task ‘my-task’ has completed successfully.

+
+ + +
+ + + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4-- diff --git a/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskFailed.html.golden b/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskFailed.html.golden new file mode 100644 index 0000000000000..5d0879bc82da2 --- /dev/null +++ b/coderd/notifications/testdata/rendered-templates/smtp/TemplateTaskFailed.html.golden @@ -0,0 +1,85 @@ +From: system@coder.com +To: bobby@coder.com +Subject: Task 'my-workspace' failed +Message-Id: 02ee4935-73be-4fa1-a290-ff9999026b13@blush-whale-48 +Date: Fri, 11 Oct 2024 09:03:06 +0000 +Content-Type: multipart/alternative; boundary=bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +MIME-Version: 1.0 + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +Content-Transfer-Encoding: quoted-printable +Content-Type: text/plain; charset=UTF-8 + +Hi Bobby, + +The task 'my-task' has failed. Check the logs for more details. + + +View task: http://test.com/tasks/bobby/my-workspace + +View workspace: http://test.com/@bobby/my-workspace + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4 +Content-Transfer-Encoding: quoted-printable +Content-Type: text/html; charset=UTF-8 + + + + + + + Task 'my-workspace' failed + + +
+
+ 3D"Cod= +
+

+ Task 'my-workspace' failed +

+
+

Hi Bobby,

+

The task ‘my-task’ has failed. Check the logs for mo= +re details.

+
+ + +
+ + + +--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4-- diff --git a/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskCompleted.json.golden b/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskCompleted.json.golden new file mode 100644 index 0000000000000..2336bf3162f59 --- /dev/null +++ b/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskCompleted.json.golden @@ -0,0 +1,33 @@ +{ + "_version": "1.1", + "msg_id": "00000000-0000-0000-0000-000000000000", + "payload": { + "_version": "1.2", + "notification_name": "Task Completed", + "notification_template_id": "00000000-0000-0000-0000-000000000000", + "user_id": "00000000-0000-0000-0000-000000000000", + "user_email": "bobby@coder.com", + "user_name": "Bobby", + "user_username": "bobby", + "actions": [ + { + "label": "View task", + "url": "http://test.com/tasks/bobby/my-workspace" + }, + { + "label": "View workspace", + "url": "http://test.com/@bobby/my-workspace" + } + ], + "labels": { + "task": "my-task", + "workspace": "my-workspace" + }, + "data": {}, + "targets": null + }, + "title": "Task 'my-workspace' completed", + "title_markdown": "Task 'my-workspace' completed", + "body": "The task 'my-task' has completed successfully.", + "body_markdown": "The task 'my-task' has completed successfully." +} \ No newline at end of file diff --git a/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskFailed.json.golden b/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskFailed.json.golden new file mode 100644 index 0000000000000..44788581a02b3 --- /dev/null +++ b/coderd/notifications/testdata/rendered-templates/webhook/TemplateTaskFailed.json.golden @@ -0,0 +1,33 @@ +{ + "_version": "1.1", + "msg_id": "00000000-0000-0000-0000-000000000000", + "payload": { + "_version": "1.2", + "notification_name": "Task Failed", + "notification_template_id": "00000000-0000-0000-0000-000000000000", + "user_id": "00000000-0000-0000-0000-000000000000", + "user_email": "bobby@coder.com", + "user_name": "Bobby", + "user_username": "bobby", + "actions": [ + { + "label": "View task", + "url": "http://test.com/tasks/bobby/my-workspace" + }, + { + "label": "View workspace", + "url": "http://test.com/@bobby/my-workspace" + } + ], + "labels": { + "task": "my-task", + "workspace": "my-workspace" + }, + "data": {}, + "targets": null + }, + "title": "Task 'my-workspace' failed", + "title_markdown": "Task 'my-workspace' failed", + "body": "The task 'my-task' has failed. Check the logs for more details.", + "body_markdown": "The task 'my-task' has failed. Check the logs for more details." +} \ No newline at end of file diff --git a/coderd/oauth2.go b/coderd/oauth2.go index 1e28f9b65bbb8..ac0c87545ead9 100644 --- a/coderd/oauth2.go +++ b/coderd/oauth2.go @@ -160,6 +160,19 @@ func (api *API) deleteOAuth2ProviderAppTokens() http.HandlerFunc { return oauth2provider.RevokeApp(api.Database) } +// @Summary Revoke OAuth2 tokens (RFC 7009). +// @ID oauth2-token-revocation +// @Accept x-www-form-urlencoded +// @Tags Enterprise +// @Param client_id formData string true "Client ID for authentication" +// @Param token formData string true "The token to revoke" +// @Param token_type_hint formData string false "Hint about token type (access_token or refresh_token)" +// @Success 200 "Token successfully revoked" +// @Router /oauth2/revoke [post] +func (api *API) revokeOAuth2Token() http.HandlerFunc { + return oauth2provider.RevokeToken(api.Database, api.Logger) +} + // @Summary OAuth2 authorization server metadata. // @ID oauth2-authorization-server-metadata // @Produce json diff --git a/coderd/oauth2_metadata_test.go b/coderd/oauth2_metadata_test.go index a3e8ec1f50571..0e7ff4b1a8743 100644 --- a/coderd/oauth2_metadata_test.go +++ b/coderd/oauth2_metadata_test.go @@ -29,7 +29,8 @@ func TestOAuth2AuthorizationServerMetadata(t *testing.T) { req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + httpClient := &http.Client{} + resp, err := httpClient.Do(req) require.NoError(t, err) defer resp.Body.Close() @@ -65,7 +66,8 @@ func TestOAuth2ProtectedResourceMetadata(t *testing.T) { req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + httpClient := &http.Client{} + resp, err := httpClient.Do(req) require.NoError(t, err) defer resp.Body.Close() diff --git a/coderd/oauth2_test.go b/coderd/oauth2_test.go index d5755b695d393..72564a2a0d85e 100644 --- a/coderd/oauth2_test.go +++ b/coderd/oauth2_test.go @@ -620,7 +620,7 @@ func TestOAuth2ProviderTokenRefresh(t *testing.T) { CreatedAt: dbtime.Now(), ExpiresAt: expires, HashPrefix: []byte(token.Prefix), - RefreshHash: []byte(token.Hashed), + RefreshHash: token.Hashed, AppSecretID: secret.ID, APIKeyID: newKey.ID, UserID: user.ID, @@ -720,7 +720,7 @@ func TestOAuth2ProviderRevoke(t *testing.T) { }, }, { - name: "DeleteToken", + name: "DeleteApp", fn: func(ctx context.Context, client *codersdk.Client, s exchangeSetup) { err := client.RevokeOAuth2ProviderApp(ctx, s.app.ID) require.NoError(t, err) @@ -1603,5 +1603,80 @@ func TestOAuth2RegistrationAccessToken(t *testing.T) { }) } +// TestOAuth2CoderClient verfies a codersdk client can be used with an oauth client. +func TestOAuth2CoderClient(t *testing.T) { + t.Parallel() + + owner := coderdtest.New(t, nil) + first := coderdtest.CreateFirstUser(t, owner) + + // Setup an oauth app + ctx := testutil.Context(t, testutil.WaitLong) + app, err := owner.PostOAuth2ProviderApp(ctx, codersdk.PostOAuth2ProviderAppRequest{ + Name: "new-app", + CallbackURL: "http://localhost", + }) + require.NoError(t, err) + + appsecret, err := owner.PostOAuth2ProviderAppSecret(ctx, app.ID) + require.NoError(t, err) + + cfg := &oauth2.Config{ + ClientID: app.ID.String(), + ClientSecret: appsecret.ClientSecretFull, + Endpoint: oauth2.Endpoint{ + AuthURL: app.Endpoints.Authorization, + DeviceAuthURL: app.Endpoints.DeviceAuth, + TokenURL: app.Endpoints.Token, + AuthStyle: oauth2.AuthStyleInParams, + }, + RedirectURL: app.CallbackURL, + Scopes: []string{}, + } + + // Make a new user + client, user := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID) + + // Do an OAuth2 token exchange and get a new client with an oauth token + state := uuid.NewString() + + // Get an OAuth2 code for a token exchange + code, err := oidctest.OAuth2GetCode( + cfg.AuthCodeURL(state), + func(req *http.Request) (*http.Response, error) { + // Change to POST to simulate the form submission + req.Method = http.MethodPost + + // Prevent automatic redirect following + client.HTTPClient.CheckRedirect = func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + } + return client.Request(ctx, req.Method, req.URL.String(), nil) + }, + ) + require.NoError(t, err) + + token, err := cfg.Exchange(ctx, code) + require.NoError(t, err) + + // Use the oauth client's authentication + // TODO: The SDK could probably support this with a better syntax/api. + oauthClient := oauth2.NewClient(ctx, oauth2.StaticTokenSource(token)) + usingOauth := codersdk.New(owner.URL) + usingOauth.HTTPClient = oauthClient + + me, err := usingOauth.User(ctx, codersdk.Me) + require.NoError(t, err) + require.Equal(t, user.ID, me.ID) + + // Revoking the refresh token should prevent further access + // Revoking the refresh also invalidates the associated access token. + err = usingOauth.RevokeOAuth2Token(ctx, app.ID, token.RefreshToken) + require.NoError(t, err) + + _, err = usingOauth.User(ctx, codersdk.Me) + require.Error(t, err) +} + // NOTE: OAuth2 client registration validation tests have been migrated to // oauth2provider/validation_test.go for better separation of concerns diff --git a/coderd/oauth2provider/app_secrets.go b/coderd/oauth2provider/app_secrets.go index 5549ece4266f2..3eff684123c0e 100644 --- a/coderd/oauth2provider/app_secrets.go +++ b/coderd/oauth2provider/app_secrets.go @@ -66,7 +66,7 @@ func CreateAppSecret(db database.Store, auditor *audit.Auditor, logger slog.Logg ID: uuid.New(), CreatedAt: dbtime.Now(), SecretPrefix: []byte(secret.Prefix), - HashedSecret: []byte(secret.Hashed), + HashedSecret: secret.Hashed, // DisplaySecret is the last six characters of the original unhashed secret. // This is done so they can be differentiated and it matches how GitHub // displays their client secrets. diff --git a/coderd/oauth2provider/apps.go b/coderd/oauth2provider/apps.go index 74bafb851ef1a..81ff8b0e24095 100644 --- a/coderd/oauth2provider/apps.go +++ b/coderd/oauth2provider/apps.go @@ -110,7 +110,7 @@ func CreateApp(db database.Store, accessURL *url.URL, auditor *audit.Auditor, lo Jwks: pqtype.NullRawMessage{}, SoftwareID: sql.NullString{}, SoftwareVersion: sql.NullString{}, - RegistrationAccessToken: sql.NullString{}, + RegistrationAccessToken: nil, RegistrationClientUri: sql.NullString{}, }) if err != nil { diff --git a/coderd/oauth2provider/authorize.go b/coderd/oauth2provider/authorize.go index 4100b82306384..d738e781e8a34 100644 --- a/coderd/oauth2provider/authorize.go +++ b/coderd/oauth2provider/authorize.go @@ -40,7 +40,7 @@ func extractAuthorizeParams(r *http.Request, callbackURL *url.URL) (authorizePar clientID: p.String(vals, "", "client_id"), redirectURL: p.RedirectURL(vals, callbackURL, "redirect_uri"), responseType: httpapi.ParseCustom(p, vals, "", "response_type", httpapi.ParseEnum[codersdk.OAuth2ProviderResponseType]), - scope: p.Strings(vals, []string{}, "scope"), + scope: strings.Fields(strings.TrimSpace(p.String(vals, "", "scope"))), state: p.String(vals, "", "state"), resource: p.String(vals, "", "resource"), codeChallenge: p.String(vals, "", "code_challenge"), @@ -165,7 +165,7 @@ func ProcessAuthorize(db database.Store) http.HandlerFunc { // has left) then they can just retry immediately and get a new code. ExpiresAt: dbtime.Now().Add(time.Duration(10) * time.Minute), SecretPrefix: []byte(code.Prefix), - HashedSecret: []byte(code.Hashed), + HashedSecret: code.Hashed, AppID: app.ID, UserID: apiKey.UserID, ResourceUri: sql.NullString{String: params.resource, Valid: params.resource != ""}, diff --git a/coderd/oauth2provider/registration.go b/coderd/oauth2provider/registration.go index 63d2de4f48394..807c39371d8a4 100644 --- a/coderd/oauth2provider/registration.go +++ b/coderd/oauth2provider/registration.go @@ -15,21 +15,14 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" - "github.com/coder/coder/v2/coderd/userpassword" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/cryptorand" -) - -// Constants for OAuth2 secret generation (RFC 7591) -const ( - secretLength = 40 // Length of the actual secret part - displaySecretLength = 6 // Length of visible part in UI (last 6 characters) ) // CreateDynamicClientRegistration returns an http.HandlerFunc that handles POST /oauth2/register @@ -106,7 +99,7 @@ func CreateDynamicClientRegistration(db database.Store, accessURL *url.URL, audi Jwks: pqtype.NullRawMessage{RawMessage: req.JWKS, Valid: len(req.JWKS) > 0}, SoftwareID: sql.NullString{String: req.SoftwareID, Valid: req.SoftwareID != ""}, SoftwareVersion: sql.NullString{String: req.SoftwareVersion, Valid: req.SoftwareVersion != ""}, - RegistrationAccessToken: sql.NullString{String: hashedRegToken, Valid: true}, + RegistrationAccessToken: hashedRegToken, RegistrationClientUri: sql.NullString{String: fmt.Sprintf("%s/oauth2/clients/%s", accessURL.String(), clientID), Valid: true}, }) if err != nil { @@ -121,7 +114,7 @@ func CreateDynamicClientRegistration(db database.Store, accessURL *url.URL, audi } // Create client secret - parse the formatted secret to get components - parsedSecret, err := parseFormattedSecret(clientSecret) + parsedSecret, err := ParseFormattedSecret(clientSecret) if err != nil { writeOAuth2RegistrationError(ctx, rw, http.StatusInternalServerError, "server_error", "Failed to parse generated secret") @@ -132,8 +125,8 @@ func CreateDynamicClientRegistration(db database.Store, accessURL *url.URL, audi _, err = db.InsertOAuth2ProviderAppSecret(dbauthz.AsSystemRestricted(ctx), database.InsertOAuth2ProviderAppSecretParams{ ID: uuid.New(), CreatedAt: now, - SecretPrefix: []byte(parsedSecret.prefix), - HashedSecret: []byte(hashedSecret), + SecretPrefix: []byte(parsedSecret.Prefix), + HashedSecret: hashedSecret, DisplaySecret: createDisplaySecret(clientSecret), AppID: clientID, }) @@ -230,7 +223,7 @@ func GetClientConfiguration(db database.Store) http.HandlerFunc { TokenEndpointAuthMethod: app.TokenEndpointAuthMethod.String, Scope: app.Scope.String, Contacts: app.Contacts, - RegistrationAccessToken: "", // RFC 7592: Not returned in GET responses for security + RegistrationAccessToken: nil, // RFC 7592: Not returned in GET responses for security RegistrationClientURI: app.RegistrationClientUri.String, } @@ -354,7 +347,7 @@ func UpdateClientConfiguration(db database.Store, auditor *audit.Auditor, logger TokenEndpointAuthMethod: updatedApp.TokenEndpointAuthMethod.String, Scope: updatedApp.Scope.String, Contacts: updatedApp.Contacts, - RegistrationAccessToken: updatedApp.RegistrationAccessToken.String, + RegistrationAccessToken: updatedApp.RegistrationAccessToken, RegistrationClientURI: updatedApp.RegistrationClientUri.String, } @@ -482,20 +475,14 @@ func RequireRegistrationAccessToken(db database.Store) func(http.Handler) http.H } // Verify the registration access token - if !app.RegistrationAccessToken.Valid { + if len(app.RegistrationAccessToken) == 0 { writeOAuth2RegistrationError(ctx, rw, http.StatusInternalServerError, "server_error", "Client has no registration access token") return } // Compare the provided token with the stored hash - valid, err := userpassword.Compare(app.RegistrationAccessToken.String, token) - if err != nil { - writeOAuth2RegistrationError(ctx, rw, http.StatusInternalServerError, - "server_error", "Failed to verify registration access token") - return - } - if !valid { + if !apikey.ValidateHash(app.RegistrationAccessToken, token) { writeOAuth2RegistrationError(ctx, rw, http.StatusUnauthorized, "invalid_token", "Invalid registration access token") return @@ -510,30 +497,19 @@ func RequireRegistrationAccessToken(db database.Store) func(http.Handler) http.H // Helper functions for RFC 7591 Dynamic Client Registration // generateClientCredentials generates a client secret for OAuth2 apps -func generateClientCredentials() (plaintext, hashed string, err error) { +func generateClientCredentials() (plaintext string, hashed []byte, err error) { // Use the same pattern as existing OAuth2 app secrets secret, err := GenerateSecret() if err != nil { - return "", "", xerrors.Errorf("generate secret: %w", err) + return "", nil, xerrors.Errorf("generate secret: %w", err) } return secret.Formatted, secret.Hashed, nil } // generateRegistrationAccessToken generates a registration access token for RFC 7592 -func generateRegistrationAccessToken() (plaintext, hashed string, err error) { - token, err := cryptorand.String(secretLength) - if err != nil { - return "", "", xerrors.Errorf("generate registration token: %w", err) - } - - // Hash the token for storage - hashedToken, err := userpassword.Hash(token) - if err != nil { - return "", "", xerrors.Errorf("hash registration token: %w", err) - } - - return token, hashedToken, nil +func generateRegistrationAccessToken() (plaintext string, hashed []byte, err error) { + return apikey.GenerateSecret(secretLength) } // writeOAuth2RegistrationError writes RFC 7591 compliant error responses @@ -551,27 +527,6 @@ func writeOAuth2RegistrationError(_ context.Context, rw http.ResponseWriter, sta _ = json.NewEncoder(rw).Encode(errorResponse) } -// parsedSecret represents the components of a formatted OAuth2 secret -type parsedSecret struct { - prefix string - secret string -} - -// parseFormattedSecret parses a formatted secret like "coder_prefix_secret" -func parseFormattedSecret(secret string) (parsedSecret, error) { - parts := strings.Split(secret, "_") - if len(parts) != 3 { - return parsedSecret{}, xerrors.Errorf("incorrect number of parts: %d", len(parts)) - } - if parts[0] != "coder" { - return parsedSecret{}, xerrors.Errorf("incorrect scheme: %s", parts[0]) - } - return parsedSecret{ - prefix: parts[1], - secret: parts[2], - }, nil -} - // createDisplaySecret creates a display version of the secret showing only the last few characters func createDisplaySecret(secret string) string { if len(secret) <= displaySecretLength { diff --git a/coderd/oauth2provider/revoke.go b/coderd/oauth2provider/revoke.go index 243ce750288bb..19f3fb803a88c 100644 --- a/coderd/oauth2provider/revoke.go +++ b/coderd/oauth2provider/revoke.go @@ -1,15 +1,211 @@ package oauth2provider import ( + "context" + "crypto/sha256" + "crypto/subtle" "database/sql" "errors" "net/http" + "strings" + "golang.org/x/xerrors" + + "github.com/google/uuid" + + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" ) +var ( + // ErrTokenNotBelongsToClient is returned when a token does not belong to the requesting client + ErrTokenNotBelongsToClient = xerrors.New("token does not belong to requesting client") + // ErrInvalidTokenFormat is returned when a token has an invalid format + ErrInvalidTokenFormat = xerrors.New("invalid token format") +) + +// RevokeToken implements RFC 7009 OAuth2 Token Revocation +// Authentication is unique for this endpoint in that it does not use the +// standard token authentication middleware. Instead, it expects the token that +// is being revoked to be valid. +// TODO: Currently the token validation occurs in the revocation logic itself. +// This code should be refactored to share token validation logic with other parts +// of the OAuth2 provider/http middleware. +func RevokeToken(db database.Store, logger slog.Logger) http.HandlerFunc { + return func(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + app := httpmw.OAuth2ProviderApp(r) + + // RFC 7009 requires POST method with application/x-www-form-urlencoded + if r.Method != http.MethodPost { + httpapi.WriteOAuth2Error(ctx, rw, http.StatusMethodNotAllowed, "invalid_request", "Method not allowed") + return + } + + if err := r.ParseForm(); err != nil { + httpapi.WriteOAuth2Error(ctx, rw, http.StatusBadRequest, "invalid_request", "Invalid form data") + return + } + + // RFC 7009 requires 'token' parameter + token := r.Form.Get("token") + if token == "" { + httpapi.WriteOAuth2Error(ctx, rw, http.StatusBadRequest, "invalid_request", "Missing token parameter") + return + } + + // Determine if this is a refresh token (starts with "coder_") or API key + // APIKeys do not have the SecretIdentifier prefix. + const coderPrefix = SecretIdentifier + "_" + isRefreshToken := strings.HasPrefix(token, coderPrefix) + + // Revoke the token with ownership verification + err := db.InTx(func(tx database.Store) error { + if isRefreshToken { + // Handle refresh token revocation + return revokeRefreshTokenInTx(ctx, tx, token, app.ID) + } + // Handle API key revocation + return revokeAPIKeyInTx(ctx, tx, token, app.ID) + }, nil) + if err != nil { + if errors.Is(err, ErrTokenNotBelongsToClient) { + // RFC 7009: Return success even if token doesn't belong to client (don't reveal token existence) + logger.Debug(ctx, "token revocation failed: token does not belong to requesting client", + slog.F("client_id", app.ID.String()), + slog.F("app_name", app.Name)) + rw.WriteHeader(http.StatusOK) + return + } + if errors.Is(err, ErrInvalidTokenFormat) { + // Invalid token format should return 400 bad request + logger.Debug(ctx, "token revocation failed: invalid token format", + slog.F("client_id", app.ID.String()), + slog.F("app_name", app.Name)) + httpapi.WriteOAuth2Error(ctx, rw, http.StatusBadRequest, "invalid_request", "Invalid token format") + return + } + logger.Error(ctx, "token revocation failed with internal server error", + slog.Error(err), + slog.F("client_id", app.ID.String()), + slog.F("app_name", app.Name)) + httpapi.WriteOAuth2Error(ctx, rw, http.StatusInternalServerError, "server_error", "Internal server error") + return + } + + // RFC 7009: successful revocation returns HTTP 200 + rw.WriteHeader(http.StatusOK) + } +} + +func revokeRefreshTokenInTx(ctx context.Context, db database.Store, token string, appID uuid.UUID) error { + // Parse the refresh token using the existing function + parsedToken, err := ParseFormattedSecret(token) + if err != nil { + return ErrInvalidTokenFormat + } + + // Try to find refresh token by prefix + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + dbToken, err := db.GetOAuth2ProviderAppTokenByPrefix(dbauthz.AsSystemOAuth2(ctx), []byte(parsedToken.Prefix)) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + // Token not found - return success per RFC 7009 (don't reveal token existence) + return nil + } + return xerrors.Errorf("get oauth2 provider app token by prefix: %w", err) + } + + equal := apikey.ValidateHash(dbToken.RefreshHash, parsedToken.Secret) + if !equal { + return xerrors.Errorf("invalid refresh token") + } + + // Verify ownership + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + appSecret, err := db.GetOAuth2ProviderAppSecretByID(dbauthz.AsSystemOAuth2(ctx), dbToken.AppSecretID) + if err != nil { + return xerrors.Errorf("get oauth2 provider app secret: %w", err) + } + if appSecret.AppID != appID { + return ErrTokenNotBelongsToClient + } + + // Delete the associated API key, which should cascade to remove the refresh token + // According to RFC 7009, when a refresh token is revoked, associated access tokens should be invalidated + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + err = db.DeleteAPIKeyByID(dbauthz.AsSystemOAuth2(ctx), dbToken.APIKeyID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("delete api key: %w", err) + } + + return nil +} + +func revokeAPIKeyInTx(ctx context.Context, db database.Store, token string, appID uuid.UUID) error { + keyID, secret, err := httpmw.SplitAPIToken(token) + if err != nil { + return ErrInvalidTokenFormat + } + + // Get the API key + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + apiKey, err := db.GetAPIKeyByID(dbauthz.AsSystemOAuth2(ctx), keyID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + // API key not found - return success per RFC 7009 (don't reveal token existence) + return nil + } + return xerrors.Errorf("get api key by id: %w", err) + } + + // Checking to see if the provided secret matches the stored hashed secret + hashedSecret := sha256.Sum256([]byte(secret)) + if subtle.ConstantTimeCompare(apiKey.HashedSecret, hashedSecret[:]) != 1 { + return xerrors.Errorf("invalid api key") + } + + // Verify the API key was created by OAuth2 + if apiKey.LoginType != database.LoginTypeOAuth2ProviderApp { + return xerrors.New("api key is not an oauth2 token") + } + + // Find the associated OAuth2 token to verify ownership + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + dbToken, err := db.GetOAuth2ProviderAppTokenByAPIKeyID(dbauthz.AsSystemOAuth2(ctx), apiKey.ID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + // No associated OAuth2 token - return success per RFC 7009 + return nil + } + return xerrors.Errorf("get oauth2 provider app token by api key id: %w", err) + } + + // Verify the token belongs to the requesting app + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + appSecret, err := db.GetOAuth2ProviderAppSecretByID(dbauthz.AsSystemOAuth2(ctx), dbToken.AppSecretID) + if err != nil { + return xerrors.Errorf("get oauth2 provider app secret for api key verification: %w", err) + } + + if appSecret.AppID != appID { + return ErrTokenNotBelongsToClient + } + + // Delete the API key + //nolint:gocritic // Using AsSystemOAuth2 for OAuth2 public token revocation endpoint + err = db.DeleteAPIKeyByID(dbauthz.AsSystemOAuth2(ctx), apiKey.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("delete api key for revocation: %w", err) + } + + return nil +} + func RevokeApp(db database.Store) http.HandlerFunc { return func(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/coderd/oauth2provider/secrets.go b/coderd/oauth2provider/secrets.go index a360c0b325c89..ee6a7b315d843 100644 --- a/coderd/oauth2provider/secrets.go +++ b/coderd/oauth2provider/secrets.go @@ -2,32 +2,68 @@ package oauth2provider import ( "fmt" + "strings" - "github.com/coder/coder/v2/coderd/userpassword" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/cryptorand" ) +const ( + // SecretIdentifier is the prefix added to all generated secrets. + SecretIdentifier = "coder" +) + +// Constants for OAuth2 secret generation +const ( + secretLength = 40 // Length of the actual secret part + displaySecretLength = 6 // Length of visible part in UI (last 6 characters) +) + +type HashedAppSecret struct { + AppSecret + // Hashed is the server stored hash(secret,salt,...). Used for verifying a + // secret. + Hashed []byte +} + type AppSecret struct { // Formatted contains the secret. This value is owned by the client, not the // server. It is formatted to include the prefix. Formatted string + // Secret is the raw secret value. This value should only be known to the client. + Secret string // Prefix is the ID of this secret owned by the server. When a client uses a // secret, this is the matching string to do a lookup on the hashed value. We // cannot use the hashed value directly because the server does not store the // salt. Prefix string - // Hashed is the server stored hash(secret,salt,...). Used for verifying a - // secret. - Hashed string +} + +// ParseFormattedSecret parses a formatted secret like "coder__ 0 { - newToken.Expiry = time.Now().Add(time.Duration(secs) * time.Second) + expiresIn, convertErr := tokenRes.ExpiresIn.Int64() + if convertErr != nil { + return nil, xerrors.Errorf("oauth2: cannot convert expires_in to int64: %w", convertErr) + } + + if expiresIn > 0 { + newToken.Expiry = time.Now().Add(time.Duration(expiresIn) * time.Second) } // ID token is a JWT token. We can decode it to get the expiry. diff --git a/coderd/parameters.go b/coderd/parameters.go index 4b8b13486934f..cb24dcd4312ec 100644 --- a/coderd/parameters.go +++ b/coderd/parameters.go @@ -139,6 +139,8 @@ func (api *API) handleParameterWebsocket(rw http.ResponseWriter, r *http.Request }) return } + go httpapi.Heartbeat(ctx, conn) + stream := wsjson.NewStream[codersdk.DynamicParametersRequest, codersdk.DynamicParametersResponse]( conn, websocket.MessageText, diff --git a/coderd/prebuilds/api.go b/coderd/prebuilds/api.go index 1bedeb10130c8..0deab99416fd5 100644 --- a/coderd/prebuilds/api.go +++ b/coderd/prebuilds/api.go @@ -37,13 +37,18 @@ type ReconciliationOrchestrator interface { TrackResourceReplacement(ctx context.Context, workspaceID, buildID uuid.UUID, replacements []*sdkproto.ResourceReplacement) } +// ReconcileStats contains statistics about a reconciliation cycle. +type ReconcileStats struct { + Elapsed time.Duration +} + type Reconciler interface { StateSnapshotter // ReconcileAll orchestrates the reconciliation of all prebuilds across all templates. // It takes a global snapshot of the system state and then reconciles each preset // in parallel, creating or deleting prebuilds as needed to reach their desired states. - ReconcileAll(ctx context.Context) error + ReconcileAll(ctx context.Context) (ReconcileStats, error) } // StateSnapshotter defines the operations necessary to capture workspace prebuilds state. @@ -66,5 +71,4 @@ type Claimer interface { nextStartAt sql.NullTime, ttl sql.NullInt64, ) (*uuid.UUID, error) - Initiator() uuid.UUID } diff --git a/coderd/prebuilds/global_snapshot.go b/coderd/prebuilds/global_snapshot.go index f8fb873739ae3..3c7ec24f5644b 100644 --- a/coderd/prebuilds/global_snapshot.go +++ b/coderd/prebuilds/global_snapshot.go @@ -8,10 +8,9 @@ import ( "cdr.dev/slog" - "github.com/coder/quartz" - "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/util/slice" + "github.com/coder/quartz" ) // GlobalSnapshot represents a full point-in-time snapshot of state relating to prebuilds across all templates. @@ -20,6 +19,7 @@ type GlobalSnapshot struct { PrebuildSchedules []database.TemplateVersionPresetPrebuildSchedule RunningPrebuilds []database.GetRunningPrebuiltWorkspacesRow PrebuildsInProgress []database.CountInProgressPrebuildsRow + PendingPrebuilds []database.CountPendingNonActivePrebuildsRow Backoffs []database.GetPresetsBackoffRow HardLimitedPresetsMap map[uuid.UUID]database.GetPresetsAtFailureLimitRow clock quartz.Clock @@ -31,6 +31,7 @@ func NewGlobalSnapshot( prebuildSchedules []database.TemplateVersionPresetPrebuildSchedule, runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow, prebuildsInProgress []database.CountInProgressPrebuildsRow, + pendingPrebuilds []database.CountPendingNonActivePrebuildsRow, backoffs []database.GetPresetsBackoffRow, hardLimitedPresets []database.GetPresetsAtFailureLimitRow, clock quartz.Clock, @@ -46,6 +47,7 @@ func NewGlobalSnapshot( PrebuildSchedules: prebuildSchedules, RunningPrebuilds: runningPrebuilds, PrebuildsInProgress: prebuildsInProgress, + PendingPrebuilds: pendingPrebuilds, Backoffs: backoffs, HardLimitedPresetsMap: hardLimitedPresetsMap, clock: clock, @@ -76,10 +78,20 @@ func (s GlobalSnapshot) FilterByPreset(presetID uuid.UUID) (*PresetSnapshot, err // Separate running workspaces into non-expired and expired based on the preset's TTL nonExpired, expired := filterExpiredWorkspaces(preset, running) + // Includes in-progress prebuilds only for active template versions. + // In-progress prebuilds correspond to workspace statuses: 'pending', 'starting', 'stopping', and 'deleting' inProgress := slice.Filter(s.PrebuildsInProgress, func(prebuild database.CountInProgressPrebuildsRow) bool { return prebuild.PresetID.UUID == preset.ID }) + // Includes count of pending prebuilds only for non-active template versions + pendingCount := 0 + if found, ok := slice.Find(s.PendingPrebuilds, func(prebuild database.CountPendingNonActivePrebuildsRow) bool { + return prebuild.PresetID.UUID == preset.ID + }); ok { + pendingCount = int(found.Count) + } + var backoffPtr *database.GetPresetsBackoffRow backoff, found := slice.Find(s.Backoffs, func(row database.GetPresetsBackoffRow) bool { return row.PresetID == preset.ID @@ -96,6 +108,7 @@ func (s GlobalSnapshot) FilterByPreset(presetID uuid.UUID) (*PresetSnapshot, err nonExpired, expired, inProgress, + pendingCount, backoffPtr, isHardLimited, s.clock, diff --git a/coderd/prebuilds/noop.go b/coderd/prebuilds/noop.go index ebb6d6964214e..0859d428b4796 100644 --- a/coderd/prebuilds/noop.go +++ b/coderd/prebuilds/noop.go @@ -17,7 +17,11 @@ func (NoopReconciler) Run(context.Context) {} func (NoopReconciler) Stop(context.Context, error) {} func (NoopReconciler) TrackResourceReplacement(context.Context, uuid.UUID, uuid.UUID, []*sdkproto.ResourceReplacement) { } -func (NoopReconciler) ReconcileAll(context.Context) error { return nil } + +func (NoopReconciler) ReconcileAll(context.Context) (ReconcileStats, error) { + return ReconcileStats{}, nil +} + func (NoopReconciler) SnapshotState(context.Context, database.Store) (*GlobalSnapshot, error) { return &GlobalSnapshot{}, nil } @@ -35,8 +39,4 @@ func (NoopClaimer) Claim(context.Context, time.Time, uuid.UUID, string, uuid.UUI return nil, ErrAGPLDoesNotSupportPrebuiltWorkspaces } -func (NoopClaimer) Initiator() uuid.UUID { - return uuid.Nil -} - var DefaultClaimer Claimer = NoopClaimer{} diff --git a/coderd/prebuilds/preset_snapshot.go b/coderd/prebuilds/preset_snapshot.go index be9299c8f5bdf..04f4cd1a83ff1 100644 --- a/coderd/prebuilds/preset_snapshot.go +++ b/coderd/prebuilds/preset_snapshot.go @@ -34,6 +34,9 @@ const ( // ActionTypeBackoff indicates that prebuild creation should be delayed. ActionTypeBackoff + + // ActionTypeCancelPending indicates that pending prebuilds should be canceled. + ActionTypeCancelPending ) // PresetSnapshot is a filtered view of GlobalSnapshot focused on a single preset. @@ -49,6 +52,7 @@ type PresetSnapshot struct { Running []database.GetRunningPrebuiltWorkspacesRow Expired []database.GetRunningPrebuiltWorkspacesRow InProgress []database.CountInProgressPrebuildsRow + PendingCount int Backoff *database.GetPresetsBackoffRow IsHardLimited bool clock quartz.Clock @@ -61,6 +65,7 @@ func NewPresetSnapshot( running []database.GetRunningPrebuiltWorkspacesRow, expired []database.GetRunningPrebuiltWorkspacesRow, inProgress []database.CountInProgressPrebuildsRow, + pendingCount int, backoff *database.GetPresetsBackoffRow, isHardLimited bool, clock quartz.Clock, @@ -72,6 +77,7 @@ func NewPresetSnapshot( Running: running, Expired: expired, InProgress: inProgress, + PendingCount: pendingCount, Backoff: backoff, IsHardLimited: isHardLimited, clock: clock, @@ -115,7 +121,7 @@ type ReconciliationActions struct { } func (ra *ReconciliationActions) IsNoop() bool { - return ra.Create == 0 && len(ra.DeleteIDs) == 0 && ra.BackoffUntil.IsZero() + return ra.ActionType != ActionTypeCancelPending && ra.Create == 0 && len(ra.DeleteIDs) == 0 && ra.BackoffUntil.IsZero() } // MatchesCron interprets a cron spec as a continuous time range, @@ -345,18 +351,30 @@ func (p PresetSnapshot) handleActiveTemplateVersion() (actions []*Reconciliation return actions, nil } -// handleInactiveTemplateVersion deletes all running prebuilds except those already being deleted -// to avoid duplicate deletion attempts. -func (p PresetSnapshot) handleInactiveTemplateVersion() ([]*ReconciliationActions, error) { - prebuildsToDelete := len(p.Running) - deleteIDs := p.getOldestPrebuildIDs(prebuildsToDelete) +// handleInactiveTemplateVersion handles prebuilds from inactive template versions: +// 1. If the preset has pending prebuild jobs from an inactive template version, create a cancel reconciliation action. +// This cancels all pending prebuild jobs for this preset's template version. +// 2. If the preset has prebuilt workspaces currently running from an inactive template version, +// create a delete reconciliation action to remove all running prebuilt workspaces. +func (p PresetSnapshot) handleInactiveTemplateVersion() (actions []*ReconciliationActions, err error) { + // Cancel pending initial prebuild jobs from inactive version + if p.PendingCount > 0 { + actions = append(actions, + &ReconciliationActions{ + ActionType: ActionTypeCancelPending, + }) + } - return []*ReconciliationActions{ - { - ActionType: ActionTypeDelete, - DeleteIDs: deleteIDs, - }, - }, nil + // Delete prebuilds running in inactive version + deleteIDs := p.getOldestPrebuildIDs(len(p.Running)) + if len(deleteIDs) > 0 { + actions = append(actions, + &ReconciliationActions{ + ActionType: ActionTypeDelete, + DeleteIDs: deleteIDs, + }) + } + return actions, nil } // needsBackoffPeriod checks if we should delay prebuild creation due to recent failures. diff --git a/coderd/prebuilds/preset_snapshot_test.go b/coderd/prebuilds/preset_snapshot_test.go index 8a1a10451323a..c32a84777d069 100644 --- a/coderd/prebuilds/preset_snapshot_test.go +++ b/coderd/prebuilds/preset_snapshot_test.go @@ -6,16 +6,14 @@ import ( "testing" "time" - "github.com/coder/coder/v2/testutil" - "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/coder/quartz" - "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/prebuilds" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) type options struct { @@ -86,7 +84,7 @@ func TestNoPrebuilds(t *testing.T) { preset(true, 0, current), } - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, nil, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, nil, nil, nil, clock, testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -108,7 +106,7 @@ func TestNetNew(t *testing.T) { preset(true, 1, current), } - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, nil, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, nil, nil, nil, clock, testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -150,7 +148,7 @@ func TestOutdatedPrebuilds(t *testing.T) { var inProgress []database.CountInProgressPrebuildsRow // WHEN: calculating the outdated preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, quartz.NewMock(t), testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, nil, quartz.NewMock(t), testutil.Logger(t)) ps, err := snapshot.FilterByPreset(outdated.presetID) require.NoError(t, err) @@ -216,7 +214,7 @@ func TestDeleteOutdatedPrebuilds(t *testing.T) { } // WHEN: calculating the outdated preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, quartz.NewMock(t), testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, nil, quartz.NewMock(t), testutil.Logger(t)) ps, err := snapshot.FilterByPreset(outdated.presetID) require.NoError(t, err) @@ -238,6 +236,74 @@ func TestDeleteOutdatedPrebuilds(t *testing.T) { }, actions) } +func TestCancelPendingPrebuilds(t *testing.T) { + t.Parallel() + + // Setup + current := opts[optionSet3] + clock := quartz.NewMock(t) + + t.Run("CancelPendingPrebuildsNonActiveVersion", func(t *testing.T) { + t.Parallel() + + // Given: a preset from a non-active version + defaultPreset := preset(false, 0, current) + presets := []database.GetTemplatePresetsWithPrebuildsRow{ + defaultPreset, + } + + // Given: 2 pending prebuilt workspaces for the preset + pending := []database.CountPendingNonActivePrebuildsRow{{ + PresetID: uuid.NullUUID{ + UUID: defaultPreset.ID, + Valid: true, + }, + Count: 2, + }} + + // When: calculating the current preset's state + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, pending, nil, nil, clock, testutil.Logger(t)) + ps, err := snapshot.FilterByPreset(current.presetID) + require.NoError(t, err) + + // Then: it should create a cancel reconciliation action + actions, err := ps.CalculateActions(backoffInterval) + require.NoError(t, err) + expectedAction := []*prebuilds.ReconciliationActions{{ActionType: prebuilds.ActionTypeCancelPending}} + require.Equal(t, expectedAction, actions) + }) + + t.Run("NotCancelPendingPrebuildsActiveVersion", func(t *testing.T) { + t.Parallel() + + // Given: a preset from an active version + defaultPreset := preset(true, 0, current) + presets := []database.GetTemplatePresetsWithPrebuildsRow{ + defaultPreset, + } + + // Given: 2 pending prebuilt workspaces for the preset + pending := []database.CountPendingNonActivePrebuildsRow{{ + PresetID: uuid.NullUUID{ + UUID: defaultPreset.ID, + Valid: true, + }, + Count: 2, + }} + + // When: calculating the current preset's state + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, nil, pending, nil, nil, clock, testutil.Logger(t)) + ps, err := snapshot.FilterByPreset(current.presetID) + require.NoError(t, err) + + // Then: it should not create a cancel reconciliation action + actions, err := ps.CalculateActions(backoffInterval) + require.NoError(t, err) + var expectedAction []*prebuilds.ReconciliationActions + require.Equal(t, expectedAction, actions) + }) +} + // A new template version is created with a preset with prebuilds configured; while a prebuild is provisioning up or down, // the calculated actions should indicate the state correctly. func TestInProgressActions(t *testing.T) { @@ -460,7 +526,7 @@ func TestInProgressActions(t *testing.T) { } // WHEN: calculating the current preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, quartz.NewMock(t), testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, nil, quartz.NewMock(t), testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -503,7 +569,7 @@ func TestExtraneous(t *testing.T) { var inProgress []database.CountInProgressPrebuildsRow // WHEN: calculating the current preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, quartz.NewMock(t), testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, nil, quartz.NewMock(t), testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -683,7 +749,7 @@ func TestExpiredPrebuilds(t *testing.T) { } // WHEN: calculating the current preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, nil, nil, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, nil, nil, nil, nil, clock, testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -719,7 +785,7 @@ func TestDeprecated(t *testing.T) { var inProgress []database.CountInProgressPrebuildsRow // WHEN: calculating the current preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, quartz.NewMock(t), testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, nil, nil, quartz.NewMock(t), testutil.Logger(t)) ps, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -772,7 +838,7 @@ func TestLatestBuildFailed(t *testing.T) { } // WHEN: calculating the current preset's state. - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, backoffs, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, running, inProgress, nil, backoffs, nil, clock, testutil.Logger(t)) psCurrent, err := snapshot.FilterByPreset(current.presetID) require.NoError(t, err) @@ -865,7 +931,7 @@ func TestMultiplePresetsPerTemplateVersion(t *testing.T) { }, } - snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, inProgress, nil, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, nil, nil, inProgress, nil, nil, nil, clock, testutil.Logger(t)) // Nothing has to be created for preset 1. { @@ -985,7 +1051,7 @@ func TestPrebuildScheduling(t *testing.T) { schedule(presets[1].ID, "* 14-16 * * 1-5", 5), } - snapshot := prebuilds.NewGlobalSnapshot(presets, schedules, nil, nil, nil, nil, clock, testutil.Logger(t)) + snapshot := prebuilds.NewGlobalSnapshot(presets, schedules, nil, nil, nil, nil, nil, clock, testutil.Logger(t)) // Check 1st preset. { @@ -1093,6 +1159,7 @@ func TestCalculateDesiredInstances(t *testing.T) { nil, nil, nil, + 0, nil, false, quartz.NewMock(t), diff --git a/coderd/prometheusmetrics/prometheusmetrics.go b/coderd/prometheusmetrics/prometheusmetrics.go index ed55e4598dc21..525ec66c5a78a 100644 --- a/coderd/prometheusmetrics/prometheusmetrics.go +++ b/coderd/prometheusmetrics/prometheusmetrics.go @@ -181,10 +181,8 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R done := make(chan struct{}) updateWorkspaceMetrics := func() { - ws, err := db.GetWorkspaces(ctx, database.GetWorkspacesParams{ - Deleted: false, - WithSummary: false, - }) + // Don't count deleted workspaces as part of these metrics. + ws, err := db.GetWorkspacesForWorkspaceMetrics(ctx) if err != nil { if errors.Is(err, sql.ErrNoRows) { workspaceLatestBuildTotals.Reset() @@ -346,87 +344,66 @@ func Agents(ctx context.Context, logger slog.Logger, registerer prometheus.Regis timer := prometheus.NewTimer(metricsCollectorAgents) derpMap := derpMapFn() - workspaceRows, err := db.GetWorkspaces(ctx, database.GetWorkspacesParams{ - AgentInactiveDisconnectTimeoutSeconds: int64(agentInactiveDisconnectTimeout.Seconds()), - }) + workspaceAgents, err := db.GetWorkspaceAgentsForMetrics(ctx) if err != nil { - logger.Error(ctx, "can't get workspace rows", slog.Error(err)) + logger.Error(ctx, "can't get workspace agents", slog.Error(err)) goto done } - for _, workspace := range workspaceRows { - templateName := workspace.TemplateName - templateVersionName := workspace.TemplateVersionName.String - if !workspace.TemplateVersionName.Valid { + for _, agent := range workspaceAgents { + // Collect information about agents + templateVersionName := agent.TemplateVersionName.String + if !agent.TemplateVersionName.Valid { templateVersionName = "unknown" } + agentsGauge.WithLabelValues(VectorOperationAdd, 1, agent.OwnerUsername, agent.WorkspaceName, agent.TemplateName, templateVersionName) - // username := + connectionStatus := agent.WorkspaceAgent.Status(agentInactiveDisconnectTimeout) + node := (*coordinator.Load()).Node(agent.WorkspaceAgent.ID) - agents, err := db.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspace.ID) - if err != nil { - logger.Error(ctx, "can't get workspace agents", slog.F("workspace_id", workspace.ID), slog.Error(err)) - agentsGauge.WithLabelValues(VectorOperationAdd, 0, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) - continue + tailnetNode := "unknown" + if node != nil { + tailnetNode = node.ID.String() } - if len(agents) == 0 { - logger.Debug(ctx, "workspace agents are unavailable", slog.F("workspace_id", workspace.ID)) - agentsGauge.WithLabelValues(VectorOperationAdd, 0, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) - continue - } - - for _, agent := range agents { - // Collect information about agents - agentsGauge.WithLabelValues(VectorOperationAdd, 1, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) - - connectionStatus := agent.Status(agentInactiveDisconnectTimeout) - node := (*coordinator.Load()).Node(agent.ID) - - tailnetNode := "unknown" - if node != nil { - tailnetNode = node.ID.String() - } - - agentsConnectionsGauge.WithLabelValues(VectorOperationSet, 1, agent.Name, workspace.OwnerUsername, workspace.Name, string(connectionStatus.Status), string(agent.LifecycleState), tailnetNode) - - if node == nil { - logger.Debug(ctx, "can't read in-memory node for agent", slog.F("agent_id", agent.ID)) - } else { - // Collect information about connection latencies - for rawRegion, latency := range node.DERPLatency { - regionParts := strings.SplitN(rawRegion, "-", 2) - regionID, err := strconv.Atoi(regionParts[0]) - if err != nil { - logger.Error(ctx, "can't convert DERP region", slog.F("agent_id", agent.ID), slog.F("raw_region", rawRegion), slog.Error(err)) - continue - } + agentsConnectionsGauge.WithLabelValues(VectorOperationSet, 1, agent.WorkspaceAgent.Name, agent.OwnerUsername, agent.WorkspaceName, string(connectionStatus.Status), string(agent.WorkspaceAgent.LifecycleState), tailnetNode) + + if node == nil { + logger.Debug(ctx, "can't read in-memory node for agent", slog.F("agent_id", agent.WorkspaceAgent.ID)) + } else { + // Collect information about connection latencies + for rawRegion, latency := range node.DERPLatency { + regionParts := strings.SplitN(rawRegion, "-", 2) + regionID, err := strconv.Atoi(regionParts[0]) + if err != nil { + logger.Error(ctx, "can't convert DERP region", slog.F("agent_id", agent.WorkspaceAgent.ID), slog.F("raw_region", rawRegion), slog.Error(err)) + continue + } - region, found := derpMap.Regions[regionID] - if !found { - // It's possible that a workspace agent is using an old DERPMap - // and reports regions that do not exist. If that's the case, - // report the region as unknown! - region = &tailcfg.DERPRegion{ - RegionID: regionID, - RegionName: fmt.Sprintf("Unnamed %d", regionID), - } + region, found := derpMap.Regions[regionID] + if !found { + // It's possible that a workspace agent is using an old DERPMap + // and reports regions that do not exist. If that's the case, + // report the region as unknown! + region = &tailcfg.DERPRegion{ + RegionID: regionID, + RegionName: fmt.Sprintf("Unnamed %d", regionID), } - - agentsConnectionLatenciesGauge.WithLabelValues(VectorOperationSet, latency, agent.Name, workspace.OwnerUsername, workspace.Name, region.RegionName, fmt.Sprintf("%v", node.PreferredDERP == regionID)) } - } - // Collect information about registered applications - apps, err := db.GetWorkspaceAppsByAgentID(ctx, agent.ID) - if err != nil && !errors.Is(err, sql.ErrNoRows) { - logger.Error(ctx, "can't get workspace apps", slog.F("agent_id", agent.ID), slog.Error(err)) - continue + agentsConnectionLatenciesGauge.WithLabelValues(VectorOperationSet, latency, agent.WorkspaceAgent.Name, agent.OwnerUsername, agent.WorkspaceName, region.RegionName, fmt.Sprintf("%v", node.PreferredDERP == regionID)) } + } - for _, app := range apps { - agentsAppsGauge.WithLabelValues(VectorOperationAdd, 1, agent.Name, workspace.OwnerUsername, workspace.Name, app.DisplayName, string(app.Health)) - } + // Collect information about registered applications + apps, err := db.GetWorkspaceAppsByAgentID(ctx, agent.WorkspaceAgent.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + logger.Error(ctx, "can't get workspace apps", slog.F("agent_id", agent.WorkspaceAgent.ID), slog.Error(err)) + continue + } + + for _, app := range apps { + agentsAppsGauge.WithLabelValues(VectorOperationAdd, 1, agent.WorkspaceAgent.Name, agent.OwnerUsername, agent.WorkspaceName, app.DisplayName, string(app.Health)) } } diff --git a/coderd/promoauth/oauth2_test.go b/coderd/promoauth/oauth2_test.go index 5aeec4f0fb949..ab8e7c33146f7 100644 --- a/coderd/promoauth/oauth2_test.go +++ b/coderd/promoauth/oauth2_test.go @@ -94,7 +94,8 @@ func TestInstrument(t *testing.T) { must[*url.URL](t)(idp.IssuerURL().Parse("/.well-known/openid-configuration")).String(), nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) require.NoError(t, err) _ = resp.Body.Close() diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go index 190be34a7ab5a..2e00796d1cd64 100644 --- a/coderd/provisionerdserver/provisionerdserver.go +++ b/coderd/provisionerdserver/provisionerdserver.go @@ -597,6 +597,11 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo return nil, failJob(fmt.Sprintf("get workspace build parameters: %s", err)) } + task, err := s.Database.GetTaskByWorkspaceID(ctx, workspaceBuild.WorkspaceID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, xerrors.Errorf("get task by workspace id: %w", err) + } + dbExternalAuthProviders := []database.ExternalAuthProvider{} err = json.Unmarshal(templateVersion.ExternalAuthProviders, &dbExternalAuthProviders) if err != nil { @@ -721,6 +726,8 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo WorkspaceOwnerRbacRoles: ownerRbacRoles, RunningAgentAuthTokens: runningAgentAuthTokens, PrebuiltWorkspaceBuildStage: input.PrebuiltWorkspaceBuildStage, + TaskId: task.ID.String(), + TaskPrompt: task.Prompt, }, LogLevel: input.LogLevel, }, @@ -1954,18 +1961,41 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } appIDs := make([]string, 0) + agentIDByAppID := make(map[string]uuid.UUID) agentTimeouts := make(map[time.Duration]bool) // A set of agent timeouts. // This could be a bulk insert to improve performance. for _, protoResource := range jobType.WorkspaceBuild.Resources { - for _, protoAgent := range protoResource.Agents { + for _, protoAgent := range protoResource.GetAgents() { + if protoAgent == nil { + continue + } + // By default InsertWorkspaceResource ignores the protoAgent.Id + // and generates a new one, but we will insert these using the + // InsertWorkspaceResourceWithAgentIDsFromProto option so that + // we can properly map agent IDs to app IDs. This is needed for + // task linking. + agentID := uuid.New() + protoAgent.Id = agentID.String() + dur := time.Duration(protoAgent.GetConnectionTimeoutSeconds()) * time.Second agentTimeouts[dur] = true for _, app := range protoAgent.GetApps() { appIDs = append(appIDs, app.GetId()) + agentIDByAppID[app.GetId()] = agentID } } - err = InsertWorkspaceResource(ctx, db, job.ID, workspaceBuild.Transition, protoResource, telemetrySnapshot) + err = InsertWorkspaceResource( + ctx, + db, + job.ID, + workspaceBuild.Transition, + protoResource, + telemetrySnapshot, + // Ensure that the agent IDs we set previously + // are written to the database. + InsertWorkspaceResourceWithAgentIDsFromProto(), + ) if err != nil { return xerrors.Errorf("insert provisioner job: %w", err) } @@ -1976,27 +2006,38 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } } - var sidebarAppID uuid.NullUUID + var taskAppID uuid.NullUUID + var taskAgentID uuid.NullUUID var hasAITask bool - var warnUnknownSidebarAppID bool + var warnUnknownTaskAppID bool if tasks := jobType.WorkspaceBuild.GetAiTasks(); len(tasks) > 0 { hasAITask = true task := tasks[0] - if task == nil || task.GetSidebarApp() == nil || len(task.GetSidebarApp().GetId()) == 0 { - return xerrors.Errorf("update ai task: sidebar app is nil or empty") + if task == nil { + return xerrors.Errorf("update ai task: task is nil") } - sidebarTaskID := task.GetSidebarApp().GetId() - if !slices.Contains(appIDs, sidebarTaskID) { - warnUnknownSidebarAppID = true + appID := task.GetAppId() + if appID == "" && task.GetSidebarApp() != nil { + appID = task.GetSidebarApp().GetId() + } + if appID == "" { + return xerrors.Errorf("update ai task: app id is empty") } - id, err := uuid.Parse(task.GetSidebarApp().GetId()) + if !slices.Contains(appIDs, appID) { + warnUnknownTaskAppID = true + } + + id, err := uuid.Parse(appID) if err != nil { - return xerrors.Errorf("parse sidebar app id: %w", err) + return xerrors.Errorf("parse app id: %w", err) } - sidebarAppID = uuid.NullUUID{UUID: id, Valid: true} + taskAppID = uuid.NullUUID{UUID: id, Valid: true} + + agentID, ok := agentIDByAppID[appID] + taskAgentID = uuid.NullUUID{UUID: agentID, Valid: ok} } // This is a hacky workaround for the issue with tasks 'disappearing' on stop: @@ -2008,19 +2049,19 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro BuildNumber: workspaceBuild.BuildNumber - 1, }); err == nil { hasAITask = prevBuild.HasAITask.Bool - sidebarAppID = prevBuild.AITaskSidebarAppID - warnUnknownSidebarAppID = false - s.Logger.Debug(ctx, "task workaround: reused has_ai_task and sidebar_app_id from previous build to keep track of task", + taskAppID = prevBuild.AITaskSidebarAppID + warnUnknownTaskAppID = false + s.Logger.Debug(ctx, "task workaround: reused has_ai_task and app_id from previous build to keep track of task", slog.F("job_id", job.ID.String()), slog.F("build_number", prevBuild.BuildNumber), slog.F("workspace_id", workspace.ID), slog.F("workspace_build_id", workspaceBuild.ID), slog.F("transition", string(workspaceBuild.Transition)), - slog.F("sidebar_app_id", sidebarAppID.UUID), + slog.F("sidebar_app_id", taskAppID.UUID), slog.F("has_ai_task", hasAITask), ) } else { - s.Logger.Error(ctx, "task workaround: tracking via has_ai_task and sidebar_app from previous build failed", + s.Logger.Error(ctx, "task workaround: tracking via has_ai_task and app_id from previous build failed", slog.Error(err), slog.F("job_id", job.ID.String()), slog.F("workspace_id", workspace.ID), @@ -2030,14 +2071,14 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } } - if warnUnknownSidebarAppID { + if warnUnknownTaskAppID { // Ref: https://github.com/coder/coder/issues/18776 // This can happen for a number of reasons: // 1. Misconfigured template // 2. Count=0 on the agent due to stop transition, meaning the associated coder_app was not inserted. // Failing the build at this point is not ideal, so log a warning instead. - s.Logger.Warn(ctx, "unknown ai_task_sidebar_app_id", - slog.F("ai_task_sidebar_app_id", sidebarAppID.UUID.String()), + s.Logger.Warn(ctx, "unknown ai_task_app_id", + slog.F("ai_task_app_id", taskAppID.UUID.String()), slog.F("job_id", job.ID.String()), slog.F("workspace_id", workspace.ID), slog.F("workspace_build_id", workspaceBuild.ID), @@ -2051,13 +2092,13 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro Level: []database.LogLevel{database.LogLevelWarn, database.LogLevelWarn, database.LogLevelWarn, database.LogLevelWarn}, Stage: []string{"Cleaning Up", "Cleaning Up", "Cleaning Up", "Cleaning Up"}, Output: []string{ - fmt.Sprintf("Unknown ai_task_sidebar_app_id %q. This workspace will be unable to run AI tasks. This may be due to a template configuration issue, please check with the template author.", sidebarAppID.UUID.String()), + fmt.Sprintf("Unknown ai_task_app_id %q. This workspace will be unable to run AI tasks. This may be due to a template configuration issue, please check with the template author.", taskAppID.UUID.String()), "Template author: double-check the following:", " - You have associated the coder_ai_task with a valid coder_app in your template (ref: https://registry.terraform.io/providers/coder/coder/latest/docs/resources/ai_task).", " - You have associated the coder_agent with at least one other compute resource. Agents with no other associated resources are not inserted into the database.", }, }); err != nil { - s.Logger.Error(ctx, "insert provisioner job log for ai task sidebar app id warning", + s.Logger.Error(ctx, "insert provisioner job log for ai task app id warning", slog.F("job_id", jobID), slog.F("workspace_id", workspace.ID), slog.F("workspace_build_id", workspaceBuild.ID), @@ -2066,7 +2107,7 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } // Important: reset hasAITask and sidebarAppID so that we don't run into a fk constraint violation. hasAITask = false - sidebarAppID = uuid.NullUUID{} + taskAppID = uuid.NullUUID{} } if hasAITask && workspaceBuild.Transition == database.WorkspaceTransitionStart { @@ -2091,6 +2132,27 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } } + if task, err := db.GetTaskByWorkspaceID(ctx, workspace.ID); err == nil { + // Irrespective of whether the agent or sidebar app is present, + // perform the upsert to ensure a link between the task and + // workspace build. Linking the task to the build is typically + // already established by wsbuilder. + _, err = db.UpsertTaskWorkspaceApp( + ctx, + database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: workspaceBuild.BuildNumber, + WorkspaceAgentID: taskAgentID, + WorkspaceAppID: taskAppID, + }, + ) + if err != nil { + return xerrors.Errorf("upsert task workspace app: %w", err) + } + } else if !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("get task by workspace id: %w", err) + } + // Regardless of whether there is an AI task or not, update the field to indicate one way or the other since it // always defaults to nil. ONLY if has_ai_task=true MUST ai_task_sidebar_app_id be set. if err := db.UpdateWorkspaceBuildFlagsByID(ctx, database.UpdateWorkspaceBuildFlagsByIDParams{ @@ -2103,7 +2165,7 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro Bool: hasExternalAgent, Valid: true, }, - SidebarAppID: sidebarAppID, + SidebarAppID: taskAppID, UpdatedAt: now, }); err != nil { return xerrors.Errorf("update workspace build ai tasks and external agent flag: %w", err) @@ -2216,6 +2278,14 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro if err != nil { return xerrors.Errorf("update workspace deleted: %w", err) } + if workspace.TaskID.Valid { + if _, err := db.DeleteTask(ctx, database.DeleteTaskParams{ + ID: workspace.TaskID.UUID, + DeletedAt: dbtime.Now(), + }); err != nil && !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("delete task related to workspace: %w", err) + } + } return nil }, nil) @@ -2561,7 +2631,28 @@ func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, return nil } -func InsertWorkspaceResource(ctx context.Context, db database.Store, jobID uuid.UUID, transition database.WorkspaceTransition, protoResource *sdkproto.Resource, snapshot *telemetry.Snapshot) error { +type insertWorkspaceResourceOptions struct { + useAgentIDsFromProto bool +} + +// InsertWorkspaceResourceOption represents a functional option for +// InsertWorkspaceResource. +type InsertWorkspaceResourceOption func(*insertWorkspaceResourceOptions) + +// InsertWorkspaceResourceWithAgentIDsFromProto allows inserting agents into the +// database using the agent IDs defined in the proto resource. +func InsertWorkspaceResourceWithAgentIDsFromProto() InsertWorkspaceResourceOption { + return func(opts *insertWorkspaceResourceOptions) { + opts.useAgentIDsFromProto = true + } +} + +func InsertWorkspaceResource(ctx context.Context, db database.Store, jobID uuid.UUID, transition database.WorkspaceTransition, protoResource *sdkproto.Resource, snapshot *telemetry.Snapshot, opt ...InsertWorkspaceResourceOption) error { + opts := &insertWorkspaceResourceOptions{} + for _, o := range opt { + o(opts) + } + resource, err := db.InsertWorkspaceResource(ctx, database.InsertWorkspaceResourceParams{ ID: uuid.New(), CreatedAt: dbtime.Now(), @@ -2658,6 +2749,12 @@ func InsertWorkspaceResource(ctx context.Context, db database.Store, jobID uuid. } agentID := uuid.New() + if opts.useAgentIDsFromProto { + agentID, err = uuid.Parse(prAgent.Id) + if err != nil { + return xerrors.Errorf("invalid agent ID format; must be uuid: %w", err) + } + } dbAgent, err := db.InsertWorkspaceAgent(ctx, database.InsertWorkspaceAgentParams{ ID: agentID, ParentID: uuid.NullUUID{}, diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go index 6409ba9b1d0df..8d55e1529289f 100644 --- a/coderd/provisionerdserver/provisionerdserver_test.go +++ b/coderd/provisionerdserver/provisionerdserver_test.go @@ -334,6 +334,16 @@ func TestAcquireJob(t *testing.T) { Transition: database.WorkspaceTransitionStart, Reason: database.BuildReasonInitiator, }) + task := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: pd.OrganizationID, + OwnerID: user.ID, + WorkspaceID: uuid.NullUUID{Valid: true, UUID: workspace.ID}, + TemplateVersionID: version.ID, + TemplateParameters: json.RawMessage("{}"), + Prompt: "Build me a REST API", + CreatedAt: dbtime.Now(), + DeletedAt: sql.NullTime{}, + }) var agent database.WorkspaceAgent if prebuiltWorkspaceBuildStage == sdkproto.PrebuiltWorkspaceBuildStage_CLAIM { @@ -446,6 +456,8 @@ func TestAcquireJob(t *testing.T) { WorkspaceBuildId: build.ID.String(), WorkspaceOwnerLoginType: string(user.LoginType), WorkspaceOwnerRbacRoles: []*sdkproto.Role{{Name: rbac.RoleOrgMember(), OrgId: pd.OrganizationID.String()}, {Name: "member", OrgId: ""}, {Name: rbac.RoleOrgAuditor(), OrgId: pd.OrganizationID.String()}}, + TaskId: task.ID.String(), + TaskPrompt: task.Prompt, } if prebuiltWorkspaceBuildStage == sdkproto.PrebuiltWorkspaceBuildStage_CLAIM { // For claimed prebuilds, we expect the prebuild state to be set to CLAIM @@ -2850,6 +2862,8 @@ func TestCompleteJob(t *testing.T) { seedFunc func(context.Context, testing.TB, database.Store) error // If you need to insert other resources transition database.WorkspaceTransition input *proto.CompletedJob_WorkspaceBuild + isTask bool + expectTaskStatus database.TaskStatus expectHasAiTask bool expectUsageEvent bool } @@ -2862,6 +2876,7 @@ func TestCompleteJob(t *testing.T) { input: &proto.CompletedJob_WorkspaceBuild{ // No AiTasks defined. }, + isTask: false, expectHasAiTask: false, expectUsageEvent: false, }, @@ -2894,6 +2909,8 @@ func TestCompleteJob(t *testing.T) { }, }, }, + isTask: true, + expectTaskStatus: database.TaskStatusInitializing, expectHasAiTask: true, expectUsageEvent: true, }, @@ -2912,6 +2929,8 @@ func TestCompleteJob(t *testing.T) { }, }, }, + isTask: true, + expectTaskStatus: database.TaskStatusInitializing, expectHasAiTask: false, expectUsageEvent: false, }, @@ -2944,6 +2963,8 @@ func TestCompleteJob(t *testing.T) { }, }, }, + isTask: true, + expectTaskStatus: database.TaskStatusPaused, expectHasAiTask: true, expectUsageEvent: false, }, @@ -2955,6 +2976,8 @@ func TestCompleteJob(t *testing.T) { AiTasks: []*sdkproto.AITask{}, Resources: []*sdkproto.Resource{}, }, + isTask: true, + expectTaskStatus: database.TaskStatusPaused, expectHasAiTask: true, expectUsageEvent: false, }, @@ -2992,6 +3015,15 @@ func TestCompleteJob(t *testing.T) { OwnerID: user.ID, OrganizationID: pd.OrganizationID, }) + var genTask database.Task + if tc.isTask { + genTask = dbgen.Task(t, db, database.TaskTable{ + OwnerID: user.ID, + OrganizationID: pd.OrganizationID, + WorkspaceID: uuid.NullUUID{UUID: workspaceTable.ID, Valid: true}, + TemplateVersionID: version.ID, + }) + } ctx := testutil.Context(t, testutil.WaitShort) if tc.seedFunc != nil { @@ -3060,6 +3092,12 @@ func TestCompleteJob(t *testing.T) { require.True(t, build.HasAITask.Valid) // We ALWAYS expect a value to be set, therefore not nil, i.e. valid = true. require.Equal(t, tc.expectHasAiTask, build.HasAITask.Bool) + if tc.isTask { + task, err := db.GetTaskByID(ctx, genTask.ID) + require.NoError(t, err) + require.Equal(t, tc.expectTaskStatus, task.Status) + } + if tc.expectHasAiTask && build.Transition != database.WorkspaceTransitionStop { require.Equal(t, sidebarAppID, build.AITaskSidebarAppID.UUID.String()) } diff --git a/coderd/provisionerjobs.go b/coderd/provisionerjobs.go index e9ab5260988d4..68f2207f2f90c 100644 --- a/coderd/provisionerjobs.go +++ b/coderd/provisionerjobs.go @@ -76,6 +76,7 @@ func (api *API) provisionerJob(rw http.ResponseWriter, r *http.Request) { // @Param ids query []string false "Filter results by job IDs" format(uuid) // @Param status query codersdk.ProvisionerJobStatus false "Filter results by status" enums(pending,running,succeeded,canceling,canceled,failed) // @Param tags query object false "Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'})" +// @Param initiator query string false "Filter results by initiator" format(uuid) // @Success 200 {array} codersdk.ProvisionerJob // @Router /organizations/{organization}/provisionerjobs [get] func (api *API) provisionerJobs(rw http.ResponseWriter, r *http.Request) { @@ -110,6 +111,7 @@ func (api *API) handleAuthAndFetchProvisionerJobs(rw http.ResponseWriter, r *htt ids = p.UUIDs(qp, nil, "ids") } tags := p.JSONStringMap(qp, database.StringMap{}, "tags") + initiatorID := p.UUID(qp, uuid.Nil, "initiator") p.ErrorExcessParams(qp) if len(p.Errors) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ @@ -125,6 +127,7 @@ func (api *API) handleAuthAndFetchProvisionerJobs(rw http.ResponseWriter, r *htt Limit: sql.NullInt32{Int32: limit, Valid: limit > 0}, IDs: ids, Tags: tags, + InitiatorID: initiatorID, }) if err != nil { if httpapi.Is404Error(err) { @@ -355,6 +358,7 @@ func convertProvisionerJob(pj database.GetProvisionerJobsByIDsWithQueuePositionR job := codersdk.ProvisionerJob{ ID: provisionerJob.ID, OrganizationID: provisionerJob.OrganizationID, + InitiatorID: provisionerJob.InitiatorID, CreatedAt: provisionerJob.CreatedAt, Type: codersdk.ProvisionerJobType(provisionerJob.Type), Error: provisionerJob.Error.String, diff --git a/coderd/provisionerjobs_test.go b/coderd/provisionerjobs_test.go index 98da3ae5584e6..91096e3b64905 100644 --- a/coderd/provisionerjobs_test.go +++ b/coderd/provisionerjobs_test.go @@ -58,6 +58,8 @@ func TestProvisionerJobs(t *testing.T) { StartedAt: sql.NullTime{Time: dbtime.Now(), Valid: true}, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: json.RawMessage(`{"workspace_build_id":"` + wbID.String() + `"}`), + InitiatorID: member.ID, + Tags: database.StringMap{"initiatorTest": "true"}, }) dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ ID: wbID, @@ -71,6 +73,7 @@ func TestProvisionerJobs(t *testing.T) { dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{ OrganizationID: owner.OrganizationID, Tags: database.StringMap{"count": strconv.Itoa(i)}, + InitiatorID: owner.UserID, }) } @@ -165,6 +168,94 @@ func TestProvisionerJobs(t *testing.T) { require.Len(t, jobs, 1) }) + t.Run("Initiator", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: member.ID.String(), + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(jobs), 1) + require.Equal(t, member.ID, jobs[0].InitiatorID) + }) + + t.Run("InitiatorWithOtherFilters", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Test filtering by initiator ID combined with status filter + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: owner.UserID.String(), + Status: []codersdk.ProvisionerJobStatus{codersdk.ProvisionerJobSucceeded}, + }) + require.NoError(t, err) + + // Verify all returned jobs have the correct initiator and status + for _, job := range jobs { + require.Equal(t, owner.UserID, job.InitiatorID) + require.Equal(t, codersdk.ProvisionerJobSucceeded, job.Status) + } + }) + + t.Run("InitiatorWithLimit", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Test filtering by initiator ID with limit + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: owner.UserID.String(), + Limit: 1, + }) + require.NoError(t, err) + require.Len(t, jobs, 1) + + // Verify the returned job has the correct initiator + require.Equal(t, owner.UserID, jobs[0].InitiatorID) + }) + + t.Run("InitiatorWithTags", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Test filtering by initiator ID combined with tags + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: member.ID.String(), + Tags: map[string]string{"initiatorTest": "true"}, + }) + require.NoError(t, err) + require.Len(t, jobs, 1) + + // Verify the returned job has the correct initiator and tags + require.Equal(t, member.ID, jobs[0].InitiatorID) + require.Equal(t, "true", jobs[0].Tags["initiatorTest"]) + }) + + t.Run("InitiatorNotFound", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Test with non-existent initiator ID + nonExistentID := uuid.New() + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: nonExistentID.String(), + }) + require.NoError(t, err) + require.Len(t, jobs, 0) + }) + + t.Run("InitiatorNil", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Test with nil initiator ID (should return all jobs) + jobs, err := templateAdminClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: "", + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(jobs), 50) // Should return all jobs (up to default limit) + }) + t.Run("Limit", func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitMedium) @@ -185,6 +276,17 @@ func TestProvisionerJobs(t *testing.T) { require.Error(t, err) require.Len(t, jobs, 0) }) + + t.Run("MemberDeniedWithInitiator", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + // Member should not be able to access jobs even with initiator filter + jobs, err := memberClient.OrganizationProvisionerJobs(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerJobsOptions{ + Initiator: member.ID.String(), + }) + require.Error(t, err) + require.Len(t, jobs, 0) + }) }) // Ensures that when a provisioner job is in the succeeded state, diff --git a/coderd/provisionerkey/provisionerkey.go b/coderd/provisionerkey/provisionerkey.go index bfd70fb0295e0..046222658eb2e 100644 --- a/coderd/provisionerkey/provisionerkey.go +++ b/coderd/provisionerkey/provisionerkey.go @@ -1,15 +1,14 @@ package provisionerkey import ( - "crypto/sha256" "crypto/subtle" "github.com/google/uuid" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" - "github.com/coder/coder/v2/cryptorand" ) const ( @@ -17,7 +16,7 @@ const ( ) func New(organizationID uuid.UUID, name string, tags map[string]string) (database.InsertProvisionerKeyParams, string, error) { - secret, err := cryptorand.String(secretLength) + secret, hashed, err := apikey.GenerateSecret(secretLength) if err != nil { return database.InsertProvisionerKeyParams{}, "", xerrors.Errorf("generate secret: %w", err) } @@ -31,7 +30,7 @@ func New(organizationID uuid.UUID, name string, tags map[string]string) (databas CreatedAt: dbtime.Now(), OrganizationID: organizationID, Name: name, - HashedSecret: HashSecret(secret), + HashedSecret: hashed, Tags: tags, }, secret, nil } @@ -45,8 +44,7 @@ func Validate(token string) error { } func HashSecret(secret string) []byte { - h := sha256.Sum256([]byte(secret)) - return h[:] + return apikey.HashSecret(secret) } func Compare(a []byte, b []byte) bool { diff --git a/coderd/rbac/POLICY.md b/coderd/rbac/POLICY.md new file mode 100644 index 0000000000000..b3ebdfe9d939f --- /dev/null +++ b/coderd/rbac/POLICY.md @@ -0,0 +1,104 @@ +# Rego authorization policy + +## Code style + +It's a good idea to consult the [Rego style guide](https://docs.styra.com/opa/rego-style-guide). The "Variables and Data Types" section in particular has some helpful and non-obvious advice in it. + +## Debugging + +Open Policy Agent provides a CLI and a playground that can be used for evaluating, formatting, testing, and linting policies. + +### CLI + +Below are some helpful commands you can use for debugging. + +For full evaluation, run: + +```sh +opa eval --format=pretty 'data.authz.allow' -d policy.rego -i input.json +``` + +For partial evaluation, run: + +```sh +opa eval --partial --format=pretty 'data.authz.allow' -d policy.rego \ + --unknowns input.object.owner --unknowns input.object.org_owner \ + --unknowns input.object.acl_user_list --unknowns input.object.acl_group_list \ + -i input.json +``` + +### Playground + +Use the [Open Policy Agent Playground](https://play.openpolicyagent.org/) while editing to getting linting, code formatting, and help debugging! + +You can use the contents of input.json as a starting point for your own testing input. Paste the contents of policy.rego into the left-hand side of the playground, and the contents of input.json into the "Input" section. Click "Evaluate" and you should see something like the following in the output. + +```json +{ + "allow": true, + "check_scope_allow_list": true, + "org": 0, + "org_member": 0, + "org_memberships": [], + "permission_allow": true, + "role_allow": true, + "scope_allow": true, + "scope_org": 0, + "scope_org_member": 0, + "scope_site": 1, + "scope_user": 0, + "site": 1, + "user": 0 +} +``` + +## Levels + +Permissions are evaluated at four levels: site, user, org, org_member. + +For each level, two checks are performed: +- Do the subject's permissions allow them to perform this action? +- Does the subject's scope allow them to perform this action? + +Each of these checks gets a "vote", which must one of three values: +- -1 to deny (usually because of a negative permission) +- 0 to abstain (no matching permission) +- 1 to allow + +If a level abstains, then the decision gets deferred to the next level. When +there is no "next" level to defer to it is equivalent to being denied. + +### Scope +Additionally, each input has a "scope" that can be thought of as a second set of permissions, where each permission belongs to one of the four levels–exactly the same as role permissions. An action is only allowed if it is allowed by both the subject's permissions _and_ their current scope. This is to allow issuing tokens for a subject that have a subset of the full subjects permissions. + +For example, you may have a scope like... + +```json +{ + "by_org_id": { + "": { + "member": [{ "resource_type": "workspace", "action": "*" }] + } + } +} +``` + +...to limit the token to only accessing workspaces owned by the user within a specific org. This provides some assurances for an admin user, that the token can only access intended resources, rather than having full access to everything. + +The final policy decision is determined by evaluating each of these checks in their proper precedence order from the `allow` rule. + +## Unknown values + +This policy is specifically constructed to compress to a set of queries if 'input.object.owner' and 'input.object.org_owner' are unknown. There is no specific set of rules that will guarantee that this policy has this property, however, there are some tricks. We have tests that enforce this property, so any changes that pass the tests will be okay. + +Some general rules to follow: + +1. Do not use unknown values in any [comprehensions](https://www.openpolicyagent.org/docs/latest/policy-language/#comprehensions) or iterations. + +2. Use the unknown values as minimally as possible. + +3. Avoid making code branches based on the value of the unknown field. + +Unknown values are like a "set" of possible values (which is why rule 1 usually breaks things). + +For example, in the org level rules, we calculate the "vote" for all orgs, rather than just the `input.object.org_owner`. This way, if the `org_owner` changes, then we don't need to recompute any votes; we already have it for the changed value. This means we don't need branching, because the end result is just a lookup table. diff --git a/coderd/rbac/README.md b/coderd/rbac/README.md index d7aa89d6dee79..0b4315525c266 100644 --- a/coderd/rbac/README.md +++ b/coderd/rbac/README.md @@ -58,22 +58,68 @@ This can be represented by the following truth table, where Y represents _positi - `+site.app.*.read`: allowed to perform the `read` action against all objects of type `app` in a given Coder deployment. - `-user.workspace.*.create`: user is not allowed to create workspaces. +## Levels + +A user can be given (or deprived) a permission at several levels. Currently, +those levels are: + +- Site-wide level +- Organization level +- User level +- Organization member level + +The site-wide level is the most authoritative. Any permission granted or denied at the side-wide level is absolute. After checking the site-wide level, depending of if the resource is owned by an organization or not, it will check the other levels. + +- If the resource is owned by an organization, the next most authoritative level is the organization level. It acts like the site-wide level, but only for resources within the corresponding organization. The user can use that permission on any resource within that organization. + - After the organization level is the member level. This level only applies to resources that are owned by both the organization _and_ the user. + +- If the resource is not owned by an organization, the next level to check is the user level. This level only applies to resources owned by the user and that are not owned by any organization. + +``` + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ Site β”‚ + β””β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”˜ + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + β”Œβ”€β”€β”€ Owned by an org? β”œβ”€β”€β” + β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ + β”Œβ”€β”€β”΄β”€β”€β” β”Œβ”€β”€β”΄β”€β” + β”‚ Yes β”‚ β”‚ No β”‚ + β””β”€β”€β”¬β”€β”€β”˜ β””β”€β”€β”¬β”€β”˜ +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β” +β”‚ Organization β”‚ β”‚ User β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”Œβ”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β” + β”‚ Member β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + ## Roles A _role_ is a set of permissions. When evaluating a role's permission to form an action, all the relevant permissions for the role are combined at each level. Permissions at a higher level override permissions at a lower level. -The following table shows the per-level role evaluation. -Y indicates that the role provides positive permissions, N indicates the role provides negative permissions, and _indicates the role does not provide positive or negative permissions. YN_ indicates that the value in the cell does not matter for the access result. +The following tables show the per-level role evaluation. Y indicates that the role provides positive permissions, N indicates the role provides negative permissions, and _indicates the role does not provide positive or negative permissions. YN_ indicates that the value in the cell does not matter for the access result. The table varies depending on if the resource belongs to an organization or not. + +If the resource is owned by an organization, such as a template or a workspace: + +| Role (example) | Site | Org | OrgMember | Result | +|--------------------------|------|------|-----------|--------| +| site-admin | Y | YN\_ | YN\_ | Y | +| negative-site-permission | N | YN\_ | YN\_ | N | +| org-admin | \_ | Y | YN\_ | Y | +| non-org-member | \_ | N | YN\_ | N | +| member-owned | \_ | \_ | Y | Y | +| not-member-owned | \_ | \_ | N | N | +| unauthenticated | \_ | \_ | \_ | N | + +If the resource is not owned by an organization: -| Role (example) | Site | Org | User | Result | -|-----------------|------|------|------|--------| -| site-admin | Y | YN\_ | YN\_ | Y | -| no-permission | N | YN\_ | YN\_ | N | -| org-admin | \_ | Y | YN\_ | Y | -| non-org-member | \_ | N | YN\_ | N | -| user | \_ | \_ | Y | Y | -| | \_ | \_ | N | N | -| unauthenticated | \_ | \_ | \_ | N | +| Role (example) | Site | User | Result | +|--------------------------|------|------|--------| +| site-admin | Y | YN\_ | Y | +| negative-site-permission | N | YN\_ | N | +| user-owned | \_ | Y | Y | +| not-user-owned | \_ | N | N | +| unauthenticated | \_ | \_ | N | ## Scopes @@ -91,15 +137,17 @@ The use case for specifying this type of permission in a role is limited, and do Example of a scope for a workspace agent token, using an `allow_list` containing a single resource id. ```javascript - "scope": { - "name": "workspace_agent", - "display_name": "Workspace_Agent", - // The ID of the given workspace the agent token correlates to. - "allow_list": ["10d03e62-7703-4df5-a358-4f76577d4e2f"], - "site": [/* ... perms ... */], - "org": {/* ... perms ... */}, - "user": [/* ... perms ... */] - } +{ + "scope": { + "name": "workspace_agent", + "display_name": "Workspace_Agent", + // The ID of the given workspace the agent token correlates to. + "allow_list": ["10d03e62-7703-4df5-a358-4f76577d4e2f"], + "site": [/* ... perms ... */], + "org": {/* ... perms ... */}, + "user": [/* ... perms ... */] + } +} ``` ## OPA (Open Policy Agent) @@ -124,31 +172,31 @@ To learn more about OPA and Rego, see https://www.openpolicyagent.org/docs. There are two types of evaluation in OPA: - **Full evaluation**: Produces a decision that can be enforced. -This is the default evaluation mode, where OPA evaluates the policy using `input` data that contains all known values and returns output data with the `allow` variable. + This is the default evaluation mode, where OPA evaluates the policy using `input` data that contains all known values and returns output data with the `allow` variable. - **Partial evaluation**: Produces a new policy that can be evaluated later when the _unknowns_ become _known_. -This is an optimization in OPA where it evaluates as much of the policy as possible without resolving expressions that depend on _unknown_ values from the `input`. -To learn more about partial evaluation, see this [OPA blog post](https://blog.openpolicyagent.org/partial-evaluation-162750eaf422). + This is an optimization in OPA where it evaluates as much of the policy as possible without resolving expressions that depend on _unknown_ values from the `input`. + To learn more about partial evaluation, see this [OPA blog post](https://blog.openpolicyagent.org/partial-evaluation-162750eaf422). Application of Full and Partial evaluation in `rbac` package: - **Full Evaluation** is handled by the `RegoAuthorizer.Authorize()` method in [`authz.go`](authz.go). -This method determines whether a subject (user) can perform a specific action on an object. -It performs a full evaluation of the Rego policy, which returns the `allow` variable to decide whether access is granted (`true`) or denied (`false` or undefined). + This method determines whether a subject (user) can perform a specific action on an object. + It performs a full evaluation of the Rego policy, which returns the `allow` variable to decide whether access is granted (`true`) or denied (`false` or undefined). - **Partial Evaluation** is handled by the `RegoAuthorizer.Prepare()` method in [`authz.go`](authz.go). -This method compiles OPA’s partial evaluation queries into `SQL WHERE` clauses. -These clauses are then used to enforce authorization directly in database queries, rather than in application code. + This method compiles OPA’s partial evaluation queries into `SQL WHERE` clauses. + These clauses are then used to enforce authorization directly in database queries, rather than in application code. Authorization Patterns: - Fetch-then-authorize: an object is first retrieved from the database, and a single authorization check is performed using full evaluation via `Authorize()`. - Authorize-while-fetching: Partial evaluation via `Prepare()` is used to inject SQL filters directly into queries, allowing efficient authorization of many objects of the same type. -`dbauthz` methods that enforce authorization directly in the SQL query are prefixed with `Authorized`, for example, `GetAuthorizedWorkspaces`. + `dbauthz` methods that enforce authorization directly in the SQL query are prefixed with `Authorized`, for example, `GetAuthorizedWorkspaces`. ## Testing - OPA Playground: https://play.openpolicyagent.org/ - OPA CLI (`opa eval`): useful for experimenting with different inputs and understanding how the policy behaves under various conditions. -`opa eval` returns the constraints that must be satisfied for a rule to evaluate to `true`. + `opa eval` returns the constraints that must be satisfied for a rule to evaluate to `true`. - `opa eval` requires an `input.json` file containing the input data to run the policy against. You can generate this file using the [gen_input.go](../../scripts/rbac-authz/gen_input.go) script. Note: the script currently produces a fixed input. You may need to tweak it for your specific use case. @@ -196,12 +244,12 @@ The script [`benchmark_authz.sh`](../../scripts/rbac-authz/benchmark_authz.sh) r - To run benchmark on the current branch: - ```bash - benchmark_authz.sh --single - ``` + ```bash + benchmark_authz.sh --single + ``` - To compare benchmarks between 2 branches: - ```bash - benchmark_authz.sh --compare main prebuild_policy - ``` + ```bash + benchmark_authz.sh --compare main prebuild_policy + ``` diff --git a/coderd/rbac/allowlist.go b/coderd/rbac/allowlist.go new file mode 100644 index 0000000000000..387d84ee2cab9 --- /dev/null +++ b/coderd/rbac/allowlist.go @@ -0,0 +1,304 @@ +package rbac + +import ( + "slices" + "sort" + "strings" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac/policy" +) + +// maxAllowListEntries caps normalized allow lists to a manageable size. This +// limit is intentionally arbitraryβ€”just high enough for current use casesβ€”so we +// can revisit it without implying any semantic contract. +const maxAllowListEntries = 128 + +// ParseAllowListEntry parses a single allow-list entry string in the form +// "*:*", ":*", or ":" into an +// AllowListElement with validation. +func ParseAllowListEntry(s string) (AllowListElement, error) { + s = strings.TrimSpace(strings.ToLower(s)) + res, id, ok := ParseResourceAction(s) + if !ok { + return AllowListElement{}, xerrors.Errorf("invalid allow_list entry %q: want :", s) + } + + return NewAllowListElement(res, id) +} + +func NewAllowListElement(resourceType string, id string) (AllowListElement, error) { + if resourceType != policy.WildcardSymbol { + if _, ok := policy.RBACPermissions[resourceType]; !ok { + return AllowListElement{}, xerrors.Errorf("unknown resource type %q", resourceType) + } + } + if id != policy.WildcardSymbol { + if _, err := uuid.Parse(id); err != nil { + return AllowListElement{}, xerrors.Errorf("invalid %s ID (must be UUID): %q", resourceType, id) + } + } + + return AllowListElement{Type: resourceType, ID: id}, nil +} + +// ParseAllowList parses, validates, normalizes, and deduplicates a list of +// allow-list entries. If max is <=0, a default cap of 128 is applied. +func ParseAllowList(inputs []string, maxEntries int) ([]AllowListElement, error) { + if len(inputs) == 0 { + return nil, nil + } + if len(inputs) > maxEntries { + return nil, xerrors.Errorf("allow_list has %d entries; max allowed is %d", len(inputs), maxEntries) + } + + elems := make([]AllowListElement, 0, len(inputs)) + for _, s := range inputs { + e, err := ParseAllowListEntry(s) + if err != nil { + return nil, err + } + // Global wildcard short-circuits + if e.Type == policy.WildcardSymbol && e.ID == policy.WildcardSymbol { + return []AllowListElement{AllowListAll()}, nil + } + elems = append(elems, e) + } + + return NormalizeAllowList(elems) +} + +// NormalizeAllowList enforces max entry limits, collapses typed wildcards, and +// produces a deterministic, deduplicated allow list. A global wildcard returns +// early with a single `[*:*]` entry, typed wildcards shadow specific IDs, and +// the final slice is sorted to keep downstream comparisons stable. When the +// input is empty we return an empty (non-nil) slice so callers can differentiate +// between "no restriction" and "not provided" cases. +func NormalizeAllowList(inputs []AllowListElement) ([]AllowListElement, error) { + if len(inputs) == 0 { + return []AllowListElement{}, nil + } + if len(inputs) > maxAllowListEntries { + return nil, xerrors.Errorf("allow_list has %d entries; max allowed is %d", len(inputs), maxAllowListEntries) + } + + // Collapse typed wildcards and drop shadowed IDs + typedWildcard := map[string]struct{}{} + idsByType := map[string]map[string]struct{}{} + for _, e := range inputs { + // Global wildcard short-circuits + if e.Type == policy.WildcardSymbol && e.ID == policy.WildcardSymbol { + return []AllowListElement{AllowListAll()}, nil + } + + if e.ID == policy.WildcardSymbol { + typedWildcard[e.Type] = struct{}{} + continue + } + if idsByType[e.Type] == nil { + idsByType[e.Type] = map[string]struct{}{} + } + idsByType[e.Type][e.ID] = struct{}{} + } + + out := make([]AllowListElement, 0) + for t := range typedWildcard { + out = append(out, AllowListElement{Type: t, ID: policy.WildcardSymbol}) + } + for t, ids := range idsByType { + if _, ok := typedWildcard[t]; ok { + continue + } + for id := range ids { + out = append(out, AllowListElement{Type: t, ID: id}) + } + } + + sort.Slice(out, func(i, j int) bool { + if out[i].Type == out[j].Type { + return out[i].ID < out[j].ID + } + return out[i].Type < out[j].Type + }) + return out, nil +} + +// UnionAllowLists merges multiple allow lists, returning the set of resources +// permitted by any input. A global wildcard short-circuits the merge. When no +// entries are present across all inputs, the result is an empty allow list. +func UnionAllowLists(lists ...[]AllowListElement) ([]AllowListElement, error) { + union := make([]AllowListElement, 0) + seen := make(map[string]struct{}) + + for _, list := range lists { + for _, elem := range list { + if elem.Type == policy.WildcardSymbol && elem.ID == policy.WildcardSymbol { + return []AllowListElement{AllowListAll()}, nil + } + key := elem.String() + if _, ok := seen[key]; ok { + continue + } + seen[key] = struct{}{} + union = append(union, elem) + } + } + + return NormalizeAllowList(union) +} + +// IntersectAllowLists combines the allow list produced by RBAC expansion with the +// API key's stored allow list. The result enforces both constraints: any +// resource must be allowed by the scope *and* the database filter. Wildcards in +// either list are respected and short-circuit appropriately. +// +// Intuition: scope definitions provide the *ceiling* of what a key could touch, +// while the DB allow list can narrow that set. Technically, since this is +// an intersection, both can narrow each other. +// +// A few illustrative cases: +// +// | Scope AllowList | DB AllowList | Result | +// | ----------------- | ------------------------------------- | ----------------- | +// | `[*:*]` | `[workspace:A]` | `[workspace:A]` | +// | `[workspace:*]` | `[workspace:A, workspace:B]` | `[workspace:A, workspace:B]` | +// | `[workspace:A]` | `[workspace:A, workspace:B]` | `[workspace:A]` | +// | `[]` | `[workspace:A]` | `[workspace:A]` | +// +// Today most API key scopes expand with an empty allow list (meaning "no +// scope-level restriction"), so the merge simply mirrors what the database +// stored. Only scopes that intentionally embed resource filters would trim the +// DB entries. +func IntersectAllowLists(scopeList []AllowListElement, dbList []AllowListElement) []AllowListElement { + // Empty DB list means no additional restriction. + if len(dbList) == 0 { + // Defensive: API keys should always persist a non-empty allow list, but + // we cannot have an empty allow list, thus we fail close. + return nil + } + + // If scope already allows everything, the db list is authoritative. + scopeAll := allowListContainsAll(scopeList) + dbAll := allowListContainsAll(dbList) + + switch { + case scopeAll && dbAll: + return []AllowListElement{AllowListAll()} + case scopeAll: + return dbList + case dbAll: + return scopeList + } + + // Otherwise compute intersection. + resultSet := make(map[string]AllowListElement) + for _, scopeElem := range scopeList { + matching := intersectAllow(scopeElem, dbList) + for _, elem := range matching { + resultSet[elem.String()] = elem + } + } + + if len(resultSet) == 0 { + return []AllowListElement{} + } + + result := make([]AllowListElement, 0, len(resultSet)) + for _, elem := range resultSet { + result = append(result, elem) + } + + slices.SortFunc(result, func(a, b AllowListElement) int { + if a.Type == b.Type { + return strings.Compare(a.ID, b.ID) + } + return strings.Compare(a.Type, b.Type) + }) + + normalized, err := NormalizeAllowList(result) + if err != nil { + return result + } + if normalized == nil { + return []AllowListElement{} + } + return normalized +} + +func allowListContainsAll(elements []AllowListElement) bool { + if len(elements) == 0 { + return false + } + for _, e := range elements { + if e.Type == policy.WildcardSymbol && e.ID == policy.WildcardSymbol { + return true + } + } + return false +} + +// intersectAllow returns the set of permit entries that satisfy both the scope +// element and the database allow list. +func intersectAllow(scopeElem AllowListElement, dbList []AllowListElement) []AllowListElement { + // Scope element is wildcard -> intersection is db list. + if scopeElem.Type == policy.WildcardSymbol && scopeElem.ID == policy.WildcardSymbol { + return dbList + } + + result := make([]AllowListElement, 0) + for _, dbElem := range dbList { + // DB entry wildcard -> keep scope element. + if dbElem.Type == policy.WildcardSymbol && dbElem.ID == policy.WildcardSymbol { + result = append(result, scopeElem) + continue + } + + if !typeMatches(scopeElem.Type, dbElem.Type) { + continue + } + + if !idMatches(scopeElem.ID, dbElem.ID) { + continue + } + + result = append(result, AllowListElement{ + Type: intersectType(scopeElem.Type, dbElem.Type), + ID: intersectID(scopeElem.ID, dbElem.ID), + }) + } + return result +} + +func typeMatches(scopeType, dbType string) bool { + return scopeType == dbType || scopeType == policy.WildcardSymbol || dbType == policy.WildcardSymbol +} + +func idMatches(scopeID, dbID string) bool { + return scopeID == dbID || scopeID == policy.WildcardSymbol || dbID == policy.WildcardSymbol +} + +func intersectType(scopeType, dbType string) string { + if scopeType == dbType { + return scopeType + } + if scopeType == policy.WildcardSymbol { + return dbType + } + return scopeType +} + +func intersectID(scopeID, dbID string) string { + switch { + case scopeID == dbID: + return scopeID + case scopeID == policy.WildcardSymbol: + return dbID + case dbID == policy.WildcardSymbol: + return scopeID + default: + // Should not happen when intersecting with matching IDs; fallback to scope ID. + return scopeID + } +} diff --git a/coderd/rbac/allowlist_test.go b/coderd/rbac/allowlist_test.go new file mode 100644 index 0000000000000..3db5c4096b244 --- /dev/null +++ b/coderd/rbac/allowlist_test.go @@ -0,0 +1,231 @@ +package rbac_test + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" +) + +func TestParseAllowListEntry(t *testing.T) { + t.Parallel() + e, err := rbac.ParseAllowListEntry("*:*") + require.NoError(t, err) + require.Equal(t, rbac.AllowListElement{Type: "*", ID: "*"}, e) + + e, err = rbac.ParseAllowListEntry("workspace:*") + require.NoError(t, err) + require.Equal(t, rbac.AllowListElement{Type: "workspace", ID: "*"}, e) + + id := uuid.New().String() + e, err = rbac.ParseAllowListEntry("template:" + id) + require.NoError(t, err) + require.Equal(t, rbac.AllowListElement{Type: "template", ID: id}, e) + + _, err = rbac.ParseAllowListEntry("unknown:*") + require.Error(t, err) + _, err = rbac.ParseAllowListEntry("workspace:bad-uuid") + require.Error(t, err) + _, err = rbac.ParseAllowListEntry(":") + require.Error(t, err) +} + +func TestParseAllowListNormalize(t *testing.T) { + t.Parallel() + id1 := uuid.New().String() + id2 := uuid.New().String() + + // Global wildcard short-circuits + out, err := rbac.ParseAllowList([]string{"workspace:" + id1, "*:*", "template:" + id2}, 128) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{{Type: "*", ID: "*"}}, out) + + // Typed wildcard collapses typed ids + out, err = rbac.ParseAllowList([]string{"workspace:*", "workspace:" + id1, "workspace:" + id2}, 128) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{{Type: "workspace", ID: "*"}}, out) + + // Typed wildcard entries persist even without explicit IDs + out, err = rbac.ParseAllowList([]string{"template:*"}, 128) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{{Type: "template", ID: "*"}}, out) + + // Dedup ids and sort deterministically + out, err = rbac.ParseAllowList([]string{"template:" + id2, "template:" + id2, "template:" + id1}, 128) + require.NoError(t, err) + require.Len(t, out, 2) + require.Equal(t, "template", out[0].Type) + require.Equal(t, "template", out[1].Type) +} + +func TestParseAllowListLimit(t *testing.T) { + t.Parallel() + inputs := make([]string, 0, 130) + for range 130 { + inputs = append(inputs, "workspace:"+uuid.New().String()) + } + _, err := rbac.ParseAllowList(inputs, 128) + require.Error(t, err) +} + +func TestIntersectAllowLists(t *testing.T) { + t.Parallel() + + id := uuid.NewString() + id2 := uuid.NewString() + + t.Run("scope_all_db_specific", func(t *testing.T) { + t.Parallel() + out := rbac.IntersectAllowLists( + []rbac.AllowListElement{rbac.AllowListAll()}, + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}, + ) + require.Equal(t, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}, out) + }) + + t.Run("db_all_keeps_scope", func(t *testing.T) { + t.Parallel() + scopeList := []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: policy.WildcardSymbol}} + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}) + require.Equal(t, scopeList, out) + }) + + t.Run("typed_wildcard_intersection", func(t *testing.T) { + t.Parallel() + scopeList := []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: policy.WildcardSymbol}} + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}) + require.Equal(t, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}, out) + }) + + t.Run("db_wildcard_type_specific", func(t *testing.T) { + t.Parallel() + scopeList := []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}} + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: policy.WildcardSymbol}}) + require.Equal(t, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}, out) + }) + + t.Run("disjoint_types", func(t *testing.T) { + t.Parallel() + scopeList := []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}} + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{{Type: rbac.ResourceTemplate.Type, ID: id}}) + require.Empty(t, out) + }) + + t.Run("different_ids", func(t *testing.T) { + t.Parallel() + scopeList := []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: uuid.NewString()}} + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id}}) + require.Empty(t, out) + }) + + t.Run("multi_entry_overlap", func(t *testing.T) { + t.Parallel() + templateSpecific := uuid.NewString() + scopeList := []rbac.AllowListElement{ + {Type: rbac.ResourceWorkspace.Type, ID: id}, + {Type: rbac.ResourceWorkspace.Type, ID: id2}, + {Type: rbac.ResourceTemplate.Type, ID: policy.WildcardSymbol}, + } + out := rbac.IntersectAllowLists(scopeList, []rbac.AllowListElement{ + {Type: rbac.ResourceWorkspace.Type, ID: id2}, + {Type: rbac.ResourceTemplate.Type, ID: templateSpecific}, + {Type: rbac.ResourceTemplate.Type, ID: policy.WildcardSymbol}, + }) + require.Equal(t, []rbac.AllowListElement{ + {Type: rbac.ResourceTemplate.Type, ID: policy.WildcardSymbol}, + {Type: rbac.ResourceWorkspace.Type, ID: id2}, + }, out) + }) + + t.Run("multi_entry_db_wildcards", func(t *testing.T) { + t.Parallel() + templateID := uuid.NewString() + dbList := []rbac.AllowListElement{ + {Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}, + {Type: rbac.ResourceWorkspace.Type, ID: id}, + {Type: rbac.ResourceTemplate.Type, ID: policy.WildcardSymbol}, + } + out := rbac.IntersectAllowLists([]rbac.AllowListElement{ + {Type: rbac.ResourceWorkspace.Type, ID: id}, + {Type: rbac.ResourceTemplate.Type, ID: templateID}, + }, dbList) + require.Equal(t, []rbac.AllowListElement{ + {Type: rbac.ResourceWorkspace.Type, ID: id}, + {Type: rbac.ResourceTemplate.Type, ID: templateID}, + }, out) + }) +} + +func TestUnionAllowLists(t *testing.T) { + t.Parallel() + + id1 := uuid.NewString() + id2 := uuid.NewString() + + t.Run("wildcard_short_circuit", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists( + []rbac.AllowListElement{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}, + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + ) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{rbac.AllowListAll()}, out) + }) + + t.Run("merge_unique_entries", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists( + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id2}}, + ) + require.NoError(t, err) + require.Len(t, out, 2) + require.ElementsMatch(t, []rbac.AllowListElement{ + {Type: rbac.ResourceWorkspace.Type, ID: id1}, + {Type: rbac.ResourceWorkspace.Type, ID: id2}, + }, out) + }) + + t.Run("typed_wildcard_collapse", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists( + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: policy.WildcardSymbol}}, + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + ) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: policy.WildcardSymbol}}, out) + }) + + t.Run("deduplicate_across_inputs", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists( + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + ) + require.NoError(t, err) + require.Equal(t, []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, out) + }) + + t.Run("combine_multiple_types", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists( + []rbac.AllowListElement{{Type: rbac.ResourceWorkspace.Type, ID: id1}}, + []rbac.AllowListElement{{Type: rbac.ResourceTemplate.Type, ID: id2}}, + ) + require.NoError(t, err) + require.ElementsMatch(t, []rbac.AllowListElement{ + {Type: rbac.ResourceTemplate.Type, ID: id2}, + {Type: rbac.ResourceWorkspace.Type, ID: id1}, + }, out) + }) + + t.Run("empty_returns_empty", func(t *testing.T) { + t.Parallel() + out, err := rbac.UnionAllowLists(nil, []rbac.AllowListElement{}) + require.NoError(t, err) + require.Empty(t, out) + }) +} diff --git a/coderd/rbac/astvalue.go b/coderd/rbac/astvalue.go index a125b6bf7a4ff..bbbbb03622532 100644 --- a/coderd/rbac/astvalue.go +++ b/coderd/rbac/astvalue.go @@ -157,23 +157,34 @@ func (role Role) regoValue() ast.Value { if role.cachedRegoValue != nil { return role.cachedRegoValue } - orgMap := ast.NewObject() - for k, p := range role.Org { - orgMap.Insert(ast.StringTerm(k), ast.NewTerm(regoSlice(p))) + byOrgIDMap := ast.NewObject() + for k, p := range role.ByOrgID { + byOrgIDMap.Insert(ast.StringTerm(k), ast.NewTerm( + ast.NewObject( + [2]*ast.Term{ + ast.StringTerm("org"), + ast.NewTerm(regoSlice(p.Org)), + }, + [2]*ast.Term{ + ast.StringTerm("member"), + ast.NewTerm(regoSlice(p.Member)), + }, + ), + )) } return ast.NewObject( [2]*ast.Term{ ast.StringTerm("site"), ast.NewTerm(regoSlice(role.Site)), }, - [2]*ast.Term{ - ast.StringTerm("org"), - ast.NewTerm(orgMap), - }, [2]*ast.Term{ ast.StringTerm("user"), ast.NewTerm(regoSlice(role.User)), }, + [2]*ast.Term{ + ast.StringTerm("by_org_id"), + ast.NewTerm(byOrgIDMap), + }, ) } diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go index 0715a8ead7783..2f39cf32a7df9 100644 --- a/coderd/rbac/authz.go +++ b/coderd/rbac/authz.go @@ -73,6 +73,7 @@ const ( SubjectTypePrebuildsOrchestrator SubjectType = "prebuilds_orchestrator" SubjectTypeSystemReadProvisionerDaemons SubjectType = "system_read_provisioner_daemons" SubjectTypeSystemRestricted SubjectType = "system_restricted" + SubjectTypeSystemOAuth SubjectType = "system_oauth" SubjectTypeNotifier SubjectType = "notifier" SubjectTypeSubAgentAPI SubjectType = "sub_agent_api" SubjectTypeFileReader SubjectType = "file_reader" @@ -710,7 +711,7 @@ func (a *authorizedSQLFilter) SQLString() string { type authCache struct { // cache is a cache of hashed Authorize inputs to the result of the Authorize // call. - // determistic function. + // deterministic function. cache *tlru.Cache[[32]byte, error] authz Authorizer diff --git a/coderd/rbac/authz_internal_test.go b/coderd/rbac/authz_internal_test.go index 284045b11fba0..c409655d0c4f1 100644 --- a/coderd/rbac/authz_internal_test.go +++ b/coderd/rbac/authz_internal_test.go @@ -287,7 +287,7 @@ func TestFilter(t *testing.T) { func TestAuthorizeDomain(t *testing.T) { t.Parallel() defOrg := uuid.New() - unuseID := uuid.New() + unusedID := uuid.New() allUsersGroup := "Everyone" // orphanedUser has no organization @@ -318,21 +318,21 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "UserACLList", user, []authTestCase{ { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(unusedID).WithACLUserList(map[string][]policy.Action{ user.ID: ResourceWorkspace.AvailableActions(), }), actions: ResourceWorkspace.AvailableActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(unusedID).WithACLUserList(map[string][]policy.Action{ user.ID: {policy.WildcardSymbol}, }), actions: ResourceWorkspace.AvailableActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(unusedID).WithACLUserList(map[string][]policy.Action{ user.ID: {policy.ActionRead, policy.ActionUpdate}, }), actions: []policy.Action{policy.ActionCreate, policy.ActionDelete}, @@ -350,21 +350,21 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "GroupACLList", user, []authTestCase{ { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ allUsersGroup: ResourceWorkspace.AvailableActions(), }), actions: ResourceWorkspace.AvailableActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ allUsersGroup: {policy.WildcardSymbol}, }), actions: ResourceWorkspace.AvailableActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ + resource: ResourceWorkspace.WithOwner(unusedID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ allUsersGroup: {policy.ActionRead, policy.ActionUpdate}, }), actions: []policy.Action{policy.ActionCreate, policy.ActionDelete}, @@ -389,13 +389,14 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.AnyOrganization().WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, {resource: ResourceTemplate.AnyOrganization(), actions: []policy.Action{policy.ActionCreate}, allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + // No org + me + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, @@ -403,8 +404,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other us - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) @@ -435,8 +436,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, @@ -444,8 +445,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) @@ -455,6 +456,7 @@ func TestAuthorizeDomain(t *testing.T) { Scope: must(ExpandScope(ScopeAll)), Roles: Roles{ must(RoleByName(ScopedRoleOrgAdmin(defOrg))), + must(RoleByName(ScopedRoleOrgMember(defOrg))), must(RoleByName(RoleMember())), }, } @@ -469,13 +471,14 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.InOrg(defOrg), actions: workspaceExceptConnect, allow: true}, {resource: ResourceWorkspace.InOrg(defOrg), actions: workspaceConnect, allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + // No org + me + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: workspaceExceptConnect, allow: true}, @@ -483,9 +486,9 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + // Other org + other user + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: false}, {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) @@ -512,8 +515,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, @@ -521,8 +524,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unusedID), actions: ResourceWorkspace.AvailableActions(), allow: true}, {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, }) @@ -546,13 +549,14 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), allow: true}, {resource: ResourceWorkspace.InOrg(defOrg), allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), allow: true}, + // No org + me + {resource: ResourceWorkspace.WithOwner(user.ID), allow: false}, {resource: ResourceWorkspace.All(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), allow: false}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), allow: false}, @@ -560,8 +564,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), allow: false}, {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, }), @@ -580,8 +584,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.All()}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID)}, - {resource: ResourceWorkspace.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID)}, + {resource: ResourceWorkspace.InOrg(unusedID)}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me")}, @@ -589,8 +593,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me")}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me")}, - {resource: ResourceWorkspace.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me")}, + {resource: ResourceWorkspace.InOrg(unusedID)}, {resource: ResourceWorkspace.WithOwner("not-me")}, }), @@ -609,8 +613,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceTemplate.All()}, // Other org + me - {resource: ResourceTemplate.InOrg(unuseID).WithOwner(user.ID)}, - {resource: ResourceTemplate.InOrg(unuseID)}, + {resource: ResourceTemplate.InOrg(unusedID).WithOwner(user.ID)}, + {resource: ResourceTemplate.InOrg(unusedID)}, // Other org + other user {resource: ResourceTemplate.InOrg(defOrg).WithOwner("not-me")}, @@ -618,8 +622,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceTemplate.WithOwner("not-me")}, // Other org + other use - {resource: ResourceTemplate.InOrg(unuseID).WithOwner("not-me")}, - {resource: ResourceTemplate.InOrg(unuseID)}, + {resource: ResourceTemplate.InOrg(unusedID).WithOwner("not-me")}, + {resource: ResourceTemplate.InOrg(unusedID)}, {resource: ResourceTemplate.WithOwner("not-me")}, }), @@ -633,13 +637,6 @@ func TestAuthorizeDomain(t *testing.T) { { Identifier: RoleIdentifier{Name: "ReadOnlyOrgAndUser"}, Site: []Permission{}, - Org: map[string][]Permission{ - defOrg.String(): {{ - Negate: false, - ResourceType: "*", - Action: policy.ActionRead, - }}, - }, User: []Permission{ { Negate: false, @@ -647,6 +644,16 @@ func TestAuthorizeDomain(t *testing.T) { Action: policy.ActionRead, }, }, + ByOrgID: map[string]OrgPermissions{ + defOrg.String(): { + Org: []Permission{{ + Negate: false, + ResourceType: "*", + Action: policy.ActionRead, + }}, + Member: []Permission{}, + }, + }, }, }, } @@ -666,8 +673,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.All(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), allow: false}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), allow: true}, @@ -675,8 +682,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID), allow: false}, {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, }), @@ -697,8 +704,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.All()}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID)}, - {resource: ResourceWorkspace.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner(user.ID)}, + {resource: ResourceWorkspace.InOrg(unusedID)}, // Other org + other user {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me")}, @@ -706,8 +713,8 @@ func TestAuthorizeDomain(t *testing.T) { {resource: ResourceWorkspace.WithOwner("not-me")}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me")}, - {resource: ResourceWorkspace.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me")}, + {resource: ResourceWorkspace.InOrg(unusedID)}, {resource: ResourceWorkspace.WithOwner("not-me")}, })) @@ -726,13 +733,16 @@ func TestAuthorizeLevels(t *testing.T) { must(RoleByName(RoleOwner())), { Identifier: RoleIdentifier{Name: "org-deny:", OrganizationID: defOrg}, - Org: map[string][]Permission{ + ByOrgID: map[string]OrgPermissions{ defOrg.String(): { - { - Negate: true, - ResourceType: "*", - Action: "*", + Org: []Permission{ + { + Negate: true, + ResourceType: "*", + Action: "*", + }, }, + Member: []Permission{}, }, }, }, @@ -926,8 +936,8 @@ func TestAuthorizeScope(t *testing.T) { // Only read access for workspaces. ResourceWorkspace.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, AllowIDList: []AllowListElement{{Type: ResourceWorkspace.Type, ID: workspaceID.String()}}, }, @@ -1015,8 +1025,8 @@ func TestAuthorizeScope(t *testing.T) { // Only read access for workspaces. ResourceWorkspace.Type: {policy.ActionCreate}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, // Empty string allow_list is allowed for actions like 'create' AllowIDList: []AllowListElement{{ @@ -1138,14 +1148,17 @@ func TestAuthorizeScope(t *testing.T) { }, DisplayName: "OrgAndUserScope", Site: nil, - Org: map[string][]Permission{ - defOrg.String(): Permissions(map[string][]policy.Action{ - ResourceWorkspace.Type: {policy.ActionRead}, - }), - }, User: Permissions(map[string][]policy.Action{ ResourceUser.Type: {policy.ActionRead}, }), + ByOrgID: map[string]OrgPermissions{ + defOrg.String(): { + Org: Permissions(map[string][]policy.Action{ + ResourceWorkspace.Type: {policy.ActionRead}, + }), + Member: []Permission{}, + }, + }, }, AllowIDList: []AllowListElement{AllowListAll()}, }, @@ -1310,9 +1323,9 @@ type authTestCase struct { func testAuthorize(t *testing.T, name string, subject Subject, sets ...[]authTestCase) { t.Helper() authorizer := NewAuthorizer(prometheus.NewRegistry()) - for _, cases := range sets { - for i, c := range cases { - caseName := fmt.Sprintf("%s/%d", name, i) + for i, cases := range sets { + for j, c := range cases { + caseName := fmt.Sprintf("%s/Set%d/Case%d", name, i, j) t.Run(caseName, func(t *testing.T) { t.Parallel() for _, a := range c.actions { diff --git a/coderd/rbac/authz_test.go b/coderd/rbac/authz_test.go index cd2bbb808add9..25955131242a8 100644 --- a/coderd/rbac/authz_test.go +++ b/coderd/rbac/authz_test.go @@ -187,7 +187,7 @@ func BenchmarkRBACAuthorizeGroups(b *testing.B) { uuid.MustParse("0632b012-49e0-4d70-a5b3-f4398f1dcd52"), uuid.MustParse("70dbaa7a-ea9c-4f68-a781-97b08af8461d"), ) - authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + authorizer := rbac.NewAuthorizer(prometheus.NewRegistry()) // Same benchmark cases, but this time groups will be used to match. // Some '*' permissions will still match, but using a fake action reduces diff --git a/coderd/rbac/input.json b/coderd/rbac/input.json index b71590c789aa0..5b8f1ad98c58c 100644 --- a/coderd/rbac/input.json +++ b/coderd/rbac/input.json @@ -23,8 +23,13 @@ "action": "*" } ], - "org": {}, - "user": [] + "user": [], + "by_org_id": { + "bf7b72bd-a2b1-4ef2-962c-1d698e0483f6": { + "org": [], + "member": [] + } + } } ], "groups": ["b617a647-b5d0-4cbe-9e40-26f89710bf18"], @@ -38,13 +43,19 @@ "action": "*" } ], - "org": {}, "user": [], + "by_org_id": { + "bf7b72bd-a2b1-4ef2-962c-1d698e0483f6": { + "org": [], + "member": [] + } + }, "allow_list": [ { "type": "workspace", "id": "*" - }] + } + ] } } } diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index d0c78bd480766..c71b74d496330 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -286,6 +286,16 @@ var ( Type: "tailnet_coordinator", } + // ResourceTask + // Valid Actions + // - "ActionCreate" :: create a new task + // - "ActionDelete" :: delete task + // - "ActionRead" :: read task data or output to view on the UI or CLI + // - "ActionUpdate" :: edit task settings or send input to an existing task + ResourceTask = Object{ + Type: "task", + } + // ResourceTemplate // Valid Actions // - "ActionCreate" :: create a template @@ -346,6 +356,7 @@ var ( // - "ActionDelete" :: delete workspace // - "ActionDeleteAgent" :: delete an existing workspace agent // - "ActionRead" :: read workspace data to view on the UI + // - "ActionShare" :: share a workspace with other users or groups // - "ActionSSH" :: ssh into a given workspace // - "ActionWorkspaceStart" :: allows starting a workspace // - "ActionWorkspaceStop" :: allows stopping a workspace @@ -378,6 +389,7 @@ var ( // - "ActionDelete" :: delete workspace // - "ActionDeleteAgent" :: delete an existing workspace agent // - "ActionRead" :: read workspace data to view on the UI + // - "ActionShare" :: share a workspace with other users or groups // - "ActionSSH" :: ssh into a given workspace // - "ActionWorkspaceStart" :: allows starting a workspace // - "ActionWorkspaceStop" :: allows stopping a workspace @@ -430,6 +442,7 @@ func AllResources() []Objecter { ResourceReplicas, ResourceSystem, ResourceTailnetCoordinator, + ResourceTask, ResourceTemplate, ResourceUsageEvent, ResourceUser, @@ -454,6 +467,7 @@ func AllActions() []policy.Action { policy.ActionRead, policy.ActionReadPersonal, policy.ActionSSH, + policy.ActionShare, policy.ActionUnassign, policy.ActionUpdate, policy.ActionUpdatePersonal, diff --git a/coderd/rbac/policy.rego b/coderd/rbac/policy.rego index eb9187338c29b..e8844a22bdbd8 100644 --- a/coderd/rbac/policy.rego +++ b/coderd/rbac/policy.rego @@ -2,392 +2,426 @@ package authz import rego.v1 -# A great playground: https://play.openpolicyagent.org/ -# Helpful cli commands to debug. -# opa eval --format=pretty 'data.authz.allow' -d policy.rego -i input.json -# opa eval --partial --format=pretty 'data.authz.allow' -d policy.rego --unknowns input.object.owner --unknowns input.object.org_owner --unknowns input.object.acl_user_list --unknowns input.object.acl_group_list -i input.json - +# Check the POLICY.md file before editing this! # -# This policy is specifically constructed to compress to a set of queries if the -# object's 'owner' and 'org_owner' fields are unknown. There is no specific set -# of rules that will guarantee that this policy has this property. However, there -# are some tricks. A unit test will enforce this property, so any edits that pass -# the unit test will be ok. +# https://play.openpolicyagent.org/ # -# Tricks: (It's hard to really explain this, fiddling is required) -# 1. Do not use unknown fields in any comprehension or iteration. -# 2. Use the unknown fields as minimally as possible. -# 3. Avoid making code branches based on the value of the unknown field. -# Unknown values are like a "set" of possible values. -# (This is why rule 1 usually breaks things) -# For example: -# In the org section, we calculate the 'allow' number for all orgs, rather -# than just the input.object.org_owner. This is because if the org_owner -# changes, then we don't need to recompute any 'allow' sets. We already have -# the 'allow' for the changed value. So the answer is in a lookup table. -# The final statement 'num := allow[input.object.org_owner]' does not have -# different code branches based on the org_owner. 'num's value does, but -# that is the whole point of partial evaluation. - -# bool_flip(b) returns the logical negation of a boolean value 'b'. -# You cannot do 'x := !false', but you can do 'x := bool_flip(false)' -bool_flip(b) := false if { - b -} -bool_flip(b) := true if { - not b -} +#==============================================================================# +# Site level rules # +#==============================================================================# -# number(set) maps a set of boolean values to one of the following numbers: -# -1: deny (if 'false' value is in the set) => set is {true, false} or {false} -# 0: no decision (if the set is empty) => set is {} -# 1: allow (if only 'true' values are in the set) => set is {true} +# Site level permissions allow the subject to use that permission on any object. +# For example, a site-level workspace.read permission means that the subject can +# see every workspace in the deployment, regardless of organization or owner. -# Return -1 if the set contains any 'false' value (i.e., an explicit deny) -number(set) := -1 if { - false in set -} +default site := 0 -# Return 0 if the set is empty (no matching permissions) -number(set) := 0 if { - count(set) == 0 -} +site := check_site_permissions(input.subject.roles) -# Return 1 if the set is non-empty and contains no 'false' values (i.e., only allows) -number(set) := 1 if { - not false in set - set[_] +default scope_site := 0 + +scope_site := check_site_permissions([input.subject.scope]) + +check_site_permissions(roles) := vote if { + allow := {is_allowed | + # Iterate over all site permissions in all roles, and check which ones match + # the action and object type. + perm := roles[_].site[_] + perm.action in [input.action, "*"] + perm.resource_type in [input.object.type, "*"] + + # If a negative matching permission was found, then we vote to disallow it. + # If the permission is not negative, then we vote to allow it. + is_allowed := bool_flip(perm.negate) + } + vote := to_vote(allow) } -# Permission evaluation is structured into three levels: site, org, and user. -# For each level, two variables are computed: -# - : the decision based on the subject's full set of roles for that level -# - scope_: the decision based on the subject's scoped roles for that level -# -# Each of these variables is assigned one of three values: -# -1 => negative (deny) -# 0 => abstain (no matching permission) -# 1 => positive (allow) -# -# These values are computed by calling the corresponding _allow functions. -# The final decision is derived from combining these values (see 'allow' rule). +#==============================================================================# +# User level rules # +#==============================================================================# -# ------------------- -# Site Level Rules -# ------------------- +# User level rules apply to all objects owned by the subject which are not also +# owned by an org. Permissions for objects which are "jointly" owned by an org +# instead defer to the org member level rules. -default site := 0 -site := site_allow(input.subject.roles) +default user := 0 + +user := check_user_permissions(input.subject.roles) + +default scope_user := 0 + +scope_user := check_user_permissions([input.subject.scope]) + +check_user_permissions(roles) := vote if { + # The object must be owned by the subject. + input.subject.id = input.object.owner + + # If there is an org, use org_member permissions instead + input.object.org_owner == "" + not input.object.any_org -default scope_site := 0 -scope_site := site_allow([input.subject.scope]) - -# site_allow receives a list of roles and returns a single number: -# -1 if any matching permission denies access -# 1 if there's at least one allow and no denies -# 0 if there are no matching permissions -site_allow(roles) := num if { - # allow is a set of boolean values (sets don't contain duplicates) allow := {is_allowed | - # Iterate over all site permissions in all roles - perm := roles[_].site[_] + # Iterate over all user permissions in all roles, and check which ones match + # the action and object type. + perm := roles[_].user[_] perm.action in [input.action, "*"] perm.resource_type in [input.object.type, "*"] - # is_allowed is either 'true' or 'false' if a matching permission exists. + # If a negative matching permission was found, then we vote to disallow it. + # If the permission is not negative, then we vote to allow it. is_allowed := bool_flip(perm.negate) } - num := number(allow) + vote := to_vote(allow) } -# ------------------- -# Org Level Rules -# ------------------- - -# org_members is the list of organizations the actor is apart of. -# TODO: Should there be an org_members for the scope too? Without it, -# the membership is determined by the user's roles, not their scope permissions. -# So if an owner (who is not an org member) has an org scope, that org scope -# will fail to return '1'. Since we assume all non members return '-1' for org -# level permissions. -# Adding a second org_members set might affect the partial evaluation. -# This is being left until org scopes are used. -org_members := {orgID | - input.subject.roles[_].org[orgID] +#==============================================================================# +# Org level rules # +#==============================================================================# + +# Org level permissions are similar to `site`, except we need to iterate over +# each organization that the subject is a member of, and check against the +# organization that the object belongs to. +# For example, an organization-level workspace.read permission means that the +# subject can see every workspace in the organization, regardless of owner. + +# org_memberships is the set of organizations the subject is apart of. +org_memberships := {org_id | + input.subject.roles[_].by_org_id[org_id] } -# 'org' is the same as 'site' except we need to iterate over each organization -# that the actor is a member of. +# TODO: Should there be a scope_org_memberships too? Without it, the membership +# is determined by the user's roles, not their scope permissions. +# +# If an owner (who is not an org member) has an org scope, that org scope will +# fail to return '1', since we assume all non-members return '-1' for org level +# permissions. Adding a second set of org memberships might affect the partial +# evaluation. This is being left until org scopes are used. + default org := 0 -org := org_allow(input.subject.roles) + +org := check_org_permissions(input.subject.roles, "org") default scope_org := 0 -scope_org := org_allow([input.subject.scope]) - -# org_allow_set is a helper function that iterates over all orgs that the actor -# is a member of. For each organization it sets the numerical allow value -# for the given object + action if the object is in the organization. -# The resulting value is a map that looks something like: -# {"10d03e62-7703-4df5-a358-4f76577d4e2f": 1, "5750d635-82e0-4681-bd44-815b18669d65": 1} -# The caller can use this output[] to get the final allow value. + +scope_org := check_org_permissions([input.subject.scope], "org") + +# check_all_org_permissions creates a map from org ids to votes at each org +# level, for each org that the subject is a member of. It doesn't actually check +# if the object is in the same org. Instead we look up the correct vote from +# this map based on the object's org id in `check_org_permissions`. +# For example, the `org_map` will look something like this: +# +# {"": 1, "": 0, "": -1} +# +# The caller then uses `output[input.object.org_owner]` to get the correct vote. # -# The reason we calculate this for all orgs, and not just the input.object.org_owner -# is that sometimes the input.object.org_owner is unknown. In those cases -# we have a list of org_ids that can we use in a SQL 'WHERE' clause. -org_allow_set(roles) := allow_set if { - allow_set := {id: num | - id := org_members[_] - set := {is_allowed | - # Iterate over all org permissions in all roles - perm := roles[_].org[id][_] - perm.action in [input.action, "*"] - perm.resource_type in [input.object.type, "*"] - - # is_allowed is either 'true' or 'false' if a matching permission exists. - is_allowed := bool_flip(perm.negate) - } - num := number(set) +# We have to create this map, rather than just getting the vote of the object's +# org id because the org id _might_ be unknown. In order to make sure that this +# policy compresses down to simple queries we need to keep unknown values out of +# comprehensions. +check_all_org_permissions(roles, key) := {org_id: vote | + org_id := org_memberships[_] + allow := {is_allowed | + # Iterate over all site permissions in all roles, and check which ones match + # the action and object type. + perm := roles[_].by_org_id[org_id][key][_] + perm.action in [input.action, "*"] + perm.resource_type in [input.object.type, "*"] + + # If a negative matching permission was found, then we vote to disallow it. + # If the permission is not negative, then we vote to allow it. + is_allowed := bool_flip(perm.negate) } + vote := to_vote(allow) } -org_allow(roles) := num if { - # If the object has "any_org" set to true, then use the other - # org_allow block. +# This check handles the case where the org id is known. +check_org_permissions(roles, key) := vote if { + # Disallow setting any_org at the same time as an org id. not input.object.any_org - allow := org_allow_set(roles) - - # Return only the org value of the input's org. - # The reason why we do not do this up front, is that we need to make sure - # this policy compresses down to simple queries. One way to ensure this is - # to keep unknown values out of comprehensions. - # (https://www.openpolicyagent.org/docs/latest/policy-language/#comprehensions) - num := allow[input.object.org_owner] -} -# This block states if "object.any_org" is set to true, then disregard the -# organization id the object is associated with. Instead, we check if the user -# can do the action on any organization. -# This is useful for UI elements when we want to conclude, "Can the user create -# a new template in any organization?" -# It is easier than iterating over every organization the user is apart of. -org_allow(roles) := num if { - input.object.any_org # if this is false, this code block is not used - allow := org_allow_set(roles) - - # allow is a map of {"": }. We only care about values - # that are 1, and ignore the rest. - num := number([ - keep | - # for every value in the mapping - value := allow[_] - - # only keep values > 0. - # 1 = allow, 0 = abstain, -1 = deny - # We only need 1 explicit allow to allow the action. - # deny's and abstains are intentionally ignored. - value > 0 - - # result set is a set of [true,false,...] - # which "number()" will convert to a number. - keep := true - ]) -} + allow_map := check_all_org_permissions(roles, key) -# 'org_mem' is set to true if the user is an org member -# If 'any_org' is set to true, use the other block to determine org membership. -org_mem if { - not input.object.any_org - input.object.org_owner != "" - input.object.org_owner in org_members + # Return only the vote of the object's org. + vote := allow_map[input.object.org_owner] } -org_mem if { +# This check handles the case where we want to know if the user has the +# appropriate permission for any organization, without needing to know which. +# This is used in several places in the UI to determine if certain parts of the +# app should be accessible. +# For example, can the user create a new template in any organization? If yes, +# then we should show the "New template" button. +check_org_permissions(roles, key) := vote if { + # Require `any_org` to be set input.object.any_org - count(org_members) > 0 -} -org_ok if { - org_mem + allow_map := check_all_org_permissions(roles, key) + + # Since we're checking if the subject has the permission in _any_ org, we're + # essentially trying to find the highest vote from any org. + vote := max({vote | + some vote in allow_map + }) } -# If the object has no organization, then the user is also considered part of -# the non-existent org. -org_ok if { - input.object.org_owner == "" +# is_org_member checks if the subject belong to the same organization as the +# object. +is_org_member if { not input.object.any_org + input.object.org_owner != "" + input.object.org_owner in org_memberships } -# ------------------- -# User Level Rules -# ------------------- +# ...if 'any_org' is set to true, we check if the subject is a member of any +# org. +is_org_member if { + input.object.any_org + count(org_memberships) > 0 +} -# 'user' is the same as 'site', except it only applies if the user owns the object and -# the user is apart of the org (if the object has an org). -default user := 0 -user := user_allow(input.subject.roles) +#==============================================================================# +# Org member level rules # +#==============================================================================# -default scope_user := 0 -scope_user := user_allow([input.subject.scope]) +# Org member level permissions apply to all objects owned by the subject _and_ +# the corresponding org. Permissions for objects which are not owned by an +# organization instead defer to the user level rules. +# +# The rules for this level are very similar to the rules for the organization +# level, and so we reuse the `check_org_permissions` function from those rules. + +default org_member := 0 -user_allow(roles) := num if { +org_member := vote if { + # Object must be jointly owned by the user input.object.owner != "" input.subject.id = input.object.owner - - allow := {is_allowed | - # Iterate over all user permissions in all roles - perm := roles[_].user[_] - perm.action in [input.action, "*"] - perm.resource_type in [input.object.type, "*"] - - # is_allowed is either 'true' or 'false' if a matching permission exists. - is_allowed := bool_flip(perm.negate) - } - num := number(allow) + vote := check_org_permissions(input.subject.roles, "member") } -# Scope allow_list is a list of resource (Type, ID) tuples explicitly allowed by the scope. -# If the list contains `(*,*)`, then all resources are allowed. -scope_allow_list if { - input.subject.scope.allow_list[_] == {"type": "*", "id": "*"} -} +default scope_org_member := 0 -# This is a shortcut if the allow_list contains (type, *), then allow all IDs of that type. -scope_allow_list if { - input.subject.scope.allow_list[_] == {"type": input.object.type, "id": "*"} +scope_org_member := vote if { + # Object must be jointly owned by the user + input.object.owner != "" + input.subject.id = input.object.owner + vote := check_org_permissions([input.subject.scope], "member") } -# A comprehension that iterates over the allow_list and checks if the -# (object.type, object.id) is in the allowed ids. -scope_allow_list if { - # If the wildcard is listed in the allow_list, we do not care about the - # object.id. This line is included to prevent partial compilations from - # ever needing to include the object.id. - not {"type": "*", "id": "*"} in input.subject.scope.allow_list - # This is equivalent to the above line, as `type` is known at partial query time. - not {"type": input.object.type, "id": "*"} in input.subject.scope.allow_list - - # allows_ids is the set of all ids allowed for the given object.type - allowed_ids := {allowed_id | - # Iterate over all allow list elements - ele := input.subject.scope.allow_list[_] - ele.type in [input.object.type, "*"] - allowed_id := ele.id - } - - # Return if the object.id is in the allowed ids - # This rule is evaluated at the end so the partial query can use the object.id - # against this precomputed set of allowed ids. - input.object.id in allowed_ids -} +#==============================================================================# +# Role rules # +#==============================================================================# -# ------------------- -# Role-Specific Rules -# ------------------- +# role_allow specifies all of the conditions under which a role can grant +# permission. These rules intentionally use the "unification" operator rather +# than the equality and inequality operators, because those operators do not +# work on partial values. +# https://www.openpolicyagent.org/docs/policy-language#unification- +# Site level authorization role_allow if { site = 1 } +# User level authorization +role_allow if { + not site = -1 + + user = 1 +} + +# Org level authorization role_allow if { not site = -1 + org = 1 } +# Org member authorization role_allow if { not site = -1 not org = -1 - # If we are not a member of an org, and the object has an org, then we are - # not authorized. This is an "implied -1" for not being in the org. - org_ok - user = 1 + org_member = 1 } -# ------------------- -# Scope-Specific Rules -# ------------------- +#==============================================================================# +# Scope rules # +#==============================================================================# + +# scope_allow specifies all of the conditions under which a scope can grant +# permission. These rules intentionally use the "unification" (=) operator +# rather than the equality (==) and inequality (!=) operators, because those +# operators do not work on partial values. +# https://www.openpolicyagent.org/docs/policy-language#unification- +# Site level scope enforcement scope_allow if { - scope_allow_list + object_is_included_in_scope_allow_list scope_site = 1 } +# User level scope enforcement scope_allow if { - scope_allow_list + # User scope permissions must be allowed by the scope, and not denied + # by the site. The object *must not* be owned by an organization. + object_is_included_in_scope_allow_list not scope_site = -1 + + scope_user = 1 +} + +# Org level scope enforcement +scope_allow if { + # Org member scope permissions must be allowed by the scope, and not denied + # by the site. The object *must* be owned by an organization. + object_is_included_in_scope_allow_list + not scope_site = -1 + scope_org = 1 } +# Org member level scope enforcement scope_allow if { - scope_allow_list + # Org member scope permissions must be allowed by the scope, and not denied + # by the site or org. The object *must* be owned by an organization. + object_is_included_in_scope_allow_list not scope_site = -1 not scope_org = -1 - # If we are not a member of an org, and the object has an org, then we are - # not authorized. This is an "implied -1" for not being in the org. - org_ok - scope_user = 1 + scope_org_member = 1 } -# ------------------- -# ACL-Specific Rules -# Access Control List -# ------------------- +# If *.* is allowed, then all objects are in scope. +object_is_included_in_scope_allow_list if { + {"type": "*", "id": "*"} in input.subject.scope.allow_list +} + +# If .* is allowed, then all objects of that type are in scope. +object_is_included_in_scope_allow_list if { + {"type": input.object.type, "id": "*"} in input.subject.scope.allow_list +} + +# Check if the object type and ID match one of the allow list entries. +object_is_included_in_scope_allow_list if { + # Check that the wildcard rules do not apply. This prevents partial inputs + # from needing to include `input.object.id`. + not {"type": "*", "id": "*"} in input.subject.scope.allow_list + not {"type": input.object.type, "id": "*"} in input.subject.scope.allow_list + + # Check which IDs from the allow list match the object type + allowed_ids_for_object_type := {it.id | + some it in input.subject.scope.allow_list + it.type in [input.object.type, "*"] + } + + # Check if the input object ID is in the set of allowed IDs for the same + # object type. We do this at the end to keep `input.object.id` out of the + # comprehension because it might be unknown. + input.object.id in allowed_ids_for_object_type +} + +#==============================================================================# +# ACL rules # +#==============================================================================# # ACL for users acl_allow if { - # Should you have to be a member of the org too? + # TODO: Should you have to be a member of the org too? perms := input.object.acl_user_list[input.subject.id] - # Either the input action or wildcard - [input.action, "*"][_] in perms + # Check if either the action or * is allowed + some action in [input.action, "*"] + action in perms } # ACL for groups acl_allow if { # If there is no organization owner, the object cannot be owned by an - # org_scoped team. - org_mem - group := input.subject.groups[_] + # org-scoped group. + is_org_member + some group in input.subject.groups perms := input.object.acl_group_list[group] - # Either the input action or wildcard - [input.action, "*"][_] in perms + # Check if either the action or * is allowed + some action in [input.action, "*"] + action in perms } -# ACL for 'all_users' special group +# ACL for the special "Everyone" groups acl_allow if { - org_mem + # If there is no organization owner, the object cannot be owned by an + # org-scoped group. + is_org_member perms := input.object.acl_group_list[input.object.org_owner] - [input.action, "*"][_] in perms + + # Check if either the action or * is allowed + some action in [input.action, "*"] + action in perms } -# ------------------- -# Final Allow -# -# The 'allow' block is quite simple. Any set with `-1` cascades down in levels. -# Authorization looks for any `allow` statement that is true. Multiple can be true! -# Note that the absence of `allow` means "unauthorized". -# An explicit `"allow": true` is required. -# -# Scope is also applied. The default scope is "wildcard:wildcard" allowing -# all actions. If the scope is not "1", then the action is not authorized. +#==============================================================================# +# Allow # +#==============================================================================# + +# The `allow` block is quite simple. Any check that voted no will cascade down. +# Authorization looks for any `allow` statement that is true. Multiple can be +# true! Note that the absence of `allow` means "unauthorized". An explicit +# `"allow": true` is required. # -# Allow query: -# data.authz.role_allow = true -# data.authz.scope_allow = true -# ------------------- +# We check both the subject's permissions (given by their roles or by ACL) and +# the subject's scope. (The default scope is "*:*", allowing all actions.) Both +# a permission check (either from roles or ACL) and the scope check must vote to +# allow or the action is not authorized. + +# A subject can be given permission by a role +permission_allow if role_allow + +# A subject can be given permission by ACL +permission_allow if acl_allow -# The role or the ACL must allow the action. Scopes can be used to limit, -# so scope_allow must always be true. allow if { - role_allow + # Must be allowed by the subject's permissions + permission_allow + + # ...and allowed by the scope scope_allow } -# ACL list must also have the scope_allow to pass -allow if { - acl_allow - scope_allow +#==============================================================================# +# Utilities # +#==============================================================================# + +# bool_flip returns the logical negation of a boolean value. You can't do +# 'x := not false', but you can do 'x := bool_flip(false)' +bool_flip(b) := false if { + b +} + +bool_flip(b) if { + not b +} + +# to_vote gives you a voting value from a set or list of booleans. +# {false,..} => deny (-1) +# {} => abstain (0) +# {true} => allow (1) + +# Any set which contains a `false` should be considered a vote to deny. +to_vote(set) := -1 if { + false in set +} + +# A set which is empty should be considered abstaining. +to_vote(set) := 0 if { + count(set) == 0 +} + +# A set which only contains true should be considered a vote to allow. +to_vote(set) := 1 if { + not false in set + true in set } diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index 93b0ba4e76215..8c4e2abaaad2d 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -27,6 +27,8 @@ const ( ActionCreateAgent Action = "create_agent" ActionDeleteAgent Action = "delete_agent" + + ActionShare Action = "share" ) type PermissionDefinition struct { @@ -61,6 +63,16 @@ var workspaceActions = map[Action]ActionDefinition{ ActionCreateAgent: "create a new workspace agent", ActionDeleteAgent: "delete an existing workspace agent", + + // Sharing a workspace + ActionShare: "share a workspace with other users or groups", +} + +var taskActions = map[Action]ActionDefinition{ + ActionCreate: "create a new task", + ActionRead: "read task data or output to view on the UI or CLI", + ActionUpdate: "edit task settings or send input to an existing task", + ActionDelete: "delete task", } // RBACPermissions is indexed by the type @@ -86,6 +98,9 @@ var RBACPermissions = map[string]PermissionDefinition{ "workspace": { Actions: workspaceActions, }, + "task": { + Actions: taskActions, + }, // Dormant workspaces have the same perms as workspaces. "workspace_dormant": { Actions: workspaceActions, diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index e5f822b91e193..91061f1647020 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -282,8 +282,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // Note: even without PrebuiltWorkspace permissions, access is still granted via Workspace permissions. ResourcePrebuiltWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete}, })...), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }.withCachedRegoValue() memberRole := Role{ @@ -295,20 +295,16 @@ func ReloadBuiltinRoles(opts *RoleOptions) { ResourceOauth2App.Type: {policy.ActionRead}, ResourceWorkspaceProxy.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{}, - User: append(allPermsExcept(ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceUser, ResourceOrganizationMember), + User: append(allPermsExcept(ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceWorkspace, ResourceUser, ResourceOrganizationMember, ResourceOrganizationMember), Permissions(map[string][]policy.Action{ - // Reduced permission set on dormant workspaces. No build, ssh, or exec - ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, // Users cannot do create/update/delete on themselves, but they // can read their own details. ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, - // Can read their own organization member record - ResourceOrganizationMember.Type: {policy.ActionRead}, // Users can create provisioner daemons scoped to themselves. ResourceProvisionerDaemon.Type: {policy.ActionRead, policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, })..., ), + ByOrgID: map[string]OrgPermissions{}, }.withCachedRegoValue() auditorRole := Role{ @@ -331,8 +327,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // Allow auditors to query aibridge interceptions. ResourceAibridgeInterception.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }.withCachedRegoValue() templateAdminRole := Role{ @@ -354,8 +350,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) { ResourceOrganization.Type: {policy.ActionRead}, ResourceOrganizationMember.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }.withCachedRegoValue() userAdminRole := Role{ @@ -378,8 +374,8 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // Manage org membership based on OIDC claims ResourceIdpsyncSettings.Type: {policy.ActionRead, policy.ActionUpdate}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }.withCachedRegoValue() builtInRoles = map[string]func(orgID uuid.UUID) Role{ @@ -419,18 +415,21 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // users at the site wide to know they exist. ResourceUser.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{ + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ // Org admins should not have workspace exec perms. - organizationID.String(): append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceAssignRole, ResourceUserSecret), Permissions(map[string][]policy.Action{ - ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, - ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH), - // PrebuiltWorkspaces are a subset of Workspaces. - // Explicitly setting PrebuiltWorkspace permissions for clarity. - // Note: even without PrebuiltWorkspace permissions, access is still granted via Workspace permissions. - ResourcePrebuiltWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete}, - })...), + organizationID.String(): { + Org: append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceAssignRole, ResourceUserSecret), Permissions(map[string][]policy.Action{ + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, + ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH), + // PrebuiltWorkspaces are a subset of Workspaces. + // Explicitly setting PrebuiltWorkspace permissions for clarity. + // Note: even without PrebuiltWorkspace permissions, access is still granted via Workspace permissions. + ResourcePrebuiltWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete}, + })...), + Member: []Permission{}, + }, }, - User: []Permission{}, } }, @@ -440,18 +439,30 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Identifier: RoleIdentifier{Name: orgMember, OrganizationID: organizationID}, DisplayName: "", Site: []Permission{}, - Org: map[string][]Permission{ - organizationID.String(): Permissions(map[string][]policy.Action{ - // All users can see the provisioner daemons for workspace - // creation. - ResourceProvisionerDaemon.Type: {policy.ActionRead}, - // All org members can read the organization - ResourceOrganization.Type: {policy.ActionRead}, - // Can read available roles. - ResourceAssignOrgRole.Type: {policy.ActionRead}, - }), + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ + organizationID.String(): { + Org: Permissions(map[string][]policy.Action{ + // All users can see the provisioner daemons for workspace + // creation. + ResourceProvisionerDaemon.Type: {policy.ActionRead}, + // All org members can read the organization + ResourceOrganization.Type: {policy.ActionRead}, + // Can read available roles. + ResourceAssignOrgRole.Type: {policy.ActionRead}, + }), + Member: append(allPermsExcept(ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceUser, ResourceOrganizationMember), + Permissions(map[string][]policy.Action{ + // Reduced permission set on dormant workspaces. No build, ssh, or exec + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, + // Can read their own organization member record + ResourceOrganizationMember.Type: {policy.ActionRead}, + // Users can create provisioner daemons scoped to themselves. + ResourceProvisionerDaemon.Type: {policy.ActionRead, policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, + })..., + ), + }, }, - User: []Permission{}, } }, orgAuditor: func(organizationID uuid.UUID) Role { @@ -459,19 +470,22 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Identifier: RoleIdentifier{Name: orgAuditor, OrganizationID: organizationID}, DisplayName: "Organization Auditor", Site: []Permission{}, - Org: map[string][]Permission{ - organizationID.String(): Permissions(map[string][]policy.Action{ - ResourceAuditLog.Type: {policy.ActionRead}, - ResourceConnectionLog.Type: {policy.ActionRead}, - // Allow auditors to see the resources that audit logs reflect. - ResourceTemplate.Type: {policy.ActionRead, policy.ActionViewInsights}, - ResourceGroup.Type: {policy.ActionRead}, - ResourceGroupMember.Type: {policy.ActionRead}, - ResourceOrganization.Type: {policy.ActionRead}, - ResourceOrganizationMember.Type: {policy.ActionRead}, - }), + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ + organizationID.String(): { + Org: Permissions(map[string][]policy.Action{ + ResourceAuditLog.Type: {policy.ActionRead}, + ResourceConnectionLog.Type: {policy.ActionRead}, + // Allow auditors to see the resources that audit logs reflect. + ResourceTemplate.Type: {policy.ActionRead, policy.ActionViewInsights}, + ResourceGroup.Type: {policy.ActionRead}, + ResourceGroupMember.Type: {policy.ActionRead}, + ResourceOrganization.Type: {policy.ActionRead}, + ResourceOrganizationMember.Type: {policy.ActionRead}, + }), + Member: []Permission{}, + }, }, - User: []Permission{}, } }, orgUserAdmin: func(organizationID uuid.UUID) Role { @@ -484,18 +498,21 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // users at the site wide to know they exist. ResourceUser.Type: {policy.ActionRead}, }), - Org: map[string][]Permission{ - organizationID.String(): Permissions(map[string][]policy.Action{ - // Assign, remove, and read roles in the organization. - ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionUnassign, policy.ActionRead}, - ResourceOrganization.Type: {policy.ActionRead}, - ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceGroup.Type: ResourceGroup.AvailableActions(), - ResourceGroupMember.Type: ResourceGroupMember.AvailableActions(), - ResourceIdpsyncSettings.Type: {policy.ActionRead, policy.ActionUpdate}, - }), - }, User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ + organizationID.String(): { + Org: Permissions(map[string][]policy.Action{ + // Assign, remove, and read roles in the organization. + ResourceAssignOrgRole.Type: {policy.ActionAssign, policy.ActionUnassign, policy.ActionRead}, + ResourceOrganization.Type: {policy.ActionRead}, + ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceGroup.Type: ResourceGroup.AvailableActions(), + ResourceGroupMember.Type: ResourceGroupMember.AvailableActions(), + ResourceIdpsyncSettings.Type: {policy.ActionRead, policy.ActionUpdate}, + }), + Member: []Permission{}, + }, + }, } }, orgTemplateAdmin: func(organizationID uuid.UUID) Role { @@ -504,25 +521,28 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Identifier: RoleIdentifier{Name: orgTemplateAdmin, OrganizationID: organizationID}, DisplayName: "Organization Template Admin", Site: []Permission{}, - Org: map[string][]Permission{ - organizationID.String(): Permissions(map[string][]policy.Action{ - ResourceTemplate.Type: ResourceTemplate.AvailableActions(), - ResourceFile.Type: {policy.ActionCreate, policy.ActionRead}, - ResourceWorkspace.Type: {policy.ActionRead}, - ResourcePrebuiltWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete}, - // Assigning template perms requires this permission. - ResourceOrganization.Type: {policy.ActionRead}, - ResourceOrganizationMember.Type: {policy.ActionRead}, - ResourceGroup.Type: {policy.ActionRead}, - ResourceGroupMember.Type: {policy.ActionRead}, - // Since templates have to correlate with provisioners, - // the ability to create templates and provisioners has - // a lot of overlap. - ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, - }), + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ + organizationID.String(): { + Org: Permissions(map[string][]policy.Action{ + ResourceTemplate.Type: ResourceTemplate.AvailableActions(), + ResourceFile.Type: {policy.ActionCreate, policy.ActionRead}, + ResourceWorkspace.Type: {policy.ActionRead}, + ResourcePrebuiltWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete}, + // Assigning template perms requires this permission. + ResourceOrganization.Type: {policy.ActionRead}, + ResourceOrganizationMember.Type: {policy.ActionRead}, + ResourceGroup.Type: {policy.ActionRead}, + ResourceGroupMember.Type: {policy.ActionRead}, + // Since templates have to correlate with provisioners, + // the ability to create templates and provisioners has + // a lot of overlap. + ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, + }), + Member: []Permission{}, + }, }, - User: []Permission{}, } }, // orgWorkspaceCreationBan prevents creating & deleting workspaces. This @@ -533,31 +553,34 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Identifier: RoleIdentifier{Name: orgWorkspaceCreationBan, OrganizationID: organizationID}, DisplayName: "Organization Workspace Creation Ban", Site: []Permission{}, - Org: map[string][]Permission{ + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{ organizationID.String(): { - { - Negate: true, - ResourceType: ResourceWorkspace.Type, - Action: policy.ActionCreate, - }, - { - Negate: true, - ResourceType: ResourceWorkspace.Type, - Action: policy.ActionDelete, - }, - { - Negate: true, - ResourceType: ResourceWorkspace.Type, - Action: policy.ActionCreateAgent, - }, - { - Negate: true, - ResourceType: ResourceWorkspace.Type, - Action: policy.ActionDeleteAgent, + Org: []Permission{ + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionCreate, + }, + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionDelete, + }, + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionCreateAgent, + }, + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionDeleteAgent, + }, }, + Member: []Permission{}, }, }, - User: []Permission{}, } }, } @@ -668,9 +691,10 @@ func (perm Permission) Valid() error { } // Role is a set of permissions at multiple levels: -// - Site level permissions apply EVERYWHERE -// - Org level permissions apply to EVERYTHING in a given ORG -// - User level permissions are the lowest +// - Site permissions apply EVERYWHERE +// - Org permissions apply to EVERYTHING in a given ORG +// - User permissions apply to all resources the user owns +// - OrgMember permissions apply to resources in the given org that the user owns // This is the type passed into the rego as a json payload. // Users of this package should instead **only** use the role names, and // this package will expand the role names into their json payloads. @@ -680,17 +704,21 @@ type Role struct { // that means the UI should never display it. DisplayName string `json:"display_name"` Site []Permission `json:"site"` - // Org is a map of orgid to permissions. We represent orgid as a string. - // We scope the organizations in the role so we can easily combine all the - // roles. - Org map[string][]Permission `json:"org"` - User []Permission `json:"user"` + User []Permission `json:"user"` + // ByOrgID is a map of organization IDs to permissions. Grouping by + // organization makes roles easy to combine. + ByOrgID map[string]OrgPermissions `json:"by_org_id"` // cachedRegoValue can be used to cache the rego value for this role. // This is helpful for static roles that never change. cachedRegoValue ast.Value } +type OrgPermissions struct { + Org []Permission `json:"org"` + Member []Permission `json:"member"` +} + // Valid will check all it's permissions and ensure they are all correct // according to the policy. This verifies every action specified make sense // for the given resource. @@ -702,10 +730,15 @@ func (role Role) Valid() error { } } - for orgID, permissions := range role.Org { - for _, perm := range permissions { + for orgID, orgPermissions := range role.ByOrgID { + for _, perm := range orgPermissions.Org { + if err := perm.Valid(); err != nil { + errs = append(errs, xerrors.Errorf("org=%q: org %w", orgID, err)) + } + } + for _, perm := range orgPermissions.Member { if err := perm.Valid(); err != nil { - errs = append(errs, xerrors.Errorf("org=%q: %w", orgID, err)) + errs = append(errs, xerrors.Errorf("org=%q: member: %w", orgID, err)) } } } @@ -774,7 +807,7 @@ func RoleByName(name RoleIdentifier) (Role, error) { // Ensure all org roles are properly scoped a non-empty organization id. // This is just some defensive programming. role := roleFunc(name.OrganizationID) - if len(role.Org) > 0 && name.OrganizationID == uuid.Nil { + if len(role.ByOrgID) > 0 && name.OrganizationID == uuid.Nil { return Role{}, xerrors.Errorf("expect a org id for role %q", name.String()) } diff --git a/coderd/rbac/roles_internal_test.go b/coderd/rbac/roles_internal_test.go index 5c7a00d899a40..b99791b5a1f5b 100644 --- a/coderd/rbac/roles_internal_test.go +++ b/coderd/rbac/roles_internal_test.go @@ -33,10 +33,11 @@ func BenchmarkRBACValueAllocation(b *testing.B) { uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, - }).WithACLUserList(map[string][]policy.Action{ - uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, - uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, - }) + }). + WithACLUserList(map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + }) jsonSubject := authSubject{ ID: actor.ID, @@ -107,7 +108,7 @@ func TestRegoInputValue(t *testing.T) { t.Parallel() // This is the input that would be passed to the rego policy. - jsonInput := map[string]interface{}{ + jsonInput := map[string]any{ "subject": authSubject{ ID: actor.ID, Roles: must(actor.Roles.Expand()), @@ -138,7 +139,7 @@ func TestRegoInputValue(t *testing.T) { t.Parallel() // This is the input that would be passed to the rego policy. - jsonInput := map[string]interface{}{ + jsonInput := map[string]any{ "subject": authSubject{ ID: actor.ID, Roles: must(actor.Roles.Expand()), @@ -146,7 +147,7 @@ func TestRegoInputValue(t *testing.T) { Scope: must(actor.Scope.Expand()), }, "action": action, - "object": map[string]interface{}{ + "object": map[string]any{ "type": obj.Type, }, } @@ -270,17 +271,18 @@ func TestDeduplicatePermissions(t *testing.T) { require.Equal(t, want, got) } -// SameAs compares 2 roles for equality. +// equalRoles compares 2 roles for equality. func equalRoles(t *testing.T, a, b Role) { require.Equal(t, a.Identifier, b.Identifier, "role names") require.Equal(t, a.DisplayName, b.DisplayName, "role display names") require.ElementsMatch(t, a.Site, b.Site, "site permissions") require.ElementsMatch(t, a.User, b.User, "user permissions") - require.Equal(t, len(a.Org), len(b.Org), "same number of org roles") + require.Equal(t, len(a.ByOrgID), len(b.ByOrgID), "same number of org roles") - for ak, av := range a.Org { - bv, ok := b.Org[ak] + for ak, av := range a.ByOrgID { + bv, ok := b.ByOrgID[ak] require.True(t, ok, "org permissions missing: %s", ak) - require.ElementsMatchf(t, av, bv, "org %s permissions", ak) + require.ElementsMatchf(t, av.Org, bv.Org, "org %s permissions", ak) + require.ElementsMatchf(t, av.Member, bv.Member, "member %s permissions", ak) } } diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index 18d521d4748eb..8ea0a9642f035 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -235,6 +235,39 @@ func TestRolePermissions(t *testing.T) { false: {setOtherOrg, memberMe, userAdmin, templateAdmin, orgTemplateAdmin, orgUserAdmin, orgAuditor, orgMemberMeBanWorkspace}, }, }, + { + Name: "ShareMyWorkspace", + Actions: []policy.Action{policy.ActionShare}, + Resource: rbac.ResourceWorkspace. + WithID(workspaceID). + InOrg(orgID). + WithOwner(currentUser.String()), + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {owner, orgMemberMe, orgAdmin, orgMemberMeBanWorkspace}, + false: { + memberMe, setOtherOrg, + templateAdmin, userAdmin, + orgTemplateAdmin, orgUserAdmin, orgAuditor, + }, + }, + }, + { + Name: "ShareWorkspaceDormant", + Actions: []policy.Action{policy.ActionShare}, + Resource: rbac.ResourceWorkspaceDormant. + WithID(uuid.New()). + InOrg(orgID). + WithOwner(memberMe.Actor.ID), + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {}, + false: { + orgMemberMe, orgAdmin, owner, setOtherOrg, + userAdmin, memberMe, + templateAdmin, orgTemplateAdmin, orgUserAdmin, orgAuditor, + orgMemberMeBanWorkspace, + }, + }, + }, { Name: "Templates", Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, @@ -505,6 +538,15 @@ func TestRolePermissions(t *testing.T) { false: {setOtherOrg, userAdmin, memberMe, orgUserAdmin, orgAuditor, orgMemberMe}, }, }, + { + Name: "Task", + Actions: crud, + Resource: rbac.ResourceTask.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {owner, orgAdmin, orgMemberMe}, + false: {setOtherOrg, userAdmin, templateAdmin, memberMe, orgTemplateAdmin, orgUserAdmin, orgAuditor}, + }, + }, // Some admin style resources { Name: "Licenses", diff --git a/coderd/rbac/rolestore/rolestore.go b/coderd/rbac/rolestore/rolestore.go index 610b04c06aa19..c2189c13b0c1f 100644 --- a/coderd/rbac/rolestore/rolestore.go +++ b/coderd/rbac/rolestore/rolestore.go @@ -124,7 +124,6 @@ func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { Identifier: dbRole.RoleIdentifier(), DisplayName: dbRole.DisplayName, Site: convertPermissions(dbRole.SitePermissions), - Org: nil, User: convertPermissions(dbRole.UserPermissions), } @@ -134,8 +133,10 @@ func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { } if dbRole.OrganizationID.UUID != uuid.Nil { - role.Org = map[string][]rbac.Permission{ - dbRole.OrganizationID.UUID.String(): convertPermissions(dbRole.OrgPermissions), + role.ByOrgID = map[string]rbac.OrgPermissions{ + dbRole.OrganizationID.UUID.String(): { + Org: convertPermissions(dbRole.OrgPermissions), + }, } } diff --git a/coderd/rbac/scopes.go b/coderd/rbac/scopes.go index 72a825003ccfb..5c8c80305679c 100644 --- a/coderd/rbac/scopes.go +++ b/coderd/rbac/scopes.go @@ -78,8 +78,8 @@ var builtinScopes = map[ScopeName]Scope{ Site: Permissions(map[string][]policy.Action{ ResourceWildcard.Type: {policy.WildcardSymbol}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, AllowIDList: []AllowListElement{AllowListAll()}, }, @@ -91,8 +91,8 @@ var builtinScopes = map[ScopeName]Scope{ Site: Permissions(map[string][]policy.Action{ ResourceWorkspace.Type: {policy.ActionApplicationConnect}, }), - Org: map[string][]Permission{}, - User: []Permission{}, + User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, AllowIDList: []AllowListElement{AllowListAll()}, }, @@ -102,8 +102,8 @@ var builtinScopes = map[ScopeName]Scope{ Identifier: RoleIdentifier{Name: fmt.Sprintf("Scope_%s", ScopeNoUserData)}, DisplayName: "Scope without access to user data", Site: allPermsExcept(ResourceUser), - Org: map[string][]Permission{}, User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, AllowIDList: []AllowListElement{AllowListAll()}, }, @@ -232,11 +232,11 @@ func ExpandScope(scope ScopeName) (Scope, error) { Identifier: RoleIdentifier{Name: fmt.Sprintf("Scope_%s", scope)}, DisplayName: string(scope), Site: site, - Org: map[string][]Permission{}, User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, // Composites are site-level; allow-list empty by default - AllowIDList: []AllowListElement{}, + AllowIDList: []AllowListElement{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}, }, nil } if res, act, ok := parseLowLevelScope(scope); ok { @@ -289,10 +289,10 @@ func expandLowLevel(resource string, action policy.Action) Scope { Identifier: RoleIdentifier{Name: fmt.Sprintf("Scope_%s:%s", resource, action)}, DisplayName: fmt.Sprintf("%s:%s", resource, action), Site: []Permission{{ResourceType: resource, Action: action}}, - Org: map[string][]Permission{}, User: []Permission{}, + ByOrgID: map[string]OrgPermissions{}, }, - // Low-level scopes intentionally return an empty allow list. - AllowIDList: []AllowListElement{}, + // Low-level scopes intentionally return a wildcard allow list. + AllowIDList: []AllowListElement{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}, } } diff --git a/coderd/rbac/scopes_catalog.go b/coderd/rbac/scopes_catalog.go index f8deb57b46fcb..ef4f3186de4fd 100644 --- a/coderd/rbac/scopes_catalog.go +++ b/coderd/rbac/scopes_catalog.go @@ -50,6 +50,13 @@ var externalLowLevel = map[ScopeName]struct{}{ "user_secret:update": {}, "user_secret:delete": {}, "user_secret:*": {}, + + // Tasks + "task:create": {}, + "task:read": {}, + "task:update": {}, + "task:delete": {}, + "task:*": {}, } // Public composite coder:* scopes exposed to users. diff --git a/coderd/rbac/scopes_catalog_internal_test.go b/coderd/rbac/scopes_catalog_internal_test.go index 4530447e993a1..37de001fae2ea 100644 --- a/coderd/rbac/scopes_catalog_internal_test.go +++ b/coderd/rbac/scopes_catalog_internal_test.go @@ -36,7 +36,7 @@ func TestExternalScopeNames(t *testing.T) { expected, ok := CompositeSitePermissions(ScopeName(name)) require.Truef(t, ok, "expected composite scope definition: %s", name) require.ElementsMatchf(t, expected, s.Site, "unexpected expanded permissions for %s", name) - require.Empty(t, s.Org) + require.Empty(t, s.ByOrgID) require.Empty(t, s.User) continue } @@ -50,7 +50,7 @@ func TestExternalScopeNames(t *testing.T) { require.Len(t, s.Site, 1) require.Equal(t, res, s.Site[0].ResourceType) require.Equal(t, act, s.Site[0].Action) - require.Empty(t, s.Org) + require.Empty(t, s.ByOrgID) require.Empty(t, s.User) } } diff --git a/coderd/rbac/scopes_constants_gen.go b/coderd/rbac/scopes_constants_gen.go index ccd9622cd4df4..2bd058b5b1007 100644 --- a/coderd/rbac/scopes_constants_gen.go +++ b/coderd/rbac/scopes_constants_gen.go @@ -95,6 +95,10 @@ const ( ScopeTailnetCoordinatorDelete ScopeName = "tailnet_coordinator:delete" ScopeTailnetCoordinatorRead ScopeName = "tailnet_coordinator:read" ScopeTailnetCoordinatorUpdate ScopeName = "tailnet_coordinator:update" + ScopeTaskCreate ScopeName = "task:create" + ScopeTaskDelete ScopeName = "task:delete" + ScopeTaskRead ScopeName = "task:read" + ScopeTaskUpdate ScopeName = "task:update" ScopeTemplateCreate ScopeName = "template:create" ScopeTemplateDelete ScopeName = "template:delete" ScopeTemplateRead ScopeName = "template:read" @@ -123,6 +127,7 @@ const ( ScopeWorkspaceDelete ScopeName = "workspace:delete" ScopeWorkspaceDeleteAgent ScopeName = "workspace:delete_agent" ScopeWorkspaceRead ScopeName = "workspace:read" + ScopeWorkspaceShare ScopeName = "workspace:share" ScopeWorkspaceSsh ScopeName = "workspace:ssh" ScopeWorkspaceStart ScopeName = "workspace:start" ScopeWorkspaceStop ScopeName = "workspace:stop" @@ -137,6 +142,7 @@ const ( ScopeWorkspaceDormantDelete ScopeName = "workspace_dormant:delete" ScopeWorkspaceDormantDeleteAgent ScopeName = "workspace_dormant:delete_agent" ScopeWorkspaceDormantRead ScopeName = "workspace_dormant:read" + ScopeWorkspaceDormantShare ScopeName = "workspace_dormant:share" ScopeWorkspaceDormantSsh ScopeName = "workspace_dormant:ssh" ScopeWorkspaceDormantStart ScopeName = "workspace_dormant:start" ScopeWorkspaceDormantStop ScopeName = "workspace_dormant:stop" @@ -244,6 +250,10 @@ func (e ScopeName) Valid() bool { ScopeTailnetCoordinatorDelete, ScopeTailnetCoordinatorRead, ScopeTailnetCoordinatorUpdate, + ScopeTaskCreate, + ScopeTaskDelete, + ScopeTaskRead, + ScopeTaskUpdate, ScopeTemplateCreate, ScopeTemplateDelete, ScopeTemplateRead, @@ -272,6 +282,7 @@ func (e ScopeName) Valid() bool { ScopeWorkspaceDelete, ScopeWorkspaceDeleteAgent, ScopeWorkspaceRead, + ScopeWorkspaceShare, ScopeWorkspaceSsh, ScopeWorkspaceStart, ScopeWorkspaceStop, @@ -286,6 +297,7 @@ func (e ScopeName) Valid() bool { ScopeWorkspaceDormantDelete, ScopeWorkspaceDormantDeleteAgent, ScopeWorkspaceDormantRead, + ScopeWorkspaceDormantShare, ScopeWorkspaceDormantSsh, ScopeWorkspaceDormantStart, ScopeWorkspaceDormantStop, @@ -394,6 +406,10 @@ func AllScopeNameValues() []ScopeName { ScopeTailnetCoordinatorDelete, ScopeTailnetCoordinatorRead, ScopeTailnetCoordinatorUpdate, + ScopeTaskCreate, + ScopeTaskDelete, + ScopeTaskRead, + ScopeTaskUpdate, ScopeTemplateCreate, ScopeTemplateDelete, ScopeTemplateRead, @@ -422,6 +438,7 @@ func AllScopeNameValues() []ScopeName { ScopeWorkspaceDelete, ScopeWorkspaceDeleteAgent, ScopeWorkspaceRead, + ScopeWorkspaceShare, ScopeWorkspaceSsh, ScopeWorkspaceStart, ScopeWorkspaceStop, @@ -436,6 +453,7 @@ func AllScopeNameValues() []ScopeName { ScopeWorkspaceDormantDelete, ScopeWorkspaceDormantDeleteAgent, ScopeWorkspaceDormantRead, + ScopeWorkspaceDormantShare, ScopeWorkspaceDormantSsh, ScopeWorkspaceDormantStart, ScopeWorkspaceDormantStop, diff --git a/coderd/rbac/scopes_test.go b/coderd/rbac/scopes_test.go index d3c1bf8cfb1eb..270f6ff02854f 100644 --- a/coderd/rbac/scopes_test.go +++ b/coderd/rbac/scopes_test.go @@ -34,10 +34,10 @@ func TestExpandScope(t *testing.T) { require.Len(t, s.Site, 1) require.Equal(t, tc.resource, s.Site[0].ResourceType) require.Equal(t, tc.action, s.Site[0].Action) - require.Empty(t, s.Org) + require.Empty(t, s.ByOrgID) require.Empty(t, s.User) - require.Len(t, s.AllowIDList, 0) + require.Equal(t, []rbac.AllowListElement{rbac.AllowListAll()}, s.AllowIDList) }) } }) diff --git a/coderd/searchquery/search.go b/coderd/searchquery/search.go index d07203a0293c0..59ec3e04923ff 100644 --- a/coderd/searchquery/search.go +++ b/coderd/searchquery/search.go @@ -355,6 +355,8 @@ func AIBridgeInterceptions(ctx context.Context, db database.Store, query string, AfterID: page.AfterID, // #nosec G115 - Safe conversion for pagination limit which is expected to be within int32 range Limit: int32(page.Limit), + // #nosec G115 - Safe conversion for pagination offset which is expected to be within int32 range + Offset: int32(page.Offset), } if query == "" { @@ -363,7 +365,7 @@ func AIBridgeInterceptions(ctx context.Context, db database.Store, query string, values, errors := searchTerms(query, func(term string, values url.Values) error { // Default to the initiating user - values.Add("user", term) + values.Add("initiator", term) return nil }) if len(errors) > 0 { @@ -389,6 +391,43 @@ func AIBridgeInterceptions(ctx context.Context, db database.Store, query string, return filter, parser.Errors } +// Tasks parses a search query for tasks. +// +// Supported query parameters: +// - owner: string (username, UUID, or 'me' for current user) +// - organization: string (organization UUID or name) +// - status: string (pending, initializing, active, paused, error, unknown) +func Tasks(ctx context.Context, db database.Store, query string, actorID uuid.UUID) (database.ListTasksParams, []codersdk.ValidationError) { + filter := database.ListTasksParams{ + OwnerID: uuid.Nil, + OrganizationID: uuid.Nil, + Status: "", + } + + if query == "" { + return filter, nil + } + + // Always lowercase for all searches. + query = strings.ToLower(query) + values, errors := searchTerms(query, func(term string, values url.Values) error { + // Default unqualified terms to owner + values.Add("owner", term) + return nil + }) + if len(errors) > 0 { + return filter, errors + } + + parser := httpapi.NewQueryParamParser() + filter.OwnerID = parseUser(ctx, db, parser, values, "owner", actorID) + filter.OrganizationID = parseOrganization(ctx, db, parser, values, "organization") + filter.Status = parser.String(values, "", "status") + + parser.ErrorExcessParams(values) + return filter, parser.Errors +} + func searchTerms(query string, defaultKey func(term string, values url.Values) error) (url.Values, []codersdk.ValidationError) { searchValues := make(url.Values) diff --git a/coderd/searchquery/search_test.go b/coderd/searchquery/search_test.go index 84d4509d0ec60..44ae9d1021159 100644 --- a/coderd/searchquery/search_test.go +++ b/coderd/searchquery/search_test.go @@ -944,3 +944,199 @@ func TestSearchTemplates(t *testing.T) { }) } } + +func TestSearchTasks(t *testing.T) { + t.Parallel() + + userID := uuid.MustParse("10000000-0000-0000-0000-000000000001") + orgID := uuid.MustParse("20000000-0000-0000-0000-000000000001") + + testCases := []struct { + Name string + Query string + ActorID uuid.UUID + Expected database.ListTasksParams + ExpectedErrorContains string + Setup func(t *testing.T, db database.Store) + }{ + { + Name: "Empty", + Query: "", + Expected: database.ListTasksParams{}, + }, + { + Name: "OwnerUsername", + Query: "owner:alice", + Setup: func(t *testing.T, db database.Store) { + dbgen.User(t, db, database.User{ + ID: userID, + Username: "alice", + }) + }, + Expected: database.ListTasksParams{ + OwnerID: userID, + }, + }, + { + Name: "OwnerMe", + Query: "owner:me", + ActorID: userID, + Expected: database.ListTasksParams{ + OwnerID: userID, + }, + }, + { + Name: "OwnerUUID", + Query: fmt.Sprintf("owner:%s", userID), + Expected: database.ListTasksParams{ + OwnerID: userID, + }, + }, + { + Name: "StatusActive", + Query: "status:active", + Expected: database.ListTasksParams{ + Status: "active", + }, + }, + { + Name: "StatusPending", + Query: "status:pending", + Expected: database.ListTasksParams{ + Status: "pending", + }, + }, + { + Name: "Organization", + Query: "organization:acme", + Setup: func(t *testing.T, db database.Store) { + dbgen.Organization(t, db, database.Organization{ + ID: orgID, + Name: "acme", + }) + }, + Expected: database.ListTasksParams{ + OrganizationID: orgID, + }, + }, + { + Name: "OrganizationUUID", + Query: fmt.Sprintf("organization:%s", orgID), + Expected: database.ListTasksParams{ + OrganizationID: orgID, + }, + }, + { + Name: "Combined", + Query: "owner:alice organization:acme status:active", + Setup: func(t *testing.T, db database.Store) { + dbgen.Organization(t, db, database.Organization{ + ID: orgID, + Name: "acme", + }) + dbgen.User(t, db, database.User{ + ID: userID, + Username: "alice", + }) + }, + Expected: database.ListTasksParams{ + OwnerID: userID, + OrganizationID: orgID, + Status: "active", + }, + }, + { + Name: "QuotedOwner", + Query: `owner:"alice"`, + Setup: func(t *testing.T, db database.Store) { + dbgen.User(t, db, database.User{ + ID: userID, + Username: "alice", + }) + }, + Expected: database.ListTasksParams{ + OwnerID: userID, + }, + }, + { + Name: "QuotedStatus", + Query: `status:"pending"`, + Expected: database.ListTasksParams{ + Status: "pending", + }, + }, + { + Name: "DefaultToOwner", + Query: "alice", + Setup: func(t *testing.T, db database.Store) { + dbgen.User(t, db, database.User{ + ID: userID, + Username: "alice", + }) + }, + Expected: database.ListTasksParams{ + OwnerID: userID, + }, + }, + { + Name: "InvalidOwner", + Query: "owner:nonexistent", + ExpectedErrorContains: "does not exist", + }, + { + Name: "InvalidOrganization", + Query: "organization:nonexistent", + ExpectedErrorContains: "does not exist", + }, + { + Name: "ExtraParam", + Query: "owner:alice invalid:param", + Setup: func(t *testing.T, db database.Store) { + dbgen.User(t, db, database.User{ + ID: userID, + Username: "alice", + }) + }, + ExpectedErrorContains: "is not a valid query param", + }, + { + Name: "ExtraColon", + Query: "owner:alice:extra", + ExpectedErrorContains: "can only contain 1 ':'", + }, + { + Name: "PrefixColon", + Query: ":owner", + ExpectedErrorContains: "cannot start or end with ':'", + }, + { + Name: "SuffixColon", + Query: "owner:", + ExpectedErrorContains: "cannot start or end with ':'", + }, + } + + for _, c := range testCases { + t.Run(c.Name, func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + + if c.Setup != nil { + c.Setup(t, db) + } + + values, errs := searchquery.Tasks(context.Background(), db, c.Query, c.ActorID) + if c.ExpectedErrorContains != "" { + require.True(t, len(errs) > 0, "expect some errors") + var s strings.Builder + for _, err := range errs { + _, _ = s.WriteString(fmt.Sprintf("%s: %s\n", err.Field, err.Detail)) + } + require.Contains(t, s.String(), c.ExpectedErrorContains) + } else { + require.Len(t, errs, 0, "expected no error") + require.Equal(t, c.Expected, values, "expected values") + } + }) + } +} diff --git a/coderd/telemetry/telemetry.go b/coderd/telemetry/telemetry.go index 8f203126c99ba..19873f99eeb2f 100644 --- a/coderd/telemetry/telemetry.go +++ b/coderd/telemetry/telemetry.go @@ -28,7 +28,6 @@ import ( "google.golang.org/protobuf/types/known/wrapperspb" "cdr.dev/slog" - "github.com/coder/coder/v2/buildinfo" clitelemetry "github.com/coder/coder/v2/cli/telemetry" "github.com/coder/coder/v2/coderd/database" @@ -36,6 +35,7 @@ import ( "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/codersdk" tailnetproto "github.com/coder/coder/v2/tailnet/proto" + "github.com/coder/quartz" ) const ( @@ -48,6 +48,7 @@ type Options struct { Disabled bool Database database.Store Logger slog.Logger + Clock quartz.Clock // URL is an endpoint to direct telemetry towards! URL *url.URL Experiments codersdk.Experiments @@ -65,6 +66,9 @@ type Options struct { // Duplicate data will be sent, it's on the server-side to index by UUID. // Data is anonymized prior to being sent! func New(options Options) (Reporter, error) { + if options.Clock == nil { + options.Clock = quartz.NewReal() + } if options.SnapshotFrequency == 0 { // Report once every 30mins by default! options.SnapshotFrequency = 30 * time.Minute @@ -86,7 +90,8 @@ func New(options Options) (Reporter, error) { options: options, deploymentURL: deploymentURL, snapshotURL: snapshotURL, - startedAt: dbtime.Now(), + startedAt: dbtime.Time(options.Clock.Now()).UTC(), + client: &http.Client{}, } go reporter.runSnapshotter() return reporter, nil @@ -119,6 +124,7 @@ type remoteReporter struct { snapshotURL *url.URL startedAt time.Time shutdownAt *time.Time + client *http.Client } func (r *remoteReporter) Enabled() bool { @@ -142,7 +148,7 @@ func (r *remoteReporter) reportSync(snapshot *Snapshot) { return } req.Header.Set(VersionHeader, buildinfo.Version()) - resp, err := http.DefaultClient.Do(req) + resp, err := r.client.Do(req) if err != nil { // If the request fails it's not necessarily an error. // In an airgapped environment, it's fine if this fails! @@ -164,7 +170,7 @@ func (r *remoteReporter) Close() { return } close(r.closed) - now := dbtime.Now() + now := dbtime.Time(r.options.Clock.Now()).UTC() r.shutdownAt = &now if r.Enabled() { // Report a final collection of telemetry prior to close! @@ -353,7 +359,7 @@ func (r *remoteReporter) deployment() error { return xerrors.Errorf("create deployment request: %w", err) } req.Header.Set(VersionHeader, buildinfo.Version()) - resp, err := http.DefaultClient.Do(req) + resp, err := r.client.Do(req) if err != nil { return xerrors.Errorf("perform request: %w", err) } @@ -410,7 +416,7 @@ func (r *remoteReporter) createSnapshot() (*Snapshot, error) { ctx = r.ctx // For resources that grow in size very quickly (like workspace builds), // we only report events that occurred within the past hour. - createdAfter = dbtime.Now().Add(-1 * time.Hour) + createdAfter = dbtime.Time(r.options.Clock.Now().Add(-1 * time.Hour)).UTC() eg errgroup.Group snapshot = &Snapshot{ DeploymentID: r.options.DeploymentID, @@ -728,6 +734,28 @@ func (r *remoteReporter) createSnapshot() (*Snapshot, error) { } return nil }) + eg.Go(func() error { + dbTasks, err := r.options.Database.ListTasks(ctx, database.ListTasksParams{ + OwnerID: uuid.Nil, + OrganizationID: uuid.Nil, + Status: "", + }) + if err != nil { + return err + } + for _, dbTask := range dbTasks { + snapshot.Tasks = append(snapshot.Tasks, ConvertTask(dbTask)) + } + return nil + }) + eg.Go(func() error { + summaries, err := r.generateAIBridgeInterceptionsSummaries(ctx) + if err != nil { + return xerrors.Errorf("generate AIBridge interceptions telemetry summaries: %w", err) + } + snapshot.AIBridgeInterceptionsSummaries = summaries + return nil + }) err := eg.Wait() if err != nil { @@ -736,6 +764,76 @@ func (r *remoteReporter) createSnapshot() (*Snapshot, error) { return snapshot, nil } +func (r *remoteReporter) generateAIBridgeInterceptionsSummaries(ctx context.Context) ([]AIBridgeInterceptionsSummary, error) { + // Get the current timeframe, which is the previous hour. + now := dbtime.Time(r.options.Clock.Now()).UTC() + endedAtBefore := now.Truncate(time.Hour) + endedAtAfter := endedAtBefore.Add(-1 * time.Hour) + + // Note: we don't use a transaction for this function since we do tolerate + // some errors, like duplicate lock rows, and we also calculate + // summaries in parallel. + + // Claim the heartbeat lock row for this hour. + err := r.options.Database.InsertTelemetryLock(ctx, database.InsertTelemetryLockParams{ + EventType: "aibridge_interceptions_summary", + PeriodEndingAt: endedAtBefore, + }) + if database.IsUniqueViolation(err, database.UniqueTelemetryLocksPkey) { + // Another replica has already claimed the lock row for this hour. + r.options.Logger.Debug(ctx, "aibridge interceptions telemetry lock already claimed for this hour by another replica, skipping", slog.F("period_ending_at", endedAtBefore)) + return nil, nil + } + if err != nil { + return nil, xerrors.Errorf("insert AIBridge interceptions telemetry lock (period_ending_at=%q): %w", endedAtBefore, err) + } + + // List the summary categories that need to be calculated. + summaryCategories, err := r.options.Database.ListAIBridgeInterceptionsTelemetrySummaries(ctx, database.ListAIBridgeInterceptionsTelemetrySummariesParams{ + EndedAtAfter: endedAtAfter, // inclusive + EndedAtBefore: endedAtBefore, // exclusive + }) + if err != nil { + return nil, xerrors.Errorf("list AIBridge interceptions telemetry summaries (startedAtAfter=%q, endedAtBefore=%q): %w", endedAtAfter, endedAtBefore, err) + } + + // Calculate and convert the summaries for all categories. + var ( + eg, egCtx = errgroup.WithContext(ctx) + mu sync.Mutex + summaries = make([]AIBridgeInterceptionsSummary, 0, len(summaryCategories)) + ) + for _, category := range summaryCategories { + eg.Go(func() error { + summary, err := r.options.Database.CalculateAIBridgeInterceptionsTelemetrySummary(egCtx, database.CalculateAIBridgeInterceptionsTelemetrySummaryParams{ + Provider: category.Provider, + Model: category.Model, + Client: category.Client, + EndedAtAfter: endedAtAfter, + EndedAtBefore: endedAtBefore, + }) + if err != nil { + return xerrors.Errorf("calculate AIBridge interceptions telemetry summary (provider=%q, model=%q, client=%q, startedAtAfter=%q, endedAtBefore=%q): %w", category.Provider, category.Model, category.Client, endedAtAfter, endedAtBefore, err) + } + + // Double check that at least one interception was found in the + // timeframe. + if summary.InterceptionCount == 0 { + return nil + } + + converted := ConvertAIBridgeInterceptionsSummary(endedAtBefore, category.Provider, category.Model, category.Client, summary) + + mu.Lock() + defer mu.Unlock() + summaries = append(summaries, converted) + return nil + }) + } + + return summaries, eg.Wait() +} + // ConvertAPIKey anonymizes an API key. func ConvertAPIKey(apiKey database.APIKey) APIKey { a := APIKey{ @@ -1203,9 +1301,11 @@ type Snapshot struct { Workspaces []Workspace `json:"workspaces"` NetworkEvents []NetworkEvent `json:"network_events"` Organizations []Organization `json:"organizations"` + Tasks []Task `json:"tasks"` TelemetryItems []TelemetryItem `json:"telemetry_items"` UserTailnetConnections []UserTailnetConnection `json:"user_tailnet_connections"` PrebuiltWorkspaces []PrebuiltWorkspace `json:"prebuilt_workspaces"` + AIBridgeInterceptionsSummaries []AIBridgeInterceptionsSummary `json:"aibridge_interceptions_summaries"` } // Deployment contains information about the host running Coder. @@ -1751,6 +1851,52 @@ type Organization struct { CreatedAt time.Time `json:"created_at"` } +type Task struct { + ID string `json:"id"` + OrganizationID string `json:"organization_id"` + OwnerID string `json:"owner_id"` + Name string `json:"name"` + WorkspaceID *string `json:"workspace_id"` + WorkspaceBuildNumber *int64 `json:"workspace_build_number"` + WorkspaceAgentID *string `json:"workspace_agent_id"` + WorkspaceAppID *string `json:"workspace_app_id"` + TemplateVersionID string `json:"template_version_id"` + PromptHash string `json:"prompt_hash"` // Prompt is hashed for privacy. + CreatedAt time.Time `json:"created_at"` + Status string `json:"status"` +} + +// ConvertTask anonymizes a Task. +func ConvertTask(task database.Task) Task { + t := &Task{ + ID: task.ID.String(), + OrganizationID: task.OrganizationID.String(), + OwnerID: task.OwnerID.String(), + Name: task.Name, + WorkspaceID: nil, + WorkspaceBuildNumber: nil, + WorkspaceAgentID: nil, + WorkspaceAppID: nil, + TemplateVersionID: task.TemplateVersionID.String(), + PromptHash: fmt.Sprintf("%x", sha256.Sum256([]byte(task.Prompt))), + CreatedAt: task.CreatedAt, + Status: string(task.Status), + } + if task.WorkspaceID.Valid { + t.WorkspaceID = ptr.Ref(task.WorkspaceID.UUID.String()) + } + if task.WorkspaceBuildNumber.Valid { + t.WorkspaceBuildNumber = ptr.Ref(int64(task.WorkspaceBuildNumber.Int32)) + } + if task.WorkspaceAgentID.Valid { + t.WorkspaceAgentID = ptr.Ref(task.WorkspaceAgentID.UUID.String()) + } + if task.WorkspaceAppID.Valid { + t.WorkspaceAppID = ptr.Ref(task.WorkspaceAppID.UUID.String()) + } + return *t +} + type telemetryItemKey string // The comment below gets rid of the warning that the name "TelemetryItemKey" has @@ -1796,6 +1942,89 @@ type PrebuiltWorkspace struct { Count int `json:"count"` } +type AIBridgeInterceptionsSummaryDurationMillis struct { + P50 int64 `json:"p50"` + P90 int64 `json:"p90"` + P95 int64 `json:"p95"` + P99 int64 `json:"p99"` +} + +type AIBridgeInterceptionsSummaryTokenCount struct { + Input int64 `json:"input"` + Output int64 `json:"output"` + CachedRead int64 `json:"cached_read"` + CachedWritten int64 `json:"cached_written"` +} + +type AIBridgeInterceptionsSummaryToolCallsCount struct { + Injected int64 `json:"injected"` + NonInjected int64 `json:"non_injected"` +} + +// AIBridgeInterceptionsSummary is a summary of aggregated AI Bridge +// interception data over a period of 1 hour. We send a summary each hour for +// each unique provider + model + client combination. +type AIBridgeInterceptionsSummary struct { + ID uuid.UUID `json:"id"` + + // The end of the hour for which the summary is taken. This will always be a + // UTC timestamp truncated to the hour. + Timestamp time.Time `json:"timestamp"` + Provider string `json:"provider"` + Model string `json:"model"` + Client string `json:"client"` + + InterceptionCount int64 `json:"interception_count"` + InterceptionDurationMillis AIBridgeInterceptionsSummaryDurationMillis `json:"interception_duration_millis"` + + // Map of route to number of interceptions. + // e.g. "/v1/chat/completions:blocking", "/v1/chat/completions:streaming" + InterceptionsByRoute map[string]int64 `json:"interceptions_by_route"` + + UniqueInitiatorCount int64 `json:"unique_initiator_count"` + + UserPromptsCount int64 `json:"user_prompts_count"` + + TokenUsagesCount int64 `json:"token_usages_count"` + TokenCount AIBridgeInterceptionsSummaryTokenCount `json:"token_count"` + + ToolCallsCount AIBridgeInterceptionsSummaryToolCallsCount `json:"tool_calls_count"` + InjectedToolCallErrorCount int64 `json:"injected_tool_call_error_count"` +} + +func ConvertAIBridgeInterceptionsSummary(endTime time.Time, provider, model, client string, summary database.CalculateAIBridgeInterceptionsTelemetrySummaryRow) AIBridgeInterceptionsSummary { + return AIBridgeInterceptionsSummary{ + ID: uuid.New(), + Timestamp: endTime, + Provider: provider, + Model: model, + Client: client, + InterceptionCount: summary.InterceptionCount, + InterceptionDurationMillis: AIBridgeInterceptionsSummaryDurationMillis{ + P50: summary.InterceptionDurationP50Millis, + P90: summary.InterceptionDurationP90Millis, + P95: summary.InterceptionDurationP95Millis, + P99: summary.InterceptionDurationP99Millis, + }, + // TODO: currently we don't track by route + InterceptionsByRoute: make(map[string]int64), + UniqueInitiatorCount: summary.UniqueInitiatorCount, + UserPromptsCount: summary.UserPromptsCount, + TokenUsagesCount: summary.TokenUsagesCount, + TokenCount: AIBridgeInterceptionsSummaryTokenCount{ + Input: summary.TokenCountInput, + Output: summary.TokenCountOutput, + CachedRead: summary.TokenCountCachedRead, + CachedWritten: summary.TokenCountCachedWritten, + }, + ToolCallsCount: AIBridgeInterceptionsSummaryToolCallsCount{ + Injected: summary.ToolCallsCountInjected, + NonInjected: summary.ToolCallsCountNonInjected, + }, + InjectedToolCallErrorCount: summary.InjectedToolCallErrorCount, + } +} + type noopReporter struct{} func (*noopReporter) Report(_ *Snapshot) {} diff --git a/coderd/telemetry/telemetry_test.go b/coderd/telemetry/telemetry_test.go index 5508a7d8816f5..dede229acdacf 100644 --- a/coderd/telemetry/telemetry_test.go +++ b/coderd/telemetry/telemetry_test.go @@ -28,6 +28,7 @@ import ( "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) func TestMain(m *testing.M) { @@ -44,6 +45,7 @@ func TestTelemetry(t *testing.T) { db, _ := dbtestutil.NewDB(t) ctx := testutil.Context(t, testutil.WaitMedium) + now := dbtime.Now() org, err := db.GetDefaultOrganization(ctx) require.NoError(t, err) @@ -151,6 +153,20 @@ func TestTelemetry(t *testing.T) { HasAITask: sql.NullBool{Valid: true, Bool: true}, AITaskSidebarAppID: uuid.NullUUID{Valid: true, UUID: taskWsApp.ID}, }) + task := dbgen.Task(t, db, database.TaskTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + WorkspaceID: uuid.NullUUID{Valid: true, UUID: taskWs.ID}, + TemplateVersionID: taskTV.ID, + Prompt: "example prompt", + TemplateParameters: json.RawMessage(`{"foo": "bar"}`), + }) + taskWA := dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{ + TaskID: task.ID, + WorkspaceAgentID: uuid.NullUUID{Valid: true, UUID: taskWsAgent.ID}, + WorkspaceAppID: uuid.NullUUID{Valid: true, UUID: taskWsApp.ID}, + WorkspaceBuildNumber: taskWB.BuildNumber, + }) group := dbgen.Group(t, db, database.Group{ OrganizationID: org.ID, @@ -194,12 +210,88 @@ func TestTelemetry(t *testing.T) { AgentID: wsagent.ID, }) - _, snapshot := collectSnapshot(ctx, t, db, nil) + previousAIBridgeInterceptionPeriod := now.Truncate(time.Hour) + user2 := dbgen.User(t, db, database.User{}) + aiBridgeInterception1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: user.ID, + Provider: "anthropic", + Model: "deanseek", + StartedAt: previousAIBridgeInterceptionPeriod.Add(-30 * time.Minute), + }, nil) + _ = dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ + InterceptionID: aiBridgeInterception1.ID, + InputTokens: 100, + OutputTokens: 200, + Metadata: json.RawMessage(`{"cache_read_input":300,"cache_creation_input":400}`), + }) + _ = dbgen.AIBridgeUserPrompt(t, db, database.InsertAIBridgeUserPromptParams{ + InterceptionID: aiBridgeInterception1.ID, + }) + _ = dbgen.AIBridgeToolUsage(t, db, database.InsertAIBridgeToolUsageParams{ + InterceptionID: aiBridgeInterception1.ID, + Injected: true, + InvocationError: sql.NullString{String: "error1", Valid: true}, + }) + _, err = db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: aiBridgeInterception1.ID, + EndedAt: aiBridgeInterception1.StartedAt.Add(1 * time.Minute), // 1 minute duration + }) + require.NoError(t, err) + aiBridgeInterception2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: user2.ID, + Provider: aiBridgeInterception1.Provider, + Model: aiBridgeInterception1.Model, + StartedAt: aiBridgeInterception1.StartedAt, + }, nil) + _ = dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ + InterceptionID: aiBridgeInterception2.ID, + InputTokens: 100, + OutputTokens: 200, + Metadata: json.RawMessage(`{"cache_read_input":300,"cache_creation_input":400}`), + }) + _ = dbgen.AIBridgeUserPrompt(t, db, database.InsertAIBridgeUserPromptParams{ + InterceptionID: aiBridgeInterception2.ID, + }) + _ = dbgen.AIBridgeToolUsage(t, db, database.InsertAIBridgeToolUsageParams{ + InterceptionID: aiBridgeInterception2.ID, + Injected: false, + }) + _, err = db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: aiBridgeInterception2.ID, + EndedAt: aiBridgeInterception2.StartedAt.Add(2 * time.Minute), // 2 minute duration + }) + require.NoError(t, err) + aiBridgeInterception3 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: user2.ID, + Provider: "openai", + Model: "gpt-5", + StartedAt: aiBridgeInterception1.StartedAt, + }, nil) + _, err = db.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: aiBridgeInterception3.ID, + EndedAt: aiBridgeInterception3.StartedAt.Add(3 * time.Minute), // 3 minute duration + }) + require.NoError(t, err) + _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: user2.ID, + Provider: "openai", + Model: "gpt-5", + StartedAt: aiBridgeInterception1.StartedAt, + }, nil) + // not ended, so it should not affect summaries + + clock := quartz.NewMock(t) + clock.Set(now) + + _, snapshot := collectSnapshot(ctx, t, db, func(opts telemetry.Options) telemetry.Options { + opts.Clock = clock + return opts + }) require.Len(t, snapshot.ProvisionerJobs, 2) require.Len(t, snapshot.Licenses, 1) require.Len(t, snapshot.Templates, 2) require.Len(t, snapshot.TemplateVersions, 3) - require.Len(t, snapshot.Users, 1) + require.Len(t, snapshot.Users, 2) require.Len(t, snapshot.Groups, 2) // 1 member in the everyone group + 1 member in the custom group require.Len(t, snapshot.GroupMembers, 2) @@ -220,6 +312,22 @@ func TestTelemetry(t *testing.T) { require.Len(t, wsa.Subsystems, 2) require.Equal(t, string(database.WorkspaceAgentSubsystemEnvbox), wsa.Subsystems[0]) require.Equal(t, string(database.WorkspaceAgentSubsystemExectrace), wsa.Subsystems[1]) + require.Len(t, snapshot.Tasks, 1) + for _, snapTask := range snapshot.Tasks { + assert.Equal(t, task.ID.String(), snapTask.ID) + assert.Equal(t, task.OrganizationID.String(), snapTask.OrganizationID) + assert.Equal(t, task.OwnerID.String(), snapTask.OwnerID) + assert.Equal(t, task.Name, snapTask.Name) + if assert.True(t, task.WorkspaceID.Valid) { + assert.Equal(t, task.WorkspaceID.UUID.String(), *snapTask.WorkspaceID) + } + assert.EqualValues(t, taskWA.WorkspaceBuildNumber, *snapTask.WorkspaceBuildNumber) + assert.Equal(t, taskWA.WorkspaceAgentID.UUID.String(), *snapTask.WorkspaceAgentID) + assert.Equal(t, taskWA.WorkspaceAppID.UUID.String(), *snapTask.WorkspaceAppID) + assert.Equal(t, task.TemplateVersionID.String(), snapTask.TemplateVersionID) + assert.Equal(t, "e196fe22e61cfa32d8c38749e0ce348108bb4cae29e2c36cdcce7e77faa9eb5f", snapTask.PromptHash) + assert.Equal(t, task.CreatedAt.UTC(), snapTask.CreatedAt.UTC()) + } require.True(t, slices.ContainsFunc(snapshot.TemplateVersions, func(ttv telemetry.TemplateVersion) bool { if ttv.ID != taskTV.ID { @@ -257,6 +365,53 @@ func TestTelemetry(t *testing.T) { for _, entity := range snapshot.Templates { require.Equal(t, entity.OrganizationID, org.ID) } + + // 2 unique provider + model + client combinations + require.Len(t, snapshot.AIBridgeInterceptionsSummaries, 2) + snapshot1 := snapshot.AIBridgeInterceptionsSummaries[0] + snapshot2 := snapshot.AIBridgeInterceptionsSummaries[1] + if snapshot1.Provider != aiBridgeInterception1.Provider { + snapshot1, snapshot2 = snapshot2, snapshot1 + } + + require.Equal(t, snapshot1.Provider, aiBridgeInterception1.Provider) + require.Equal(t, snapshot1.Model, aiBridgeInterception1.Model) + require.Equal(t, snapshot1.Client, "unknown") // no client info yet + require.EqualValues(t, snapshot1.InterceptionCount, 2) + require.EqualValues(t, snapshot1.InterceptionsByRoute, map[string]int64{}) // no route info yet + require.EqualValues(t, snapshot1.InterceptionDurationMillis.P50, 90_000) + require.EqualValues(t, snapshot1.InterceptionDurationMillis.P90, 114_000) + require.EqualValues(t, snapshot1.InterceptionDurationMillis.P95, 117_000) + require.EqualValues(t, snapshot1.InterceptionDurationMillis.P99, 119_400) + require.EqualValues(t, snapshot1.UniqueInitiatorCount, 2) + require.EqualValues(t, snapshot1.UserPromptsCount, 2) + require.EqualValues(t, snapshot1.TokenUsagesCount, 2) + require.EqualValues(t, snapshot1.TokenCount.Input, 200) + require.EqualValues(t, snapshot1.TokenCount.Output, 400) + require.EqualValues(t, snapshot1.TokenCount.CachedRead, 600) + require.EqualValues(t, snapshot1.TokenCount.CachedWritten, 800) + require.EqualValues(t, snapshot1.ToolCallsCount.Injected, 1) + require.EqualValues(t, snapshot1.ToolCallsCount.NonInjected, 1) + require.EqualValues(t, snapshot1.InjectedToolCallErrorCount, 1) + + require.Equal(t, snapshot2.Provider, aiBridgeInterception3.Provider) + require.Equal(t, snapshot2.Model, aiBridgeInterception3.Model) + require.Equal(t, snapshot2.Client, "unknown") // no client info yet + require.EqualValues(t, snapshot2.InterceptionCount, 1) + require.EqualValues(t, snapshot2.InterceptionsByRoute, map[string]int64{}) // no route info yet + require.EqualValues(t, snapshot2.InterceptionDurationMillis.P50, 180_000) + require.EqualValues(t, snapshot2.InterceptionDurationMillis.P90, 180_000) + require.EqualValues(t, snapshot2.InterceptionDurationMillis.P95, 180_000) + require.EqualValues(t, snapshot2.InterceptionDurationMillis.P99, 180_000) + require.EqualValues(t, snapshot2.UniqueInitiatorCount, 1) + require.EqualValues(t, snapshot2.UserPromptsCount, 0) + require.EqualValues(t, snapshot2.TokenUsagesCount, 0) + require.EqualValues(t, snapshot2.TokenCount.Input, 0) + require.EqualValues(t, snapshot2.TokenCount.Output, 0) + require.EqualValues(t, snapshot2.TokenCount.CachedRead, 0) + require.EqualValues(t, snapshot2.TokenCount.CachedWritten, 0) + require.EqualValues(t, snapshot2.ToolCallsCount.Injected, 0) + require.EqualValues(t, snapshot2.ToolCallsCount.NonInjected, 0) }) t.Run("HashedEmail", func(t *testing.T) { t.Parallel() diff --git a/coderd/templates_test.go b/coderd/templates_test.go index c470dd17c664a..df50b28ab861e 100644 --- a/coderd/templates_test.go +++ b/coderd/templates_test.go @@ -944,10 +944,6 @@ func TestPatchTemplateMeta(t *testing.T) { t.Run("AlreadyExists", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires Postgres constraints") - } - ownerClient := coderdtest.New(t, nil) owner := coderdtest.CreateFirstUser(t, ownerClient) client, _ := coderdtest.CreateAnotherUser(t, ownerClient, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID)) diff --git a/coderd/templateversions.go b/coderd/templateversions.go index 17a4d9b451e9c..2e959702fbde5 100644 --- a/coderd/templateversions.go +++ b/coderd/templateversions.go @@ -1958,6 +1958,7 @@ func convertTemplateVersion(version database.TemplateVersion, job codersdk.Provi CreatedBy: codersdk.MinimalUser{ ID: version.CreatedBy, Username: version.CreatedByUsername, + Name: version.CreatedByName, AvatarURL: version.CreatedByAvatarURL, }, Archived: version.Archived, diff --git a/coderd/templateversions_test.go b/coderd/templateversions_test.go index 48f690d26d2eb..f282f8420b52e 100644 --- a/coderd/templateversions_test.go +++ b/coderd/templateversions_test.go @@ -1422,9 +1422,6 @@ func TestTemplateVersionDryRun(t *testing.T) { t.Run("Pending", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closer := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ diff --git a/coderd/users.go b/coderd/users.go index 1e592d010c077..30fa7bf7cabeb 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -753,6 +753,14 @@ func (api *API) putUserProfile(rw http.ResponseWriter, r *http.Request) { if !httpapi.Read(ctx, rw, r, ¶ms) { return } + + // If caller wants to update user's username, they need "update_users" permission. + // This is restricted to user admins only. + if params.Username != user.Username && !api.Authorize(r, policy.ActionUpdate, user) { + httpapi.ResourceNotFound(rw) + return + } + existentUser, err := api.Database.GetUserByEmailOrUsername(ctx, database.GetUserByEmailOrUsernameParams{ Username: params.Username, }) @@ -1600,5 +1608,6 @@ func convertAPIKey(k database.APIKey) codersdk.APIKey { Scopes: scopes, LifetimeSeconds: k.LifetimeSeconds, TokenName: k.TokenName, + AllowList: db2sdk.List(k.AllowList, db2sdk.APIAllowListTarget), } } diff --git a/coderd/users_test.go b/coderd/users_test.go index 22c9fad5eebea..283b607e89df9 100644 --- a/coderd/users_test.go +++ b/coderd/users_test.go @@ -1051,7 +1051,7 @@ func TestUpdateUserProfile(t *testing.T) { require.Equal(t, database.AuditActionWrite, auditor.AuditLogs()[numLogs-1].Action) }) - t.Run("UpdateSelfAsMember", func(t *testing.T) { + t.Run("UpdateSelfAsMember_Name", func(t *testing.T) { t.Parallel() auditor := audit.NewMock() client := coderdtest.New(t, &coderdtest.Options{Auditor: auditor}) @@ -1060,29 +1060,82 @@ func TestUpdateUserProfile(t *testing.T) { firstUser := coderdtest.CreateFirstUser(t, client) numLogs++ // add an audit log for login - memberClient, _ := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) numLogs++ // add an audit log for user creation ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() - newUsername := coderdtest.RandomUsername(t) newName := coderdtest.RandomName(t) userProfile, err := memberClient.UpdateUserProfile(ctx, codersdk.Me, codersdk.UpdateUserProfileRequest{ - Username: newUsername, Name: newName, + Username: memberUser.Username, }) numLogs++ // add an audit log for user update numLogs++ // add an audit log for API key creation require.NoError(t, err) - require.Equal(t, newUsername, userProfile.Username) + require.Equal(t, memberUser.Username, userProfile.Username) require.Equal(t, newName, userProfile.Name) require.Len(t, auditor.AuditLogs(), numLogs) require.Equal(t, database.AuditActionWrite, auditor.AuditLogs()[numLogs-1].Action) }) + t.Run("UpdateSelfAsMember_Username", func(t *testing.T) { + t.Parallel() + auditor := audit.NewMock() + client := coderdtest.New(t, &coderdtest.Options{Auditor: auditor}) + + firstUser := coderdtest.CreateFirstUser(t, client) + memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + newUsername := coderdtest.RandomUsername(t) + _, err := memberClient.UpdateUserProfile(ctx, codersdk.Me, codersdk.UpdateUserProfileRequest{ + Name: memberUser.Name, + Username: newUsername, + }) + + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusNotFound, apiErr.StatusCode()) + }) + + t.Run("UpdateMemberAsAdmin_Username", func(t *testing.T) { + t.Parallel() + auditor := audit.NewMock() + adminClient := coderdtest.New(t, &coderdtest.Options{Auditor: auditor}) + numLogs := len(auditor.AuditLogs()) + + adminUser := coderdtest.CreateFirstUser(t, adminClient) + numLogs++ // add an audit log for login + + _, memberUser := coderdtest.CreateAnotherUser(t, adminClient, adminUser.OrganizationID) + numLogs++ // add an audit log for user creation + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + newUsername := coderdtest.RandomUsername(t) + userProfile, err := adminClient.UpdateUserProfile(ctx, codersdk.Me, codersdk.UpdateUserProfileRequest{ + Name: memberUser.Name, + Username: newUsername, + }) + + numLogs++ // add an audit log for user update + numLogs++ // add an audit log for API key creation + + require.NoError(t, err) + require.Equal(t, newUsername, userProfile.Username) + require.Equal(t, memberUser.Name, userProfile.Name) + + require.Len(t, auditor.AuditLogs(), numLogs) + require.Equal(t, database.AuditActionWrite, auditor.AuditLogs()[numLogs-1].Action) + }) + t.Run("InvalidRealUserName", func(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) diff --git a/coderd/util/strings/strings.go b/coderd/util/strings/strings.go index 49aad579e83f5..e21908d488cd8 100644 --- a/coderd/util/strings/strings.go +++ b/coderd/util/strings/strings.go @@ -23,15 +23,64 @@ func JoinWithConjunction(s []string) string { ) } -// Truncate returns the first n characters of s. -func Truncate(s string, n int) string { +type TruncateOption int + +func (o TruncateOption) String() string { + switch o { + case TruncateWithEllipsis: + return "TruncateWithEllipsis" + case TruncateWithFullWords: + return "TruncateWithFullWords" + default: + return fmt.Sprintf("TruncateOption(%d)", o) + } +} + +const ( + // TruncateWithEllipsis adds a Unicode ellipsis character to the end of the string. + TruncateWithEllipsis TruncateOption = 1 << 0 + // TruncateWithFullWords ensures that words are not split in the middle. + // As a special case, if there is no word boundary, the string is truncated. + TruncateWithFullWords TruncateOption = 1 << 1 +) + +// Truncate truncates s to n characters. +// Additional behaviors can be specified using TruncateOptions. +func Truncate(s string, n int, opts ...TruncateOption) string { + var options TruncateOption + for _, opt := range opts { + options |= opt + } if n < 1 { return "" } if len(s) <= n { return s } - return s[:n] + + maxLen := n + if options&TruncateWithEllipsis != 0 { + maxLen-- + } + var sb strings.Builder + // If we need to truncate to full words, find the last word boundary before n. + if options&TruncateWithFullWords != 0 { + lastWordBoundary := strings.LastIndexFunc(s[:maxLen], unicode.IsSpace) + if lastWordBoundary < 0 { + // We cannot find a word boundary. At this point, we'll truncate the string. + // It's better than nothing. + _, _ = sb.WriteString(s[:maxLen]) + } else { // lastWordBoundary <= maxLen + _, _ = sb.WriteString(s[:lastWordBoundary]) + } + } else { + _, _ = sb.WriteString(s[:maxLen]) + } + + if options&TruncateWithEllipsis != 0 { + _, _ = sb.WriteString("…") + } + return sb.String() } var bmPolicy = bluemonday.StrictPolicy() diff --git a/coderd/util/strings/strings_test.go b/coderd/util/strings/strings_test.go index 7a20a06a25f28..000fa9efa11e5 100644 --- a/coderd/util/strings/strings_test.go +++ b/coderd/util/strings/strings_test.go @@ -1,6 +1,7 @@ package strings_test import ( + "fmt" "testing" "github.com/stretchr/testify/assert" @@ -23,17 +24,47 @@ func TestTruncate(t *testing.T) { s string n int expected string + options []strings.TruncateOption }{ - {"foo", 4, "foo"}, - {"foo", 3, "foo"}, - {"foo", 2, "fo"}, - {"foo", 1, "f"}, - {"foo", 0, ""}, - {"foo", -1, ""}, + {"foo", 4, "foo", nil}, + {"foo", 3, "foo", nil}, + {"foo", 2, "fo", nil}, + {"foo", 1, "f", nil}, + {"foo", 0, "", nil}, + {"foo", -1, "", nil}, + {"foo bar", 7, "foo bar", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 6, "foo b…", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 5, "foo …", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 4, "foo…", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 3, "fo…", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 2, "f…", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 1, "…", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 0, "", []strings.TruncateOption{strings.TruncateWithEllipsis}}, + {"foo bar", 7, "foo bar", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 6, "foo", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 5, "foo", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 4, "foo", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 3, "foo", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 2, "fo", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 1, "f", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 0, "", []strings.TruncateOption{strings.TruncateWithFullWords}}, + {"foo bar", 7, "foo bar", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 6, "foo…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 5, "foo…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 4, "foo…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 3, "fo…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 2, "f…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 1, "…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"foo bar", 0, "", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, + {"This is a very long task prompt that should be truncated to 160 characters. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", 160, "This is a very long task prompt that should be truncated to 160 characters. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor…", []strings.TruncateOption{strings.TruncateWithFullWords, strings.TruncateWithEllipsis}}, } { - t.Run(tt.expected, func(t *testing.T) { + tName := fmt.Sprintf("%s_%d", tt.s, tt.n) + for _, opt := range tt.options { + tName += fmt.Sprintf("_%v", opt) + } + t.Run(tName, func(t *testing.T) { t.Parallel() - actual := strings.Truncate(tt.s, tt.n) + actual := strings.Truncate(tt.s, tt.n, tt.options...) require.Equal(t, tt.expected, actual) }) } diff --git a/coderd/util/tz/tz_test.go b/coderd/util/tz/tz_test.go index a0e7971bd7492..57d2d660ec34a 100644 --- a/coderd/util/tz/tz_test.go +++ b/coderd/util/tz/tz_test.go @@ -35,12 +35,9 @@ func Test_TimezoneIANA(t *testing.T) { // This test can be flaky on some Windows runners :( t.Skip("This test is flaky under Windows.") } - oldEnv, found := os.LookupEnv("TZ") + _, found := os.LookupEnv("TZ") if found { require.NoError(t, os.Unsetenv("TZ")) - t.Cleanup(func() { - _ = os.Setenv("TZ", oldEnv) - }) } zone, err := tz.TimezoneIANA() diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index eddd6510b634b..23046dab28e15 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -452,6 +452,10 @@ func (api *API) enqueueAITaskStateNotification( notificationTemplate = notifications.TemplateTaskWorking case codersdk.WorkspaceAppStatusStateIdle: notificationTemplate = notifications.TemplateTaskIdle + case codersdk.WorkspaceAppStatusStateComplete: + notificationTemplate = notifications.TemplateTaskCompleted + case codersdk.WorkspaceAppStatusStateFailure: + notificationTemplate = notifications.TemplateTaskFailed default: // Not a notifiable state, do nothing return @@ -471,6 +475,13 @@ func (api *API) enqueueAITaskStateNotification( return } + // Skip the initial "Working" notification when task first starts. + // This is obvious to the user since they just created the task. + // We still notify on first "Idle" status and all subsequent transitions. + if len(latestAppStatus) == 0 && newAppStatus == codersdk.WorkspaceAppStatusStateWorking { + return + } + // Use the task prompt as the "task" label, fallback to workspace name parameters, err := api.Database.GetWorkspaceBuildParameters(ctx, workspaceBuild.ID) if err != nil { @@ -484,6 +495,11 @@ func (api *API) enqueueAITaskStateNotification( } } + // As task prompt may be particularly long, truncate it to 160 characters for notifications. + if len(taskName) > 160 { + taskName = strutil.Truncate(taskName, 160, strutil.TruncateWithEllipsis, strutil.TruncateWithFullWords) + } + if _, err := api.NotificationsEnqueuer.EnqueueWithData( // nolint:gocritic // Need notifier actor to enqueue notifications dbauthz.AsNotifier(ctx), diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 9e26a28c71370..4c24cc9325e46 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -324,11 +324,19 @@ func (p *DBTokenProvider) authorizeRequest(ctx context.Context, roles *rbac.Subj // rbacResourceOwned is for the level "authenticated". We still need to // make sure the API key has permissions to connect to the actor's own // workspace. Scopes would prevent this. - rbacResourceOwned rbac.Object = rbac.ResourceWorkspace.WithOwner(roles.ID) + // TODO: This is an odd repercussion of the org_member permission level. + // This Object used to not specify an org restriction, and `InOrg` would + // actually have a significantly different meaning (only sharing with + // other authenticated users in the same org, whereas the existing behavior + // is to share with any authenticated user). Because workspaces are always + // jointly owned by an organization, there _must_ be an org restriction on + // the object to check the proper permissions. AnyOrg is almost the same, + // but technically excludes users who are not in any organization. This is + // the closest we can get though without more significant refactoring. + rbacResourceOwned rbac.Object = rbac.ResourceWorkspace.WithOwner(roles.ID).AnyOrganization() ) if dbReq.AccessMethod == AccessMethodTerminal { rbacAction = policy.ActionSSH - rbacResourceOwned = rbac.ResourceWorkspace.WithOwner(roles.ID) } // Do a standard RBAC check. This accounts for share level "owner" and any @@ -355,21 +363,20 @@ func (p *DBTokenProvider) authorizeRequest(ctx context.Context, roles *rbac.Subj return true, []string{}, nil } case database.AppSharingLevelOrganization: - // Check if the user is a member of the same organization as the workspace // First check if they have permission to connect to their own workspace (enforces scopes) err := p.Authorizer.Authorize(ctx, *roles, rbacAction, rbacResourceOwned) if err != nil { return false, warnings, nil } - // Check if the user is a member of the workspace's organization + // Check if the user is a member of the same organization as the workspace workspaceOrgID := dbReq.Workspace.OrganizationID expandedRoles, err := roles.Roles.Expand() if err != nil { return false, warnings, xerrors.Errorf("expand roles: %w", err) } for _, role := range expandedRoles { - if _, ok := role.Org[workspaceOrgID.String()]; ok { + if _, ok := role.ByOrgID[workspaceOrgID.String()]; ok { return true, []string{}, nil } } diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go index b6409d8ed781d..d064a0ef3f574 100644 --- a/coderd/workspacebuilds.go +++ b/coderd/workspacebuilds.go @@ -335,6 +335,15 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) { return } + // We want to allow a delete build for a deleted workspace, but not a start or stop build. + if workspace.Deleted && createBuild.Transition != codersdk.WorkspaceTransitionDelete { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: fmt.Sprintf("Cannot %s a deleted workspace!", createBuild.Transition), + Detail: "This workspace has been deleted and cannot be modified.", + }) + return + } + apiBuild, err := api.postWorkspaceBuildsInternal( ctx, apiKey, @@ -1181,9 +1190,9 @@ func (api *API) convertWorkspaceBuild( if build.HasAITask.Valid { hasAITask = &build.HasAITask.Bool } - var aiTasksSidebarAppID *uuid.UUID + var taskAppID *uuid.UUID if build.AITaskSidebarAppID.Valid { - aiTasksSidebarAppID = &build.AITaskSidebarAppID.UUID + taskAppID = &build.AITaskSidebarAppID.UUID } var hasExternalAgent *bool @@ -1218,7 +1227,7 @@ func (api *API) convertWorkspaceBuild( MatchedProvisioners: &matchedProvisioners, TemplateVersionPresetID: presetID, HasAITask: hasAITask, - AITaskSidebarAppID: aiTasksSidebarAppID, + AITaskSidebarAppID: taskAppID, HasExternalAgent: hasExternalAgent, }, nil } diff --git a/coderd/workspacebuilds_test.go b/coderd/workspacebuilds_test.go index 2c518a95e53a6..d0ab64b1aeb32 100644 --- a/coderd/workspacebuilds_test.go +++ b/coderd/workspacebuilds_test.go @@ -634,9 +634,7 @@ func TestPatchCancelWorkspaceBuild(t *testing.T) { t.Run("Cancel with expect_state=pending", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } + // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -732,9 +730,7 @@ func TestPatchCancelWorkspaceBuild(t *testing.T) { t.Run("Cancel with expect_state=running when job is pending - should fail with 412", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } + // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -1731,9 +1727,7 @@ func TestPostWorkspaceBuild(t *testing.T) { t.Run("NoProvisionersAvailable", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } + // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -1777,9 +1771,7 @@ func TestPostWorkspaceBuild(t *testing.T) { t.Run("AllProvisionersStale", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } + // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -1848,6 +1840,68 @@ func TestPostWorkspaceBuild(t *testing.T) { require.NoError(t, err) require.Equal(t, codersdk.BuildReasonDashboard, build.Reason) }) + t.Run("DeletedWorkspace", func(t *testing.T) { + t.Parallel() + + // Given: a workspace that has already been deleted + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = slogtest.Make(t, &slogtest.Options{}).Leveled(slog.LevelError) + adminClient, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + Logger: &logger, + }) + admin = coderdtest.CreateFirstUser(t, adminClient) + workspaceOwnerClient, member1 = coderdtest.CreateAnotherUser(t, adminClient, admin.OrganizationID) + otherMemberClient, _ = coderdtest.CreateAnotherUser(t, adminClient, admin.OrganizationID) + ws = dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{OwnerID: member1.ID, OrganizationID: admin.OrganizationID}). + Seed(database.WorkspaceBuild{Transition: database.WorkspaceTransitionDelete}). + Do() + ) + + // This needs to be done separately as provisionerd handles marking the workspace as deleted + // and we're skipping provisionerd here for speed. + require.NoError(t, db.UpdateWorkspaceDeletedByID(dbauthz.AsProvisionerd(ctx), database.UpdateWorkspaceDeletedByIDParams{ + ID: ws.Workspace.ID, + Deleted: true, + })) + + // Assert test invariant: Workspace should be deleted + dbWs, err := db.GetWorkspaceByID(dbauthz.AsProvisionerd(ctx), ws.Workspace.ID) + require.NoError(t, err) + require.True(t, dbWs.Deleted, "workspace should be deleted") + + for _, tc := range []struct { + user *codersdk.Client + tr codersdk.WorkspaceTransition + expectStatus int + }{ + // You should not be allowed to mess with a workspace you don't own, regardless of its deleted state. + {otherMemberClient, codersdk.WorkspaceTransitionStart, http.StatusNotFound}, + {otherMemberClient, codersdk.WorkspaceTransitionStop, http.StatusNotFound}, + {otherMemberClient, codersdk.WorkspaceTransitionDelete, http.StatusNotFound}, + // Starting or stopping a workspace is not allowed when it is deleted. + {workspaceOwnerClient, codersdk.WorkspaceTransitionStart, http.StatusConflict}, + {workspaceOwnerClient, codersdk.WorkspaceTransitionStop, http.StatusConflict}, + // We allow a delete just in case a retry is required. In most cases, this will be a no-op. + // Note: this is the last test case because it will change the state of the workspace. + {workspaceOwnerClient, codersdk.WorkspaceTransitionDelete, http.StatusOK}, + } { + // When: we create a workspace build with the given transition + _, err = tc.user.CreateWorkspaceBuild(ctx, ws.Workspace.ID, codersdk.CreateWorkspaceBuildRequest{ + Transition: tc.tr, + }) + + // Then: we allow ONLY a delete build for a deleted workspace. + if tc.expectStatus < http.StatusBadRequest { + require.NoError(t, err, "creating a %s build for a deleted workspace should not error", tc.tr) + } else { + var apiError *codersdk.Error + require.Error(t, err, "creating a %s build for a deleted workspace should return an error", tc.tr) + require.ErrorAs(t, err, &apiError) + require.Equal(t, tc.expectStatus, apiError.StatusCode()) + } + } + }) } func TestWorkspaceBuildTimings(t *testing.T) { diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 8f2317fc96375..3519442c3e6bf 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -388,7 +388,7 @@ func (api *API) postWorkspacesByOrganization(rw http.ResponseWriter, r *http.Req AvatarURL: member.AvatarURL, } - w, err := createWorkspace(ctx, aReq, apiKey.UserID, api, owner, req, r) + w, err := createWorkspace(ctx, aReq, apiKey.UserID, api, owner, req, r, nil) if err != nil { httperror.WriteResponseError(ctx, rw, err) return @@ -484,7 +484,7 @@ func (api *API) postUserWorkspaces(rw http.ResponseWriter, r *http.Request) { defer commitAudit() - w, err := createWorkspace(ctx, aReq, apiKey.UserID, api, owner, req, r) + w, err := createWorkspace(ctx, aReq, apiKey.UserID, api, owner, req, r, nil) if err != nil { httperror.WriteResponseError(ctx, rw, err) return @@ -499,6 +499,15 @@ type workspaceOwner struct { AvatarURL string } +type createWorkspaceOptions struct { + // preCreateInTX is a function that is called within the transaction, before + // the workspace is created. + preCreateInTX func(ctx context.Context, tx database.Store) error + // postCreateInTX is a function that is called within the transaction, after + // the workspace is created but before the workspace build is created. + postCreateInTX func(ctx context.Context, tx database.Store, workspace database.Workspace) error +} + func createWorkspace( ctx context.Context, auditReq *audit.Request[database.WorkspaceTable], @@ -507,7 +516,12 @@ func createWorkspace( owner workspaceOwner, req codersdk.CreateWorkspaceRequest, r *http.Request, + opts *createWorkspaceOptions, ) (codersdk.Workspace, error) { + if opts == nil { + opts = &createWorkspaceOptions{} + } + template, err := requestTemplate(ctx, req, api.Database) if err != nil { return codersdk.Workspace{}, err @@ -636,6 +650,16 @@ func createWorkspace( claimedWorkspace *database.Workspace ) + // If a preCreate hook is provided, execute it before creating or + // claiming the workspace. This can be used to perform additional + // setup or validation before the workspace is created (e.g. task + // creation). + if opts.preCreateInTX != nil { + if err := opts.preCreateInTX(ctx, db); err != nil { + return xerrors.Errorf("workspace preCreate failed: %w", err) + } + } + // Use injected Clock to allow time mocking in tests now := dbtime.Time(api.Clock.Now()) @@ -719,7 +743,6 @@ func createWorkspace( } else { // Prebuild found! workspaceID = claimedWorkspace.ID - initiatorID = prebuildsClaimer.Initiator() } // We have to refetch the workspace for the joined in fields. @@ -730,6 +753,15 @@ func createWorkspace( return xerrors.Errorf("get workspace by ID: %w", err) } + // If the postCreate hook is provided, execute it. This can be used to + // perform additional actions after the workspace has been created, like + // linking the workspace to a task. + if opts.postCreateInTX != nil { + if err := opts.postCreateInTX(ctx, db, workspace); err != nil { + return xerrors.Errorf("workspace postCreate failed: %w", err) + } + } + builder := wsbuilder.New(workspace, database.WorkspaceTransitionStart, *api.BuildUsageChecker.Load()). Reason(database.BuildReasonInitiator). Initiator(initiatorID). @@ -2622,6 +2654,7 @@ func convertWorkspace( Favorite: requesterFavorite, NextStartAt: nextStartAt, IsPrebuild: workspace.IsPrebuild(), + TaskID: workspace.TaskID, }, nil } diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index 3a24eafde1a95..51134dce27951 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -1240,9 +1240,7 @@ func TestPostWorkspacesByOrganization(t *testing.T) { t.Run("NoProvisionersAvailable", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } + // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{ @@ -1283,9 +1281,6 @@ func TestPostWorkspacesByOrganization(t *testing.T) { t.Run("AllProvisionersStale", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } // Given: a coderd instance with a provisioner daemon store, ps, db := dbtestutil.NewDBWithSQLDB(t) diff --git a/coderd/workspacestats/tracker_test.go b/coderd/workspacestats/tracker_test.go index 2803e5a5322b3..fde8c9f2dad90 100644 --- a/coderd/workspacestats/tracker_test.go +++ b/coderd/workspacestats/tracker_test.go @@ -110,9 +110,6 @@ func TestTracker(t *testing.T) { // This test performs a more 'integration-style' test with multiple instances. func TestTracker_MultipleInstances(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test only makes sense with postgres") - } // Given we have two coderd instances connected to the same database var ( diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 223b8bec084ad..6aef8c2c2aa17 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -6,6 +6,7 @@ import ( "context" "database/sql" "encoding/json" + "errors" "fmt" "net/http" "time" @@ -488,6 +489,21 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object return BuildError{code, "insert workspace build", err} } + // If this is a task workspace, link it to the latest workspace build. + if task, err := store.GetTaskByWorkspaceID(b.ctx, b.workspace.ID); err == nil { + _, err = store.UpsertTaskWorkspaceApp(b.ctx, database.UpsertTaskWorkspaceAppParams{ + TaskID: task.ID, + WorkspaceBuildNumber: buildNum, + WorkspaceAgentID: uuid.NullUUID{}, // Updated by the provisioner upon job completion. + WorkspaceAppID: uuid.NullUUID{}, // Updated by the provisioner upon job completion. + }) + if err != nil { + return BuildError{http.StatusInternalServerError, "upsert task workspace app", err} + } + } else if !errors.Is(err, sql.ErrNoRows) { + return BuildError{http.StatusInternalServerError, "get task by workspace id", err} + } + err = store.InsertWorkspaceBuildParameters(b.ctx, database.InsertWorkspaceBuildParametersParams{ WorkspaceBuildID: workspaceBuildID, Name: names, diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index b862e6459c285..3a8921dd6dcd9 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -47,6 +47,7 @@ var ( lastBuildJobID = uuid.MustParse("12341234-0000-0000-000c-000000000000") otherUserID = uuid.MustParse("12341234-0000-0000-000d-000000000000") presetID = uuid.MustParse("12341234-0000-0000-000e-000000000000") + taskID = uuid.MustParse("12341234-0000-0000-000f-000000000000") ) func TestBuilder_NoOptions(t *testing.T) { @@ -94,6 +95,7 @@ func TestBuilder_NoOptions(t *testing.T) { asrt.Equal(buildID, bld.ID) }), withBuild, + withNoTask, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { asrt.Equal(buildID, params.WorkspaceBuildID) asrt.Empty(params.Name) @@ -140,6 +142,7 @@ func TestBuilder_Initiator(t *testing.T) { expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -188,6 +191,7 @@ func TestBuilder_Baggage(t *testing.T) { expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -229,6 +233,7 @@ func TestBuilder_Reason(t *testing.T) { expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -275,6 +280,7 @@ func TestBuilder_ActiveVersion(t *testing.T) { expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -391,6 +397,7 @@ func TestWorkspaceBuildWithTags(t *testing.T) { expectBuildParameters(func(_ database.InsertWorkspaceBuildParametersParams) { }), withBuild, + withNoTask, expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -476,6 +483,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + withNoTask, expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -526,6 +534,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + withNoTask, expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -669,6 +678,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + withNoTask, expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -735,6 +745,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -798,6 +809,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + withNoTask, ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -860,6 +872,7 @@ func TestWorkspaceBuildWithPreset(t *testing.T) { asrt.Equal(presetID, bld.TemplateVersionPresetID.UUID) }), withBuild, + withNoTask, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { asrt.Equal(buildID, params.WorkspaceBuildID) asrt.Empty(params.Name) @@ -929,6 +942,7 @@ func TestWorkspaceBuildDeleteOrphan(t *testing.T) { asrt.Equal(buildID, bld.ID) }), withBuild, + withNoTask, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { asrt.Equal(buildID, params.WorkspaceBuildID) asrt.Empty(params.Name) @@ -992,6 +1006,7 @@ func TestWorkspaceBuildDeleteOrphan(t *testing.T) { asrt.Equal(buildID, bld.ID) }), withBuild, + withNoTask, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { asrt.Equal(buildID, params.WorkspaceBuildID) asrt.Empty(params.Name) @@ -1057,6 +1072,7 @@ func TestWorkspaceBuildUsageChecker(t *testing.T) { expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), withBuild, + withNoTask, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) {}), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -1133,6 +1149,59 @@ func TestWorkspaceBuildUsageChecker(t *testing.T) { } } +func TestWorkspaceBuildWithTask(t *testing.T) { + t.Parallel() + req := require.New(t) + asrt := assert.New(t) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + testTask := database.Task{ + ID: taskID, + OrganizationID: orgID, + OwnerID: userID, + Name: "test-task", + WorkspaceID: uuid.NullUUID{UUID: workspaceID, Valid: true}, + TemplateVersionID: activeVersionID, + CreatedAt: dbtime.Now(), + } + + mDB := expectDB(t, + // Inputs + withTemplate, + withInactiveVersion(nil), + withLastBuildFound, + withTemplateVersionVariables(inactiveVersionID, nil), + withRichParameters(nil), + withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), + withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}), + + // Outputs + expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), + withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), + expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), + withBuild, + withTask(testTask), + expectUpsertTaskWorkspaceApp(func(params database.UpsertTaskWorkspaceAppParams) { + asrt.Equal(taskID, params.TaskID) + asrt.Equal(int32(2), params.WorkspaceBuildNumber) + asrt.False(params.WorkspaceAgentID.Valid, "workspace_agent_id should be NULL initially") + asrt.False(params.WorkspaceAppID.Valid, "workspace_app_id should be NULL initially") + }), + expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) {}), + ) + fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) + + ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID} + uut := wsbuilder.New(ws, database.WorkspaceTransitionStart, wsbuilder.NoopUsageChecker{}) + // nolint: dogsled + _, _, _, err := uut.Build(ctx, mDB, fc, nil, audit.WorkspaceBuildBaggage{}) + req.NoError(err) +} + func TestWsbuildError(t *testing.T) { t.Parallel() @@ -1514,3 +1583,39 @@ type fakeUsageChecker struct { func (f *fakeUsageChecker) CheckBuildUsage(ctx context.Context, store database.Store, templateVersion *database.TemplateVersion) (wsbuilder.UsageCheckResponse, error) { return f.checkBuildUsageFunc(ctx, store, templateVersion) } + +func withNoTask(mTx *dbmock.MockStore) { + mTx.EXPECT().GetTaskByWorkspaceID(gomock.Any(), gomock.Any()).Times(1). + DoAndReturn(func(ctx context.Context, id uuid.UUID) (database.Task, error) { + return database.Task{}, sql.ErrNoRows + }) +} + +func withTask(task database.Task) func(mTx *dbmock.MockStore) { + return func(mTx *dbmock.MockStore) { + mTx.EXPECT().GetTaskByWorkspaceID(gomock.Any(), gomock.Any()).Times(1). + DoAndReturn(func(ctx context.Context, id uuid.UUID) (database.Task, error) { + return task, nil + }) + } +} + +func expectUpsertTaskWorkspaceApp( + assertions func(database.UpsertTaskWorkspaceAppParams), +) func(mTx *dbmock.MockStore) { + return func(mTx *dbmock.MockStore) { + mTx.EXPECT().UpsertTaskWorkspaceApp(gomock.Any(), gomock.Any()). + Times(1). + DoAndReturn( + func(ctx context.Context, params database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) { + assertions(params) + return database.TaskWorkspaceApp{ + TaskID: params.TaskID, + WorkspaceBuildNumber: params.WorkspaceBuildNumber, + WorkspaceAgentID: params.WorkspaceAgentID, + WorkspaceAppID: params.WorkspaceAppID, + }, nil + }, + ) + } +} diff --git a/codersdk/agentsdk/agentsdk.go b/codersdk/agentsdk/agentsdk.go index a6c8e5b1ea620..b668ab4a36569 100644 --- a/codersdk/agentsdk/agentsdk.go +++ b/codersdk/agentsdk/agentsdk.go @@ -391,7 +391,7 @@ func (i *InstanceIdentitySessionTokenProvider) GetSessionToken() string { defer cancel() resp, err := i.TokenExchanger.exchange(ctx) if err != nil { - i.logger.Error(ctx, "failed to exchange session token: %v", err) + i.logger.Error(ctx, "failed to exchange session token", slog.Error(err)) return "" } i.sessionToken = resp.SessionToken diff --git a/codersdk/agentsdk/agentsdk_test.go b/codersdk/agentsdk/agentsdk_test.go index 4f3d7d838b524..b6646662a4536 100644 --- a/codersdk/agentsdk/agentsdk_test.go +++ b/codersdk/agentsdk/agentsdk_test.go @@ -42,7 +42,8 @@ func TestStreamAgentReinitEvents(t *testing.T) { requestCtx := testutil.Context(t, testutil.WaitShort) req, err := http.NewRequestWithContext(requestCtx, "GET", srv.URL, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) require.NoError(t, err) defer resp.Body.Close() @@ -77,7 +78,8 @@ func TestStreamAgentReinitEvents(t *testing.T) { requestCtx := testutil.Context(t, testutil.WaitShort) req, err := http.NewRequestWithContext(requestCtx, "GET", srv.URL, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) require.NoError(t, err) defer resp.Body.Close() @@ -110,7 +112,8 @@ func TestStreamAgentReinitEvents(t *testing.T) { requestCtx := testutil.Context(t, testutil.WaitShort) req, err := http.NewRequestWithContext(requestCtx, "GET", srv.URL, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + client := &http.Client{} + resp, err := client.Do(req) require.NoError(t, err) defer resp.Body.Close() diff --git a/codersdk/aibridge.go b/codersdk/aibridge.go index 3101dab383ad1..a322187bb23c3 100644 --- a/codersdk/aibridge.go +++ b/codersdk/aibridge.go @@ -13,11 +13,12 @@ import ( type AIBridgeInterception struct { ID uuid.UUID `json:"id" format:"uuid"` - InitiatorID uuid.UUID `json:"initiator_id" format:"uuid"` + Initiator MinimalUser `json:"initiator"` Provider string `json:"provider"` Model string `json:"model"` Metadata map[string]any `json:"metadata"` StartedAt time.Time `json:"started_at" format:"date-time"` + EndedAt *time.Time `json:"ended_at" format:"date-time"` TokenUsages []AIBridgeTokenUsage `json:"token_usages"` UserPrompts []AIBridgeUserPrompt `json:"user_prompts"` ToolUsages []AIBridgeToolUsage `json:"tool_usages"` @@ -56,6 +57,7 @@ type AIBridgeToolUsage struct { } type AIBridgeListInterceptionsResponse struct { + Count int64 `json:"count"` Results []AIBridgeInterception `json:"results"` } @@ -111,8 +113,8 @@ func (f AIBridgeListInterceptionsFilter) asRequestOption() RequestOption { // AIBridgeListInterceptions returns AIBridge interceptions with the given // filter. -func (c *ExperimentalClient) AIBridgeListInterceptions(ctx context.Context, filter AIBridgeListInterceptionsFilter) (AIBridgeListInterceptionsResponse, error) { - res, err := c.Request(ctx, http.MethodGet, "/api/experimental/aibridge/interceptions", nil, filter.asRequestOption(), filter.Pagination.asRequestOption(), filter.Pagination.asRequestOption()) +func (c *Client) AIBridgeListInterceptions(ctx context.Context, filter AIBridgeListInterceptionsFilter) (AIBridgeListInterceptionsResponse, error) { + res, err := c.Request(ctx, http.MethodGet, "/api/v2/aibridge/interceptions", nil, filter.asRequestOption(), filter.Pagination.asRequestOption(), filter.Pagination.asRequestOption()) if err != nil { return AIBridgeListInterceptionsResponse{}, err } diff --git a/codersdk/aitasks.go b/codersdk/aitasks.go index 21efb15b5313e..9f390202e4fd2 100644 --- a/codersdk/aitasks.go +++ b/codersdk/aitasks.go @@ -89,6 +89,44 @@ func (c *ExperimentalClient) CreateTask(ctx context.Context, user string, reques return task, nil } +// TaskStatus represents the status of a task. +// +// Experimental: This type is experimental and may change in the future. +type TaskStatus string + +const ( + // TaskStatusPending indicates the task has been created but no workspace + // has been provisioned yet, or the workspace build job status is unknown. + TaskStatusPending TaskStatus = "pending" + // TaskStatusInitializing indicates the workspace build is pending/running, + // the agent is connecting, or apps are initializing. + TaskStatusInitializing TaskStatus = "initializing" + // TaskStatusActive indicates the task's workspace is running with a + // successful start transition, the agent is connected, and all workspace + // apps are either healthy or disabled. + TaskStatusActive TaskStatus = "active" + // TaskStatusPaused indicates the task's workspace has been stopped or + // deleted (stop/delete transition with successful job status). + TaskStatusPaused TaskStatus = "paused" + // TaskStatusUnknown indicates the task's status cannot be determined + // based on the workspace build, agent lifecycle, or app health states. + TaskStatusUnknown TaskStatus = "unknown" + // TaskStatusError indicates the task's workspace build job has failed, + // or the workspace apps are reporting unhealthy status. + TaskStatusError TaskStatus = "error" +) + +func AllTaskStatuses() []TaskStatus { + return []TaskStatus{ + TaskStatusPending, + TaskStatusInitializing, + TaskStatusActive, + TaskStatusPaused, + TaskStatusError, + TaskStatusUnknown, + } +} + // TaskState represents the high-level lifecycle of a task. // // Experimental: This type is experimental and may change in the future. @@ -96,10 +134,18 @@ type TaskState string // TaskState enums. const ( - TaskStateWorking TaskState = "working" - TaskStateIdle TaskState = "idle" + // TaskStateWorking indicates the AI agent is actively processing work. + // Reported when the agent is performing actions or the screen is changing. + TaskStateWorking TaskState = "working" + // TaskStateIdle indicates the AI agent's screen is stable and no work + // is being performed. Reported automatically by the screen watcher. + TaskStateIdle TaskState = "idle" + // TaskStateComplete indicates the AI agent has successfully completed + // the task. Reported via the workspace app status. TaskStateComplete TaskState = "complete" - TaskStateFailed TaskState = "failed" + // TaskStateFailed indicates the AI agent reported a failure state. + // Reported via the workspace app status. + TaskStateFailed TaskState = "failed" ) // Task represents a task. @@ -110,18 +156,24 @@ type Task struct { OrganizationID uuid.UUID `json:"organization_id" format:"uuid" table:"organization id"` OwnerID uuid.UUID `json:"owner_id" format:"uuid" table:"owner id"` OwnerName string `json:"owner_name" table:"owner name"` + OwnerAvatarURL string `json:"owner_avatar_url,omitempty" table:"owner avatar url"` Name string `json:"name" table:"name,default_sort"` TemplateID uuid.UUID `json:"template_id" format:"uuid" table:"template id"` + TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid" table:"template version id"` TemplateName string `json:"template_name" table:"template name"` TemplateDisplayName string `json:"template_display_name" table:"template display name"` TemplateIcon string `json:"template_icon" table:"template icon"` WorkspaceID uuid.NullUUID `json:"workspace_id" format:"uuid" table:"workspace id"` + WorkspaceName string `json:"workspace_name" table:"workspace name"` + WorkspaceStatus WorkspaceStatus `json:"workspace_status,omitempty" enums:"pending,starting,running,stopping,stopped,failed,canceling,canceled,deleting,deleted" table:"workspace status"` + WorkspaceBuildNumber int32 `json:"workspace_build_number,omitempty" table:"workspace build number"` WorkspaceAgentID uuid.NullUUID `json:"workspace_agent_id" format:"uuid" table:"workspace agent id"` WorkspaceAgentLifecycle *WorkspaceAgentLifecycle `json:"workspace_agent_lifecycle" table:"workspace agent lifecycle"` WorkspaceAgentHealth *WorkspaceAgentHealth `json:"workspace_agent_health" table:"workspace agent health"` + WorkspaceAppID uuid.NullUUID `json:"workspace_app_id" format:"uuid" table:"workspace app id"` InitialPrompt string `json:"initial_prompt" table:"initial prompt"` - Status WorkspaceStatus `json:"status" enums:"pending,starting,running,stopping,stopped,failed,canceling,canceled,deleting,deleted" table:"status"` - CurrentState *TaskStateEntry `json:"current_state" table:"cs,recursive_inline"` + Status TaskStatus `json:"status" enums:"pending,initializing,active,paused,unknown,error" table:"status"` + CurrentState *TaskStateEntry `json:"current_state" table:"cs,recursive_inline,empty_nil"` CreatedAt time.Time `json:"created_at" format:"date-time" table:"created at"` UpdatedAt time.Time `json:"updated_at" format:"date-time" table:"updated at"` } @@ -142,12 +194,45 @@ type TaskStateEntry struct { type TasksFilter struct { // Owner can be a username, UUID, or "me". Owner string `json:"owner,omitempty"` - // Status is a task status. - Status string `json:"status,omitempty" typescript:"-"` - // Offset is the number of tasks to skip before returning results. - Offset int `json:"offset,omitempty" typescript:"-"` - // Limit is a limit on the number of tasks returned. - Limit int `json:"limit,omitempty" typescript:"-"` + // Organization can be an organization name or UUID. + Organization string `json:"organization,omitempty"` + // Status filters the tasks by their task status. + Status TaskStatus `json:"status,omitempty"` + // FilterQuery allows specifying a raw filter query. + FilterQuery string `json:"filter_query,omitempty"` +} + +// TaskListResponse is the response shape for tasks list. +// +// Experimental response shape for tasks list (server returns []Task). +type TasksListResponse struct { + Tasks []Task `json:"tasks"` + Count int `json:"count"` +} + +func (f TasksFilter) asRequestOption() RequestOption { + return func(r *http.Request) { + var params []string + // Make sure all user input is quoted to ensure it's parsed as a single + // string. + if f.Owner != "" { + params = append(params, fmt.Sprintf("owner:%q", f.Owner)) + } + if f.Organization != "" { + params = append(params, fmt.Sprintf("organization:%q", f.Organization)) + } + if f.Status != "" { + params = append(params, fmt.Sprintf("status:%q", string(f.Status))) + } + if f.FilterQuery != "" { + // If custom stuff is added, just add it on here. + params = append(params, f.FilterQuery) + } + + q := r.URL.Query() + q.Set("q", strings.Join(params, " ")) + r.URL.RawQuery = q.Encode() + } } // Tasks lists all tasks belonging to the user or specified owner. @@ -158,15 +243,7 @@ func (c *ExperimentalClient) Tasks(ctx context.Context, filter *TasksFilter) ([] filter = &TasksFilter{} } - var wsFilter WorkspaceFilter - wsFilter.Owner = filter.Owner - wsFilter.Status = filter.Status - page := Pagination{ - Offset: filter.Offset, - Limit: filter.Limit, - } - - res, err := c.Request(ctx, http.MethodGet, "/api/experimental/tasks", nil, wsFilter.asRequestOption(), page.asRequestOption()) + res, err := c.Request(ctx, http.MethodGet, "/api/experimental/tasks", nil, filter.asRequestOption()) if err != nil { return nil, err } @@ -175,12 +252,7 @@ func (c *ExperimentalClient) Tasks(ctx context.Context, filter *TasksFilter) ([] return nil, ReadBodyAsError(res) } - // Experimental response shape for tasks list (server returns []Task). - type tasksListResponse struct { - Tasks []Task `json:"tasks"` - Count int `json:"count"` - } - var tres tasksListResponse + var tres TasksListResponse if err := json.NewDecoder(res.Body).Decode(&tres); err != nil { return nil, err } @@ -209,6 +281,72 @@ func (c *ExperimentalClient) TaskByID(ctx context.Context, id uuid.UUID) (Task, return task, nil } +func splitTaskIdentifier(identifier string) (owner string, taskName string, err error) { + parts := strings.Split(identifier, "/") + + switch len(parts) { + case 1: + owner = Me + taskName = parts[0] + case 2: + owner = parts[0] + taskName = parts[1] + default: + return "", "", xerrors.Errorf("invalid task identifier: %q", identifier) + } + return owner, taskName, nil +} + +// TaskByIdentifier fetches and returns a task by an identifier, which may be +// either a UUID, a name (for a task owned by the current user), or a +// "user/task" combination, where user is either a username or UUID. +// +// Since there is no TaskByOwnerAndName endpoint yet, this function uses the +// list endpoint with filtering when a name is provided. +func (c *ExperimentalClient) TaskByIdentifier(ctx context.Context, identifier string) (Task, error) { + identifier = strings.TrimSpace(identifier) + + // Try parsing as UUID first. + if taskID, err := uuid.Parse(identifier); err == nil { + return c.TaskByID(ctx, taskID) + } + + // Not a UUID, treat as identifier. + owner, taskName, err := splitTaskIdentifier(identifier) + if err != nil { + return Task{}, err + } + + tasks, err := c.Tasks(ctx, &TasksFilter{ + Owner: owner, + }) + if err != nil { + return Task{}, xerrors.Errorf("list tasks for owner %q: %w", owner, err) + } + + if taskID, err := uuid.Parse(taskName); err == nil { + // Find task by ID. + for _, task := range tasks { + if task.ID == taskID { + return task, nil + } + } + } else { + // Find task by name. + for _, task := range tasks { + if task.Name == taskName { + return task, nil + } + } + } + + // Mimic resource not found from API. + var notFoundErr error = &Error{ + Response: Response{Message: "Resource not found or you do not have access to this resource"}, + } + return Task{}, xerrors.Errorf("task %q not found for owner %q: %w", taskName, owner, notFoundErr) +} + // DeleteTask deletes a task by its ID. // // Experimental: This method is experimental and may change in the future. diff --git a/codersdk/aitasks_internal_test.go b/codersdk/aitasks_internal_test.go new file mode 100644 index 0000000000000..b10a8659a64e2 --- /dev/null +++ b/codersdk/aitasks_internal_test.go @@ -0,0 +1,75 @@ +package codersdk + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_splitTaskIdentifier(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + identifier string + expectedOwner string + expectedTask string + expectErr bool + }{ + { + name: "bare task name", + identifier: "mytask", + expectedOwner: Me, + expectedTask: "mytask", + expectErr: false, + }, + { + name: "owner/task format", + identifier: "alice/her-task", + expectedOwner: "alice", + expectedTask: "her-task", + expectErr: false, + }, + { + name: "uuid/task format", + identifier: "550e8400-e29b-41d4-a716-446655440000/task1", + expectedOwner: "550e8400-e29b-41d4-a716-446655440000", + expectedTask: "task1", + expectErr: false, + }, + { + name: "owner/uuid format", + identifier: "alice/3abe1dcf-cd87-4078-8b54-c0e2058ad2e2", + expectedOwner: "alice", + expectedTask: "3abe1dcf-cd87-4078-8b54-c0e2058ad2e2", + expectErr: false, + }, + { + name: "too many slashes", + identifier: "owner/task/extra", + expectErr: true, + }, + { + name: "empty parts acceptable", + identifier: "/task", + expectedOwner: "", + expectedTask: "task", + expectErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + owner, taskName, err := splitTaskIdentifier(tt.identifier) + if tt.expectErr { + require.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, tt.expectedOwner, owner) + assert.Equal(t, tt.expectedTask, taskName) + } + }) + } +} diff --git a/codersdk/allowlist.go b/codersdk/allowlist.go new file mode 100644 index 0000000000000..48f8214537619 --- /dev/null +++ b/codersdk/allowlist.go @@ -0,0 +1,80 @@ +package codersdk + +import ( + "encoding/json" + "strings" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac/policy" +) + +// APIAllowListTarget represents a single allow-list entry using the canonical +// string form ":". The wildcard symbol "*" is treated as a +// permissive match for either side. +type APIAllowListTarget struct { + Type RBACResource `json:"type"` + ID string `json:"id"` +} + +func AllowAllTarget() APIAllowListTarget { + return APIAllowListTarget{Type: ResourceWildcard, ID: policy.WildcardSymbol} +} + +func AllowTypeTarget(r RBACResource) APIAllowListTarget { + return APIAllowListTarget{Type: r, ID: policy.WildcardSymbol} +} + +func AllowResourceTarget(r RBACResource, id uuid.UUID) APIAllowListTarget { + return APIAllowListTarget{Type: r, ID: id.String()} +} + +// String returns the canonical string representation ":" with "*" wildcards. +func (t APIAllowListTarget) String() string { + return string(t.Type) + ":" + t.ID +} + +// MarshalJSON encodes as a JSON string: ":". +func (t APIAllowListTarget) MarshalJSON() ([]byte, error) { + return json.Marshal(t.String()) +} + +// UnmarshalJSON decodes from a JSON string: ":". +func (t *APIAllowListTarget) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + parts := strings.SplitN(strings.TrimSpace(s), ":", 2) + if len(parts) != 2 || parts[0] == "" || parts[1] == "" { + return xerrors.Errorf("invalid allow_list entry %q: want :", s) + } + + resource, id := RBACResource(parts[0]), parts[1] + + // Type + if resource != ResourceWildcard { + if _, ok := policy.RBACPermissions[string(resource)]; !ok { + return xerrors.Errorf("unknown resource type %q", resource) + } + } + t.Type = resource + + // ID + if id != policy.WildcardSymbol { + if _, err := uuid.Parse(id); err != nil { + return xerrors.Errorf("invalid %s ID (must be UUID): %q", resource, id) + } + } + t.ID = id + return nil +} + +// Implement encoding.TextMarshaler/Unmarshaler for broader compatibility + +func (t APIAllowListTarget) MarshalText() ([]byte, error) { return []byte(t.String()), nil } + +func (t *APIAllowListTarget) UnmarshalText(b []byte) error { + return t.UnmarshalJSON([]byte("\"" + string(b) + "\"")) +} diff --git a/codersdk/allowlist_test.go b/codersdk/allowlist_test.go new file mode 100644 index 0000000000000..46eec4549ec77 --- /dev/null +++ b/codersdk/allowlist_test.go @@ -0,0 +1,40 @@ +package codersdk_test + +import ( + "encoding/json" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/codersdk" +) + +func TestAPIAllowListTarget_JSONRoundTrip(t *testing.T) { + t.Parallel() + + all := codersdk.AllowAllTarget() + b, err := json.Marshal(all) + require.NoError(t, err) + require.JSONEq(t, `"*:*"`, string(b)) + var rt codersdk.APIAllowListTarget + require.NoError(t, json.Unmarshal(b, &rt)) + require.Equal(t, codersdk.ResourceWildcard, rt.Type) + require.Equal(t, policy.WildcardSymbol, rt.ID) + + ty := codersdk.AllowTypeTarget(codersdk.ResourceWorkspace) + b, err = json.Marshal(ty) + require.NoError(t, err) + require.JSONEq(t, `"workspace:*"`, string(b)) + require.NoError(t, json.Unmarshal(b, &rt)) + require.Equal(t, codersdk.ResourceWorkspace, rt.Type) + require.Equal(t, policy.WildcardSymbol, rt.ID) + + id := uuid.New() + res := codersdk.AllowResourceTarget(codersdk.ResourceTemplate, id) + b, err = json.Marshal(res) + require.NoError(t, err) + exp := `"template:` + id.String() + `"` + require.JSONEq(t, exp, string(b)) +} diff --git a/codersdk/apikey.go b/codersdk/apikey.go index 82828fcd7d77a..a5b622c73afe4 100644 --- a/codersdk/apikey.go +++ b/codersdk/apikey.go @@ -12,17 +12,18 @@ import ( // APIKey: do not ever return the HashedSecret type APIKey struct { - ID string `json:"id" validate:"required"` - UserID uuid.UUID `json:"user_id" validate:"required" format:"uuid"` - LastUsed time.Time `json:"last_used" validate:"required" format:"date-time"` - ExpiresAt time.Time `json:"expires_at" validate:"required" format:"date-time"` - CreatedAt time.Time `json:"created_at" validate:"required" format:"date-time"` - UpdatedAt time.Time `json:"updated_at" validate:"required" format:"date-time"` - LoginType LoginType `json:"login_type" validate:"required" enums:"password,github,oidc,token"` - Scope APIKeyScope `json:"scope" enums:"all,application_connect"` // Deprecated: use Scopes instead. - Scopes []APIKeyScope `json:"scopes"` - TokenName string `json:"token_name" validate:"required"` - LifetimeSeconds int64 `json:"lifetime_seconds" validate:"required"` + ID string `json:"id" validate:"required"` + UserID uuid.UUID `json:"user_id" validate:"required" format:"uuid"` + LastUsed time.Time `json:"last_used" validate:"required" format:"date-time"` + ExpiresAt time.Time `json:"expires_at" validate:"required" format:"date-time"` + CreatedAt time.Time `json:"created_at" validate:"required" format:"date-time"` + UpdatedAt time.Time `json:"updated_at" validate:"required" format:"date-time"` + LoginType LoginType `json:"login_type" validate:"required" enums:"password,github,oidc,token"` + Scope APIKeyScope `json:"scope" enums:"all,application_connect"` // Deprecated: use Scopes instead. + Scopes []APIKeyScope `json:"scopes"` + TokenName string `json:"token_name" validate:"required"` + LifetimeSeconds int64 `json:"lifetime_seconds" validate:"required"` + AllowList []APIAllowListTarget `json:"allow_list"` } // LoginType is the type of login used to create the API key. @@ -44,10 +45,11 @@ const ( type APIKeyScope string type CreateTokenRequest struct { - Lifetime time.Duration `json:"lifetime"` - Scope APIKeyScope `json:"scope,omitempty"` // Deprecated: use Scopes instead. - Scopes []APIKeyScope `json:"scopes,omitempty"` - TokenName string `json:"token_name"` + Lifetime time.Duration `json:"lifetime"` + Scope APIKeyScope `json:"scope,omitempty"` // Deprecated: use Scopes instead. + Scopes []APIKeyScope `json:"scopes,omitempty"` + TokenName string `json:"token_name"` + AllowList []APIAllowListTarget `json:"allow_list,omitempty"` } // GenerateAPIKeyResponse contains an API key for a user. diff --git a/codersdk/apikey_scopes_gen.go b/codersdk/apikey_scopes_gen.go index a399a014b42d5..df7fe96c4585e 100644 --- a/codersdk/apikey_scopes_gen.go +++ b/codersdk/apikey_scopes_gen.go @@ -5,46 +5,201 @@ const ( // Deprecated: use codersdk.APIKeyScopeCoderAll instead. APIKeyScopeAll APIKeyScope = "all" // Deprecated: use codersdk.APIKeyScopeCoderApplicationConnect instead. - APIKeyScopeApplicationConnect APIKeyScope = "application_connect" - APIKeyScopeApiKeyAll APIKeyScope = "api_key:*" - APIKeyScopeApiKeyCreate APIKeyScope = "api_key:create" - APIKeyScopeApiKeyDelete APIKeyScope = "api_key:delete" - APIKeyScopeApiKeyRead APIKeyScope = "api_key:read" - APIKeyScopeApiKeyUpdate APIKeyScope = "api_key:update" - APIKeyScopeCoderAll APIKeyScope = "coder:all" - APIKeyScopeCoderApikeysManageSelf APIKeyScope = "coder:apikeys.manage_self" - APIKeyScopeCoderApplicationConnect APIKeyScope = "coder:application_connect" - APIKeyScopeCoderTemplatesAuthor APIKeyScope = "coder:templates.author" - APIKeyScopeCoderTemplatesBuild APIKeyScope = "coder:templates.build" - APIKeyScopeCoderWorkspacesAccess APIKeyScope = "coder:workspaces.access" - APIKeyScopeCoderWorkspacesCreate APIKeyScope = "coder:workspaces.create" - APIKeyScopeCoderWorkspacesDelete APIKeyScope = "coder:workspaces.delete" - APIKeyScopeCoderWorkspacesOperate APIKeyScope = "coder:workspaces.operate" - APIKeyScopeFileAll APIKeyScope = "file:*" - APIKeyScopeFileCreate APIKeyScope = "file:create" - APIKeyScopeFileRead APIKeyScope = "file:read" - APIKeyScopeTemplateAll APIKeyScope = "template:*" - APIKeyScopeTemplateCreate APIKeyScope = "template:create" - APIKeyScopeTemplateDelete APIKeyScope = "template:delete" - APIKeyScopeTemplateRead APIKeyScope = "template:read" - APIKeyScopeTemplateUpdate APIKeyScope = "template:update" - APIKeyScopeTemplateUse APIKeyScope = "template:use" - APIKeyScopeUserReadPersonal APIKeyScope = "user:read_personal" - APIKeyScopeUserUpdatePersonal APIKeyScope = "user:update_personal" - APIKeyScopeUserSecretAll APIKeyScope = "user_secret:*" - APIKeyScopeUserSecretCreate APIKeyScope = "user_secret:create" - APIKeyScopeUserSecretDelete APIKeyScope = "user_secret:delete" - APIKeyScopeUserSecretRead APIKeyScope = "user_secret:read" - APIKeyScopeUserSecretUpdate APIKeyScope = "user_secret:update" - APIKeyScopeWorkspaceAll APIKeyScope = "workspace:*" - APIKeyScopeWorkspaceApplicationConnect APIKeyScope = "workspace:application_connect" - APIKeyScopeWorkspaceCreate APIKeyScope = "workspace:create" - APIKeyScopeWorkspaceDelete APIKeyScope = "workspace:delete" - APIKeyScopeWorkspaceRead APIKeyScope = "workspace:read" - APIKeyScopeWorkspaceSsh APIKeyScope = "workspace:ssh" - APIKeyScopeWorkspaceStart APIKeyScope = "workspace:start" - APIKeyScopeWorkspaceStop APIKeyScope = "workspace:stop" - APIKeyScopeWorkspaceUpdate APIKeyScope = "workspace:update" + APIKeyScopeApplicationConnect APIKeyScope = "application_connect" + APIKeyScopeAibridgeInterceptionAll APIKeyScope = "aibridge_interception:*" + APIKeyScopeAibridgeInterceptionCreate APIKeyScope = "aibridge_interception:create" + APIKeyScopeAibridgeInterceptionRead APIKeyScope = "aibridge_interception:read" + APIKeyScopeAibridgeInterceptionUpdate APIKeyScope = "aibridge_interception:update" + APIKeyScopeApiKeyAll APIKeyScope = "api_key:*" + APIKeyScopeApiKeyCreate APIKeyScope = "api_key:create" + APIKeyScopeApiKeyDelete APIKeyScope = "api_key:delete" + APIKeyScopeApiKeyRead APIKeyScope = "api_key:read" + APIKeyScopeApiKeyUpdate APIKeyScope = "api_key:update" + APIKeyScopeAssignOrgRoleAll APIKeyScope = "assign_org_role:*" + APIKeyScopeAssignOrgRoleAssign APIKeyScope = "assign_org_role:assign" + APIKeyScopeAssignOrgRoleCreate APIKeyScope = "assign_org_role:create" + APIKeyScopeAssignOrgRoleDelete APIKeyScope = "assign_org_role:delete" + APIKeyScopeAssignOrgRoleRead APIKeyScope = "assign_org_role:read" + APIKeyScopeAssignOrgRoleUnassign APIKeyScope = "assign_org_role:unassign" + APIKeyScopeAssignOrgRoleUpdate APIKeyScope = "assign_org_role:update" + APIKeyScopeAssignRoleAll APIKeyScope = "assign_role:*" + APIKeyScopeAssignRoleAssign APIKeyScope = "assign_role:assign" + APIKeyScopeAssignRoleRead APIKeyScope = "assign_role:read" + APIKeyScopeAssignRoleUnassign APIKeyScope = "assign_role:unassign" + APIKeyScopeAuditLogAll APIKeyScope = "audit_log:*" + APIKeyScopeAuditLogCreate APIKeyScope = "audit_log:create" + APIKeyScopeAuditLogRead APIKeyScope = "audit_log:read" + APIKeyScopeCoderAll APIKeyScope = "coder:all" + APIKeyScopeCoderApikeysManageSelf APIKeyScope = "coder:apikeys.manage_self" + APIKeyScopeCoderApplicationConnect APIKeyScope = "coder:application_connect" + APIKeyScopeCoderTemplatesAuthor APIKeyScope = "coder:templates.author" + APIKeyScopeCoderTemplatesBuild APIKeyScope = "coder:templates.build" + APIKeyScopeCoderWorkspacesAccess APIKeyScope = "coder:workspaces.access" + APIKeyScopeCoderWorkspacesCreate APIKeyScope = "coder:workspaces.create" + APIKeyScopeCoderWorkspacesDelete APIKeyScope = "coder:workspaces.delete" + APIKeyScopeCoderWorkspacesOperate APIKeyScope = "coder:workspaces.operate" + APIKeyScopeConnectionLogAll APIKeyScope = "connection_log:*" + APIKeyScopeConnectionLogRead APIKeyScope = "connection_log:read" + APIKeyScopeConnectionLogUpdate APIKeyScope = "connection_log:update" + APIKeyScopeCryptoKeyAll APIKeyScope = "crypto_key:*" + APIKeyScopeCryptoKeyCreate APIKeyScope = "crypto_key:create" + APIKeyScopeCryptoKeyDelete APIKeyScope = "crypto_key:delete" + APIKeyScopeCryptoKeyRead APIKeyScope = "crypto_key:read" + APIKeyScopeCryptoKeyUpdate APIKeyScope = "crypto_key:update" + APIKeyScopeDebugInfoAll APIKeyScope = "debug_info:*" + APIKeyScopeDebugInfoRead APIKeyScope = "debug_info:read" + APIKeyScopeDeploymentConfigAll APIKeyScope = "deployment_config:*" + APIKeyScopeDeploymentConfigRead APIKeyScope = "deployment_config:read" + APIKeyScopeDeploymentConfigUpdate APIKeyScope = "deployment_config:update" + APIKeyScopeDeploymentStatsAll APIKeyScope = "deployment_stats:*" + APIKeyScopeDeploymentStatsRead APIKeyScope = "deployment_stats:read" + APIKeyScopeFileAll APIKeyScope = "file:*" + APIKeyScopeFileCreate APIKeyScope = "file:create" + APIKeyScopeFileRead APIKeyScope = "file:read" + APIKeyScopeGroupAll APIKeyScope = "group:*" + APIKeyScopeGroupCreate APIKeyScope = "group:create" + APIKeyScopeGroupDelete APIKeyScope = "group:delete" + APIKeyScopeGroupRead APIKeyScope = "group:read" + APIKeyScopeGroupUpdate APIKeyScope = "group:update" + APIKeyScopeGroupMemberAll APIKeyScope = "group_member:*" + APIKeyScopeGroupMemberRead APIKeyScope = "group_member:read" + APIKeyScopeIdpsyncSettingsAll APIKeyScope = "idpsync_settings:*" + APIKeyScopeIdpsyncSettingsRead APIKeyScope = "idpsync_settings:read" + APIKeyScopeIdpsyncSettingsUpdate APIKeyScope = "idpsync_settings:update" + APIKeyScopeInboxNotificationAll APIKeyScope = "inbox_notification:*" + APIKeyScopeInboxNotificationCreate APIKeyScope = "inbox_notification:create" + APIKeyScopeInboxNotificationRead APIKeyScope = "inbox_notification:read" + APIKeyScopeInboxNotificationUpdate APIKeyScope = "inbox_notification:update" + APIKeyScopeLicenseAll APIKeyScope = "license:*" + APIKeyScopeLicenseCreate APIKeyScope = "license:create" + APIKeyScopeLicenseDelete APIKeyScope = "license:delete" + APIKeyScopeLicenseRead APIKeyScope = "license:read" + APIKeyScopeNotificationMessageAll APIKeyScope = "notification_message:*" + APIKeyScopeNotificationMessageCreate APIKeyScope = "notification_message:create" + APIKeyScopeNotificationMessageDelete APIKeyScope = "notification_message:delete" + APIKeyScopeNotificationMessageRead APIKeyScope = "notification_message:read" + APIKeyScopeNotificationMessageUpdate APIKeyScope = "notification_message:update" + APIKeyScopeNotificationPreferenceAll APIKeyScope = "notification_preference:*" + APIKeyScopeNotificationPreferenceRead APIKeyScope = "notification_preference:read" + APIKeyScopeNotificationPreferenceUpdate APIKeyScope = "notification_preference:update" + APIKeyScopeNotificationTemplateAll APIKeyScope = "notification_template:*" + APIKeyScopeNotificationTemplateRead APIKeyScope = "notification_template:read" + APIKeyScopeNotificationTemplateUpdate APIKeyScope = "notification_template:update" + APIKeyScopeOauth2AppAll APIKeyScope = "oauth2_app:*" + APIKeyScopeOauth2AppCreate APIKeyScope = "oauth2_app:create" + APIKeyScopeOauth2AppDelete APIKeyScope = "oauth2_app:delete" + APIKeyScopeOauth2AppRead APIKeyScope = "oauth2_app:read" + APIKeyScopeOauth2AppUpdate APIKeyScope = "oauth2_app:update" + APIKeyScopeOauth2AppCodeTokenAll APIKeyScope = "oauth2_app_code_token:*" + APIKeyScopeOauth2AppCodeTokenCreate APIKeyScope = "oauth2_app_code_token:create" + APIKeyScopeOauth2AppCodeTokenDelete APIKeyScope = "oauth2_app_code_token:delete" + APIKeyScopeOauth2AppCodeTokenRead APIKeyScope = "oauth2_app_code_token:read" + APIKeyScopeOauth2AppSecretAll APIKeyScope = "oauth2_app_secret:*" + APIKeyScopeOauth2AppSecretCreate APIKeyScope = "oauth2_app_secret:create" + APIKeyScopeOauth2AppSecretDelete APIKeyScope = "oauth2_app_secret:delete" + APIKeyScopeOauth2AppSecretRead APIKeyScope = "oauth2_app_secret:read" + APIKeyScopeOauth2AppSecretUpdate APIKeyScope = "oauth2_app_secret:update" + APIKeyScopeOrganizationAll APIKeyScope = "organization:*" + APIKeyScopeOrganizationCreate APIKeyScope = "organization:create" + APIKeyScopeOrganizationDelete APIKeyScope = "organization:delete" + APIKeyScopeOrganizationRead APIKeyScope = "organization:read" + APIKeyScopeOrganizationUpdate APIKeyScope = "organization:update" + APIKeyScopeOrganizationMemberAll APIKeyScope = "organization_member:*" + APIKeyScopeOrganizationMemberCreate APIKeyScope = "organization_member:create" + APIKeyScopeOrganizationMemberDelete APIKeyScope = "organization_member:delete" + APIKeyScopeOrganizationMemberRead APIKeyScope = "organization_member:read" + APIKeyScopeOrganizationMemberUpdate APIKeyScope = "organization_member:update" + APIKeyScopePrebuiltWorkspaceAll APIKeyScope = "prebuilt_workspace:*" + APIKeyScopePrebuiltWorkspaceDelete APIKeyScope = "prebuilt_workspace:delete" + APIKeyScopePrebuiltWorkspaceUpdate APIKeyScope = "prebuilt_workspace:update" + APIKeyScopeProvisionerDaemonAll APIKeyScope = "provisioner_daemon:*" + APIKeyScopeProvisionerDaemonCreate APIKeyScope = "provisioner_daemon:create" + APIKeyScopeProvisionerDaemonDelete APIKeyScope = "provisioner_daemon:delete" + APIKeyScopeProvisionerDaemonRead APIKeyScope = "provisioner_daemon:read" + APIKeyScopeProvisionerDaemonUpdate APIKeyScope = "provisioner_daemon:update" + APIKeyScopeProvisionerJobsAll APIKeyScope = "provisioner_jobs:*" + APIKeyScopeProvisionerJobsCreate APIKeyScope = "provisioner_jobs:create" + APIKeyScopeProvisionerJobsRead APIKeyScope = "provisioner_jobs:read" + APIKeyScopeProvisionerJobsUpdate APIKeyScope = "provisioner_jobs:update" + APIKeyScopeReplicasAll APIKeyScope = "replicas:*" + APIKeyScopeReplicasRead APIKeyScope = "replicas:read" + APIKeyScopeSystemAll APIKeyScope = "system:*" + APIKeyScopeSystemCreate APIKeyScope = "system:create" + APIKeyScopeSystemDelete APIKeyScope = "system:delete" + APIKeyScopeSystemRead APIKeyScope = "system:read" + APIKeyScopeSystemUpdate APIKeyScope = "system:update" + APIKeyScopeTailnetCoordinatorAll APIKeyScope = "tailnet_coordinator:*" + APIKeyScopeTailnetCoordinatorCreate APIKeyScope = "tailnet_coordinator:create" + APIKeyScopeTailnetCoordinatorDelete APIKeyScope = "tailnet_coordinator:delete" + APIKeyScopeTailnetCoordinatorRead APIKeyScope = "tailnet_coordinator:read" + APIKeyScopeTailnetCoordinatorUpdate APIKeyScope = "tailnet_coordinator:update" + APIKeyScopeTaskAll APIKeyScope = "task:*" + APIKeyScopeTaskCreate APIKeyScope = "task:create" + APIKeyScopeTaskDelete APIKeyScope = "task:delete" + APIKeyScopeTaskRead APIKeyScope = "task:read" + APIKeyScopeTaskUpdate APIKeyScope = "task:update" + APIKeyScopeTemplateAll APIKeyScope = "template:*" + APIKeyScopeTemplateCreate APIKeyScope = "template:create" + APIKeyScopeTemplateDelete APIKeyScope = "template:delete" + APIKeyScopeTemplateRead APIKeyScope = "template:read" + APIKeyScopeTemplateUpdate APIKeyScope = "template:update" + APIKeyScopeTemplateUse APIKeyScope = "template:use" + APIKeyScopeTemplateViewInsights APIKeyScope = "template:view_insights" + APIKeyScopeUsageEventAll APIKeyScope = "usage_event:*" + APIKeyScopeUsageEventCreate APIKeyScope = "usage_event:create" + APIKeyScopeUsageEventRead APIKeyScope = "usage_event:read" + APIKeyScopeUsageEventUpdate APIKeyScope = "usage_event:update" + APIKeyScopeUserAll APIKeyScope = "user:*" + APIKeyScopeUserCreate APIKeyScope = "user:create" + APIKeyScopeUserDelete APIKeyScope = "user:delete" + APIKeyScopeUserRead APIKeyScope = "user:read" + APIKeyScopeUserReadPersonal APIKeyScope = "user:read_personal" + APIKeyScopeUserUpdate APIKeyScope = "user:update" + APIKeyScopeUserUpdatePersonal APIKeyScope = "user:update_personal" + APIKeyScopeUserSecretAll APIKeyScope = "user_secret:*" + APIKeyScopeUserSecretCreate APIKeyScope = "user_secret:create" + APIKeyScopeUserSecretDelete APIKeyScope = "user_secret:delete" + APIKeyScopeUserSecretRead APIKeyScope = "user_secret:read" + APIKeyScopeUserSecretUpdate APIKeyScope = "user_secret:update" + APIKeyScopeWebpushSubscriptionAll APIKeyScope = "webpush_subscription:*" + APIKeyScopeWebpushSubscriptionCreate APIKeyScope = "webpush_subscription:create" + APIKeyScopeWebpushSubscriptionDelete APIKeyScope = "webpush_subscription:delete" + APIKeyScopeWebpushSubscriptionRead APIKeyScope = "webpush_subscription:read" + APIKeyScopeWorkspaceAll APIKeyScope = "workspace:*" + APIKeyScopeWorkspaceApplicationConnect APIKeyScope = "workspace:application_connect" + APIKeyScopeWorkspaceCreate APIKeyScope = "workspace:create" + APIKeyScopeWorkspaceCreateAgent APIKeyScope = "workspace:create_agent" + APIKeyScopeWorkspaceDelete APIKeyScope = "workspace:delete" + APIKeyScopeWorkspaceDeleteAgent APIKeyScope = "workspace:delete_agent" + APIKeyScopeWorkspaceRead APIKeyScope = "workspace:read" + APIKeyScopeWorkspaceShare APIKeyScope = "workspace:share" + APIKeyScopeWorkspaceSsh APIKeyScope = "workspace:ssh" + APIKeyScopeWorkspaceStart APIKeyScope = "workspace:start" + APIKeyScopeWorkspaceStop APIKeyScope = "workspace:stop" + APIKeyScopeWorkspaceUpdate APIKeyScope = "workspace:update" + APIKeyScopeWorkspaceAgentDevcontainersAll APIKeyScope = "workspace_agent_devcontainers:*" + APIKeyScopeWorkspaceAgentDevcontainersCreate APIKeyScope = "workspace_agent_devcontainers:create" + APIKeyScopeWorkspaceAgentResourceMonitorAll APIKeyScope = "workspace_agent_resource_monitor:*" + APIKeyScopeWorkspaceAgentResourceMonitorCreate APIKeyScope = "workspace_agent_resource_monitor:create" + APIKeyScopeWorkspaceAgentResourceMonitorRead APIKeyScope = "workspace_agent_resource_monitor:read" + APIKeyScopeWorkspaceAgentResourceMonitorUpdate APIKeyScope = "workspace_agent_resource_monitor:update" + APIKeyScopeWorkspaceDormantAll APIKeyScope = "workspace_dormant:*" + APIKeyScopeWorkspaceDormantApplicationConnect APIKeyScope = "workspace_dormant:application_connect" + APIKeyScopeWorkspaceDormantCreate APIKeyScope = "workspace_dormant:create" + APIKeyScopeWorkspaceDormantCreateAgent APIKeyScope = "workspace_dormant:create_agent" + APIKeyScopeWorkspaceDormantDelete APIKeyScope = "workspace_dormant:delete" + APIKeyScopeWorkspaceDormantDeleteAgent APIKeyScope = "workspace_dormant:delete_agent" + APIKeyScopeWorkspaceDormantRead APIKeyScope = "workspace_dormant:read" + APIKeyScopeWorkspaceDormantShare APIKeyScope = "workspace_dormant:share" + APIKeyScopeWorkspaceDormantSsh APIKeyScope = "workspace_dormant:ssh" + APIKeyScopeWorkspaceDormantStart APIKeyScope = "workspace_dormant:start" + APIKeyScopeWorkspaceDormantStop APIKeyScope = "workspace_dormant:stop" + APIKeyScopeWorkspaceDormantUpdate APIKeyScope = "workspace_dormant:update" + APIKeyScopeWorkspaceProxyAll APIKeyScope = "workspace_proxy:*" + APIKeyScopeWorkspaceProxyCreate APIKeyScope = "workspace_proxy:create" + APIKeyScopeWorkspaceProxyDelete APIKeyScope = "workspace_proxy:delete" + APIKeyScopeWorkspaceProxyRead APIKeyScope = "workspace_proxy:read" + APIKeyScopeWorkspaceProxyUpdate APIKeyScope = "workspace_proxy:update" ) // PublicAPIKeyScopes lists all public low-level API key scopes. @@ -66,6 +221,11 @@ var PublicAPIKeyScopes = []APIKeyScope{ APIKeyScopeFileAll, APIKeyScopeFileCreate, APIKeyScopeFileRead, + APIKeyScopeTaskAll, + APIKeyScopeTaskCreate, + APIKeyScopeTaskDelete, + APIKeyScopeTaskRead, + APIKeyScopeTaskUpdate, APIKeyScopeTemplateAll, APIKeyScopeTemplateCreate, APIKeyScopeTemplateDelete, diff --git a/codersdk/audit.go b/codersdk/audit.go index 1e529202b5285..0b2eca7d79d92 100644 --- a/codersdk/audit.go +++ b/codersdk/audit.go @@ -44,6 +44,7 @@ const ( // Deprecated: Workspace App connections are now included in the // connection log. ResourceTypeWorkspaceApp ResourceType = "workspace_app" + ResourceTypeTask ResourceType = "task" ) func (r ResourceType) FriendlyString() string { @@ -100,6 +101,8 @@ func (r ResourceType) FriendlyString() string { return "workspace agent" case ResourceTypeWorkspaceApp: return "workspace app" + case ResourceTypeTask: + return "task" default: return "unknown" } diff --git a/codersdk/client.go b/codersdk/client.go index e71703c751963..42ad51286f181 100644 --- a/codersdk/client.go +++ b/codersdk/client.go @@ -519,6 +519,16 @@ func (e *Error) Error() string { return builder.String() } +// NewTestError is a helper function to create a Error, setting the internal fields. It's generally only useful for +// testing. +func NewTestError(statusCode int, method string, u string) *Error { + return &Error{ + statusCode: statusCode, + method: method, + url: u, + } +} + type closeFunc func() error func (c closeFunc) Close() error { diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 9549b0b98eb2f..9425a3740f089 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -487,6 +487,7 @@ type DeploymentValues struct { Sessions SessionLifetime `json:"session_lifetime,omitempty" typescript:",notnull"` DisablePasswordAuth serpent.Bool `json:"disable_password_auth,omitempty" typescript:",notnull"` Support SupportConfig `json:"support,omitempty" typescript:",notnull"` + EnableAuthzRecording serpent.Bool `json:"enable_authz_recording,omitempty" typescript:",notnull"` ExternalAuthConfigs serpent.Struct[[]ExternalAuthConfig] `json:"external_auth,omitempty" typescript:",notnull"` SSHConfig SSHConfig `json:"config_ssh,omitempty" typescript:",notnull"` WgtunnelHost serpent.String `json:"wgtunnel_host,omitempty" typescript:",notnull"` @@ -984,7 +985,7 @@ func DefaultSupportLinks(docsURL string) []LinkConfig { }, { Name: "Join the Coder Discord", - Target: "https://coder.com/chat?utm_source=coder&utm_medium=coder&utm_campaign=server-footer", + Target: "https://discord.gg/coder", Icon: "chat", }, { @@ -3240,14 +3241,13 @@ Write out the current server config as YAML to stdout.`, // AIBridge Options { Name: "AIBridge Enabled", - Description: fmt.Sprintf("Whether to start an in-memory aibridged instance (%q experiment must be enabled, too).", ExperimentAIBridge), + Description: "Whether to start an in-memory aibridged instance.", Flag: "aibridge-enabled", Env: "CODER_AIBRIDGE_ENABLED", Value: &c.AI.BridgeConfig.Enabled, Default: "false", Group: &deploymentGroupAIBridge, YAML: "enabled", - Hidden: true, }, { Name: "AIBridge OpenAI Base URL", @@ -3258,7 +3258,6 @@ Write out the current server config as YAML to stdout.`, Default: "https://api.openai.com/v1/", Group: &deploymentGroupAIBridge, YAML: "openai_base_url", - Hidden: true, }, { Name: "AIBridge OpenAI Key", @@ -3269,7 +3268,6 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "openai_key", - Hidden: true, }, { Name: "AIBridge Anthropic Base URL", @@ -3279,19 +3277,80 @@ Write out the current server config as YAML to stdout.`, Value: &c.AI.BridgeConfig.Anthropic.BaseURL, Default: "https://api.anthropic.com/", Group: &deploymentGroupAIBridge, - YAML: "base_url", - Hidden: true, + YAML: "anthropic_base_url", }, { - Name: "AIBridge Anthropic KEY", + Name: "AIBridge Anthropic Key", Description: "The key to authenticate against the Anthropic API.", Flag: "aibridge-anthropic-key", Env: "CODER_AIBRIDGE_ANTHROPIC_KEY", Value: &c.AI.BridgeConfig.Anthropic.Key, Default: "", Group: &deploymentGroupAIBridge, - YAML: "key", - Hidden: true, + YAML: "anthropic_key", + }, + { + Name: "AIBridge Bedrock Region", + Description: "The AWS Bedrock API region.", + Flag: "aibridge-bedrock-region", + Env: "CODER_AIBRIDGE_BEDROCK_REGION", + Value: &c.AI.BridgeConfig.Bedrock.Region, + Default: "", + Group: &deploymentGroupAIBridge, + YAML: "bedrock_region", + }, + { + Name: "AIBridge Bedrock Access Key", + Description: "The access key to authenticate against the AWS Bedrock API.", + Flag: "aibridge-bedrock-access-key", + Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY", + Value: &c.AI.BridgeConfig.Bedrock.AccessKey, + Default: "", + Group: &deploymentGroupAIBridge, + YAML: "bedrock_access_key", + }, + { + Name: "AIBridge Bedrock Access Key Secret", + Description: "The access key secret to use with the access key to authenticate against the AWS Bedrock API.", + Flag: "aibridge-bedrock-access-key-secret", + Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET", + Value: &c.AI.BridgeConfig.Bedrock.AccessKeySecret, + Default: "", + Group: &deploymentGroupAIBridge, + YAML: "bedrock_access_key_secret", + }, + { + Name: "AIBridge Bedrock Model", + Description: "The model to use when making requests to the AWS Bedrock API.", + Flag: "aibridge-bedrock-model", + Env: "CODER_AIBRIDGE_BEDROCK_MODEL", + Value: &c.AI.BridgeConfig.Bedrock.Model, + Default: "global.anthropic.claude-sonnet-4-5-20250929-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock. + Group: &deploymentGroupAIBridge, + YAML: "bedrock_model", + }, + { + Name: "AIBridge Bedrock Small Fast Model", + Description: "The small fast model to use when making requests to the AWS Bedrock API. Claude Code uses Haiku-class models to perform background tasks. See https://docs.claude.com/en/docs/claude-code/settings#environment-variables.", + Flag: "aibridge-bedrock-small-fastmodel", + Env: "CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL", + Value: &c.AI.BridgeConfig.Bedrock.SmallFastModel, + Default: "global.anthropic.claude-haiku-4-5-20251001-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock. + Group: &deploymentGroupAIBridge, + YAML: "bedrock_small_fast_model", + }, + { + Name: "Enable Authorization Recordings", + Description: "All api requests will have a header including all authorization calls made during the request. " + + "This is used for debugging purposes and only available for dev builds.", + Required: false, + Flag: "enable-authz-recordings", + Env: "CODER_ENABLE_AUTHZ_RECORDINGS", + Default: "false", + Value: &c.EnableAuthzRecording, + // Do not show this option ever. It is a developer tool only, and not to be + // used externally. + Hidden: true, }, } @@ -3302,6 +3361,7 @@ type AIBridgeConfig struct { Enabled serpent.Bool `json:"enabled" typescript:",notnull"` OpenAI AIBridgeOpenAIConfig `json:"openai" typescript:",notnull"` Anthropic AIBridgeAnthropicConfig `json:"anthropic" typescript:",notnull"` + Bedrock AIBridgeBedrockConfig `json:"bedrock" typescript:",notnull"` } type AIBridgeOpenAIConfig struct { @@ -3314,6 +3374,14 @@ type AIBridgeAnthropicConfig struct { Key serpent.String `json:"key" typescript:",notnull"` } +type AIBridgeBedrockConfig struct { + Region serpent.String `json:"region" typescript:",notnull"` + AccessKey serpent.String `json:"access_key" typescript:",notnull"` + AccessKeySecret serpent.String `json:"access_key_secret" typescript:",notnull"` + Model serpent.String `json:"model" typescript:",notnull"` + SmallFastModel serpent.String `json:"small_fast_model" typescript:",notnull"` +} + type AIConfig struct { BridgeConfig AIBridgeConfig `json:"bridge,omitempty"` } @@ -3325,7 +3393,9 @@ type SupportConfig struct { type LinkConfig struct { Name string `json:"name" yaml:"name"` Target string `json:"target" yaml:"target"` - Icon string `json:"icon" yaml:"icon" enums:"bug,chat,docs"` + Icon string `json:"icon" yaml:"icon" enums:"bug,chat,docs,star"` + + Location string `json:"location,omitempty" yaml:"location,omitempty" enums:"navbar,dropdown"` } // Validate checks cross-field constraints for deployment values. @@ -3565,7 +3635,6 @@ const ( ExperimentOAuth2 Experiment = "oauth2" // Enables OAuth2 provider functionality. ExperimentMCPServerHTTP Experiment = "mcp-server-http" // Enables the MCP HTTP server functionality. ExperimentWorkspaceSharing Experiment = "workspace-sharing" // Enables updating workspace ACLs for sharing with users and groups. - ExperimentAIBridge Experiment = "aibridge" // Enables AI Bridge functionality. ) func (e Experiment) DisplayName() string { @@ -3586,8 +3655,6 @@ func (e Experiment) DisplayName() string { return "MCP HTTP Server Functionality" case ExperimentWorkspaceSharing: return "Workspace Sharing" - case ExperimentAIBridge: - return "AI Bridge" default: // Split on hyphen and convert to title case // e.g. "web-push" -> "Web Push", "mcp-server-http" -> "Mcp Server Http" @@ -3606,7 +3673,6 @@ var ExperimentsKnown = Experiments{ ExperimentOAuth2, ExperimentMCPServerHTTP, ExperimentWorkspaceSharing, - ExperimentAIBridge, } // ExperimentsSafe should include all experiments that are safe for diff --git a/codersdk/oauth2.go b/codersdk/oauth2.go index c2c59ed599190..79b2186480b9c 100644 --- a/codersdk/oauth2.go +++ b/codersdk/oauth2.go @@ -7,6 +7,7 @@ import ( "fmt" "net/http" "net/url" + "strings" "github.com/google/uuid" ) @@ -26,6 +27,7 @@ type OAuth2ProviderApp struct { type OAuth2AppEndpoints struct { Authorization string `json:"authorization"` Token string `json:"token"` + TokenRevoke string `json:"token_revoke"` // DeviceAuth is optional. DeviceAuth string `json:"device_authorization"` } @@ -212,6 +214,26 @@ func (e OAuth2ProviderResponseType) Valid() bool { return false } +// RevokeOAuth2Token revokes a specific OAuth2 token using RFC 7009 token revocation. +func (c *Client) RevokeOAuth2Token(ctx context.Context, clientID uuid.UUID, token string) error { + form := url.Values{} + form.Set("token", token) + // Client authentication is handled via the client_id in the app middleware + form.Set("client_id", clientID.String()) + + res, err := c.Request(ctx, http.MethodPost, "/oauth2/revoke", strings.NewReader(form.Encode()), func(r *http.Request) { + r.Header.Set("Content-Type", "application/x-www-form-urlencoded") + }) + if err != nil { + return err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return ReadBodyAsError(res) + } + return nil +} + // RevokeOAuth2ProviderApp completely revokes an app's access for the // authenticated user. func (c *Client) RevokeOAuth2ProviderApp(ctx context.Context, appID uuid.UUID) error { @@ -464,6 +486,6 @@ type OAuth2ClientConfiguration struct { TokenEndpointAuthMethod string `json:"token_endpoint_auth_method"` Scope string `json:"scope,omitempty"` Contacts []string `json:"contacts,omitempty"` - RegistrationAccessToken string `json:"registration_access_token"` + RegistrationAccessToken []byte `json:"registration_access_token"` RegistrationClientURI string `json:"registration_client_uri"` } diff --git a/codersdk/organizations.go b/codersdk/organizations.go index bca87c7bd4591..823169d385b22 100644 --- a/codersdk/organizations.go +++ b/codersdk/organizations.go @@ -397,10 +397,11 @@ func (c *Client) OrganizationProvisionerDaemons(ctx context.Context, organizatio } type OrganizationProvisionerJobsOptions struct { - Limit int - IDs []uuid.UUID - Status []ProvisionerJobStatus - Tags map[string]string + Limit int + IDs []uuid.UUID + Status []ProvisionerJobStatus + Tags map[string]string + Initiator string } func (c *Client) OrganizationProvisionerJobs(ctx context.Context, organizationID uuid.UUID, opts *OrganizationProvisionerJobsOptions) ([]ProvisionerJob, error) { @@ -422,6 +423,9 @@ func (c *Client) OrganizationProvisionerJobs(ctx context.Context, organizationID } qp.Add("tags", string(tagsRaw)) } + if opts.Initiator != "" { + qp.Add("initiator", opts.Initiator) + } } res, err := c.Request(ctx, http.MethodGet, diff --git a/codersdk/provisionerdaemons.go b/codersdk/provisionerdaemons.go index 4bff7d7827aa1..19f8cae546118 100644 --- a/codersdk/provisionerdaemons.go +++ b/codersdk/provisionerdaemons.go @@ -175,6 +175,12 @@ func JobIsMissingParameterErrorCode(code JobErrorCode) bool { return string(code) == runner.MissingParameterErrorCode } +// JobIsMissingRequiredTemplateVariableErrorCode returns whether the error is a missing a required template +// variable error. This can indicate to consumers that they need to provide required template variables. +func JobIsMissingRequiredTemplateVariableErrorCode(code JobErrorCode) bool { + return string(code) == runner.RequiredTemplateVariablesErrorCode +} + // ProvisionerJob describes the job executed by the provisioning daemon. type ProvisionerJob struct { ID uuid.UUID `json:"id" format:"uuid" table:"id"` @@ -192,6 +198,7 @@ type ProvisionerJob struct { QueuePosition int `json:"queue_position" table:"queue position"` QueueSize int `json:"queue_size" table:"queue size"` OrganizationID uuid.UUID `json:"organization_id" format:"uuid" table:"organization id"` + InitiatorID uuid.UUID `json:"initiator_id" format:"uuid" table:"initiator id"` Input ProvisionerJobInput `json:"input" table:"input,recursive_inline"` Type ProvisionerJobType `json:"type" table:"type"` AvailableWorkers []uuid.UUID `json:"available_workers,omitempty" format:"uuid" table:"available workers"` diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go index 6a8185e6eb62a..b6f8e778ee760 100644 --- a/codersdk/rbacresources_gen.go +++ b/codersdk/rbacresources_gen.go @@ -35,6 +35,7 @@ const ( ResourceReplicas RBACResource = "replicas" ResourceSystem RBACResource = "system" ResourceTailnetCoordinator RBACResource = "tailnet_coordinator" + ResourceTask RBACResource = "task" ResourceTemplate RBACResource = "template" ResourceUsageEvent RBACResource = "usage_event" ResourceUser RBACResource = "user" @@ -59,6 +60,7 @@ const ( ActionRead RBACAction = "read" ActionReadPersonal RBACAction = "read_personal" ActionSSH RBACAction = "ssh" + ActionShare RBACAction = "share" ActionUnassign RBACAction = "unassign" ActionUpdate RBACAction = "update" ActionUpdatePersonal RBACAction = "update_personal" @@ -102,14 +104,15 @@ var RBACResourceActions = map[RBACResource][]RBACAction{ ResourceReplicas: {ActionRead}, ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, + ResourceTask: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceTemplate: {ActionCreate, ActionDelete, ActionRead, ActionUpdate, ActionUse, ActionViewInsights}, ResourceUsageEvent: {ActionCreate, ActionRead, ActionUpdate}, ResourceUser: {ActionCreate, ActionDelete, ActionRead, ActionReadPersonal, ActionUpdate, ActionUpdatePersonal}, ResourceUserSecret: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceWebpushSubscription: {ActionCreate, ActionDelete, ActionRead}, - ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, + ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionShare, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, ResourceWorkspaceAgentDevcontainers: {ActionCreate}, ResourceWorkspaceAgentResourceMonitor: {ActionCreate, ActionRead, ActionUpdate}, - ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, + ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionShare, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, ResourceWorkspaceProxy: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, } diff --git a/codersdk/roles.go b/codersdk/roles.go index f248c38798d19..70162f8f09ba4 100644 --- a/codersdk/roles.go +++ b/codersdk/roles.go @@ -56,9 +56,11 @@ type Role struct { OrganizationID string `json:"organization_id,omitempty" table:"organization id" format:"uuid"` DisplayName string `json:"display_name" table:"display name"` SitePermissions []Permission `json:"site_permissions" table:"site permissions"` + UserPermissions []Permission `json:"user_permissions" table:"user permissions"` // OrganizationPermissions are specific for the organization in the field 'OrganizationID' above. OrganizationPermissions []Permission `json:"organization_permissions" table:"organization permissions"` - UserPermissions []Permission `json:"user_permissions" table:"user permissions"` + // OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above. + OrganizationMemberPermissions []Permission `json:"organization_member_permissions" table:"organization member permissions"` } // CustomRoleRequest is used to edit custom roles. @@ -66,9 +68,11 @@ type CustomRoleRequest struct { Name string `json:"name" table:"name,default_sort" validate:"username"` DisplayName string `json:"display_name" table:"display name"` SitePermissions []Permission `json:"site_permissions" table:"site permissions"` + UserPermissions []Permission `json:"user_permissions" table:"user permissions"` // OrganizationPermissions are specific to the organization the role belongs to. OrganizationPermissions []Permission `json:"organization_permissions" table:"organization permissions"` - UserPermissions []Permission `json:"user_permissions" table:"user permissions"` + // OrganizationMemberPermissions are specific to the organization the role belongs to. + OrganizationMemberPermissions []Permission `json:"organization_member_permissions" table:"organization member permissions"` } // FullName returns the role name scoped to the organization ID. This is useful if @@ -85,11 +89,12 @@ func (r Role) FullName() string { // CreateOrganizationRole will create a custom organization role func (c *Client) CreateOrganizationRole(ctx context.Context, role Role) (Role, error) { req := CustomRoleRequest{ - Name: role.Name, - DisplayName: role.DisplayName, - SitePermissions: role.SitePermissions, - OrganizationPermissions: role.OrganizationPermissions, - UserPermissions: role.UserPermissions, + Name: role.Name, + DisplayName: role.DisplayName, + SitePermissions: role.SitePermissions, + UserPermissions: role.UserPermissions, + OrganizationPermissions: role.OrganizationPermissions, + OrganizationMemberPermissions: role.OrganizationMemberPermissions, } res, err := c.Request(ctx, http.MethodPost, @@ -108,11 +113,12 @@ func (c *Client) CreateOrganizationRole(ctx context.Context, role Role) (Role, e // UpdateOrganizationRole will update an existing custom organization role func (c *Client) UpdateOrganizationRole(ctx context.Context, role Role) (Role, error) { req := CustomRoleRequest{ - Name: role.Name, - DisplayName: role.DisplayName, - SitePermissions: role.SitePermissions, - OrganizationPermissions: role.OrganizationPermissions, - UserPermissions: role.UserPermissions, + Name: role.Name, + DisplayName: role.DisplayName, + SitePermissions: role.SitePermissions, + UserPermissions: role.UserPermissions, + OrganizationPermissions: role.OrganizationPermissions, + OrganizationMemberPermissions: role.OrganizationMemberPermissions, } res, err := c.Request(ctx, http.MethodPut, diff --git a/codersdk/templatevariables.go b/codersdk/templatevariables.go index 3e02f6910642f..19c614e796e1e 100644 --- a/codersdk/templatevariables.go +++ b/codersdk/templatevariables.go @@ -68,7 +68,7 @@ func ParseUserVariableValues(varsFiles []string, variablesFile string, commandLi return nil, err } - return combineVariableValues(fromVars, fromFile, fromCommandLine), nil + return CombineVariableValues(fromVars, fromFile, fromCommandLine), nil } func parseVariableValuesFromVarsFiles(varsFiles []string) ([]VariableValue, error) { @@ -252,7 +252,7 @@ func parseVariableValuesFromCommandLine(variables []string) ([]VariableValue, er return values, nil } -func combineVariableValues(valuesSets ...[]VariableValue) []VariableValue { +func CombineVariableValues(valuesSets ...[]VariableValue) []VariableValue { combinedValues := make(map[string]string) for _, values := range valuesSets { diff --git a/codersdk/toolsdk/bash.go b/codersdk/toolsdk/bash.go index 7497363c2a54e..8d72f090d7ef0 100644 --- a/codersdk/toolsdk/bash.go +++ b/codersdk/toolsdk/bash.go @@ -53,8 +53,16 @@ If the command times out, all output captured up to that point is returned with For background commands (background: true), output is captured until the timeout is reached, then the command continues running in the background. The captured output is returned as the result. +For file operations (list, write, edit), always prefer the dedicated file tools. +Do not use bash commands (ls, cat, echo, heredoc, etc.) to list, write, or read +files when the file tools are available. The bash tool should be used for: + + - Running commands and scripts + - Installing packages + - Starting services + - Executing programs + Examples: -- workspace: "my-workspace", command: "ls -la" - workspace: "john/dev-env", command: "git status", timeout_ms: 30000 - workspace: "my-workspace", command: "npm run dev", background: true, timeout_ms: 10000 - workspace: "my-workspace.main", command: "docker ps"`, @@ -266,20 +274,25 @@ func getWorkspaceAgent(workspace codersdk.Workspace, agentName string) (codersdk return codersdk.WorkspaceAgent{}, xerrors.Errorf("multiple agents found, please specify the agent name, available agents: %v", availableNames) } -// namedWorkspace gets a workspace by owner/name or just name -func namedWorkspace(ctx context.Context, client *codersdk.Client, identifier string) (codersdk.Workspace, error) { - // Parse owner and workspace name +func splitNameAndOwner(identifier string) (name string, owner string) { + // Parse owner and name (workspace, task). parts := strings.SplitN(identifier, "/", 2) - var owner, workspaceName string if len(parts) == 2 { owner = parts[0] - workspaceName = parts[1] + name = parts[1] } else { owner = "me" - workspaceName = identifier + name = identifier } + return name, owner +} + +// namedWorkspace gets a workspace by owner/name or just name +func namedWorkspace(ctx context.Context, client *codersdk.Client, identifier string) (codersdk.Workspace, error) { + workspaceName, owner := splitNameAndOwner(identifier) + // Handle -- separator format (convert to / format) if strings.Contains(identifier, "--") && !strings.Contains(identifier, "/") { dashParts := strings.SplitN(identifier, "--", 2) diff --git a/codersdk/toolsdk/toolsdk.go b/codersdk/toolsdk/toolsdk.go index acc7a9b10292a..802b319a5a6b1 100644 --- a/codersdk/toolsdk/toolsdk.go +++ b/codersdk/toolsdk/toolsdk.go @@ -50,6 +50,13 @@ const ( ToolNameWorkspaceEditFile = "coder_workspace_edit_file" ToolNameWorkspaceEditFiles = "coder_workspace_edit_files" ToolNameWorkspacePortForward = "coder_workspace_port_forward" + ToolNameWorkspaceListApps = "coder_workspace_list_apps" + ToolNameCreateTask = "coder_create_task" + ToolNameDeleteTask = "coder_delete_task" + ToolNameListTasks = "coder_list_tasks" + ToolNameGetTaskStatus = "coder_get_task_status" + ToolNameSendTaskInput = "coder_send_task_input" + ToolNameGetTaskLogs = "coder_get_task_logs" ) func NewDeps(client *codersdk.Client, opts ...func(*Deps)) (Deps, error) { @@ -223,6 +230,13 @@ var All = []GenericTool{ WorkspaceEditFile.Generic(), WorkspaceEditFiles.Generic(), WorkspacePortForward.Generic(), + WorkspaceListApps.Generic(), + CreateTask.Generic(), + DeleteTask.Generic(), + ListTasks.Generic(), + GetTaskStatus.Generic(), + SendTaskInput.Generic(), + GetTaskLogs.Generic(), } type ReportTaskArgs struct { @@ -339,12 +353,24 @@ var CreateWorkspace = Tool[CreateWorkspaceArgs, codersdk.Workspace]{ If a user is asking to "test a template", they are typically referring to creating a workspace from a template to ensure the infrastructure is provisioned correctly and the agent can connect to the control plane. + +Before creating a workspace, always confirm the template choice with the user by: + + 1. Listing the available templates that match their request. + 2. Recommending the most relevant option. + 2. Asking the user to confirm which template to use. + +It is important to not create a workspace without confirming the template +choice with the user. + +After creating a workspace, watch the build logs and wait for the workspace to +be ready before trying to use or connect to the workspace. `, Schema: aisdk.Schema{ Properties: map[string]any{ "user": map[string]any{ "type": "string", - "description": "Username or ID of the user to create the workspace for. Use the `me` keyword to create a workspace for the authenticated user.", + "description": userDescription("create a workspace"), }, "template_version_id": map[string]any{ "type": "string", @@ -516,8 +542,13 @@ type CreateWorkspaceBuildArgs struct { var CreateWorkspaceBuild = Tool[CreateWorkspaceBuildArgs, codersdk.WorkspaceBuild]{ Tool: aisdk.Tool{ - Name: ToolNameCreateWorkspaceBuild, - Description: "Create a new workspace build for an existing workspace. Use this to start, stop, or delete.", + Name: ToolNameCreateWorkspaceBuild, + Description: `Create a new workspace build for an existing workspace. Use this to start, stop, or delete. + +After creating a workspace build, watch the build logs and wait for the +workspace build to complete before trying to start another build or use or +connect to the workspace. +`, Schema: aisdk.Schema{ Properties: map[string]any{ "workspace_id": map[string]any{ @@ -1393,8 +1424,6 @@ type WorkspaceLSResponse struct { Contents []WorkspaceLSFile `json:"contents"` } -const workspaceDescription = "The workspace name in the format [owner/]workspace[.agent]. If an owner is not specified, the authenticated user is used." - var WorkspaceLS = Tool[WorkspaceLSArgs, WorkspaceLSResponse]{ Tool: aisdk.Tool{ Name: ToolNameWorkspaceLS, @@ -1519,8 +1548,20 @@ type WorkspaceWriteFileArgs struct { var WorkspaceWriteFile = Tool[WorkspaceWriteFileArgs, codersdk.Response]{ Tool: aisdk.Tool{ - Name: ToolNameWorkspaceWriteFile, - Description: `Write a file in a workspace.`, + Name: ToolNameWorkspaceWriteFile, + Description: `Write a file in a workspace. + +If a file write fails due to syntax errors or encoding issues, do NOT switch +to using bash commands as a workaround. Instead: + + 1. Read the error message carefully to identify the issue + 2. Fix the content encoding/syntax + 3. Retry with this tool + +The content parameter expects base64-encoded bytes. Ensure your source content +is correct before encoding it. If you encounter errors, decode and verify the +content you are trying to write, then re-encode it properly. +`, Schema: aisdk.Schema{ Properties: map[string]any{ "workspace": map[string]any{ @@ -1750,6 +1791,353 @@ var WorkspacePortForward = Tool[WorkspacePortForwardArgs, WorkspacePortForwardRe }, } +type WorkspaceListAppsArgs struct { + Workspace string `json:"workspace"` +} + +type WorkspaceListApp struct { + Name string `json:"name"` + URL string `json:"url"` +} + +type WorkspaceListAppsResponse struct { + Apps []WorkspaceListApp `json:"apps"` +} + +var WorkspaceListApps = Tool[WorkspaceListAppsArgs, WorkspaceListAppsResponse]{ + Tool: aisdk.Tool{ + Name: ToolNameWorkspaceListApps, + Description: `List the URLs of Coder apps running in a workspace for a single agent.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "workspace": map[string]any{ + "type": "string", + "description": workspaceDescription, + }, + }, + Required: []string{"workspace"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args WorkspaceListAppsArgs) (WorkspaceListAppsResponse, error) { + workspaceName := NormalizeWorkspaceInput(args.Workspace) + _, workspaceAgent, err := findWorkspaceAndAgent(ctx, deps.coderClient, workspaceName) + if err != nil { + return WorkspaceListAppsResponse{}, xerrors.Errorf("failed to find workspace: %w", err) + } + + var res WorkspaceListAppsResponse + for _, app := range workspaceAgent.Apps { + name := app.DisplayName + if name == "" { + name = app.Slug + } + res.Apps = append(res.Apps, WorkspaceListApp{ + Name: name, + URL: app.URL, + }) + } + + return res, nil + }, +} + +type CreateTaskArgs struct { + Input string `json:"input"` + TemplateVersionID string `json:"template_version_id"` + TemplateVersionPresetID string `json:"template_version_preset_id"` + User string `json:"user"` +} + +var CreateTask = Tool[CreateTaskArgs, codersdk.Task]{ + Tool: aisdk.Tool{ + Name: ToolNameCreateTask, + Description: `Create a task.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "input": map[string]any{ + "type": "string", + "description": "Input/prompt for the task.", + }, + "template_version_id": map[string]any{ + "type": "string", + "description": "ID of the template version to create the task from.", + }, + "template_version_preset_id": map[string]any{ + "type": "string", + "description": "Optional ID of the template version preset to create the task from.", + }, + "user": map[string]any{ + "type": "string", + "description": userDescription("create a task"), + }, + }, + Required: []string{"input", "template_version_id"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args CreateTaskArgs) (codersdk.Task, error) { + if args.Input == "" { + return codersdk.Task{}, xerrors.New("input is required") + } + + tvID, err := uuid.Parse(args.TemplateVersionID) + if err != nil { + return codersdk.Task{}, xerrors.New("template_version_id must be a valid UUID") + } + + var tvPresetID uuid.UUID + if args.TemplateVersionPresetID != "" { + tvPresetID, err = uuid.Parse(args.TemplateVersionPresetID) + if err != nil { + return codersdk.Task{}, xerrors.New("template_version_preset_id must be a valid UUID") + } + } + + if args.User == "" { + args.User = codersdk.Me + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + task, err := expClient.CreateTask(ctx, args.User, codersdk.CreateTaskRequest{ + Input: args.Input, + TemplateVersionID: tvID, + TemplateVersionPresetID: tvPresetID, + }) + if err != nil { + return codersdk.Task{}, xerrors.Errorf("create task: %w", err) + } + + return task, nil + }, +} + +type DeleteTaskArgs struct { + TaskID string `json:"task_id"` +} + +var DeleteTask = Tool[DeleteTaskArgs, codersdk.Response]{ + Tool: aisdk.Tool{ + Name: ToolNameDeleteTask, + Description: `Delete a task.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "task_id": map[string]any{ + "type": "string", + "description": taskIDDescription("delete"), + }, + }, + Required: []string{"task_id"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args DeleteTaskArgs) (codersdk.Response, error) { + if args.TaskID == "" { + return codersdk.Response{}, xerrors.New("task_id is required") + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + + task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + if err != nil { + return codersdk.Response{}, xerrors.Errorf("resolve task: %w", err) + } + + err = expClient.DeleteTask(ctx, task.OwnerName, task.ID) + if err != nil { + return codersdk.Response{}, xerrors.Errorf("delete task: %w", err) + } + + return codersdk.Response{ + Message: "Task deleted successfully", + }, nil + }, +} + +type ListTasksArgs struct { + Status codersdk.TaskStatus `json:"status"` + User string `json:"user"` +} + +type ListTasksResponse struct { + Tasks []codersdk.Task `json:"tasks"` +} + +var ListTasks = Tool[ListTasksArgs, ListTasksResponse]{ + Tool: aisdk.Tool{ + Name: ToolNameListTasks, + Description: `List tasks.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "status": map[string]any{ + "type": "string", + "description": "Optional filter by task status.", + }, + "user": map[string]any{ + "type": "string", + "description": userDescription("list tasks"), + }, + }, + Required: []string{}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args ListTasksArgs) (ListTasksResponse, error) { + if args.User == "" { + args.User = codersdk.Me + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + tasks, err := expClient.Tasks(ctx, &codersdk.TasksFilter{ + Owner: args.User, + Status: args.Status, + }) + if err != nil { + return ListTasksResponse{}, xerrors.Errorf("list tasks: %w", err) + } + + return ListTasksResponse{ + Tasks: tasks, + }, nil + }, +} + +type GetTaskStatusArgs struct { + TaskID string `json:"task_id"` +} + +type GetTaskStatusResponse struct { + Status codersdk.TaskStatus `json:"status"` + State *codersdk.TaskStateEntry `json:"state"` +} + +var GetTaskStatus = Tool[GetTaskStatusArgs, GetTaskStatusResponse]{ + Tool: aisdk.Tool{ + Name: ToolNameGetTaskStatus, + Description: `Get the status of a task.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "task_id": map[string]any{ + "type": "string", + "description": taskIDDescription("get"), + }, + }, + Required: []string{"task_id"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args GetTaskStatusArgs) (GetTaskStatusResponse, error) { + if args.TaskID == "" { + return GetTaskStatusResponse{}, xerrors.New("task_id is required") + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + + task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + if err != nil { + return GetTaskStatusResponse{}, xerrors.Errorf("resolve task %q: %w", args.TaskID, err) + } + + return GetTaskStatusResponse{ + Status: task.Status, + State: task.CurrentState, + }, nil + }, +} + +type SendTaskInputArgs struct { + TaskID string `json:"task_id"` + Input string `json:"input"` +} + +var SendTaskInput = Tool[SendTaskInputArgs, codersdk.Response]{ + Tool: aisdk.Tool{ + Name: ToolNameSendTaskInput, + Description: `Send input to a running task.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "task_id": map[string]any{ + "type": "string", + "description": taskIDDescription("prompt"), + }, + "input": map[string]any{ + "type": "string", + "description": "The input to send to the task.", + }, + }, + Required: []string{"task_id", "input"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args SendTaskInputArgs) (codersdk.Response, error) { + if args.TaskID == "" { + return codersdk.Response{}, xerrors.New("task_id is required") + } + + if args.Input == "" { + return codersdk.Response{}, xerrors.New("input is required") + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + + task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + if err != nil { + return codersdk.Response{}, xerrors.Errorf("resolve task %q: %w", args.TaskID, err) + } + + err = expClient.TaskSend(ctx, task.OwnerName, task.ID, codersdk.TaskSendRequest{ + Input: args.Input, + }) + if err != nil { + return codersdk.Response{}, xerrors.Errorf("send task input %q: %w", args.TaskID, err) + } + + return codersdk.Response{ + Message: "Input sent to task successfully.", + }, nil + }, +} + +type GetTaskLogsArgs struct { + TaskID string `json:"task_id"` +} + +var GetTaskLogs = Tool[GetTaskLogsArgs, codersdk.TaskLogsResponse]{ + Tool: aisdk.Tool{ + Name: ToolNameGetTaskLogs, + Description: `Get the logs of a task.`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "task_id": map[string]any{ + "type": "string", + "description": taskIDDescription("query"), + }, + }, + Required: []string{"task_id"}, + }, + }, + UserClientOptional: true, + Handler: func(ctx context.Context, deps Deps, args GetTaskLogsArgs) (codersdk.TaskLogsResponse, error) { + if args.TaskID == "" { + return codersdk.TaskLogsResponse{}, xerrors.New("task_id is required") + } + + expClient := codersdk.NewExperimentalClient(deps.coderClient) + + task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + if err != nil { + return codersdk.TaskLogsResponse{}, err + } + + logs, err := expClient.TaskLogs(ctx, task.OwnerName, task.ID) + if err != nil { + return codersdk.TaskLogsResponse{}, xerrors.Errorf("get task logs %q: %w", args.TaskID, err) + } + + return logs, nil + }, +} + // NormalizeWorkspaceInput converts workspace name input to standard format. // Handles the following input formats: // - workspace β†’ workspace @@ -1810,3 +2198,13 @@ func newAgentConn(ctx context.Context, client *codersdk.Client, workspace string } return conn, nil } + +const workspaceDescription = "The workspace name in the format [owner/]workspace[.agent]. If an owner is not specified, the authenticated user is used." + +func taskIDDescription(action string) string { + return fmt.Sprintf("ID or workspace identifier in the format [owner/]workspace[.agent] for the task to %s. If an owner is not specified, the authenticated user is used.", action) +} + +func userDescription(action string) string { + return fmt.Sprintf("Username or ID of the user for which to %s. Omit or use the `me` keyword to %s for the authenticated user.", action, action) +} diff --git a/codersdk/toolsdk/toolsdk_test.go b/codersdk/toolsdk/toolsdk_test.go index f89f22e0089d7..beb3cf4339178 100644 --- a/codersdk/toolsdk/toolsdk_test.go +++ b/codersdk/toolsdk/toolsdk_test.go @@ -2,8 +2,11 @@ package toolsdk_test import ( "context" + "database/sql" "encoding/json" "fmt" + "net/http" + "net/http/httptest" "os" "path/filepath" "runtime" @@ -20,16 +23,19 @@ import ( "github.com/coder/aisdk-go" + agentapi "github.com/coder/agentapi-sdk-go" "github.com/coder/coder/v2/agent" "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/toolsdk" "github.com/coder/coder/v2/codersdk/workspacesdk" + "github.com/coder/coder/v2/provisioner/echo" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" ) @@ -791,6 +797,792 @@ func TestTools(t *testing.T) { }) } }) + + t.Run("CreateTask", func(t *testing.T) { + t.Parallel() + + presetID := uuid.New() + // nolint:gocritic // This is in a test package and does not end up in the build + aiTV := dbfake.TemplateVersion(t, store).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: member.ID, + HasAITask: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Preset(database.TemplateVersionPreset{ + ID: presetID, + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + + tests := []struct { + name string + args toolsdk.CreateTaskArgs + error string + }{ + { + name: "OK", + args: toolsdk.CreateTaskArgs{ + TemplateVersionID: aiTV.TemplateVersion.ID.String(), + Input: "do a barrel roll", + User: "me", + }, + }, + { + name: "NoUser", + args: toolsdk.CreateTaskArgs{ + TemplateVersionID: aiTV.TemplateVersion.ID.String(), + Input: "do another barrel roll", + }, + }, + { + name: "NoInput", + args: toolsdk.CreateTaskArgs{ + TemplateVersionID: aiTV.TemplateVersion.ID.String(), + }, + error: "input is required", + }, + { + name: "NotTaskTemplate", + args: toolsdk.CreateTaskArgs{ + TemplateVersionID: r.TemplateVersion.ID.String(), + Input: "do yet another barrel roll", + }, + error: "Template does not have a valid \"coder_ai_task\" resource.", + }, + { + name: "WithPreset", + args: toolsdk.CreateTaskArgs{ + TemplateVersionID: r.TemplateVersion.ID.String(), + TemplateVersionPresetID: presetID.String(), + Input: "not enough barrel rolls", + }, + error: "Template does not have a valid \"coder_ai_task\" resource.", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + _, err = testTool(t, toolsdk.CreateTask, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + } + }) + } + }) + + t.Run("DeleteTask", func(t *testing.T) { + t.Parallel() + + // nolint:gocritic // This is in a test package and does not end up in the build + aiTV := dbfake.TemplateVersion(t, store).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: member.ID, + HasAITask: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Do() + + build1 := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "delete-task-workspace-1", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + TemplateID: aiTV.Template.ID, + }).WithTask(database.TaskTable{ + Name: "delete-task-1", + Prompt: "delete task 1", + }, nil).Do() + task1 := build1.Task + + build2 := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "delete-task-workspace-2", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + TemplateID: aiTV.Template.ID, + }).WithTask(database.TaskTable{ + Name: "delete-task-2", + Prompt: "delete task 2", + }, nil).Do() + task2 := build2.Task + + tests := []struct { + name string + args toolsdk.DeleteTaskArgs + error string + }{ + { + name: "ByUUID", + args: toolsdk.DeleteTaskArgs{ + TaskID: task1.ID.String(), + }, + }, + { + name: "ByIdentifier", + args: toolsdk.DeleteTaskArgs{ + TaskID: task2.Name, + }, + }, + { + name: "NoID", + args: toolsdk.DeleteTaskArgs{}, + error: "task_id is required", + }, + { + name: "NoTaskByID", + args: toolsdk.DeleteTaskArgs{ + TaskID: uuid.New().String(), + }, + error: "Resource not found", + }, + { + name: "NoTaskByWorkspaceIdentifier", + args: toolsdk.DeleteTaskArgs{ + TaskID: "non-existent", + }, + error: "Resource not found", + }, + { + name: "ExistsButNotATask", + args: toolsdk.DeleteTaskArgs{ + TaskID: r.Workspace.ID.String(), + }, + error: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + _, err = testTool(t, toolsdk.DeleteTask, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + } + }) + } + }) + + t.Run("ListTasks", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + _, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + taskClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Create a template with AI task support using the proper flow. + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: "AI Prompt", Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + taskExpClient := codersdk.NewExperimentalClient(taskClient) + + // This task should not show up since listing is user-scoped. + _, err := expClient.CreateTask(ctx, member.Username, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "task for member", + Name: "list-task-workspace-member", + }) + require.NoError(t, err) + + // Create tasks for taskUser. These should show up in the list. + for i := range 5 { + taskName := fmt.Sprintf("list-task-workspace-%d", i) + task, err := taskExpClient.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: fmt.Sprintf("task %d", i), + Name: taskName, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have workspace ID") + + // For the first task, stop the workspace to make it paused. + if i == 0 { + ws, err := taskClient.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, taskClient, ws.LatestBuild.ID) + + // Stop the workspace to set task status to paused. + build, err := taskClient.CreateWorkspaceBuild(ctx, task.WorkspaceID.UUID, codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionStop, + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, taskClient, build.ID) + } + } + + tests := []struct { + name string + args toolsdk.ListTasksArgs + expected []string + error string + }{ + { + name: "ListAllOwned", + args: toolsdk.ListTasksArgs{}, + expected: []string{ + "list-task-workspace-0", + "list-task-workspace-1", + "list-task-workspace-2", + "list-task-workspace-3", + "list-task-workspace-4", + }, + }, + { + name: "ListFiltered", + args: toolsdk.ListTasksArgs{ + Status: codersdk.TaskStatusPaused, + }, + expected: []string{ + "list-task-workspace-0", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(taskClient) + require.NoError(t, err) + + res, err := testTool(t, toolsdk.ListTasks, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + require.Len(t, res.Tasks, len(tt.expected)) + for _, task := range res.Tasks { + require.Contains(t, tt.expected, task.Name) + } + } + }) + } + }) + + t.Run("GetTask", func(t *testing.T) { + t.Parallel() + + // nolint:gocritic // This is in a test package and does not end up in the build + aiTV := dbfake.TemplateVersion(t, store).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: member.ID, + HasAITask: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Do() + + build := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "get-task-workspace-1", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + TemplateID: aiTV.Template.ID, + }).WithTask(database.TaskTable{ + Name: "get-task-1", + Prompt: "get task", + }, nil).Do() + task := build.Task + + tests := []struct { + name string + args toolsdk.GetTaskStatusArgs + expected codersdk.TaskStatus + error string + }{ + { + name: "ByUUID", + args: toolsdk.GetTaskStatusArgs{ + TaskID: task.ID.String(), + }, + expected: codersdk.TaskStatusInitializing, + }, + { + name: "ByIdentifier", + args: toolsdk.GetTaskStatusArgs{ + TaskID: task.Name, + }, + expected: codersdk.TaskStatusInitializing, + }, + { + name: "NoID", + args: toolsdk.GetTaskStatusArgs{}, + error: "task_id is required", + }, + { + name: "NoTaskByID", + args: toolsdk.GetTaskStatusArgs{ + TaskID: uuid.New().String(), + }, + error: "Resource not found", + }, + { + name: "NoTaskByWorkspaceIdentifier", + args: toolsdk.GetTaskStatusArgs{ + TaskID: "non-existent", + }, + error: "Resource not found", + }, + { + name: "ExistsButNotATask", + args: toolsdk.GetTaskStatusArgs{ + TaskID: r.Workspace.ID.String(), + }, + error: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + res, err := testTool(t, toolsdk.GetTaskStatus, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + require.Equal(t, tt.expected, res.Status) + } + }) + } + }) + + t.Run("WorkspaceListApps", func(t *testing.T) { + t.Parallel() + + // nolint:gocritic // This is in a test package and does not end up in the build + _ = dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "list-app-workspace-one-agent", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + agents[0].Apps = []*proto.App{ + { + Slug: "zero", + Url: "http://zero.dev.coder.com", + }, + } + return agents + }).Do() + + // nolint:gocritic // This is in a test package and does not end up in the build + _ = dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "list-app-workspace-multi-agent", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + agents[0].Apps = []*proto.App{ + { + Slug: "one", + Url: "http://one.dev.coder.com", + }, + { + Slug: "two", + Url: "http://two.dev.coder.com", + }, + { + Slug: "three", + Url: "http://three.dev.coder.com", + }, + } + agents = append(agents, &proto.Agent{ + Id: uuid.NewString(), + Name: "dev2", + Auth: &proto.Agent_Token{ + Token: uuid.NewString(), + }, + Env: map[string]string{}, + Apps: []*proto.App{ + { + Slug: "four", + Url: "http://four.dev.coder.com", + }, + }, + }) + return agents + }).Do() + + tests := []struct { + name string + args toolsdk.WorkspaceListAppsArgs + expected []toolsdk.WorkspaceListApp + error string + }{ + { + name: "NonExistentWorkspace", + args: toolsdk.WorkspaceListAppsArgs{ + Workspace: "list-appp-workspace-does-not-exist", + }, + error: "failed to find workspace", + }, + { + name: "OneAgentOneApp", + args: toolsdk.WorkspaceListAppsArgs{ + Workspace: "list-app-workspace-one-agent", + }, + expected: []toolsdk.WorkspaceListApp{ + { + Name: "zero", + URL: "http://zero.dev.coder.com", + }, + }, + }, + { + name: "MultiAgent", + args: toolsdk.WorkspaceListAppsArgs{ + Workspace: "list-app-workspace-multi-agent", + }, + error: "multiple agents found, please specify the agent name", + }, + { + name: "MultiAgentOneApp", + args: toolsdk.WorkspaceListAppsArgs{ + Workspace: "list-app-workspace-multi-agent.dev2", + }, + expected: []toolsdk.WorkspaceListApp{ + { + Name: "four", + URL: "http://four.dev.coder.com", + }, + }, + }, + { + name: "MultiAgentMultiApp", + args: toolsdk.WorkspaceListAppsArgs{ + Workspace: "list-app-workspace-multi-agent.dev", + }, + expected: []toolsdk.WorkspaceListApp{ + { + Name: "one", + URL: "http://one.dev.coder.com", + }, + { + Name: "three", + URL: "http://three.dev.coder.com", + }, + { + Name: "two", + URL: "http://two.dev.coder.com", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + res, err := testTool(t, toolsdk.WorkspaceListApps, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + require.Equal(t, tt.expected, res.Apps) + } + }) + } + }) + + t.Run("SendTaskInput", func(t *testing.T) { + t.Parallel() + + // Start a fake AgentAPI that accepts GET /status and POST /message. + srv := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/status" { + httpapi.Write(r.Context(), rw, http.StatusOK, agentapi.GetStatusResponse{ + Status: agentapi.StatusStable, + }) + return + } + if r.Method == http.MethodPost && r.URL.Path == "/message" { + rw.Header().Set("Content-Type", "application/json") + + var req agentapi.PostMessageParams + ok := httpapi.Read(r.Context(), rw, r, &req) + assert.True(t, ok, "failed to read request") + + assert.Equal(t, req.Content, "frob the baz") + assert.Equal(t, req.Type, agentapi.MessageTypeUser) + + httpapi.Write(r.Context(), rw, http.StatusOK, agentapi.PostMessageResponse{ + Ok: true, + }) + return + } + rw.WriteHeader(http.StatusInternalServerError) + })) + t.Cleanup(srv.Close) + + // nolint:gocritic // This is in a test package and does not end up in the build + aiTV := dbfake.TemplateVersion(t, store).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: member.ID, + HasAITask: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Do() + + ws := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "send-task-input-ws", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + TemplateID: aiTV.Template.ID, + }).WithTask(database.TaskTable{ + Name: "send-task-input", + Prompt: "send task input", + }, &proto.App{Url: srv.URL}).Do() + task := ws.Task + + _ = agenttest.New(t, client.URL, ws.AgentToken) + coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID).Wait() + + tests := []struct { + name string + args toolsdk.SendTaskInputArgs + error string + }{ + { + name: "ByUUID", + args: toolsdk.SendTaskInputArgs{ + TaskID: task.ID.String(), + Input: "frob the baz", + }, + }, + { + name: "ByIdentifier", + args: toolsdk.SendTaskInputArgs{ + TaskID: task.Name, + Input: "frob the baz", + }, + }, + { + name: "NoID", + args: toolsdk.SendTaskInputArgs{}, + error: "task_id is required", + }, + { + name: "NoInput", + args: toolsdk.SendTaskInputArgs{ + TaskID: "send-task-input", + }, + error: "input is required", + }, + { + name: "NoTaskByID", + args: toolsdk.SendTaskInputArgs{ + TaskID: uuid.New().String(), + Input: "this is ignored", + }, + error: "Resource not found", + }, + { + name: "NoTaskByWorkspaceIdentifier", + args: toolsdk.SendTaskInputArgs{ + TaskID: "non-existent", + Input: "this is ignored", + }, + error: "Resource not found", + }, + { + name: "ExistsButNotATask", + args: toolsdk.SendTaskInputArgs{ + TaskID: r.Workspace.ID.String(), + Input: "this is ignored", + }, + error: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + _, err = testTool(t, toolsdk.SendTaskInput, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + } + }) + } + }) + + t.Run("GetTaskLogs", func(t *testing.T) { + t.Parallel() + + messages := []agentapi.Message{ + { + Id: 0, + Content: "welcome", + Role: agentapi.RoleAgent, + }, + { + Id: 1, + Content: "frob the dazzle", + Role: agentapi.RoleUser, + }, + { + Id: 2, + Content: "frob dazzled", + Role: agentapi.RoleAgent, + }, + } + + // Start a fake AgentAPI that returns some messages. + srv := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/messages" { + httpapi.Write(r.Context(), rw, http.StatusOK, agentapi.GetMessagesResponse{ + Messages: messages, + }) + return + } + rw.WriteHeader(http.StatusInternalServerError) + })) + t.Cleanup(srv.Close) + + // nolint:gocritic // This is in a test package and does not end up in the build + aiTV := dbfake.TemplateVersion(t, store).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: member.ID, + HasAITask: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Do() + + ws := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "get-task-logs-ws", + OrganizationID: owner.OrganizationID, + OwnerID: member.ID, + TemplateID: aiTV.Template.ID, + }).WithTask(database.TaskTable{ + Name: "get-task-logs", + Prompt: "get task logs", + }, &proto.App{Url: srv.URL}).Do() + task := ws.Task + + _ = agenttest.New(t, client.URL, ws.AgentToken) + coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID).Wait() + + tests := []struct { + name string + args toolsdk.GetTaskLogsArgs + expected []agentapi.Message + error string + }{ + { + name: "ByUUID", + args: toolsdk.GetTaskLogsArgs{ + TaskID: task.ID.String(), + }, + expected: messages, + }, + { + name: "ByIdentifier", + args: toolsdk.GetTaskLogsArgs{ + TaskID: task.Name, + }, + expected: messages, + }, + { + name: "NoID", + args: toolsdk.GetTaskLogsArgs{}, + error: "task_id is required", + }, + { + name: "NoTaskByID", + args: toolsdk.GetTaskLogsArgs{ + TaskID: uuid.New().String(), + }, + error: "Resource not found", + }, + { + name: "NoTaskByWorkspaceIdentifier", + args: toolsdk.GetTaskLogsArgs{ + TaskID: "non-existent", + }, + error: "Resource not found", + }, + { + name: "ExistsButNotATask", + args: toolsdk.GetTaskLogsArgs{ + TaskID: r.Workspace.ID.String(), + }, + error: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tb, err := toolsdk.NewDeps(memberClient) + require.NoError(t, err) + + res, err := testTool(t, toolsdk.GetTaskLogs, tb, tt.args) + if tt.error != "" { + require.Error(t, err) + require.ErrorContains(t, err, tt.error) + } else { + require.NoError(t, err) + require.Len(t, res.Logs, len(tt.expected)) + for i, msg := range tt.expected { + require.Equal(t, msg.Id, int64(res.Logs[i].ID)) + require.Equal(t, msg.Content, res.Logs[i].Content) + if msg.Role == agentapi.RoleUser { + require.Equal(t, codersdk.TaskLogTypeInput, res.Logs[i].Type) + } else { + require.Equal(t, codersdk.TaskLogTypeOutput, res.Logs[i].Type) + } + require.Equal(t, msg.Time, res.Logs[i].Time) + } + } + }) + } + }) } // TestedTools keeps track of which tools have been tested. diff --git a/codersdk/users.go b/codersdk/users.go index 266ef8c59873e..44464f9476ddd 100644 --- a/codersdk/users.go +++ b/codersdk/users.go @@ -41,6 +41,7 @@ type UsersRequest struct { type MinimalUser struct { ID uuid.UUID `json:"id" validate:"required" table:"id" format:"uuid"` Username string `json:"username" validate:"required" table:"username,default_sort"` + Name string `json:"name,omitempty" table:"name"` AvatarURL string `json:"avatar_url,omitempty" format:"uri"` } @@ -50,7 +51,6 @@ type MinimalUser struct { // required by the frontend. type ReducedUser struct { MinimalUser `table:"m,recursive_inline"` - Name string `json:"name,omitempty"` Email string `json:"email" validate:"required" table:"email" format:"email"` CreatedAt time.Time `json:"created_at" validate:"required" table:"created at" format:"date-time"` UpdatedAt time.Time `json:"updated_at" table:"updated at" format:"date-time"` diff --git a/codersdk/workspacebuilds.go b/codersdk/workspacebuilds.go index bb9511178c7f4..8c38bd5c6469b 100644 --- a/codersdk/workspacebuilds.go +++ b/codersdk/workspacebuilds.go @@ -89,8 +89,9 @@ type WorkspaceBuild struct { MatchedProvisioners *MatchedProvisioners `json:"matched_provisioners,omitempty"` TemplateVersionPresetID *uuid.UUID `json:"template_version_preset_id" format:"uuid"` HasAITask *bool `json:"has_ai_task,omitempty"` - AITaskSidebarAppID *uuid.UUID `json:"ai_task_sidebar_app_id,omitempty" format:"uuid"` - HasExternalAgent *bool `json:"has_external_agent,omitempty"` + // Deprecated: This field has been replaced with `Task.WorkspaceAppID` + AITaskSidebarAppID *uuid.UUID `json:"ai_task_sidebar_app_id,omitempty" format:"uuid"` + HasExternalAgent *bool `json:"has_external_agent,omitempty"` } // WorkspaceResource describes resources used to create a workspace, for instance: diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index f190d58be6bfb..709c9257c8350 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -72,6 +72,8 @@ type Workspace struct { // Once a prebuilt workspace is claimed by a user, it transitions to a regular workspace, // and IsPrebuild returns false. IsPrebuild bool `json:"is_prebuild"` + // TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task. + TaskID uuid.NullUUID `json:"task_id,omitempty"` } func (w Workspace) FullName() string { diff --git a/compose.yaml b/compose.yaml index 409ecda158c1b..6bb78b6123a4a 100644 --- a/compose.yaml +++ b/compose.yaml @@ -2,7 +2,7 @@ services: coder: # This MUST be stable for our documentation and # other automations. - image: ghcr.io/coder/coder:${CODER_VERSION:-latest} + image: ${CODER_REPO:-ghcr.io/coder/coder}:${CODER_VERSION:-latest} ports: - "7080:7080" environment: diff --git a/docs/admin/infrastructure/validated-architectures/10k-users.md b/docs/admin/infrastructure/validated-architectures/10k-users.md new file mode 100644 index 0000000000000..486ac8192c991 --- /dev/null +++ b/docs/admin/infrastructure/validated-architectures/10k-users.md @@ -0,0 +1,124 @@ +# Reference Architecture: up to 10,000 users + +> [!CAUTION] +> This page is a work in progress. +> +> We are actively testing different load profiles for this user target and will be updating +> recommendations. Use these recommendations as a starting point, but monitor your cluster resource +> utilization and adjust. + +The 10,000 users architecture targets large-scale enterprises with development +teams in multiple geographic regions. + +**Geographic Distribution**: For these tests we deploy on 3 cloud-managed Kubernetes clusters in +the following regions: + +1. USA - Primary - Coderd collocated with the PostgreSQL database deployment. +2. Europe - Workspace Proxies +3. Asia - Workspace Proxies + +**High Availability**: Typically, such scale requires a fully-managed HA +PostgreSQL service, and all Coder observability features enabled for operational +purposes. + +**Observability**: Deploy monitoring solutions to gather Prometheus metrics and +visualize them with Grafana to gain detailed insights into infrastructure and +application behavior. This allows operators to respond quickly to incidents and +continuously improve the reliability and performance of the platform. + +## Testing Methodology + +### Workspace Network Traffic + +6000 concurrent workspaces (2000 per region), each sending 10 kB/s application traffic. + +Test procedure: + +1. Create workspaces. This happens simultaneously in each region with 200 provisioners (and thus 600 concurrent builds). +2. Wait 5 minutes to establish baselines for metrics. +3. Generate 10 kB/s traffic to each workspace (originating within the same region & cluster). + +After, we examine the Coderd, Workspace Proxy, and Database metrics to look for issues. + +### Dynamic Parameters + +1000 connections simulating changing parameters while configuring a new workspace. + +Test procedure: + +1. Create a template with complex parameter logic and multiple template versions. +1. Partition the connections among the template versions (forces Coder to process multiple template files) +1. Simultaneously connect to the dynamic-parameters API websocket endpoint for the template version +1. Wait for the initial parameter update. +1. Send a new parameter value that has cascading effects among other parameters. +1. Wait for the next update. + +After, we examine the latency in the initial connection and update, as well as Coderd and Database metrics to look for +issues. + +### API Request Traffic + +To be determined. + +## Hardware recommendations + +### Coderd + +These are deployed in the Primary region only. + +| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | +|----------------|--------------|----------|----------------------------| +| 4 vCPU (4000m) | 12 GiB | 10 | `c2d-standard-16` | + +### Provisioners + +These are deployed in each of the 3 regions. + +| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | +|-----------------|--------------|----------|----------------------------| +| 0.1 vCPU (100m) | 1 GiB | 200 | `c2d-standard-16` | + +**Footnotes**: + +- Each provisioner handles a single concurrent build, so this configuration implies 200 concurrent + workspace builds per region. +- Provisioners are run as a separate Kubernetes Deployment from Coderd, although they may + share the same node pool. +- Separate provisioners into different namespaces in favor of zero-trust or + multi-cloud deployments. + +### Workspace Proxies + +These are deployed in the non-Primary regions only. + +| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | +|----------------|--------------|----------|----------------------------| +| 4 vCPU (4000m) | 12 GiB | 10 | `c2d-standard-16` | + +**Footnotes**: + +- Our testing implies this is somewhat overspecced for the loads we have tried. We are in process of revising these numbers. + +### Workspaces + +These numbers are for each of the 3 regions. We recommend that you use a separate node pool for user Workspaces. + +| Users | Node capacity | Replicas | GCP | AWS | Azure | +|-------------|----------------------|-------------------------------|------------------|--------------|-------------------| +| Up to 3,000 | 8 vCPU, 32 GB memory | 256 nodes, 12 workspaces each | `t2d-standard-8` | `m5.2xlarge` | `Standard_D8s_v3` | + +**Footnotes**: + +- Assumed that a workspace user needs 2 GB memory to perform +- Maximum number of Kubernetes workspace pods per node: 256 +- As workspace nodes can be distributed between regions, on-premises networks + and cloud areas, consider different namespaces in favor of zero-trust or + multi-cloud deployments. + +### Database nodes + +We conducted our test using the `db-custom-16-61440` tier on Google Cloud SQL. + +**Footnotes**: + +- This database tier was only just able to keep up with 600 concurrent builds in our tests. diff --git a/docs/admin/infrastructure/validated-architectures/index.md b/docs/admin/infrastructure/validated-architectures/index.md index 6bd18f7f3c132..59602f22bc47a 100644 --- a/docs/admin/infrastructure/validated-architectures/index.md +++ b/docs/admin/infrastructure/validated-architectures/index.md @@ -220,6 +220,8 @@ For sizing recommendations, see the below reference architectures: - [Up to 3,000 users](3k-users.md) +- DRAFT: [Up to 10,000 users](10k-users.md) + ### AWS Instance Types For production AWS deployments, we recommend using non-burstable instance types, diff --git a/docs/admin/integrations/prometheus.md b/docs/admin/integrations/prometheus.md index 47fbc575c7c2e..f3820bdd298dd 100644 --- a/docs/admin/integrations/prometheus.md +++ b/docs/admin/integrations/prometheus.md @@ -146,6 +146,8 @@ deployment. They will always be available from the agent. | `coderd_prebuilt_workspace_claim_duration_seconds` | histogram | Time to claim a prebuilt workspace by organization, template, and preset. | `organization_name` `preset_name` `template_name` | | `coderd_provisionerd_job_timings_seconds` | histogram | The provisioner job time duration in seconds. | `provisioner` `status` | | `coderd_provisionerd_jobs_current` | gauge | The number of currently running provisioner jobs. | `provisioner` | +| `coderd_provisionerd_num_daemons` | gauge | The number of provisioner daemons. | | +| `coderd_provisionerd_workspace_build_timings_seconds` | histogram | The time taken for a workspace to build. | `status` `template_name` `template_version` `workspace_transition` | | `coderd_workspace_builds_total` | counter | The number of workspaces started, updated, or deleted. | `action` `owner_email` `status` `template_name` `template_version` `workspace_name` | | `coderd_workspace_creation_duration_seconds` | histogram | Time to create a workspace by organization, template, preset, and type (regular or prebuild). | `organization_name` `preset_name` `template_name` `type` | | `coderd_workspace_creation_total` | counter | Total regular (non-prebuilt) workspace creations by organization, template, and preset. | `organization_name` `preset_name` `template_name` | diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md index e87a4dd1ac27d..b1461cfec58a6 100644 --- a/docs/admin/monitoring/notifications/index.md +++ b/docs/admin/monitoring/notifications/index.md @@ -14,6 +14,13 @@ user(s) of the event. Coder supports the following list of events: +### Task Events + +These notifications are sent to the owner of the workspace where the task is running: + +- Task Idle +- Task Working + ### Template Events These notifications are sent to users with **template admin** roles: diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 37ed2a1c00694..387bdd9836a19 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -32,6 +32,7 @@ We track the following resources: | OrganizationSyncSettings
| |
FieldTracked
assign_defaulttrue
fieldtrue
mappingtrue
| | PrebuildsSettings
| |
FieldTracked
idfalse
reconciliation_pausedtrue
| | RoleSyncSettings
| |
FieldTracked
fieldtrue
mappingtrue
| +| TaskTable
| |
FieldTracked
created_atfalse
deleted_atfalse
idtrue
nametrue
organization_idfalse
owner_idtrue
prompttrue
template_parameterstrue
template_version_idtrue
workspace_idtrue
| | Template
write, delete | |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
cors_behaviortrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
use_classic_parameter_flowtrue
user_acltrue
| | TemplateVersion
create, write | |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
external_auth_providersfalse
has_ai_taskfalse
has_external_agentfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
source_example_idfalse
template_idtrue
updated_atfalse
| | User
create, write, delete | |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
is_systemtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
updated_atfalse
usernametrue
| @@ -125,6 +126,56 @@ log entry: 2023-06-13 03:43:29.233 [info] coderd: audit_log ID=95f7c392-da3e-480c-a579-8909f145fbe2 Time="2023-06-13T03:43:29.230422Z" UserID=6c405053-27e3-484a-9ad7-bcb64e7bfde6 OrganizationID=00000000-0000-0000-0000-000000000000 Ip= UserAgent= ResourceType=workspace_build ResourceID=988ae133-5b73-41e3-a55e-e1e9d3ef0b66 ResourceTarget="" Action=start Diff="{}" StatusCode=200 AdditionalFields="{\"workspace_name\":\"linux-container\",\"build_number\":\"7\",\"build_reason\":\"initiator\",\"workspace_owner\":\"\"}" RequestID=9682b1b5-7b9f-4bf2-9a39-9463f8e41cd6 ResourceIcon="" ``` +## Purging Old Audit Logs + +> [!WARNING] +> Audit Logs provide critical security and compliance information. Purging Audit Logs may impact your organization's ability +> to investigate security incidents or meet compliance requirements. Consult your security and compliance teams before purging any audit data. + +Audit Logs are not automatically purged from the database, though they can account for a large amount of disk usage. +Use the following query to determine the amount of disk space used by the `audit_logs` table. + +```sql +SELECT + relname AS table_name, + pg_size_pretty(pg_total_relation_size(relid)) AS total_size, + pg_size_pretty(pg_relation_size(relid)) AS table_size, + pg_size_pretty(pg_indexes_size(relid)) AS indexes_size, + (SELECT COUNT(*) FROM audit_logs) AS total_records +FROM pg_catalog.pg_statio_user_tables +WHERE relname = 'audit_logs' +ORDER BY pg_total_relation_size(relid) DESC; +``` + +Should you wish to purge these records, it is safe to do so. This can only be done by running SQL queries +directly against the `audit_logs` table in the database. We advise users to only purge old records (>1yr) +and in accordance with your compliance requirements. + +### Backup/Archive + +Consider exporting or archiving these records before deletion: + +```sql +-- Export to CSV +COPY (SELECT * FROM audit_logs WHERE time < CURRENT_TIMESTAMP - INTERVAL '1 year') +TO '/path/to/audit_logs_archive.csv' DELIMITER ',' CSV HEADER; + +-- Copy to archive table +CREATE TABLE audit_logs_archive AS +SELECT * FROM audit_logs WHERE time < CURRENT_TIMESTAMP - INTERVAL '1 year'; +``` + +### Permanent Deletion + +> [!NOTE] +> For large `audit_logs` tables, consider running the `DELETE` operation during maintenance windows as it may impact +> database performance. You can also batch the deletions to reduce lock time. + +```sql +DELETE FROM audit_logs WHERE time < CURRENT_TIMESTAMP - INTERVAL '1 year'; +-- Consider running `VACUUM VERBOSE audit_logs` afterwards for large datasets to reclaim disk space. +``` + ## How to Enable Audit Logs -This feature is only available with a [Premium license](../licensing/index.md). +This feature is only available with a [Premium license](../licensing/index.md), and is automatically enabled. diff --git a/docs/admin/setup/appearance.md b/docs/admin/setup/appearance.md index 38c85a5439d89..66dbc2587e78e 100644 --- a/docs/admin/setup/appearance.md +++ b/docs/admin/setup/appearance.md @@ -57,7 +57,18 @@ server. The link icons are optional, and can be set to any url or [builtin icon](../templates/extending-templates/icons.md#bundled-icons), -additionally `bug`, `chat`, and `docs` are available as three special icons. +additionally `bug`, `chat`, `docs`, and `star` are available as special icons. + +### Location + +The `location` property is optional and determines where the support link will +be displayed: + +- `navbar` - displays the link as a button in the top navigation bar +- `dropdown` - displays the link in the user dropdown menu (default) + +If the `location` property is not specified, the link will be displayed in the +user dropdown menu. ### Configuration @@ -77,7 +88,7 @@ coder: "https://codercom.slack.com/archives/C014JH42DBJ", "icon": "/icon/slack.svg"}, {"name": "Hello Discord", "target": "https://discord.gg/coder", "icon": - "/icon/discord.svg"}, + "/icon/discord.svg", "location": "navbar"}, {"name": "Hello Foobar", "target": "https://foo.com/bar", "icon": "/emojis/1f3e1.png"}] ``` @@ -88,12 +99,12 @@ if running as a system service, set an environment variable `CODER_SUPPORT_LINKS` in `/etc/coder.d/coder.env` as follows, ```env -CODER_SUPPORT_LINKS='[{"name": "Hello GitHub", "target": "https://github.com/coder/coder", "icon": "bug"}, {"name": "Hello Slack", "target": "https://codercom.slack.com/archives/C014JH42DBJ", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/slack.svg"}, {"name": "Hello Discord", "target": "https://discord.gg/coder", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/discord.svg"}, {"name": "Hello Foobar", "target": "https://discord.gg/coder", "icon": "/emojis/1f3e1.png"}]' +CODER_SUPPORT_LINKS='[{"name": "Hello GitHub", "target": "https://github.com/coder/coder", "icon": "bug"}, {"name": "Hello Slack", "target": "https://codercom.slack.com/archives/C014JH42DBJ", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/slack.svg"}, {"name": "Hello Discord", "target": "https://discord.gg/coder", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/discord.svg", "location": "navbar"}, {"name": "Hello Foobar", "target": "https://discord.gg/coder", "icon": "/emojis/1f3e1.png"}]' ``` For CLI, use, ```shell -export CODER_SUPPORT_LINKS='[{"name": "Hello GitHub", "target": "https://github.com/coder/coder", "icon": "bug"}, {"name": "Hello Slack", "target": "https://codercom.slack.com/archives/C014JH42DBJ", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/slack.svg"}, {"name": "Hello Discord", "target": "https://discord.gg/coder", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/discord.svg"}, {"name": "Hello Foobar", "target": "https://discord.gg/coder", "icon": "/emojis/1f3e1.png"}]' +export CODER_SUPPORT_LINKS='[{"name": "Hello GitHub", "target": "https://github.com/coder/coder", "icon": "bug"}, {"name": "Hello Slack", "target": "https://codercom.slack.com/archives/C014JH42DBJ", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/slack.svg"}, {"name": "Hello Discord", "target": "https://discord.gg/coder", "icon": "https://raw.githubusercontent.com/coder/coder/main/site/static/icon/discord.svg", "location": "navbar"}, {"name": "Hello Foobar", "target": "https://discord.gg/coder", "icon": "/emojis/1f3e1.png"}]' coder-server ``` diff --git a/docs/admin/templates/extending-templates/prebuilt-workspaces.md b/docs/admin/templates/extending-templates/prebuilt-workspaces.md index 61734679d4c7d..748fc40c98d9c 100644 --- a/docs/admin/templates/extending-templates/prebuilt-workspaces.md +++ b/docs/admin/templates/extending-templates/prebuilt-workspaces.md @@ -247,6 +247,82 @@ When prebuilt workspaces are configured for an organization, Coder creates a "pr If a quota is exceeded, the prebuilt workspace will fail provisioning the same way other workspaces do. +### Managing prebuild provisioning queues + +Prebuilt workspaces can overwhelm a Coder deployment, causing significant delays when users and template administrators create new workspaces or manage their templates. Fundamentally, this happens when provisioners are not able to meet the demand for provisioner jobs. Prebuilds contribute to provisioner demand by scheduling many jobs in bursts whenever templates are updated. The solution is to either increase the number of provisioners or decrease the number of requested prebuilt workspaces across the entire system. + +To identify if prebuilt workspaces have overwhelmed the available provisioners in your Coder deployment, look for: + +- Large or growing queue of prebuild-related jobs +- User workspace creation is slow +- Publishing a new template version is not reflected in the UI because the associated template import job has not yet finished + +The troubleshooting steps below will help you assess and resolve this situation: + +1) Pause prebuilt workspace reconciliation to stop the problem from getting worse +2) Check how many prebuild jobs are clogging your provisioner queue +3) Cancel excess prebuild jobs to free up provisioners for human users +4) Fix any problematic templates that are causing the issue +5) Resume prebuilt reconciliation once everything is back to normal + +#### Pause prebuilds to limit potential impact + +Run: + +```bash +coder prebuilds pause +``` + +This prevents further pollution of your provisioner queues by stopping the prebuilt workspaces feature from scheduling new creation jobs. While the pause is in effect, no new prebuilt workspaces will be scheduled for any templates in any organizations across the entire Coder deployment. Therefore, the command must be executed by a user with Owner level access. Existing prebuilt workspaces will remain in place. + +**Important**: Remember to run `coder prebuilds resume` once all impact has been mitigated (see the last step in this section). + +#### Assess prebuild queue impact + +Next, run: + +```bash +coder provisioner jobs list --status=pending --initiator=prebuilds +``` + +This will show a list of all pending jobs that have been enqueued by the prebuilt workspace system. The length of this list indicates whether prebuilt workspaces have overwhelmed your Coder deployment. + +Human-initiated jobs have priority over pending prebuild jobs, but running prebuild jobs cannot be preempted. A long list of pending prebuild jobs increases the likelihood that all provisioners are already occupied when a user wants to create a workspace or import a new template version. This increases the likelihood that users will experience delays waiting for the next available provisioner. + +#### Cancel pending prebuild jobs + +Human-initiated jobs are prioritized above prebuild jobs in the provisioner queue. However, if no human-initiated jobs are queued when a provisioner becomes available, a prebuild job will occupy the provisioner. This can delay human-initiated jobs that arrive later, forcing them to wait for the next available provisioner. + +To expedite fixing a broken template by ensuring maximum provisioner availability, cancel all pending prebuild jobs: + +```bash +coder provisioner jobs list --status=pending --initiator=prebuilds | jq -r '.[].id' | xargs -n1 -P2 -I{} coder provisioner jobs cancel {} +``` + +This will clear the provisioner queue of all jobs that were not initiated by a human being, which increases the probability that a provisioner will be available when the next human operator needs it. It does not cancel running provisioner jobs, so there may still be some delay in processing new provisioner jobs until a provisioner completes its current job. + +At this stage, most prebuild related impact will have been mitigated. There may still be a bugged template version, but it will no longer pollute provisioner queues with prebuilt workspace jobs. If the latest version of a template is also broken for reasons unrelated to prebuilds, then users are able to create workspaces using a previous template version. Some running jobs may have been initiated by the prebuild system, but these cannot be cancelled without potentially orphaning resources that have already been deployed by Terraform. Depending on your deployment and template provisioning times, it might be best to upload a new template version and wait for it to be processed organically. + +#### Cancel running prebuild provisioning jobs (Optional) + +If you need to expedite the processing of human-related jobs at the cost of some infrastructure housekeeping, you can run: + +```bash +coder provisioner jobs list --status=running --initiator=prebuilds | jq -r '.[].id' | xargs -n1 -P2 -I{} coder provisioner jobs cancel {} +``` + +This should be done as a last resort. It will cancel running prebuild jobs (orphaning any resources that have already been deployed) and immediately make room for human-initiated jobs. Orphaned infrastructure will need to be manually cleaned up by a human operator. The process to identify and clear these orphaned resources will likely require administrative access to the infrastructure that hosts Coder workspaces. Furthermore, the ability to identify such orphaned resources will depend on metadata that should be included in the workspace template. + +Once the provisioner queue has been cleared and all templates have been fixed, resume prebuild reconciliation by running: + +#### Resume prebuild reconciliation + +```bash +coder prebuilds resume +``` + +This re-enables the prebuilt workspaces feature and allows the reconciliation loop to resume normal operation. The system will begin creating new prebuilt workspaces according to your template configurations. + ### Template configuration best practices #### Preventing resource replacement @@ -280,7 +356,7 @@ resource "docker_container" "workspace" { Limit the scope of `ignore_changes` to include only the fields specified in the notification. If you include too many fields, Terraform might ignore changes that wouldn't otherwise cause drift. -Learn more about `ignore_changes` in the [Terraform documentation](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes). +Learn more about `ignore_changes` in the [Terraform documentation](https://developer.hashicorp.com/terraform/language/meta-arguments#lifecycle). _A note on "immutable" attributes: Terraform providers may specify `ForceNew` on their resources' attributes. Any change to these attributes require the replacement (destruction and recreation) of the managed resource instance, rather than an in-place update. @@ -288,6 +364,67 @@ For example, the [`ami`](https://registry.terraform.io/providers/hashicorp/aws/l has [`ForceNew`](https://github.com/hashicorp/terraform-provider-aws/blob/main/internal/service/ec2/ec2_instance.go#L75-L81) set, since the AMI cannot be changed in-place._ +### Preventing prebuild queue contention (recommended) + +The section [Managing prebuild provisioning queues](#managing-prebuild-provisioning-queues) covers how to recover when prebuilds have already overwhelmed the provisioner queue. +This section outlines a **best-practice configuration** to prevent that situation by isolating prebuild jobs to a dedicated provisioner pool. +This setup is optional and requires minor template changes. + +Coder supports [external provisioners and provisioner tags](../../provisioners/index.md), which allows you to route jobs to provisioners with matching tags. +By creating external provisioners with a special tag (e.g., `is_prebuild=true`) and updating the template to conditionally add that tag for prebuild jobs, +all prebuild work is handled by the prebuild pool. +This keeps other provisioners available to handle user-initiated jobs. + +#### Setup + +1. Create a provisioner key with a prebuild tag (e.g., `is_prebuild=true`). + Provisioner keys are org-scoped and their tags are inferred automatically by provisioner daemons that use the key. + **Note:** `coder_workspace_tags` are cumulative, so if your template already defines provisioner tags, you will need to create the provisioner key with the same tags plus the `is_prebuild=true` tag so that prebuild jobs correctly match the dedicated prebuild pool. + See [Scoped Key](../../provisioners/index.md#scoped-key-recommended) for instructions on how to create a provisioner key. + +1. Deploy a separate provisioner pool using that key (for example, via the [Helm coder-provisioner chart](https://github.com/coder/coder/pkgs/container/chart%2Fcoder-provisioner)). + Daemons in this pool will only execute jobs that include all of the tags specified in their provisioner key. + See [External provisioners](../../provisioners/index.md) for environment-specific deployment examples. + +1. Update the template to conditionally add the prebuild tag for prebuild jobs. + + ```hcl + data "coder_workspace_tags" "prebuilds" { + count = data.coder_workspace_owner.me.name == "prebuilds" ? 1 : 0 + tags = { + "is_prebuild" = "true" + } + } + ``` + +Prebuild workspaces are a special type of workspace owned by the system user `prebuilds`. +The value `data.coder_workspace_owner.me.name` returns the name of the workspace owner, for prebuild workspaces, this value is `"prebuilds"`. +Because the condition evaluates based on the workspace owner, provisioning or deprovisioning prebuilds automatically applies the prebuild tag, whereas regular jobs (like workspace creation or template import) do not. + +> [!NOTE] +> The prebuild provisioner pool can still accept non-prebuild jobs. +> To achieve a fully isolated setup, add an additional tag (`is_prebuild=false`) to your standard provisioners, ensuring a clean separation between prebuild and non-prebuild workloads. +> See [Provisioner Tags](../../provisioners/index.md#provisioner-tags) for further details. + +#### Validation + +To confirm that prebuild jobs are correctly routed to the new provisioner pool, use the Provisioner Jobs dashboard or the [`coder provisioner jobs list`](../../../reference/cli/provisioner_jobs_list.md) CLI command to inspect job metadata and tags. +Follow these steps: + +1. Publish the new template version. + +1. Validate the status of the prebuild provisioners. + Check the Provisioners page in the Coder dashboard or run the [`coder provisioner list`](../../../reference/cli/provisioner_list.md) CLI command to ensure all prebuild provisioners are up to date and the tags are properly set. + +1. Wait for the prebuilds reconciliation loop to run. + The loop frequency is controlled by the configuration value [`CODER_WORKSPACE_PREBUILDS_RECONCILIATION_INTERVAL`](../../../reference/cli/server.md#--workspace-prebuilds-reconciliation-interval). + When the loop runs, it will provision prebuilds for the new template version and deprovision prebuilds for the previous version. + Both provisioning and deprovisioning jobs for prebuilds should display the tag `is_prebuild=true`. + +1. Create a new workspace from a preset. + Whether the preset uses a prebuild pool or not, the resulting job should not include the `is_prebuild=true` tag. + This confirms that only prebuild-related jobs are routed to the dedicated prebuild provisioner pool. + ### Monitoring and observability #### Available metrics diff --git a/docs/admin/templates/extending-templates/workspace-tags.md b/docs/admin/templates/extending-templates/workspace-tags.md index 7a5aca5179d01..279d01adcf84f 100644 --- a/docs/admin/templates/extending-templates/workspace-tags.md +++ b/docs/admin/templates/extending-templates/workspace-tags.md @@ -74,6 +74,9 @@ that every tag set is associated with at least one healthy provisioner. > [!NOTE] > It may be useful to run at least one provisioner with no additional > tag restrictions that is able to take on any job. +> +> `coder_workspace_tags` are cumulative. +> Jobs will only match provisioners that have all tags defined in both your template configuration and `coder_workspace_tags`. ### Parameters types diff --git a/docs/ai-coder/agent-boundary.md b/docs/ai-coder/agent-boundary.md new file mode 100644 index 0000000000000..36e36a08b6d2f --- /dev/null +++ b/docs/ai-coder/agent-boundary.md @@ -0,0 +1,50 @@ +# Agent Boundary + +Agent Boundaries are process-level firewalls that restrict and audit what autonomous programs, such as AI agents, can access and use. + +![Screenshot of Agent Boundaries blocking a process](../images/guides/ai-agents/boundary.png)Example of Agent Boundaries blocking a process. + +## Supported Agents + +Agent Boundaries support the securing of any terminal-based agent, including your own custom agents. + +## Features + +Agent Boundaries offer network policy enforcement, which blocks domains and HTTP verbs to prevent exfiltration, and writes logs to the workspace. + +## Getting Started with Boundary + +The easiest way to use Agent Boundaries is through existing Coder modules, such as the [Claude Code module](https://registry.coder.com/modules/coder/claude-code). It can also be ran directly in the terminal by installing the [CLI](https://github.com/coder/boundary). + +Below is an example of how to configure Agent Boundaries for usage in your workspace. + +```tf +module "claude-code" { + source = "dev.registry.coder.com/coder/claude-code/coder" + enable_boundary = true + boundary_version = "main" + boundary_log_dir = "/tmp/boundary_logs" + boundary_log_level = "WARN" + boundary_additional_allowed_urls = ["GET *google.com"] + boundary_proxy_port = "8087" + version = "3.2.1" +} +``` + +- `boundary_version` defines what version of Boundary is being applied. This is set to `main`, which points to the main branch of `coder/boundary`. +- `boundary_log_dir` is the directory where log files are written to when the workspace spins up. +- `boundary_log_level` defines the verbosity at which requests are logged. Boundary uses the following verbosity levels: + - `WARN`: logs only requests that have been blocked by Boundary + - `INFO`: logs all requests at a high level + - `DEBUG`: logs all requests in detail +- `boundary_additional_allowed_urls`: defines the URLs that the agent can access, in additional to the default URLs required for the agent to work + - `github.com` means only the specific domain is allowed + - `*.github.com` means only the subdomains are allowed - the specific domain is excluded + - `*github.com` means both the specific domain and all subdomains are allowed + - You can also also filter on methods, hostnames, and paths - for example, `GET,HEAD *github.com/coder`. + +You can also run Agent Boundaries directly in your workspace and configure it per template. You can do so by installing the [binary](https://github.com/coder/boundary) into the workspace image or at start-up. You can do so with the following command: + +```hcl +curl -fsSL https://raw.githubusercontent.com/coder/boundary/main/install.sh | bash + ``` diff --git a/docs/ai-coder/ai-bridge.md b/docs/ai-coder/ai-bridge.md new file mode 100644 index 0000000000000..a993cee71319c --- /dev/null +++ b/docs/ai-coder/ai-bridge.md @@ -0,0 +1,300 @@ +# AI Bridge + +![AI bridge diagram](../images/aibridge/aibridge_diagram.png) + +Bridge is a smart proxy for AI. It acts as a man-in-the-middle between your users' coding agents / IDEs +and providers like OpenAI and Anthropic. By intercepting all the AI traffic between these clients and +the upstream APIs, Bridge can record user prompts, token usage, and tool invocations. + +Bridge solves 3 key problems: + +1. **Centralized authn/z management**: no more issuing & managing API tokens for OpenAI/Anthropic usage. + Users use their Coder session or API tokens to authenticate with `coderd` (Coder control plane), and + `coderd` securely communicates with the upstream APIs on their behalf. Use a single key for all users. +2. **Auditing and attribution**: all interactions with AI services, whether autonomous or human-initiated, + will be audited and attributed back to a user. +3. **Centralized MCP administration**: define a set of approved MCP servers and tools which your users may + use, and prevent users from using their own. + +## When to use AI Bridge + +As the library of LLMs and their associated tools grow, administrators are pressured to provide auditing, measure adoption, provide tools through MCP, and track token spend. Disparate SAAS platforms provide _some_ of these for _some_ tools, but there is no centralized, secure solution for these challenges. + +If you are an administrator or devops leader looking to: + +- Measure AI tooling adoption across teams or projects +- Provide an LLM audit trail to security administrators +- Manage token spend in a central dashboard +- Investigate opportunities for AI automation +- Uncover the high-leverage use cases from experienced engineers + +We advise trying Bridge as self-hosted proxy to monitor LLM usage agnostically across AI powered IDEs like Cursor and headless agents like Claude Code. + +## Setup + +Bridge runs inside the Coder control plane, requiring no separate compute to deploy or scale. Once enabled, `coderd` hosts the bridge in-memory and brokers traffic to your configured AI providers on behalf of authenticated users. + +**Required**: + +1. A **premium** licensed Coder deployment +1. Feature must be [enabled](#activation) using the server flag +1. One or more [provider](#providers) API keys must be configured + +### Activation + +You will need to enable AI Bridge explicitly: + +```sh +CODER_AIBRIDGE_ENABLED=true coder server +# or +coder server --aibridge-enabled=true +``` + +### Providers + +Bridge currently supports OpenAI and Anthropic APIs. + +**API Key**: + +The single key used to authenticate all requests from Bridge to OpenAI/Anthropic APIs. + +- `CODER_AIBRIDGE_OPENAI_KEY` or `--aibridge-openai-key` +- `CODER_AIBRIDGE_ANTHROPIC_KEY` or `--aibridge-anthropic-key` + +**Base URL**: + +The API to which Bridge will relay requests. + +- `CODER_AIBRIDGE_OPENAI_BASE_URL` or `--aibridge-openai-base-url`, defaults to `https://api.openai.com/v1/` +- `CODER_AIBRIDGE_ANTHROPIC_BASE_URL` or `--aibridge-anthropic-base-url`, defaults to `https://api.anthropic.com/` + +Bridge is compatible with _[Google Vertex AI](https://cloud.google.com/vertex-ai?hl=en)_, _[AWS Bedrock](https://aws.amazon.com/bedrock/)_, and other LLM brokers. You may specify the base URL(s) above to the appropriate API endpoint for your provider. + +--- + +> [!NOTE] +> See [Supported APIs](#supported-apis) section below for a comprehensive list. + +## Client Configuration + +Once AI Bridge is enabled on the server, your users need to configure their AI coding tools to use it. This section explains how users should configure their clients to connect to AI Bridge. + +### Setting Base URLs + +The exact configuration method varies by client β€” some use environment variables, others use configuration files or UI settings: + +- **OpenAI-compatible clients**: Set the base URL (commonly via the `OPENAI_BASE_URL` environment variable) to `https://coder.example.com/api/v2/aibridge/openai/v1` +- **Anthropic-compatible clients**: Set the base URL (commonly via the `ANTHROPIC_BASE_URL` environment variable) to `https://coder.example.com/api/v2/aibridge/anthropic` + +Replace `coder.example.com` with your actual Coder deployment URL. + +### Authentication + +Instead of distributing provider-specific API keys (OpenAI/Anthropic keys) to users, they authenticate to AI Bridge using their **Coder session token** or **API key**: + +- **OpenAI clients**: Users set `OPENAI_API_KEY` to their Coder session token or API key +- **Anthropic clients**: Users set `ANTHROPIC_API_KEY` to their Coder session token or API key + +Users can generate a Coder API key using: + +```sh +coder tokens create +``` + +Template admins can pre-configure authentication in templates using [`data.coder_workspace_owner.me.session_token`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_owner#session_token-1) to automatically configure the workspace owner's credentials. + +#### Compatibility Notes + +Most AI coding assistants that support custom base URLs can work with AI Bridge. However, client-specific configuration requirements vary: + +- Some clients require specific URL formats (e.g. try removing the `/v1` suffix) +- Some clients may proxy requests through their own servers, limiting compatibility (e.g. Cursor) +- Some clients may not support custom base URLs at all (e.g. Copilot CLI, Sourcegraph Amp) + +Consult your specific AI client's documentation for details on configuring custom API endpoints. + +## Collected Data + +Bridge collects: + +- The last `user` prompt of each request +- All token usage (associated with each prompt) +- Every tool invocation + +All of these records are associated to an "interception" record, which maps 1:1 with requests received from clients but may involve several interactions with upstream providers. Interceptions are associated with a Coder identity, allowing you to map consumption and cost with teams or individuals in your organization: + +![User Prompt logging](../images/aibridge/grafana_user_prompts_logging.png) + +These logs can be used to determine usage patterns, track costs, and evaluate tooling adoption. + +This data is currently accessible through the API and CLI, which we advise administrators export to their observability platform of choice. We've configured a Grafana dashboard to display Claude Code usage internally which can be imported as a starting point for your tooling adoption metrics. + +![User Leaderboard](../images/aibridge/grafana_user_leaderboard.png) + +We provide an example Grafana dashboard that you can import as a starting point for your tooling adoption metrics. See [here](https://github.com/coder/coder/blob/main/examples/monitoring/dashboards/grafana/aibridge/README.md). + +## Implementation Details + +`coderd` runs an in-memory instance of `aibridged`, whose logic is mostly contained in https://github.com/coder/aibridge. In future releases we will support running external instances for higher throughput and complete memory isolation from `coderd`. + +
+See a diagram of how Bridge interception works + +```mermaid + +sequenceDiagram + actor User + participant Client + participant Bridge + + User->>Client: Issues prompt + activate Client + + Note over User, Client: Coder session key used
as AI token + Client-->>Bridge: Sends request + + activate Bridge + Note over Client, Bridge: Coder session key
passed along + + Note over Bridge: Authenticate + Note over Bridge: Parse request + + alt Rejected + Bridge-->>Client: Send response + Client->>User: Display response + end + + Note over Bridge: If first request, establish
connection(s) with MCP server(s)
and list tools + + Note over Bridge: Inject MCP tools + + Bridge-->>AIProvider: Send modified request + + activate AIProvider + + AIProvider-->>Bridge: Send response + + Note over Client: Client is unaware of injected
tools and invocations,
just receives one long response + + alt Has injected tool calls + loop + Note over Bridge: Invoke injected tool + Bridge-->>AIProvider: Send tool result + AIProvider-->>Bridge: Send response + end + end + + deactivate AIProvider + + Bridge-->>Client: Relay response + deactivate Bridge + + Client->>User: Display response + deactivate Client +``` + +
+ +## MCP + +[Model Context Protocol (MCP)](https://modelcontextprotocol.io/docs/getting-started/intro) is a mechanism for connecting AI applications to external systems. + +Bridge can connect to MCP servers and inject tools automatically, enabling you to centrally manage the list of tools you wish to grant your users. + +> [!NOTE] +> Only MCP servers which support OAuth2 Authorization are supported currently. In future releases we will support [optional authorization](https://modelcontextprotocol.io/specification/2025-06-18/basic/authorization#protocol-requirements). +> +> [_Streamable HTTP_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http) is the only supported transport currently. In future releases we will support the (now deprecated) [_Server-Sent Events_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#backwards-compatibility) transport. + +Bridge makes use of [External Auth](../admin/external-auth/index.md) applications, as they define OAuth2 connections to upstream services. If your External Auth application hosts a remote MCP server, you can configure Bridge to connect to it, retrieve its tools and inject them into requests automatically - all while using each individual user's access token. + +For example, GitHub has a [remote MCP server](https://github.com/github/github-mcp-server?tab=readme-ov-file#remote-github-mcp-server) and we can use it as follows. + +```bash +CODER_EXTERNAL_AUTH_0_TYPE=github +CODER_EXTERNAL_AUTH_0_CLIENT_ID=... +CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=... +# Tell Bridge where it can find this service's remote MCP server. +CODER_EXTERNAL_AUTH_0_MCP_URL=https://api.githubcopilot.com/mcp/ +``` + +See the diagram in [Implementation Details](#implementation-details) for more information. + +You can also control which tools are injected by using an allow and/or a deny regular expression on the tool names: + +```bash +CODER_EXTERNAL_AUTH_0_MCP_TOOL_ALLOW_REGEX=(.+_gist.*) +CODER_EXTERNAL_AUTH_0_MCP_TOOL_DENY_REGEX=(create_gist) +``` + +In the above example, all tools containing `_gist` in their name will be allowed, but `create_gist` is denied. + +The logic works as follows: + +- If neither the allow/deny patterns are defined, all tools will be injected. +- The deny pattern takes precedence. +- If only a deny pattern is defined, all tools are injected except those explicitly denied. + +In the above example, if you prompted your AI model with "list your available github tools by name", it would reply something like: + +> Certainly! Here are the GitHub-related tools that I have available: +> +> 1. `bmcp_github_update_gist` +> 2. `bmcp_github_list_gists` + +Bridge marks automatically injected tools with a prefix `bmcp_` ("bridged MCP"). It also namespaces all tool names by the ID of their associated External Auth application (in this case `github`). + +## Tool Injection + +If a model decides to invoke a tool and it has a `bmcp_` suffix and Bridge has a connection with the related MCP server, it will invoke the tool. The tool result will be passed back to the upstream AI provider, and this will loop until the model has all of its required data. These inner loops are not relayed back to the client; all it seems is the result of this loop. See [Implementation Details](#implementation-details). + +In contrast, tools which are defined by the client (i.e. the [`Bash` tool](https://docs.claude.com/en/docs/claude-code/settings#tools-available-to-claude) defined by _Claude Code_) cannot be invoked by Bridge, and the tool call from the model will be relayed to the client, after which it will invoke the tool. + +If you have the `oauth2` and `mcp-server-http` experiments enabled, Coder's own [internal MCP tools](mcp-server.md) will be injected automatically. + +### Troubleshooting + +- **Too many tools**: should you receive an error like `Invalid 'tools': array too long. Expected an array with maximum length 128, but got an array with length 132 instead`, you can reduce the number by filtering out tools using the allow/deny patterns documented in the [MCP](#mcp) section. + +- **Coder MCP tools not being injected**: in order for Coder MCP tools to be injected, the internal MCP server needs to be active. Follow the instructions in the [MCP Server](mcp-server.md) page to enable it. + +- **External Auth tools not being injected**: this is generally due to the requesting user not being authenticated against the External Auth app; when this is the case, no attempt is made to connect to the MCP server. + +## Known Issues / Limitations + +- Codex CLI currently does not work with Bridge due to a JSON marshaling issue: https://github.com/coder/aibridge/issues/19 +- Claude Code web searches do not report correctly: https://github.com/coder/aibridge/issues/11 + +## Supported APIs + +API support is broken down into two categories: + +- **Intercepted**: requests are intercepted, audited, and augmented - full Bridge functionality +- **Passthrough**: requests are proxied directly to the upstream, no auditing or augmentation takes place + +Where relevant, both streaming and non-streaming requests are supported. + +### OpenAI + +**Intercepted**: + +- [`/v1/chat/completions`](https://platform.openai.com/docs/api-reference/chat/create) + +**Passthrough**: + +- [`/v1/models(/*)`](https://platform.openai.com/docs/api-reference/models/list) +- [`/v1/responses`](https://platform.openai.com/docs/api-reference/responses/create) _(Interception support coming in **Beta**)_ + +### Anthropic + +**Intercepted**: + +- [`/v1/messages`](https://docs.claude.com/en/api/messages) + +**Passthrough**: + +- [`/v1/models(/*)`](https://docs.claude.com/en/api/models-list) + +## Troubleshooting + +To report a bug, file a feature request, or view a list of known issues, please visit our [GitHub repository for Bridge](https://github.com/coder/aibridge). If you encounter issues with Bridge during early access, please reach out to us via [Discord](https://discord.gg/coder). diff --git a/docs/ai-coder/cli.md b/docs/ai-coder/cli.md new file mode 100644 index 0000000000000..6d337b458d6a7 --- /dev/null +++ b/docs/ai-coder/cli.md @@ -0,0 +1,230 @@ +# Tasks CLI + +The Coder CLI provides experimental commands for managing tasks programmatically. These are available under `coder exp task`: + +```console +USAGE: + coder exp task + + Experimental task commands. + + Aliases: tasks + +SUBCOMMANDS: + create Create an experimental task + delete Delete experimental tasks + list List experimental tasks + logs Show a task's logs + send Send input to a task + status Show the status of a task. +``` + +## Creating tasks + +```console +USAGE: + coder exp task create [flags] [input] + + Create an experimental task + + - Create a task with direct input: + + $ coder exp task create "Add authentication to the user service" + + - Create a task with stdin input: + + $ echo "Add authentication to the user service" | coder exp task create + + - Create a task with a specific name: + + $ coder exp task create --name task1 "Add authentication to the user service" + + - Create a task from a specific template / preset: + + $ coder exp task create --template backend-dev --preset "My Preset" "Add authentication to the user service" + + - Create a task for another user (requires appropriate permissions): + + $ coder exp task create --owner user@example.com "Add authentication to the user service" + +OPTIONS: + -O, --org string, $CODER_ORGANIZATION + Select which organization (uuid or name) to use. + + --name string + Specify the name of the task. If you do not specify one, a name will be generated for you. + + --owner string (default: me) + Specify the owner of the task. Defaults to the current user. + + --preset string, $CODER_TASK_PRESET_NAME (default: none) + -q, --quiet bool + Only display the created task's ID. + + --stdin bool + Reads from stdin for the task input. + + --template string, $CODER_TASK_TEMPLATE_NAME + --template-version string, $CODER_TASK_TEMPLATE_VERSION +``` + +## Deleting Tasks + +```console +USAGE: + coder exp task delete [flags] [ ...] + + Delete experimental tasks + + Aliases: rm + + - Delete a single task.: + + $ $ coder exp task delete task1 + + - Delete multiple tasks.: + + $ $ coder exp task delete task1 task2 task3 + + - Delete a task without confirmation.: + + $ $ coder exp task delete task4 --yes + +OPTIONS: + -y, --yes bool + Bypass prompts. +``` + +## Listing tasks + +```console +USAGE: + coder exp task list [flags] + + List experimental tasks + + Aliases: ls + + - List tasks for the current user.: + + $ coder exp task list + + - List tasks for a specific user.: + + $ coder exp task list --user someone-else + + - List all tasks you can view.: + + $ coder exp task list --all + + - List all your running tasks.: + + $ coder exp task list --status running + + - As above, but only show IDs.: + + $ coder exp task list --status running --quiet + +OPTIONS: + -a, --all bool (default: false) + List tasks for all users you can view. + + -c, --column [id|organization id|owner id|owner name|name|template id|template name|template display name|template icon|workspace id|workspace agent id|workspace agent lifecycle|workspace agent health|initial prompt|status|state|message|created at|updated at|state changed] (default: name,status,state,state changed,message) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + + -q, --quiet bool (default: false) + Only display task IDs. + + --status string + Filter by task status (e.g. running, failed, etc). + + --user string + List tasks for the specified user (username, "me"). +``` + +## Viewing Task Logs + +```console +USAGE: + coder exp task logs [flags] + + Show a task's logs + + - Show logs for a given task.: + + $ coder exp task logs task1 + +OPTIONS: + -c, --column [id|content|type|time] (default: type,content) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. +``` + +## Sending input to a task + +```console +USAGE: + coder exp task send [flags] [ | --stdin] + + Send input to a task + + - Send direct input to a task.: + + $ coder exp task send task1 "Please also add unit tests" + + - Send input from stdin to a task.: + + $ echo "Please also add unit tests" | coder exp task send task1 --stdin + +OPTIONS: + --stdin bool + Reads the input from stdin. +``` + +## Viewing Task Status + +```console +USAGE: + coder exp task status [flags] + + Show the status of a task. + + Aliases: stat + + - Show the status of a given task.: + + $ coder exp task status task1 + + - Watch the status of a given task until it completes (idle or stopped).: + + $ coder exp task status task1 --watch + +OPTIONS: + -c, --column [state changed|status|healthy|state|message] (default: state changed,status,healthy,state,message) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + + --watch bool (default: false) + Watch the task status output. This will stream updates to the terminal until the underlying workspace is stopped. +``` + +> **Note**: The `--watch` flag will automatically exit when the task reaches a terminal state. Watch mode ends when: +> +> - The workspace is stopped +> - The workspace agent becomes unhealthy or is shutting down +> - The task completes (reaches a non-working state like completed, failed, or canceled) + +## Identifying Tasks + +Tasks can be identified in CLI commands using either: + +- **Task Name**: The human-readable name (e.g., `my-task-name`) + > Note: Tasks owned by other users can be identified by their owner and name (e.g., `alice/her-task`). +- **Task ID**: The UUID identifier (e.g., `550e8400-e29b-41d4-a716-446655440000`) diff --git a/docs/ai-coder/index.md b/docs/ai-coder/index.md index d14caa35c33ab..eb1fe33d7f24d 100644 --- a/docs/ai-coder/index.md +++ b/docs/ai-coder/index.md @@ -16,4 +16,12 @@ In cases where the IDE is secondary, such as prototyping or long-running backgro ![Coder Tasks UI](../images/guides/ai-agents/tasks-ui.png) -[Learn more about Coder Tasks](./tasks.md) to how to get started and best practices. +[Learn more about Coder Tasks](./tasks.md) for best practices and how to get started. + +## Secure Your Workflows with Agent Boundaries (Beta) + +AI agents can be powerful teammates, but must be treated as untrusted and unpredictable interns as opposed to tools. Without the right controls, they can go rogue. + +[Agent Boundaries](./agent-boundary.md) is a new tool that offers process-level safeguards that detect and prevent destructive actions. Unlike traditional mitigation methods like firewalls, service meshes, and RBAC systems, Agent Boundaries is an agent-aware, centralized control point that can either be embedded in the same secure Coder Workspaces that enterprises already trust, or used through an open source CLI. + +To learn more about features, implementation details, and how to get started, check out the [Agent Boundary documentation](./agent-boundary.md). diff --git a/docs/ai-coder/security.md b/docs/ai-coder/security.md index 8d1e07ae1d329..86a252b8c4f2e 100644 --- a/docs/ai-coder/security.md +++ b/docs/ai-coder/security.md @@ -19,16 +19,10 @@ not access or upload sensitive information. Many agents require API keys to access external services. It is recommended to create a separate API key for your agent with the minimum permissions required. -This will likely involve editing your template for Agents to set different scopes or tokens -from the standard one. +This will likely involve editing your template for Agents to set different scopes or tokens from the standard one. Additional guidance and tooling is coming in future releases of Coder. -## Set Up Agent Boundaries (Premium) +## Set Up Agent Boundaries -Agent Boundaries add an additional layer and isolation of security between the -agent and the rest of the environment inside of your Coder workspace, allowing -humans to have more privileges and access compared to agents inside the same -workspace. - -- [Contact us for more information](https://coder.com/contact) and for early access to agent boundaries +Agent Boundaries are process-level "agent firewalls" that lets you restrict and audit what AI agents can access within Coder workspaces. To learn more about this feature, see [Agent Boundary](./agent-boundary.md). diff --git a/docs/ai-coder/tasks.md b/docs/ai-coder/tasks.md index 78b9a9adbcbf0..878c542350472 100644 --- a/docs/ai-coder/tasks.md +++ b/docs/ai-coder/tasks.md @@ -63,7 +63,7 @@ data "coder_parameter" "setup_script" { # The Claude Code module does the automatic task reporting # Other agent modules: https://registry.coder.com/modules?search=agent -# Or use a custom agent: +# Or use a custom agent: module "claude-code" { source = "registry.coder.com/coder/claude-code/coder" version = "3.0.1" @@ -128,6 +128,10 @@ Coder can automatically generate a name your tasks if you set the `ANTHROPIC_API If you tried Tasks and decided you don't want to use it, you can hide the Tasks tab by starting `coder server` with the `CODER_HIDE_AI_TASKS=true` environment variable or the `--hide-ai-tasks` flag. +## Command Line Interface + +See [Tasks CLI](./cli.md). + ## Next Steps diff --git a/docs/images/aibridge/aibridge_diagram.png b/docs/images/aibridge/aibridge_diagram.png new file mode 100644 index 0000000000000..fe9d39b766d1f Binary files /dev/null and b/docs/images/aibridge/aibridge_diagram.png differ diff --git a/docs/images/aibridge/grafana_user_leaderboard.png b/docs/images/aibridge/grafana_user_leaderboard.png new file mode 100644 index 0000000000000..a336aa262968e Binary files /dev/null and b/docs/images/aibridge/grafana_user_leaderboard.png differ diff --git a/docs/images/aibridge/grafana_user_prompts_logging.png b/docs/images/aibridge/grafana_user_prompts_logging.png new file mode 100644 index 0000000000000..6ac48d189fac4 Binary files /dev/null and b/docs/images/aibridge/grafana_user_prompts_logging.png differ diff --git a/docs/images/guides/ai-agents/boundary.png b/docs/images/guides/ai-agents/boundary.png new file mode 100644 index 0000000000000..34f8d14a6b642 Binary files /dev/null and b/docs/images/guides/ai-agents/boundary.png differ diff --git a/docs/install/cli.md b/docs/install/cli.md index bb70d89c6a724..38e7d2ede9f93 100644 --- a/docs/install/cli.md +++ b/docs/install/cli.md @@ -64,7 +64,7 @@ Every Coder server hosts CLI binaries for all supported platforms. You can run a script to download the appropriate CLI for your machine from your Coder deployment. -![Install Coder binary from your deplyment](../images/install/install_from_deployment.png) +![Install Coder binary from your deployment](../images/install/install_from_deployment.png) This script works within air-gapped deployments and ensures that the version of the CLI you have installed on your machine matches the version of the server. diff --git a/docs/install/index.md b/docs/install/index.md index 2fc04c186a128..b7ba22da090ff 100644 --- a/docs/install/index.md +++ b/docs/install/index.md @@ -27,6 +27,23 @@ curl -L https://coder.com/install.sh | sh Refer to [GitHub releases](https://github.com/coder/coder/releases) for alternate installation methods (e.g. standalone binaries, system packages). +> [!Warning] +> If you're using an Apple Silicon Mac with ARM64 architecture, so M1/M2/M3/M4, you'll need to use an external PostgreSQL Database using the following commands: + +``` bash +# Install PostgreSQL +brew install postgresql@16 + +# Start PostgreSQL +brew services start postgresql@16 + +# Create database +createdb coder + +# Run Coder with external database +coder server --postgres-url="postgres://$(whoami)@localhost/coder?sslmode=disable" +``` + ## Windows If you plan to use the built-in PostgreSQL database, ensure that the diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index 5cab1a2350e2d..3af2d917b431d 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -136,7 +136,7 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.26.0 + --version 2.27.1 ``` - **OCI Registry** @@ -147,7 +147,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.26.0 + --version 2.27.2 ``` - **Stable** Coder release: @@ -160,7 +160,7 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.25.2 + --version 2.26.3 ``` - **OCI Registry** @@ -171,7 +171,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.25.2 + --version 2.26.3 ``` You can watch Coder start up by running `kubectl get pods -n coder`. Once Coder diff --git a/docs/install/rancher.md b/docs/install/rancher.md index 7a5c95f017a94..aaf9c947dff37 100644 --- a/docs/install/rancher.md +++ b/docs/install/rancher.md @@ -134,8 +134,8 @@ kubectl create secret generic coder-db-url -n coder \ 1. Select a Coder version: - - **Mainline**: `2.20.x` - - **Stable**: `2.19.x` + - **Mainline**: `2.27.2` + - **Stable**: `2.26.3` Learn more about release channels in the [Releases documentation](./releases/index.md). diff --git a/docs/install/releases/index.md b/docs/install/releases/index.md index 2a8ef1d704273..afe0cf4e1656c 100644 --- a/docs/install/releases/index.md +++ b/docs/install/releases/index.md @@ -57,13 +57,13 @@ pages. | Release name | Release Date | Status | Latest Release | |------------------------------------------------|--------------------|------------------|----------------------------------------------------------------| -| [2.21](https://coder.com/changelog/coder-2-21) | April 02, 2025 | Not Supported | [v2.21.3](https://github.com/coder/coder/releases/tag/v2.21.3) | | [2.22](https://coder.com/changelog/coder-2-22) | May 16, 2025 | Not Supported | [v2.22.1](https://github.com/coder/coder/releases/tag/v2.22.1) | -| [2.23](https://coder.com/changelog/coder-2-23) | June 03, 2025 | Security Support | [v2.23.2](https://github.com/coder/coder/releases/tag/v2.23.4) | -| [2.24](https://coder.com/changelog/coder-2-24) | August 07, 2025 | Security Support | [v2.24.3](https://github.com/coder/coder/releases/tag/v2.24.3) | -| [2.25](https://coder.com/changelog/coder-2-25) | September 04, 2025 | Stable | [v2.25.2](https://github.com/coder/coder/releases/tag/v2.25.2) | -| [2.26](https://coder.com/changelog/coder-2-26) | September 02, 2025 | Mainline | [v2.26.0](https://github.com/coder/coder/releases/tag/v2.26.0) | -| 2.27 | October 07, 2025 | Not Released | N/A | +| [2.23](https://coder.com/changelog/coder-2-23) | June 03, 2025 | Not Supported | [v2.23.5](https://github.com/coder/coder/releases/tag/v2.23.5) | +| [2.24](https://coder.com/changelog/coder-2-24) | July 01, 2025 | Not Supported | [v2.24.4](https://github.com/coder/coder/releases/tag/v2.24.4) | +| [2.25](https://coder.com/changelog/coder-2-25) | August 05, 2025 | Security Support | [v2.25.3](https://github.com/coder/coder/releases/tag/v2.25.3) | +| [2.26](https://coder.com/changelog/coder-2-26) | September 03, 2025 | Stable | [v2.26.3](https://github.com/coder/coder/releases/tag/v2.26.3) | +| [2.27](https://coder.com/changelog/coder-2-27) | October 02, 2025 | Mainline | [v2.27.2](https://github.com/coder/coder/releases/tag/v2.27.2) | +| 2.28 | | Not Released | N/A | > [!TIP] diff --git a/docs/manifest.json b/docs/manifest.json index 342326c99a760..57711406c87d7 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -209,6 +209,11 @@ "description": "Use VSCode with Coder in the desktop or browser", "path": "./user-guides/workspace-access/vscode.md" }, + { + "title": "Web Terminal", + "description": "Use the browser-based terminal to access your workspace", + "path": "./user-guides/workspace-access/web-terminal.md" + }, { "title": "JetBrains IDEs", "description": "Use JetBrains IDEs with Coder", @@ -391,6 +396,11 @@ "title": "Up to 3,000 Users", "description": "Enterprise-scale architecture recommendations for Coder deployments that support up to 3,000 users", "path": "./admin/infrastructure/validated-architectures/3k-users.md" + }, + { + "title": "Up to 10,000 Users", + "description": "Enterprise-scale architecture recommendations for Coder deployments that support up to 10,000 users", + "path": "./admin/infrastructure/validated-architectures/10k-users.md" } ] }, @@ -907,6 +917,26 @@ "description": "Connect to agents Coder with a MCP server", "path": "./ai-coder/mcp-server.md", "state": ["beta"] + }, + { + "title": "Agent Boundaries", + "description": "Understanding Agent Boundaries in Coder Tasks", + "path": "./ai-coder/agent-boundary.md", + "state": ["early access"] + }, + { + "title": "AI Bridge", + "description": "Centralized LLM and MCP proxy for platform teams", + "path": "./ai-coder/ai-bridge.md", + "icon_path": "./images/icons/api.svg", + "state": ["premium", "early access"] + }, + { + "title": "Tasks CLI", + "description": "Coder CLI for managing tasks programmatically", + "path": "./ai-coder/cli.md", + "icon_path": "./images/icons/api.svg", + "state": ["beta"] } ] }, @@ -1150,6 +1180,21 @@ "path": "./reference/cli/index.md", "icon_path": "./images/icons/terminal.svg", "children": [ + { + "title": "aibridge", + "description": "Manage AIBridge.", + "path": "reference/cli/aibridge.md" + }, + { + "title": "aibridge interceptions", + "description": "Manage AIBridge interceptions.", + "path": "reference/cli/aibridge_interceptions.md" + }, + { + "title": "aibridge interceptions list", + "description": "List AIBridge interceptions as JSON.", + "path": "reference/cli/aibridge_interceptions_list.md" + }, { "title": "autoupdate", "description": "Toggle auto-update policy for a workspace", @@ -1768,6 +1813,11 @@ "description": "Delete a token", "path": "reference/cli/tokens_remove.md" }, + { + "title": "tokens view", + "description": "Display detailed information about a token", + "path": "reference/cli/tokens_view.md" + }, { "title": "unfavorite", "description": "Remove a workspace from your favorites", diff --git a/docs/reference/api/aibridge.md b/docs/reference/api/aibridge.md index 6c929a3fa9383..7e3a23fc5ec21 100644 --- a/docs/reference/api/aibridge.md +++ b/docs/reference/api/aibridge.md @@ -6,12 +6,12 @@ ```shell # Example request using curl -curl -X GET http://coder-server:8080/api/v2/api/experimental/aibridge/interceptions \ +curl -X GET http://coder-server:8080/api/v2/aibridge/interceptions \ -H 'Accept: application/json' \ -H 'Coder-Session-Token: API_KEY' ``` -`GET /api/experimental/aibridge/interceptions` +`GET /aibridge/interceptions` ### Parameters @@ -19,7 +19,8 @@ curl -X GET http://coder-server:8080/api/v2/api/experimental/aibridge/intercepti |------------|-------|---------|----------|------------------------------------------------------------------------------------------------------------------------| | `q` | query | string | false | Search query in the format `key:value`. Available keys are: initiator, provider, model, started_after, started_before. | | `limit` | query | integer | false | Page limit | -| `after_id` | query | string | false | Cursor pagination after ID | +| `after_id` | query | string | false | Cursor pagination after ID (cannot be used with offset) | +| `offset` | query | integer | false | Offset pagination (cannot be used with after_id) | ### Example responses @@ -27,10 +28,17 @@ curl -X GET http://coder-server:8080/api/v2/api/experimental/aibridge/intercepti ```json { + "count": 0, "results": [ { + "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", - "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", + "initiator": { + "avatar_url": "http://example.com", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", + "username": "string" + }, "metadata": { "property1": null, "property2": null diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md index 232509052b7b0..82b7cb8365a3e 100644 --- a/docs/reference/api/builds.md +++ b/docs/reference/api/builds.md @@ -48,6 +48,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -288,6 +289,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1019,6 +1021,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1332,6 +1335,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1532,7 +1536,7 @@ Status Code **200** | Name | Type | Required | Restrictions | Description | |----------------------------------|--------------------------------------------------------------------------------------------------------|----------|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `[array item]` | array | false | | | -| `Β» ai_task_sidebar_app_id` | string(uuid) | false | | | +| `Β» ai_task_sidebar_app_id` | string(uuid) | false | | Deprecated: This field has been replaced with `Task.WorkspaceAppID` | | `Β» build_number` | integer | false | | | | `Β» created_at` | string(date-time) | false | | | | `Β» daily_cost` | integer | false | | | @@ -1551,6 +1555,7 @@ Status Code **200** | `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | | | `»» file_id` | string(uuid) | false | | | | `»» id` | string(uuid) | false | | | +| `»» initiator_id` | string(uuid) | false | | | | `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | | | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | @@ -1829,6 +1834,7 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index b6043544d4766..131223e38e5f4 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -120,6 +120,7 @@ curl -X GET http://coder-server:8080/api/v2/appearance \ "support_links": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -808,7 +809,8 @@ curl -X GET http://coder-server:8080/api/v2/oauth2-provider/apps \ "endpoints": { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" }, "icon": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", @@ -835,6 +837,7 @@ Status Code **200** | `»» authorization` | string | false | | | | `»» device_authorization` | string | false | | Device authorization is optional. | | `»» token` | string | false | | | +| `»» token_revoke` | string | false | | | | `Β» icon` | string | false | | | | `Β» id` | string(uuid) | false | | | | `Β» name` | string | false | | | @@ -881,7 +884,8 @@ curl -X POST http://coder-server:8080/api/v2/oauth2-provider/apps \ "endpoints": { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" }, "icon": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", @@ -926,7 +930,8 @@ curl -X GET http://coder-server:8080/api/v2/oauth2-provider/apps/{app} \ "endpoints": { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" }, "icon": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", @@ -983,7 +988,8 @@ curl -X PUT http://coder-server:8080/api/v2/oauth2-provider/apps/{app} \ "endpoints": { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" }, "icon": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", @@ -1268,7 +1274,9 @@ curl -X GET http://coder-server:8080/api/v2/oauth2/clients/{client_id} \ "redirect_uris": [ "string" ], - "registration_access_token": "string", + "registration_access_token": [ + 0 + ], "registration_client_uri": "string", "response_types": [ "string" @@ -1362,7 +1370,9 @@ curl -X PUT http://coder-server:8080/api/v2/oauth2/clients/{client_id} \ "redirect_uris": [ "string" ], - "registration_access_token": "string", + "registration_access_token": [ + 0 + ], "registration_client_uri": "string", "response_types": [ "string" @@ -1499,6 +1509,42 @@ curl -X POST http://coder-server:8080/api/v2/oauth2/register \ |--------|--------------------------------------------------------------|-------------|--------------------------------------------------------------------------------------------------| | 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.OAuth2ClientRegistrationResponse](schemas.md#codersdkoauth2clientregistrationresponse) | +## Revoke OAuth2 tokens (RFC 7009) + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/oauth2/revoke \ + +``` + +`POST /oauth2/revoke` + +> Body parameter + +```yaml +client_id: string +token: string +token_type_hint: string + +``` + +### Parameters + +| Name | In | Type | Required | Description | +|---------------------|------|--------|----------|-------------------------------------------------------| +| `body` | body | object | true | | +| `Β» client_id` | body | string | true | Client ID for authentication | +| `Β» token` | body | string | true | The token to revoke | +| `Β» token_type_hint` | body | string | false | Hint about token type (access_token or refresh_token) | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|----------------------------|--------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | Token successfully revoked | | + ## OAuth2 token exchange ### Code samples diff --git a/docs/reference/api/experimental.md b/docs/reference/api/experimental.md new file mode 100644 index 0000000000000..34ad224bd3538 --- /dev/null +++ b/docs/reference/api/experimental.md @@ -0,0 +1,204 @@ +# Experimental + +## List AI tasks + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks \ + -H 'Accept: */*' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /api/experimental/tasks` + +### Parameters + +| Name | In | Type | Required | Description | +|------|-------|--------|----------|---------------------------------------------------------------------------------------------------------------------| +| `q` | query | string | false | Search query for filtering tasks. Supports: owner:, organization:, status: | + +### Example responses + +> 200 Response + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|--------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TasksListResponse](schemas.md#codersdktaskslistresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Create a new AI task + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/api/experimental/tasks/{user} \ + -H 'Content-Type: application/json' \ + -H 'Accept: */*' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /api/experimental/tasks/{user}` + +> Body parameter + +```json +{ + "input": "string", + "name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------------------------------------------------------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `body` | body | [codersdk.CreateTaskRequest](schemas.md#codersdkcreatetaskrequest) | true | Create task request | + +### Example responses + +> 201 Response + +### Responses + +| Status | Meaning | Description | Schema | +|--------|--------------------------------------------------------------|-------------|------------------------------------------| +| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.Task](schemas.md#codersdktask) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get AI task by ID + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task} \ + -H 'Accept: */*' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /api/experimental/tasks/{user}/{task}` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string(uuid) | true | Task ID | + +### Example responses + +> 200 Response + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Task](schemas.md#codersdktask) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Delete AI task by ID + +### Code samples + +```shell +# Example request using curl +curl -X DELETE http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task} \ + -H 'Coder-Session-Token: API_KEY' +``` + +`DELETE /api/experimental/tasks/{user}/{task}` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string(uuid) | true | Task ID | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------------|-------------------------|--------| +| 202 | [Accepted](https://tools.ietf.org/html/rfc7231#section-6.3.3) | Task deletion initiated | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get AI task logs + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task}/logs \ + -H 'Accept: */*' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /api/experimental/tasks/{user}/{task}/logs` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string(uuid) | true | Task ID | + +### Example responses + +> 200 Response + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TaskLogsResponse](schemas.md#codersdktasklogsresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Send input to AI task + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task}/send \ + -H 'Content-Type: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /api/experimental/tasks/{user}/{task}/send` + +> Body parameter + +```json +{ + "input": "string" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|----------------------------------------------------------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string(uuid) | true | Task ID | +| `body` | body | [codersdk.TaskSendRequest](schemas.md#codersdktasksendrequest) | true | Task input request | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|-----------------------------------------------------------------|-------------------------|--------| +| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | Input sent successfully | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/general.md b/docs/reference/api/general.md index a89376db38dba..5718979ae86c2 100644 --- a/docs/reference/api/general.md +++ b/docs/reference/api/general.md @@ -167,6 +167,13 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \ "base_url": "string", "key": "string" }, + "bedrock": { + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" + }, "enabled": true, "openai": { "base_url": "string", @@ -237,6 +244,7 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \ "scheme": "string", "user": {} }, + "enable_authz_recording": true, "enable_terraform_debug_mode": true, "ephemeral_deployment": true, "experiments": [ @@ -472,6 +480,7 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \ "value": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } diff --git a/docs/reference/api/members.md b/docs/reference/api/members.md index 6e02b90bc6b9d..a2251a59ba099 100644 --- a/docs/reference/api/members.md +++ b/docs/reference/api/members.md @@ -112,6 +112,13 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -147,20 +154,21 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members Status Code **200** -| Name | Type | Required | Restrictions | Description | -|------------------------------|----------------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `[array item]` | array | false | | | -| `Β» assignable` | boolean | false | | | -| `Β» built_in` | boolean | false | | Built in roles are immutable | -| `Β» display_name` | string | false | | | -| `Β» name` | string | false | | | -| `Β» organization_id` | string(uuid) | false | | | -| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»» negate` | boolean | false | | Negate makes this a negative permission | -| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `Β» site_permissions` | array | false | | | -| `Β» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|----------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `[array item]` | array | false | | | +| `Β» assignable` | boolean | false | | | +| `Β» built_in` | boolean | false | | Built in roles are immutable | +| `Β» display_name` | string | false | | | +| `Β» name` | string | false | | | +| `Β» organization_id` | string(uuid) | false | | | +| `Β» organization_member_permissions` | array | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `Β» site_permissions` | array | false | | | +| `Β» user_permissions` | array | false | | | #### Enumerated Values @@ -175,6 +183,7 @@ Status Code **200** | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | +| `action` | `share` | | `action` | `unassign` | | `action` | `update` | | `action` | `update_personal` | @@ -213,6 +222,7 @@ Status Code **200** | `resource_type` | `replicas` | | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | +| `resource_type` | `task` | | `resource_type` | `template` | | `resource_type` | `usage_event` | | `resource_type` | `user` | @@ -246,6 +256,13 @@ curl -X PUT http://coder-server:8080/api/v2/organizations/{organization}/members { "display_name": "string", "name": "string", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -287,6 +304,13 @@ curl -X PUT http://coder-server:8080/api/v2/organizations/{organization}/members "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -322,18 +346,19 @@ curl -X PUT http://coder-server:8080/api/v2/organizations/{organization}/members Status Code **200** -| Name | Type | Required | Restrictions | Description | -|------------------------------|----------------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `[array item]` | array | false | | | -| `Β» display_name` | string | false | | | -| `Β» name` | string | false | | | -| `Β» organization_id` | string(uuid) | false | | | -| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»» negate` | boolean | false | | Negate makes this a negative permission | -| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `Β» site_permissions` | array | false | | | -| `Β» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|----------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `[array item]` | array | false | | | +| `Β» display_name` | string | false | | | +| `Β» name` | string | false | | | +| `Β» organization_id` | string(uuid) | false | | | +| `Β» organization_member_permissions` | array | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `Β» site_permissions` | array | false | | | +| `Β» user_permissions` | array | false | | | #### Enumerated Values @@ -348,6 +373,7 @@ Status Code **200** | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | +| `action` | `share` | | `action` | `unassign` | | `action` | `update` | | `action` | `update_personal` | @@ -386,6 +412,7 @@ Status Code **200** | `resource_type` | `replicas` | | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | +| `resource_type` | `task` | | `resource_type` | `template` | | `resource_type` | `usage_event` | | `resource_type` | `user` | @@ -419,6 +446,13 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/member { "display_name": "string", "name": "string", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -460,6 +494,13 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/member "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -495,18 +536,19 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/member Status Code **200** -| Name | Type | Required | Restrictions | Description | -|------------------------------|----------------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `[array item]` | array | false | | | -| `Β» display_name` | string | false | | | -| `Β» name` | string | false | | | -| `Β» organization_id` | string(uuid) | false | | | -| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»» negate` | boolean | false | | Negate makes this a negative permission | -| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `Β» site_permissions` | array | false | | | -| `Β» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|----------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `[array item]` | array | false | | | +| `Β» display_name` | string | false | | | +| `Β» name` | string | false | | | +| `Β» organization_id` | string(uuid) | false | | | +| `Β» organization_member_permissions` | array | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `Β» site_permissions` | array | false | | | +| `Β» user_permissions` | array | false | | | #### Enumerated Values @@ -521,6 +563,7 @@ Status Code **200** | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | +| `action` | `share` | | `action` | `unassign` | | `action` | `update` | | `action` | `update_personal` | @@ -559,6 +602,7 @@ Status Code **200** | `resource_type` | `replicas` | | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | +| `resource_type` | `task` | | `resource_type` | `template` | | `resource_type` | `usage_event` | | `resource_type` | `user` | @@ -602,6 +646,13 @@ curl -X DELETE http://coder-server:8080/api/v2/organizations/{organization}/memb "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -637,18 +688,19 @@ curl -X DELETE http://coder-server:8080/api/v2/organizations/{organization}/memb Status Code **200** -| Name | Type | Required | Restrictions | Description | -|------------------------------|----------------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `[array item]` | array | false | | | -| `Β» display_name` | string | false | | | -| `Β» name` | string | false | | | -| `Β» organization_id` | string(uuid) | false | | | -| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»» negate` | boolean | false | | Negate makes this a negative permission | -| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `Β» site_permissions` | array | false | | | -| `Β» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|----------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `[array item]` | array | false | | | +| `Β» display_name` | string | false | | | +| `Β» name` | string | false | | | +| `Β» organization_id` | string(uuid) | false | | | +| `Β» organization_member_permissions` | array | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `Β» site_permissions` | array | false | | | +| `Β» user_permissions` | array | false | | | #### Enumerated Values @@ -663,6 +715,7 @@ Status Code **200** | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | +| `action` | `share` | | `action` | `unassign` | | `action` | `update` | | `action` | `update_personal` | @@ -701,6 +754,7 @@ Status Code **200** | `resource_type` | `replicas` | | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | +| `resource_type` | `task` | | `resource_type` | `template` | | `resource_type` | `usage_event` | | `resource_type` | `user` | @@ -964,6 +1018,13 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -999,20 +1060,21 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ Status Code **200** -| Name | Type | Required | Restrictions | Description | -|------------------------------|----------------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `[array item]` | array | false | | | -| `Β» assignable` | boolean | false | | | -| `Β» built_in` | boolean | false | | Built in roles are immutable | -| `Β» display_name` | string | false | | | -| `Β» name` | string | false | | | -| `Β» organization_id` | string(uuid) | false | | | -| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»» negate` | boolean | false | | Negate makes this a negative permission | -| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `Β» site_permissions` | array | false | | | -| `Β» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|----------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `[array item]` | array | false | | | +| `Β» assignable` | boolean | false | | | +| `Β» built_in` | boolean | false | | Built in roles are immutable | +| `Β» display_name` | string | false | | | +| `Β» name` | string | false | | | +| `Β» organization_id` | string(uuid) | false | | | +| `Β» organization_member_permissions` | array | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `Β» site_permissions` | array | false | | | +| `Β» user_permissions` | array | false | | | #### Enumerated Values @@ -1027,6 +1089,7 @@ Status Code **200** | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | +| `action` | `share` | | `action` | `unassign` | | `action` | `update` | | `action` | `update_personal` | @@ -1065,6 +1128,7 @@ Status Code **200** | `resource_type` | `replicas` | | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | +| `resource_type` | `task` | | `resource_type` | `template` | | `resource_type` | `usage_event` | | `resource_type` | `user` | diff --git a/docs/reference/api/organizations.md b/docs/reference/api/organizations.md index d418a1fcba106..ffd6f78405fb1 100644 --- a/docs/reference/api/organizations.md +++ b/docs/reference/api/organizations.md @@ -366,6 +366,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi | `ids` | query | array(uuid) | false | Filter results by job IDs | | `status` | query | string | false | Filter results by status | | `tags` | query | object | false | Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'}) | +| `initiator` | query | string(uuid) | false | Filter results by initiator | #### Enumerated Values @@ -402,6 +403,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -454,6 +456,7 @@ Status Code **200** | `Β» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | | | `Β» file_id` | string(uuid) | false | | | | `Β» id` | string(uuid) | false | | | +| `Β» initiator_id` | string(uuid) | false | | | | `Β» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | | | `»» error` | string | false | | | | `»» template_version_id` | string(uuid) | false | | | @@ -531,6 +534,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 98324941a1c70..5d6a66efafee1 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -351,6 +351,28 @@ | `base_url` | string | false | | | | `key` | string | false | | | +## codersdk.AIBridgeBedrockConfig + +```json +{ + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------------------|--------|----------|--------------|-------------| +| `access_key` | string | false | | | +| `access_key_secret` | string | false | | | +| `model` | string | false | | | +| `region` | string | false | | | +| `small_fast_model` | string | false | | | + ## codersdk.AIBridgeConfig ```json @@ -359,6 +381,13 @@ "base_url": "string", "key": "string" }, + "bedrock": { + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" + }, "enabled": true, "openai": { "base_url": "string", @@ -372,6 +401,7 @@ | Name | Type | Required | Restrictions | Description | |-------------|----------------------------------------------------------------------|----------|--------------|-------------| | `anthropic` | [codersdk.AIBridgeAnthropicConfig](#codersdkaibridgeanthropicconfig) | false | | | +| `bedrock` | [codersdk.AIBridgeBedrockConfig](#codersdkaibridgebedrockconfig) | false | | | | `enabled` | boolean | false | | | | `openai` | [codersdk.AIBridgeOpenAIConfig](#codersdkaibridgeopenaiconfig) | false | | | @@ -379,8 +409,14 @@ ```json { + "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", - "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", + "initiator": { + "avatar_url": "http://example.com", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", + "username": "string" + }, "metadata": { "property1": null, "property2": null @@ -439,8 +475,9 @@ | Name | Type | Required | Restrictions | Description | |--------------------|---------------------------------------------------------------------|----------|--------------|-------------| +| `ended_at` | string | false | | | | `id` | string | false | | | -| `initiator_id` | string | false | | | +| `initiator` | [codersdk.MinimalUser](#codersdkminimaluser) | false | | | | `metadata` | object | false | | | | Β» `[any property]` | any | false | | | | `model` | string | false | | | @@ -454,10 +491,17 @@ ```json { + "count": 0, "results": [ { + "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", - "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", + "initiator": { + "avatar_url": "http://example.com", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", + "username": "string" + }, "metadata": { "property1": null, "property2": null @@ -518,6 +562,7 @@ | Name | Type | Required | Restrictions | Description | |-----------|-------------------------------------------------------------------------|----------|--------------|-------------| +| `count` | integer | false | | | | `results` | array of [codersdk.AIBridgeInterception](#codersdkaibridgeinterception) | false | | | ## codersdk.AIBridgeOpenAIConfig @@ -639,6 +684,13 @@ "base_url": "string", "key": "string" }, + "bedrock": { + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" + }, "enabled": true, "openai": { "base_url": "string", @@ -654,10 +706,32 @@ |----------|----------------------------------------------------|----------|--------------|-------------| | `bridge` | [codersdk.AIBridgeConfig](#codersdkaibridgeconfig) | false | | | +## codersdk.APIAllowListTarget + +```json +{ + "id": "string", + "type": "*" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|--------|------------------------------------------------|----------|--------------|-------------| +| `id` | string | false | | | +| `type` | [codersdk.RBACResource](#codersdkrbacresource) | false | | | + ## codersdk.APIKey ```json { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "created_at": "2019-08-24T14:15:22Z", "expires_at": "2019-08-24T14:15:22Z", "id": "string", @@ -676,19 +750,20 @@ ### Properties -| Name | Type | Required | Restrictions | Description | -|--------------------|-------------------------------------------------------|----------|--------------|---------------------------------| -| `created_at` | string | true | | | -| `expires_at` | string | true | | | -| `id` | string | true | | | -| `last_used` | string | true | | | -| `lifetime_seconds` | integer | true | | | -| `login_type` | [codersdk.LoginType](#codersdklogintype) | true | | | -| `scope` | [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | -| `scopes` | array of [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | | -| `token_name` | string | true | | | -| `updated_at` | string | true | | | -| `user_id` | string | true | | | +| Name | Type | Required | Restrictions | Description | +|--------------------|---------------------------------------------------------------------|----------|--------------|---------------------------------| +| `allow_list` | array of [codersdk.APIAllowListTarget](#codersdkapiallowlisttarget) | false | | | +| `created_at` | string | true | | | +| `expires_at` | string | true | | | +| `id` | string | true | | | +| `last_used` | string | true | | | +| `lifetime_seconds` | integer | true | | | +| `login_type` | [codersdk.LoginType](#codersdklogintype) | true | | | +| `scope` | [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | +| `scopes` | array of [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | | +| `token_name` | string | true | | | +| `updated_at` | string | true | | | +| `user_id` | string | true | | | #### Enumerated Values @@ -711,49 +786,204 @@ #### Enumerated Values -| Value | -|---------------------------------| -| `all` | -| `application_connect` | -| `api_key:*` | -| `api_key:create` | -| `api_key:delete` | -| `api_key:read` | -| `api_key:update` | -| `coder:all` | -| `coder:apikeys.manage_self` | -| `coder:application_connect` | -| `coder:templates.author` | -| `coder:templates.build` | -| `coder:workspaces.access` | -| `coder:workspaces.create` | -| `coder:workspaces.delete` | -| `coder:workspaces.operate` | -| `file:*` | -| `file:create` | -| `file:read` | -| `template:*` | -| `template:create` | -| `template:delete` | -| `template:read` | -| `template:update` | -| `template:use` | -| `user:read_personal` | -| `user:update_personal` | -| `user_secret:*` | -| `user_secret:create` | -| `user_secret:delete` | -| `user_secret:read` | -| `user_secret:update` | -| `workspace:*` | -| `workspace:application_connect` | -| `workspace:create` | -| `workspace:delete` | -| `workspace:read` | -| `workspace:ssh` | -| `workspace:start` | -| `workspace:stop` | -| `workspace:update` | +| Value | +|-------------------------------------------| +| `all` | +| `application_connect` | +| `aibridge_interception:*` | +| `aibridge_interception:create` | +| `aibridge_interception:read` | +| `aibridge_interception:update` | +| `api_key:*` | +| `api_key:create` | +| `api_key:delete` | +| `api_key:read` | +| `api_key:update` | +| `assign_org_role:*` | +| `assign_org_role:assign` | +| `assign_org_role:create` | +| `assign_org_role:delete` | +| `assign_org_role:read` | +| `assign_org_role:unassign` | +| `assign_org_role:update` | +| `assign_role:*` | +| `assign_role:assign` | +| `assign_role:read` | +| `assign_role:unassign` | +| `audit_log:*` | +| `audit_log:create` | +| `audit_log:read` | +| `coder:all` | +| `coder:apikeys.manage_self` | +| `coder:application_connect` | +| `coder:templates.author` | +| `coder:templates.build` | +| `coder:workspaces.access` | +| `coder:workspaces.create` | +| `coder:workspaces.delete` | +| `coder:workspaces.operate` | +| `connection_log:*` | +| `connection_log:read` | +| `connection_log:update` | +| `crypto_key:*` | +| `crypto_key:create` | +| `crypto_key:delete` | +| `crypto_key:read` | +| `crypto_key:update` | +| `debug_info:*` | +| `debug_info:read` | +| `deployment_config:*` | +| `deployment_config:read` | +| `deployment_config:update` | +| `deployment_stats:*` | +| `deployment_stats:read` | +| `file:*` | +| `file:create` | +| `file:read` | +| `group:*` | +| `group:create` | +| `group:delete` | +| `group:read` | +| `group:update` | +| `group_member:*` | +| `group_member:read` | +| `idpsync_settings:*` | +| `idpsync_settings:read` | +| `idpsync_settings:update` | +| `inbox_notification:*` | +| `inbox_notification:create` | +| `inbox_notification:read` | +| `inbox_notification:update` | +| `license:*` | +| `license:create` | +| `license:delete` | +| `license:read` | +| `notification_message:*` | +| `notification_message:create` | +| `notification_message:delete` | +| `notification_message:read` | +| `notification_message:update` | +| `notification_preference:*` | +| `notification_preference:read` | +| `notification_preference:update` | +| `notification_template:*` | +| `notification_template:read` | +| `notification_template:update` | +| `oauth2_app:*` | +| `oauth2_app:create` | +| `oauth2_app:delete` | +| `oauth2_app:read` | +| `oauth2_app:update` | +| `oauth2_app_code_token:*` | +| `oauth2_app_code_token:create` | +| `oauth2_app_code_token:delete` | +| `oauth2_app_code_token:read` | +| `oauth2_app_secret:*` | +| `oauth2_app_secret:create` | +| `oauth2_app_secret:delete` | +| `oauth2_app_secret:read` | +| `oauth2_app_secret:update` | +| `organization:*` | +| `organization:create` | +| `organization:delete` | +| `organization:read` | +| `organization:update` | +| `organization_member:*` | +| `organization_member:create` | +| `organization_member:delete` | +| `organization_member:read` | +| `organization_member:update` | +| `prebuilt_workspace:*` | +| `prebuilt_workspace:delete` | +| `prebuilt_workspace:update` | +| `provisioner_daemon:*` | +| `provisioner_daemon:create` | +| `provisioner_daemon:delete` | +| `provisioner_daemon:read` | +| `provisioner_daemon:update` | +| `provisioner_jobs:*` | +| `provisioner_jobs:create` | +| `provisioner_jobs:read` | +| `provisioner_jobs:update` | +| `replicas:*` | +| `replicas:read` | +| `system:*` | +| `system:create` | +| `system:delete` | +| `system:read` | +| `system:update` | +| `tailnet_coordinator:*` | +| `tailnet_coordinator:create` | +| `tailnet_coordinator:delete` | +| `tailnet_coordinator:read` | +| `tailnet_coordinator:update` | +| `task:*` | +| `task:create` | +| `task:delete` | +| `task:read` | +| `task:update` | +| `template:*` | +| `template:create` | +| `template:delete` | +| `template:read` | +| `template:update` | +| `template:use` | +| `template:view_insights` | +| `usage_event:*` | +| `usage_event:create` | +| `usage_event:read` | +| `usage_event:update` | +| `user:*` | +| `user:create` | +| `user:delete` | +| `user:read` | +| `user:read_personal` | +| `user:update` | +| `user:update_personal` | +| `user_secret:*` | +| `user_secret:create` | +| `user_secret:delete` | +| `user_secret:read` | +| `user_secret:update` | +| `webpush_subscription:*` | +| `webpush_subscription:create` | +| `webpush_subscription:delete` | +| `webpush_subscription:read` | +| `workspace:*` | +| `workspace:application_connect` | +| `workspace:create` | +| `workspace:create_agent` | +| `workspace:delete` | +| `workspace:delete_agent` | +| `workspace:read` | +| `workspace:share` | +| `workspace:ssh` | +| `workspace:start` | +| `workspace:stop` | +| `workspace:update` | +| `workspace_agent_devcontainers:*` | +| `workspace_agent_devcontainers:create` | +| `workspace_agent_resource_monitor:*` | +| `workspace_agent_resource_monitor:create` | +| `workspace_agent_resource_monitor:read` | +| `workspace_agent_resource_monitor:update` | +| `workspace_dormant:*` | +| `workspace_dormant:application_connect` | +| `workspace_dormant:create` | +| `workspace_dormant:create_agent` | +| `workspace_dormant:delete` | +| `workspace_dormant:delete_agent` | +| `workspace_dormant:read` | +| `workspace_dormant:share` | +| `workspace_dormant:ssh` | +| `workspace_dormant:start` | +| `workspace_dormant:stop` | +| `workspace_dormant:update` | +| `workspace_proxy:*` | +| `workspace_proxy:create` | +| `workspace_proxy:delete` | +| `workspace_proxy:read` | +| `workspace_proxy:update` | ## codersdk.AddLicenseRequest @@ -871,6 +1101,7 @@ "support_links": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -912,6 +1143,13 @@ "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -938,16 +1176,17 @@ ### Properties -| Name | Type | Required | Restrictions | Description | -|----------------------------|-----------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `assignable` | boolean | false | | | -| `built_in` | boolean | false | | Built in roles are immutable | -| `display_name` | string | false | | | -| `name` | string | false | | | -| `organization_id` | string | false | | | -| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| Name | Type | Required | Restrictions | Description | +|-----------------------------------|-----------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `assignable` | boolean | false | | | +| `built_in` | boolean | false | | Built in roles are immutable | +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_id` | string | false | | | +| `organization_member_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.AuditAction @@ -1833,6 +2072,26 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in |-------|--------|----------|--------------|-------------| | `key` | string | false | | | +## codersdk.CreateTaskRequest + +```json +{ + "input": "string", + "name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|------------------------------|--------|----------|--------------|-------------| +| `input` | string | false | | | +| `name` | string | false | | | +| `template_version_id` | string | false | | | +| `template_version_preset_id` | string | false | | | + ## codersdk.CreateTemplateRequest ```json @@ -2024,6 +2283,12 @@ This is required on creation to enable a user-flow of validating a template work ```json { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "lifetime": 0, "scope": "all", "scopes": [ @@ -2035,12 +2300,13 @@ This is required on creation to enable a user-flow of validating a template work ### Properties -| Name | Type | Required | Restrictions | Description | -|--------------|-------------------------------------------------------|----------|--------------|---------------------------------| -| `lifetime` | integer | false | | | -| `scope` | [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | -| `scopes` | array of [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | | -| `token_name` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|--------------|---------------------------------------------------------------------|----------|--------------|---------------------------------| +| `allow_list` | array of [codersdk.APIAllowListTarget](#codersdkapiallowlisttarget) | false | | | +| `lifetime` | integer | false | | | +| `scope` | [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | +| `scopes` | array of [codersdk.APIKeyScope](#codersdkapikeyscope) | false | | | +| `token_name` | string | false | | | ## codersdk.CreateUserRequestWithOrgs @@ -2270,6 +2536,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o { "display_name": "string", "name": "string", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -2296,13 +2569,14 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ### Properties -| Name | Type | Required | Restrictions | Description | -|----------------------------|-----------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------| -| `display_name` | string | false | | | -| `name` | string | false | | | -| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific to the organization the role belongs to. | -| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| Name | Type | Required | Restrictions | Description | +|-----------------------------------|-----------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------| +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_member_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization member permissions are specific to the organization the role belongs to. | +| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific to the organization the role belongs to. | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.DAUEntry @@ -2566,6 +2840,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "base_url": "string", "key": "string" }, + "bedrock": { + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" + }, "enabled": true, "openai": { "base_url": "string", @@ -2636,6 +2917,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "scheme": "string", "user": {} }, + "enable_authz_recording": true, "enable_terraform_debug_mode": true, "ephemeral_deployment": true, "experiments": [ @@ -2871,6 +3153,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "value": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -3071,6 +3354,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "base_url": "string", "key": "string" }, + "bedrock": { + "access_key": "string", + "access_key_secret": "string", + "model": "string", + "region": "string", + "small_fast_model": "string" + }, "enabled": true, "openai": { "base_url": "string", @@ -3141,6 +3431,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "scheme": "string", "user": {} }, + "enable_authz_recording": true, "enable_terraform_debug_mode": true, "ephemeral_deployment": true, "experiments": [ @@ -3376,6 +3667,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "value": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -3475,6 +3767,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `disable_password_auth` | boolean | false | | | | `disable_path_apps` | boolean | false | | | | `docs_url` | [serpent.URL](#serpenturl) | false | | | +| `enable_authz_recording` | boolean | false | | | | `enable_terraform_debug_mode` | boolean | false | | | | `ephemeral_deployment` | boolean | false | | | | `experiments` | array of string | false | | | @@ -3766,7 +4059,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `oauth2` | | `mcp-server-http` | | `workspace-sharing` | -| `aibridge` | ## codersdk.ExternalAPIKeyScopes @@ -4483,6 +4775,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| ```json { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -4490,19 +4783,23 @@ Only certain features set these fields: - FeatureManagedAgentLimit| ### Properties -| Name | Type | Required | Restrictions | Description | -|----------|--------|----------|--------------|-------------| -| `icon` | string | false | | | -| `name` | string | false | | | -| `target` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|------------|--------|----------|--------------|-------------| +| `icon` | string | false | | | +| `location` | string | false | | | +| `name` | string | false | | | +| `target` | string | false | | | #### Enumerated Values -| Property | Value | -|----------|--------| -| `icon` | `bug` | -| `icon` | `chat` | -| `icon` | `docs` | +| Property | Value | +|------------|------------| +| `icon` | `bug` | +| `icon` | `chat` | +| `icon` | `docs` | +| `icon` | `star` | +| `location` | `navbar` | +| `location` | `dropdown` | ## codersdk.ListInboxNotificationsResponse @@ -4688,6 +4985,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" } ``` @@ -4698,6 +4996,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| |--------------|--------|----------|--------------|-------------| | `avatar_url` | string | false | | | | `id` | string | true | | | +| `name` | string | false | | | | `username` | string | true | | | ## codersdk.NotificationMethodsResponse @@ -4993,7 +5292,8 @@ Only certain features set these fields: - FeatureManagedAgentLimit| { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" } ``` @@ -5004,6 +5304,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `authorization` | string | false | | | | `device_authorization` | string | false | | Device authorization is optional. | | `token` | string | false | | | +| `token_revoke` | string | false | | | ## codersdk.OAuth2AuthorizationServerMetadata @@ -5067,7 +5368,9 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "redirect_uris": [ "string" ], - "registration_access_token": "string", + "registration_access_token": [ + 0 + ], "registration_client_uri": "string", "response_types": [ "string" @@ -5082,28 +5385,28 @@ Only certain features set these fields: - FeatureManagedAgentLimit| ### Properties -| Name | Type | Required | Restrictions | Description | -|------------------------------|-----------------|----------|--------------|-------------| -| `client_id` | string | false | | | -| `client_id_issued_at` | integer | false | | | -| `client_name` | string | false | | | -| `client_secret_expires_at` | integer | false | | | -| `client_uri` | string | false | | | -| `contacts` | array of string | false | | | -| `grant_types` | array of string | false | | | -| `jwks` | object | false | | | -| `jwks_uri` | string | false | | | -| `logo_uri` | string | false | | | -| `policy_uri` | string | false | | | -| `redirect_uris` | array of string | false | | | -| `registration_access_token` | string | false | | | -| `registration_client_uri` | string | false | | | -| `response_types` | array of string | false | | | -| `scope` | string | false | | | -| `software_id` | string | false | | | -| `software_version` | string | false | | | -| `token_endpoint_auth_method` | string | false | | | -| `tos_uri` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|------------------------------|------------------|----------|--------------|-------------| +| `client_id` | string | false | | | +| `client_id_issued_at` | integer | false | | | +| `client_name` | string | false | | | +| `client_secret_expires_at` | integer | false | | | +| `client_uri` | string | false | | | +| `contacts` | array of string | false | | | +| `grant_types` | array of string | false | | | +| `jwks` | object | false | | | +| `jwks_uri` | string | false | | | +| `logo_uri` | string | false | | | +| `policy_uri` | string | false | | | +| `redirect_uris` | array of string | false | | | +| `registration_access_token` | array of integer | false | | | +| `registration_client_uri` | string | false | | | +| `response_types` | array of string | false | | | +| `scope` | string | false | | | +| `software_id` | string | false | | | +| `software_version` | string | false | | | +| `token_endpoint_auth_method` | string | false | | | +| `tos_uri` | string | false | | | ## codersdk.OAuth2ClientRegistrationRequest @@ -5315,7 +5618,8 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "endpoints": { "authorization": "string", "device_authorization": "string", - "token": "string" + "token": "string", + "token_revoke": "string" }, "icon": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", @@ -6390,6 +6694,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -6432,6 +6737,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `error_code` | [codersdk.JobErrorCode](#codersdkjoberrorcode) | false | | | | `file_id` | string | false | | | | `id` | string | false | | | +| `initiator_id` | string | false | | | | `input` | [codersdk.ProvisionerJobInput](#codersdkprovisionerjobinput) | false | | | | `logs_overflowed` | boolean | false | | | | `metadata` | [codersdk.ProvisionerJobMetadata](#codersdkprovisionerjobmetadata) | false | | | @@ -6815,6 +7121,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `read` | | `read_personal` | | `ssh` | +| `share` | | `unassign` | | `update` | | `update_personal` | @@ -6866,6 +7173,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `replicas` | | `system` | | `tailnet_coordinator` | +| `task` | | `template` | | `usage_event` | | `user` | @@ -7117,6 +7425,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `idp_sync_settings_role` | | `workspace_agent` | | `workspace_app` | +| `task` | ## codersdk.Response @@ -7148,6 +7457,13 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "organization_member_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "organization_permissions": [ { "action": "application_connect", @@ -7174,14 +7490,15 @@ Only certain features set these fields: - FeatureManagedAgentLimit| ### Properties -| Name | Type | Required | Restrictions | Description | -|----------------------------|-----------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------| -| `display_name` | string | false | | | -| `name` | string | false | | | -| `organization_id` | string | false | | | -| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | -| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| Name | Type | Required | Restrictions | Description | +|-----------------------------------|-----------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------| +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_id` | string | false | | | +| `organization_member_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization member permissions are specific for the organization in the field 'OrganizationID' above. | +| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.RoleSyncSettings @@ -7349,6 +7666,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "value": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } @@ -7422,6 +7740,290 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `redirect_http` | boolean | false | | | | `supported_ciphers` | array of string | false | | | +## codersdk.Task + +```json +{ + "created_at": "2019-08-24T14:15:22Z", + "current_state": { + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" + }, + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initial_prompt": "string", + "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "owner_avatar_url": "string", + "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", + "owner_name": "string", + "status": "pending", + "template_display_name": "string", + "template_icon": "string", + "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", + "template_name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "updated_at": "2019-08-24T14:15:22Z", + "workspace_agent_health": { + "healthy": false, + "reason": "agent has lost connection" + }, + "workspace_agent_id": { + "uuid": "string", + "valid": true + }, + "workspace_agent_lifecycle": "created", + "workspace_app_id": { + "uuid": "string", + "valid": true + }, + "workspace_build_number": 0, + "workspace_id": { + "uuid": "string", + "valid": true + }, + "workspace_name": "string", + "workspace_status": "pending" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-----------------------------|----------------------------------------------------------------------|----------|--------------|-------------| +| `created_at` | string | false | | | +| `current_state` | [codersdk.TaskStateEntry](#codersdktaskstateentry) | false | | | +| `id` | string | false | | | +| `initial_prompt` | string | false | | | +| `name` | string | false | | | +| `organization_id` | string | false | | | +| `owner_avatar_url` | string | false | | | +| `owner_id` | string | false | | | +| `owner_name` | string | false | | | +| `status` | [codersdk.TaskStatus](#codersdktaskstatus) | false | | | +| `template_display_name` | string | false | | | +| `template_icon` | string | false | | | +| `template_id` | string | false | | | +| `template_name` | string | false | | | +| `template_version_id` | string | false | | | +| `updated_at` | string | false | | | +| `workspace_agent_health` | [codersdk.WorkspaceAgentHealth](#codersdkworkspaceagenthealth) | false | | | +| `workspace_agent_id` | [uuid.NullUUID](#uuidnulluuid) | false | | | +| `workspace_agent_lifecycle` | [codersdk.WorkspaceAgentLifecycle](#codersdkworkspaceagentlifecycle) | false | | | +| `workspace_app_id` | [uuid.NullUUID](#uuidnulluuid) | false | | | +| `workspace_build_number` | integer | false | | | +| `workspace_id` | [uuid.NullUUID](#uuidnulluuid) | false | | | +| `workspace_name` | string | false | | | +| `workspace_status` | [codersdk.WorkspaceStatus](#codersdkworkspacestatus) | false | | | + +#### Enumerated Values + +| Property | Value | +|--------------------|----------------| +| `status` | `pending` | +| `status` | `initializing` | +| `status` | `active` | +| `status` | `paused` | +| `status` | `unknown` | +| `status` | `error` | +| `workspace_status` | `pending` | +| `workspace_status` | `starting` | +| `workspace_status` | `running` | +| `workspace_status` | `stopping` | +| `workspace_status` | `stopped` | +| `workspace_status` | `failed` | +| `workspace_status` | `canceling` | +| `workspace_status` | `canceled` | +| `workspace_status` | `deleting` | +| `workspace_status` | `deleted` | + +## codersdk.TaskLogEntry + +```json +{ + "content": "string", + "id": 0, + "time": "2019-08-24T14:15:22Z", + "type": "input" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-----------|----------------------------------------------|----------|--------------|-------------| +| `content` | string | false | | | +| `id` | integer | false | | | +| `time` | string | false | | | +| `type` | [codersdk.TaskLogType](#codersdktasklogtype) | false | | | + +## codersdk.TaskLogType + +```json +"input" +``` + +### Properties + +#### Enumerated Values + +| Value | +|----------| +| `input` | +| `output` | + +## codersdk.TaskLogsResponse + +```json +{ + "logs": [ + { + "content": "string", + "id": 0, + "time": "2019-08-24T14:15:22Z", + "type": "input" + } + ] +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|--------|---------------------------------------------------------|----------|--------------|-------------| +| `logs` | array of [codersdk.TaskLogEntry](#codersdktasklogentry) | false | | | + +## codersdk.TaskSendRequest + +```json +{ + "input": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------|--------|----------|--------------|-------------| +| `input` | string | false | | | + +## codersdk.TaskState + +```json +"working" +``` + +### Properties + +#### Enumerated Values + +| Value | +|------------| +| `working` | +| `idle` | +| `complete` | +| `failed` | + +## codersdk.TaskStateEntry + +```json +{ + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-------------|------------------------------------------|----------|--------------|-------------| +| `message` | string | false | | | +| `state` | [codersdk.TaskState](#codersdktaskstate) | false | | | +| `timestamp` | string | false | | | +| `uri` | string | false | | | + +## codersdk.TaskStatus + +```json +"pending" +``` + +### Properties + +#### Enumerated Values + +| Value | +|----------------| +| `pending` | +| `initializing` | +| `active` | +| `paused` | +| `unknown` | +| `error` | + +## codersdk.TasksListResponse + +```json +{ + "count": 0, + "tasks": [ + { + "created_at": "2019-08-24T14:15:22Z", + "current_state": { + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" + }, + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initial_prompt": "string", + "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "owner_avatar_url": "string", + "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", + "owner_name": "string", + "status": "pending", + "template_display_name": "string", + "template_icon": "string", + "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", + "template_name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "updated_at": "2019-08-24T14:15:22Z", + "workspace_agent_health": { + "healthy": false, + "reason": "agent has lost connection" + }, + "workspace_agent_id": { + "uuid": "string", + "valid": true + }, + "workspace_agent_lifecycle": "created", + "workspace_app_id": { + "uuid": "string", + "valid": true + }, + "workspace_build_number": 0, + "workspace_id": { + "uuid": "string", + "valid": true + }, + "workspace_name": "string", + "workspace_status": "pending" + } + ] +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------|-----------------------------------------|----------|--------------|-------------| +| `count` | integer | false | | | +| `tasks` | array of [codersdk.Task](#codersdktask) | false | | | + ## codersdk.TelemetryConfig ```json @@ -8103,6 +8705,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -8118,6 +8721,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -9386,6 +9990,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -9578,6 +10183,10 @@ If the schedule is empty, the user will be updated to use the default schedule.| "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -9616,6 +10225,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `owner_avatar_url` | string | false | | | | `owner_id` | string | false | | | | `owner_name` | string | false | | Owner name is the username of the owner of the workspace. | +| `task_id` | [uuid.NullUUID](#uuidnulluuid) | false | | Task ID if set, indicates that the workspace is relevant to the given codersdk.Task. | | `template_active_version_id` | string | false | | | | `template_allow_user_cancel_workspace_jobs` | boolean | false | | | | `template_display_name` | string | false | | | @@ -9672,6 +10282,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "role": "admin", "username": "string" } @@ -10553,6 +11164,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -10743,7 +11355,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| | Name | Type | Required | Restrictions | Description | |------------------------------|-------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------| -| `ai_task_sidebar_app_id` | string | false | | | +| `ai_task_sidebar_app_id` | string | false | | Deprecated: This field has been replaced with `Task.WorkspaceAppID` | | `build_number` | integer | false | | | | `created_at` | string | false | | | | `daily_cost` | integer | false | | | @@ -11311,6 +11923,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "role": "admin", "username": "string" } @@ -11322,6 +11935,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| |--------------|--------------------------------------------------|----------|--------------|-------------| | `avatar_url` | string | false | | | | `id` | string | true | | | +| `name` | string | false | | | | `role` | [codersdk.WorkspaceRole](#codersdkworkspacerole) | false | | | | `username` | string | true | | | @@ -11389,6 +12003,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -11564,6 +12179,10 @@ If the schedule is empty, the user will be updated to use the default schedule.| "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -13210,6 +13829,7 @@ None "value": [ { "icon": "bug", + "location": "navbar", "name": "string", "target": "string" } diff --git a/docs/reference/api/templates.md b/docs/reference/api/templates.md index efc59cf7b5743..2c516f4788b4d 100644 --- a/docs/reference/api/templates.md +++ b/docs/reference/api/templates.md @@ -460,6 +460,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -475,6 +476,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -560,6 +562,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -575,6 +578,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -684,6 +688,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -699,6 +704,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1291,6 +1297,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \ "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -1306,6 +1313,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1370,6 +1378,7 @@ Status Code **200** | `Β» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | | | `»» avatar_url` | string(uri) | false | | | | `»» id` | string(uuid) | true | | | +| `»» name` | string | false | | | | `»» username` | string | true | | | | `Β» has_external_agent` | boolean | false | | | | `Β» id` | string(uuid) | false | | | @@ -1382,6 +1391,7 @@ Status Code **200** | `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | | | `»» file_id` | string(uuid) | false | | | | `»» id` | string(uuid) | false | | | +| `»» initiator_id` | string(uuid) | false | | | | `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | | | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | @@ -1574,6 +1584,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -1589,6 +1600,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1653,6 +1665,7 @@ Status Code **200** | `Β» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | | | `»» avatar_url` | string(uri) | false | | | | `»» id` | string(uuid) | true | | | +| `»» name` | string | false | | | | `»» username` | string | true | | | | `Β» has_external_agent` | boolean | false | | | | `Β» id` | string(uuid) | false | | | @@ -1665,6 +1678,7 @@ Status Code **200** | `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | | | `»» file_id` | string(uuid) | false | | | | `»» id` | string(uuid) | false | | | +| `»» initiator_id` | string(uuid) | false | | | | `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | | | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | @@ -1747,6 +1761,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion} \ "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -1762,6 +1777,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion} \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1856,6 +1872,7 @@ curl -X PATCH http://coder-server:8080/api/v2/templateversions/{templateversion} "created_by": { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "username": "string" }, "has_external_agent": true, @@ -1871,6 +1888,7 @@ curl -X PATCH http://coder-server:8080/api/v2/templateversions/{templateversion} "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -2069,6 +2087,7 @@ curl -X POST http://coder-server:8080/api/v2/templateversions/{templateversion}/ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -2143,6 +2162,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/d "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", diff --git a/docs/reference/api/users.md b/docs/reference/api/users.md index 9815ba5406e33..857d619398ff9 100644 --- a/docs/reference/api/users.md +++ b/docs/reference/api/users.md @@ -757,6 +757,12 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens \ ```json [ { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "created_at": "2019-08-24T14:15:22Z", "expires_at": "2019-08-24T14:15:22Z", "id": "string", @@ -784,31 +790,76 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens \ Status Code **200** -| Name | Type | Required | Restrictions | Description | -|----------------------|--------------------------------------------------------|----------|--------------|---------------------------------| -| `[array item]` | array | false | | | -| `Β» created_at` | string(date-time) | true | | | -| `Β» expires_at` | string(date-time) | true | | | -| `Β» id` | string | true | | | -| `Β» last_used` | string(date-time) | true | | | -| `Β» lifetime_seconds` | integer | true | | | -| `Β» login_type` | [codersdk.LoginType](schemas.md#codersdklogintype) | true | | | -| `Β» scope` | [codersdk.APIKeyScope](schemas.md#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | -| `Β» scopes` | array | false | | | -| `Β» token_name` | string | true | | | -| `Β» updated_at` | string(date-time) | true | | | -| `Β» user_id` | string(uuid) | true | | | +| Name | Type | Required | Restrictions | Description | +|----------------------|----------------------------------------------------------|----------|--------------|---------------------------------| +| `[array item]` | array | false | | | +| `Β» allow_list` | array | false | | | +| `»» id` | string | false | | | +| `»» type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `Β» created_at` | string(date-time) | true | | | +| `Β» expires_at` | string(date-time) | true | | | +| `Β» id` | string | true | | | +| `Β» last_used` | string(date-time) | true | | | +| `Β» lifetime_seconds` | integer | true | | | +| `Β» login_type` | [codersdk.LoginType](schemas.md#codersdklogintype) | true | | | +| `Β» scope` | [codersdk.APIKeyScope](schemas.md#codersdkapikeyscope) | false | | Deprecated: use Scopes instead. | +| `Β» scopes` | array | false | | | +| `Β» token_name` | string | true | | | +| `Β» updated_at` | string(date-time) | true | | | +| `Β» user_id` | string(uuid) | true | | | #### Enumerated Values -| Property | Value | -|--------------|-----------------------| -| `login_type` | `password` | -| `login_type` | `github` | -| `login_type` | `oidc` | -| `login_type` | `token` | -| `scope` | `all` | -| `scope` | `application_connect` | +| Property | Value | +|--------------|------------------------------------| +| `type` | `*` | +| `type` | `aibridge_interception` | +| `type` | `api_key` | +| `type` | `assign_org_role` | +| `type` | `assign_role` | +| `type` | `audit_log` | +| `type` | `connection_log` | +| `type` | `crypto_key` | +| `type` | `debug_info` | +| `type` | `deployment_config` | +| `type` | `deployment_stats` | +| `type` | `file` | +| `type` | `group` | +| `type` | `group_member` | +| `type` | `idpsync_settings` | +| `type` | `inbox_notification` | +| `type` | `license` | +| `type` | `notification_message` | +| `type` | `notification_preference` | +| `type` | `notification_template` | +| `type` | `oauth2_app` | +| `type` | `oauth2_app_code_token` | +| `type` | `oauth2_app_secret` | +| `type` | `organization` | +| `type` | `organization_member` | +| `type` | `prebuilt_workspace` | +| `type` | `provisioner_daemon` | +| `type` | `provisioner_jobs` | +| `type` | `replicas` | +| `type` | `system` | +| `type` | `tailnet_coordinator` | +| `type` | `task` | +| `type` | `template` | +| `type` | `usage_event` | +| `type` | `user` | +| `type` | `user_secret` | +| `type` | `webpush_subscription` | +| `type` | `workspace` | +| `type` | `workspace_agent_devcontainers` | +| `type` | `workspace_agent_resource_monitor` | +| `type` | `workspace_dormant` | +| `type` | `workspace_proxy` | +| `login_type` | `password` | +| `login_type` | `github` | +| `login_type` | `oidc` | +| `login_type` | `token` | +| `scope` | `all` | +| `scope` | `application_connect` | To perform this operation, you must be authenticated. [Learn more](authentication.md). @@ -830,6 +881,12 @@ curl -X POST http://coder-server:8080/api/v2/users/{user}/keys/tokens \ ```json { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "lifetime": 0, "scope": "all", "scopes": [ @@ -890,6 +947,12 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens/{keyname} \ ```json { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "created_at": "2019-08-24T14:15:22Z", "expires_at": "2019-08-24T14:15:22Z", "id": "string", @@ -940,6 +1003,12 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \ ```json { + "allow_list": [ + { + "id": "string", + "type": "*" + } + ], "created_at": "2019-08-24T14:15:22Z", "expires_at": "2019-08-24T14:15:22Z", "id": "string", diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md index 455fefcb57749..4bd188df3daf5 100644 --- a/docs/reference/api/workspaces.md +++ b/docs/reference/api/workspaces.md @@ -103,6 +103,7 @@ of the template will be used. "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -295,6 +296,10 @@ of the template will be used. "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -393,6 +398,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -585,6 +591,10 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -708,6 +718,7 @@ of the template will be used. "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -900,6 +911,10 @@ of the template will be used. "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -1001,6 +1016,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1176,6 +1192,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -1275,6 +1295,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -1467,6 +1488,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -1583,6 +1608,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/acl \ { "avatar_url": "http://example.com", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "name": "string", "role": "admin", "username": "string" } @@ -1824,6 +1850,7 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "error_code": "REQUIRED_TEMPLATE_VARIABLES", "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "input": { "error": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -2016,6 +2043,10 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", diff --git a/docs/reference/cli/aibridge.md b/docs/reference/cli/aibridge.md new file mode 100644 index 0000000000000..7b16c5cdc87a8 --- /dev/null +++ b/docs/reference/cli/aibridge.md @@ -0,0 +1,16 @@ + +# aibridge + +Manage AIBridge. + +## Usage + +```console +coder aibridge +``` + +## Subcommands + +| Name | Purpose | +|-----------------------------------------------------------|--------------------------------| +| [interceptions](./aibridge_interceptions.md) | Manage AIBridge interceptions. | diff --git a/docs/reference/cli/aibridge_interceptions.md b/docs/reference/cli/aibridge_interceptions.md new file mode 100644 index 0000000000000..9cfb3d45a74ea --- /dev/null +++ b/docs/reference/cli/aibridge_interceptions.md @@ -0,0 +1,16 @@ + +# aibridge interceptions + +Manage AIBridge interceptions. + +## Usage + +```console +coder aibridge interceptions +``` + +## Subcommands + +| Name | Purpose | +|-------------------------------------------------------|--------------------------------------| +| [list](./aibridge_interceptions_list.md) | List AIBridge interceptions as JSON. | diff --git a/docs/reference/cli/aibridge_interceptions_list.md b/docs/reference/cli/aibridge_interceptions_list.md new file mode 100644 index 0000000000000..7e86cd4968e33 --- /dev/null +++ b/docs/reference/cli/aibridge_interceptions_list.md @@ -0,0 +1,69 @@ + +# aibridge interceptions list + +List AIBridge interceptions as JSON. + +## Usage + +```console +coder aibridge interceptions list [flags] +``` + +## Options + +### --initiator + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions initiated by this user. Accepts a user ID, username, or "me". + +### --started-before + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions started before this time. Must be after 'started-after' if set. Accepts a time in the RFC 3339 format, e.g. "2006-01-02T15:04:05Z07:00". + +### --started-after + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions started after this time. Must be before 'started-before' if set. Accepts a time in the RFC 3339 format, e.g. "2006-01-02T15:04:05Z07:00". + +### --provider + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions from this provider. + +### --model + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions from this model. + +### --after-id + +| | | +|------|---------------------| +| Type | string | + +The ID of the last result on the previous page to use as a pagination cursor. + +### --limit + +| | | +|---------|------------------| +| Type | int | +| Default | 100 | + +The limit of results to return. Must be between 1 and 1000. diff --git a/docs/reference/cli/index.md b/docs/reference/cli/index.md index c298f8bcb61a2..c1410b4599977 100644 --- a/docs/reference/cli/index.md +++ b/docs/reference/cli/index.md @@ -68,6 +68,7 @@ Coder β€” A tool for provisioning self-hosted development environments with Terr | [groups](./groups.md) | Manage groups | | [prebuilds](./prebuilds.md) | Manage Coder prebuilds | | [external-workspaces](./external-workspaces.md) | Create or manage external workspaces | +| [aibridge](./aibridge.md) | Manage AIBridge. | ## Options diff --git a/docs/reference/cli/provisioner_jobs_list.md b/docs/reference/cli/provisioner_jobs_list.md index a0bff8554d610..0167dd467d60a 100644 --- a/docs/reference/cli/provisioner_jobs_list.md +++ b/docs/reference/cli/provisioner_jobs_list.md @@ -34,6 +34,15 @@ Filter by job status. Limit the number of jobs returned. +### -i, --initiator + +| | | +|-------------|----------------------------------------------------| +| Type | string | +| Environment | $CODER_PROVISIONER_JOB_LIST_INITIATOR | + +Filter by initiator (user ID or username). + ### -O, --org | | | @@ -45,10 +54,10 @@ Select which organization (uuid or name) to use. ### -c, --column -| | | -|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Type | [id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|logs overflowed\|organization\|queue] | -| Default | created at,id,type,template display name,status,queue,tags | +| | | +|---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | [id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|initiator id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|logs overflowed\|organization\|queue] | +| Default | created at,id,type,template display name,status,queue,tags | Columns to display in table output. diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md index bdc424bdd7a8b..e689f7fa28336 100644 --- a/docs/reference/cli/server.md +++ b/docs/reference/cli/server.md @@ -1647,3 +1647,108 @@ How often to reconcile workspace prebuilds state. | Default | false | Hide AI tasks from the dashboard. + +### --aibridge-enabled + +| | | +|-------------|--------------------------------------| +| Type | bool | +| Environment | $CODER_AIBRIDGE_ENABLED | +| YAML | aibridge.enabled | +| Default | false | + +Whether to start an in-memory aibridged instance. + +### --aibridge-openai-base-url + +| | | +|-------------|----------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_OPENAI_BASE_URL | +| YAML | aibridge.openai_base_url | +| Default | https://api.openai.com/v1/ | + +The base URL of the OpenAI API. + +### --aibridge-openai-key + +| | | +|-------------|-----------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_OPENAI_KEY | +| YAML | aibridge.openai_key | + +The key to authenticate against the OpenAI API. + +### --aibridge-anthropic-base-url + +| | | +|-------------|-------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_ANTHROPIC_BASE_URL | +| YAML | aibridge.anthropic_base_url | +| Default | https://api.anthropic.com/ | + +The base URL of the Anthropic API. + +### --aibridge-anthropic-key + +| | | +|-------------|--------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_ANTHROPIC_KEY | +| YAML | aibridge.anthropic_key | + +The key to authenticate against the Anthropic API. + +### --aibridge-bedrock-region + +| | | +|-------------|---------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_REGION | +| YAML | aibridge.bedrock_region | + +The AWS Bedrock API region. + +### --aibridge-bedrock-access-key + +| | | +|-------------|-------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY | +| YAML | aibridge.bedrock_access_key | + +The access key to authenticate against the AWS Bedrock API. + +### --aibridge-bedrock-access-key-secret + +| | | +|-------------|--------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET | +| YAML | aibridge.bedrock_access_key_secret | + +The access key secret to use with the access key to authenticate against the AWS Bedrock API. + +### --aibridge-bedrock-model + +| | | +|-------------|---------------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_MODEL | +| YAML | aibridge.bedrock_model | +| Default | global.anthropic.claude-sonnet-4-5-20250929-v1:0 | + +The model to use when making requests to the AWS Bedrock API. + +### --aibridge-bedrock-small-fastmodel + +| | | +|-------------|--------------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL | +| YAML | aibridge.bedrock_small_fast_model | +| Default | global.anthropic.claude-haiku-4-5-20251001-v1:0 | + +The small fast model to use when making requests to the AWS Bedrock API. Claude Code uses Haiku-class models to perform background tasks. See https://docs.claude.com/en/docs/claude-code/settings#environment-variables. diff --git a/docs/reference/cli/templates_init.md b/docs/reference/cli/templates_init.md index 7613144e66018..3ac28749ad5e4 100644 --- a/docs/reference/cli/templates_init.md +++ b/docs/reference/cli/templates_init.md @@ -13,8 +13,8 @@ coder templates init [flags] [directory] ### --id -| | | -|------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Type | aws-devcontainer\|aws-linux\|aws-windows\|azure-linux\|digitalocean-linux\|docker\|docker-devcontainer\|docker-envbuilder\|gcp-devcontainer\|gcp-linux\|gcp-vm-container\|gcp-windows\|kubernetes\|kubernetes-devcontainer\|nomad-docker\|scratch | +| | | +|------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | aws-devcontainer\|aws-linux\|aws-windows\|azure-linux\|digitalocean-linux\|docker\|docker-devcontainer\|docker-envbuilder\|gcp-devcontainer\|gcp-linux\|gcp-vm-container\|gcp-windows\|kubernetes\|kubernetes-devcontainer\|nomad-docker\|scratch\|tasks-docker | Specify a given example template by ID. diff --git a/docs/reference/cli/tokens.md b/docs/reference/cli/tokens.md index 36b6575ed323f..fd4369d5e63f0 100644 --- a/docs/reference/cli/tokens.md +++ b/docs/reference/cli/tokens.md @@ -25,6 +25,10 @@ Tokens are used to authenticate automated clients to Coder. $ coder tokens ls + - Create a scoped token: + + $ coder tokens create --scope workspace:read --allow workspace: + - Remove a token by ID: $ coder tokens rm WuoWs4ZsMX @@ -32,8 +36,9 @@ Tokens are used to authenticate automated clients to Coder. ## Subcommands -| Name | Purpose | -|-------------------------------------------|----------------| -| [create](./tokens_create.md) | Create a token | -| [list](./tokens_list.md) | List tokens | -| [remove](./tokens_remove.md) | Delete a token | +| Name | Purpose | +|-------------------------------------------|--------------------------------------------| +| [create](./tokens_create.md) | Create a token | +| [list](./tokens_list.md) | List tokens | +| [view](./tokens_view.md) | Display detailed information about a token | +| [remove](./tokens_remove.md) | Delete a token | diff --git a/docs/reference/cli/tokens_create.md b/docs/reference/cli/tokens_create.md index 7ad9699c17c35..b15e58cd1304d 100644 --- a/docs/reference/cli/tokens_create.md +++ b/docs/reference/cli/tokens_create.md @@ -18,7 +18,7 @@ coder tokens create [flags] | Type | string | | Environment | $CODER_TOKEN_LIFETIME | -Specify a duration for the lifetime of the token. +Duration for the token lifetime. Supports standard Go duration units (ns, us, ms, s, m, h) plus d (days) and y (years). Examples: 8h, 30d, 1y, 1d12h30m. ### -n, --name @@ -37,3 +37,19 @@ Specify a human-readable name. | Environment | $CODER_TOKEN_USER | Specify the user to create the token for (Only works if logged in user is admin). + +### --scope + +| | | +|------|---------------------------| +| Type | string-array | + +Repeatable scope to attach to the token (e.g. workspace:read). + +### --allow + +| | | +|------|-------------------------| +| Type | allow-list | + +Repeatable allow-list entry (:, e.g. workspace:1234-...). diff --git a/docs/reference/cli/tokens_list.md b/docs/reference/cli/tokens_list.md index 150b411855174..53d5e9b7b57c8 100644 --- a/docs/reference/cli/tokens_list.md +++ b/docs/reference/cli/tokens_list.md @@ -25,10 +25,10 @@ Specifies whether all users' tokens will be listed or not (must have Owner role ### -c, --column -| | | -|---------|-------------------------------------------------------------------| -| Type | [id\|name\|last used\|expires at\|created at\|owner] | -| Default | id,name,last used,expires at,created at | +| | | +|---------|---------------------------------------------------------------------------------------| +| Type | [id\|name\|scopes\|allow list\|last used\|expires at\|created at\|owner] | +| Default | id,name,scopes,allow list,last used,expires at,created at | Columns to display in table output. diff --git a/docs/reference/cli/tokens_view.md b/docs/reference/cli/tokens_view.md new file mode 100644 index 0000000000000..f5008f5e41092 --- /dev/null +++ b/docs/reference/cli/tokens_view.md @@ -0,0 +1,30 @@ + +# tokens view + +Display detailed information about a token + +## Usage + +```console +coder tokens view [flags] +``` + +## Options + +### -c, --column + +| | | +|---------|---------------------------------------------------------------------------------------| +| Type | [id\|name\|scopes\|allow list\|last used\|expires at\|created at\|owner] | +| Default | id,name,scopes,allow list,last used,expires at,created at,owner | + +Columns to display in table output. + +### -o, --output + +| | | +|---------|--------------------------| +| Type | table\|json | +| Default | table | + +Output format. diff --git a/docs/reference/cli/users_list.md b/docs/reference/cli/users_list.md index 93122e7741072..7217a8267b760 100644 --- a/docs/reference/cli/users_list.md +++ b/docs/reference/cli/users_list.md @@ -25,10 +25,10 @@ Filter users by their GitHub user ID. ### -c, --column -| | | -|---------|--------------------------------------------------------------------| -| Type | [id\|username\|email\|created at\|updated at\|status] | -| Default | username,email,created at,status | +| | | +|---------|--------------------------------------------------------------------------| +| Type | [id\|username\|name\|email\|created at\|updated at\|status] | +| Default | username,email,created at,status | Columns to display in table output. diff --git a/docs/user-guides/workspace-access/index.md b/docs/user-guides/workspace-access/index.md index 266e76e94757f..53b1583dac4b2 100644 --- a/docs/user-guides/workspace-access/index.md +++ b/docs/user-guides/workspace-access/index.md @@ -12,13 +12,18 @@ dashboard. ![Workspace View](../../images/user-guides/workspace-view-connection-annotated.png) -## Terminal +## Web Terminal -The terminal is implicitly enabled in Coder and allows you to access your -workspace through the shell environment set by your template. +The Web Terminal is a browser-based terminal that provides instant access to +your workspace's shell environment. It uses [xterm.js](https://xtermjs.org/) +and WebSocket technology for a responsive terminal experience with features +like persistent sessions, Unicode support, and clickable URLs. ![Terminal Access](../../images/user-guides/terminal-access.png) +Read the complete [Web Terminal documentation](./web-terminal.md) for +customization options, keyboard shortcuts, and troubleshooting guides. + ## SSH ### Through with the CLI diff --git a/docs/user-guides/workspace-access/remote-desktops.md b/docs/user-guides/workspace-access/remote-desktops.md index 71d5944020d34..f07589a53993f 100644 --- a/docs/user-guides/workspace-access/remote-desktops.md +++ b/docs/user-guides/workspace-access/remote-desktops.md @@ -64,7 +64,7 @@ coder port-forward --tcp 3399:3389 Then, connect to your workspace via RDP at `localhost:3399`. ![windows-rdp](../../images/user-guides/remote-desktops/windows_rdp_client.png) -
s + > [!NOTE] > Some versions of Windows, including Windows Server 2022, do not communicate correctly over UDP when using Coder Connect because they do not respect the maximum transmission unit (MTU) of the link. When this happens, the RDP client will appear to connect, but displays a blank screen. diff --git a/docs/user-guides/workspace-access/web-terminal.md b/docs/user-guides/workspace-access/web-terminal.md new file mode 100644 index 0000000000000..93c364c2894d3 --- /dev/null +++ b/docs/user-guides/workspace-access/web-terminal.md @@ -0,0 +1,236 @@ +# Web Terminal + +The Web Terminal is a browser-based terminal interface that provides instant +access to your workspace's shell environment directly from the Coder dashboard. +It's automatically enabled for all workspaces and requires no additional +configuration. + +![Terminal Access](../../images/user-guides/terminal-access.png) + +## Overview + +The Web Terminal leverages [xterm.js](https://xtermjs.org/), an industry-standard +terminal emulator, combined with WebSocket technology to provide a responsive +and feature-rich terminal experience in your browser. + +### Key Features + +- **Instant Access**: Click the terminal icon in your workspace to open a shell + session +- **Persistent Sessions**: Sessions are maintained using reconnection tokens, + allowing you to resume your terminal even after page refreshes or network + interruptions +- **Full Unicode Support**: Displays international characters and emojis + correctly +- **Clickable Links**: Automatically detects and makes URLs clickable +- **Copy/Paste Support**: Select text to automatically copy it to your clipboard +- **Multiple Rendering Options**: Choose between different rendering engines for + optimal performance + +## Accessing the Terminal + +### From the Dashboard + +1. Navigate to your workspace in the Coder dashboard +2. Click the **Terminal** button or icon +3. The terminal will open in a new browser tab or window + +The terminal automatically connects to your workspace agent using an optimized +WebSocket connection. + +### Direct URL Access + +You can also bookmark or share direct terminal URLs: + +```text +https://coder.example.com/@username/workspace-name/terminal +``` + +To access a specific agent in a multi-agent workspace: + +```text +https://coder.example.com/@username/workspace-name.agent-name/terminal +``` + +## Architecture + +### How It Works + +The Web Terminal creates a persistent connection between your browser and the +workspace: + +1. **Browser**: Renders the terminal using xterm.js +2. **WebSocket**: Maintains a persistent, low-latency connection +3. **Coder Server**: Routes traffic between browser and workspace +4. **Workspace Agent**: Manages the pseudo-terminal (PTY) session +5. **Shell Process**: Your actual bash/zsh/fish shell + +The connection flow is: Browser ↔ WebSocket ↔ Coder Server ↔ Workspace Agent ↔ Shell Process + +### Reconnection & Persistence + +The terminal uses reconnection tokens to maintain session state: + +- Each terminal session has a unique UUID +- If the connection drops, the same token is used to reconnect +- The workspace agent buffers output during disconnections +- Your shell session continues running even when the browser is closed + +## Customization + +### Font Selection + +You can customize the terminal font through your user settings: + +1. Click your avatar in the top-right corner +2. Select **Settings** β†’ **Appearance** +3. Choose from available fonts: + - **IBM Plex Mono** (default) + - **Fira Code** (with ligatures) + - **JetBrains Mono** + - **Source Code Pro** + +The font change applies immediately to all open terminal sessions. + +### Rendering Engine + +Administrators can configure the terminal renderer for performance optimization: + +```yaml +# In your Coder deployment configuration +webTerminalRenderer: "canvas" # Options: canvas, webgl, dom +``` + +Or via environment variable: + +```bash +CODER_WEB_TERMINAL_RENDERER=canvas +``` + +**Renderer Options:** + +- **`canvas`** (default): Best compatibility, good performance on most systems +- **`webgl`**: Hardware-accelerated, ideal for high-refresh terminals and + complex rendering +- **`dom`**: Fallback option, useful for accessibility tools or older browsers + +> **Note:** The renderer setting is deployment-wide and requires a Coder server +> restart to take effect. + +## Keyboard Shortcuts + +The Web Terminal supports standard terminal keybindings: + +| Shortcut | Action | +|-------------------------------------|---------------------------| +| `Ctrl+Shift+C` (Mac: `Cmd+Shift+C`) | Copy selected text | +| `Ctrl+Shift+V` (Mac: `Cmd+Shift+V`) | Paste from clipboard | +| `Shift+Enter` | Insert literal newline | +| `Ctrl+C` | Send interrupt (SIGINT) | +| `Ctrl+D` | Send EOF / exit shell | +| `Ctrl+Z` | Suspend process (SIGTSTP) | + +### Copy/Paste Behavior + +- **Auto-copy**: Selecting text automatically copies it to your clipboard +- **Paste**: Use the standard paste shortcut or middle-click (on Linux/X11) +- **Browser permissions**: First paste may prompt for clipboard access + +## URL Handling + +The terminal automatically detects URLs and makes them clickable. When you click +a URL: + +- **External URLs** (e.g., `https://example.com`) open in a new tab +- **Localhost URLs** (e.g., `http://localhost:3000`) are automatically + port-forwarded through Coder's [port forwarding](./port-forwarding.md) system +- **Port-forwarded URLs** use your configured workspace proxy + +This makes it seamless to open development servers running in your workspace. + +## Advanced Usage + +### Custom Commands + +You can open a terminal with a specific command by adding a query parameter: + +```text +https://coder.example.com/@user/workspace/terminal?command=htop +``` + +This will execute `htop` immediately when the terminal opens. + +### Container Selection + +For workspaces with multiple Docker containers, specify which container to +connect to: + +```text +https://coder.example.com/@user/workspace/terminal?container=sidecar +``` + +You can also specify the container user: + +```text +https://coder.example.com/@user/workspace/terminal?container=app&container_user=node +``` + +> **Note:** This feature only works with Docker containers. + +### Debug Mode + +Enable debug information to monitor connection latency: + +```text +https://coder.example.com/@user/workspace/terminal?debug +``` + +This displays the current latency to your selected workspace proxy in the +bottom-right corner. + +## Configuration File Support + +The Web Terminal uses xterm.js under the hood, which is configured +programmatically rather than through a configuration file. However, you can +customize various aspects: + +### User-Side Customization + +End-users can customize: + +- **Font family** via Settings β†’ Appearance +- **Shell environment** via dotfiles or shell rc files +- **TERM variable** is automatically set to `xterm-256color` + +### Shell Configuration + +The terminal respects your shell's configuration files: + +```bash +# ~/.bashrc or ~/.zshrc +export PS1="\u@\h:\w\$ " # Custom prompt +alias ll="ls -lah" # Custom aliases + +# Set terminal colors +export CTERM=xterm-256color +``` + +## Troubleshooting + +### Connection Issues + +If the terminal fails to connect: + +1. **Check workspace status**: Ensure your workspace is running +2. **Verify agent health**: Look for agent connection warnings +3. **Network issues**: Check if WebSockets are blocked by your firewall/proxy +4. **Browser console**: Open DevTools to see WebSocket error messages + +### Display Issues + +If characters or colors appear incorrect: + +1. **Unicode support**: Ensure your shell locale is set correctly (`locale -a`) +2. **Terminal type**: The terminal sets `TERM=xterm-256color` automatically +3. **Color schemes**: Some applications may not render correctly in dark mode +4. **Font rendering**: Try switching terminal fonts in your appearance settings diff --git a/docs/user-guides/workspace-scheduling.md b/docs/user-guides/workspace-scheduling.md index b5c27263a7e2e..151829c27d727 100644 --- a/docs/user-guides/workspace-scheduling.md +++ b/docs/user-guides/workspace-scheduling.md @@ -39,6 +39,9 @@ workspace if you're still using it. It will wait for the user to become inactive before checking connections again (1 hour by default). Template admins can modify this duration with the **activity bump** template setting. +> [!NOTE] +> Autostop must be enabled on the template prior to workspace creation, it is not applied to existing running workspaces. + ![Autostop UI](../images/workspaces/autostop.png) ## Activity detection diff --git a/dogfood/coder/Dockerfile b/dogfood/coder/Dockerfile index 90bdcb71aaacb..1214bd17ca44f 100644 --- a/dogfood/coder/Dockerfile +++ b/dogfood/coder/Dockerfile @@ -1,5 +1,5 @@ # 1.86.0 -FROM rust:slim@sha256:3f391b0678a6e0c88fd26f13e399c9c515ac47354e3cadfee7daee3b21651a4f AS rust-utils +FROM rust:slim@sha256:e4ae8ab67883487c5545884d5aa5ebbe86b5f13c6df4a8e3e2f34c89cedb9f54 AS rust-utils # Install rust helper programs ENV CARGO_INSTALL_ROOT=/tmp/ # Use more reliable mirrors for Debian packages @@ -8,11 +8,11 @@ RUN sed -i 's|http://deb.debian.org/debian|http://mirrors.edge.kernel.org/debian RUN apt-get update && apt-get install -y libssl-dev openssl pkg-config build-essential RUN cargo install jj-cli typos-cli watchexec-cli -FROM ubuntu:jammy@sha256:0e5e4a57c2499249aafc3b40fcd541e9a456aab7296681a3994d631587203f97 AS go +FROM ubuntu:jammy@sha256:4e0171b9275e12d375863f2b3ae9ce00a4c53ddda176bd55868df97ac6f21a6e AS go # Install Go manually, so that we can control the version -ARG GO_VERSION=1.24.6 -ARG GO_CHECKSUM="bbca37cc395c974ffa4893ee35819ad23ebb27426df87af92e93a9ec66ef8712" +ARG GO_VERSION=1.24.10 +ARG GO_CHECKSUM="dd52b974e3d9c5a7bbfb222c685806def6be5d6f7efd10f9caa9ca1fa2f47955" # Boring Go is needed to build FIPS-compliant binaries. RUN apt-get update && \ @@ -97,7 +97,7 @@ RUN curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/d unzip protoc.zip && \ rm protoc.zip -FROM ubuntu:jammy@sha256:0e5e4a57c2499249aafc3b40fcd541e9a456aab7296681a3994d631587203f97 +FROM ubuntu:jammy@sha256:4e0171b9275e12d375863f2b3ae9ce00a4c53ddda176bd55868df97ac6f21a6e SHELL ["/bin/bash", "-c"] diff --git a/dogfood/coder/files/etc/apt/preferences.d/docker b/dogfood/coder/files/etc/apt/preferences.d/docker index a92c0abb03d7c..91dcb2b37f643 100644 --- a/dogfood/coder/files/etc/apt/preferences.d/docker +++ b/dogfood/coder/files/etc/apt/preferences.d/docker @@ -4,8 +4,12 @@ Pin: origin download.docker.com Pin-Priority: 1 # Docker Community Edition +# We need to pin docker-ce to a specific version because containerd is pinned +# to an older version. Newer major versions of docker-ce require a version of +# containerd.io greater than our pinned version. Package: docker-ce Pin: origin download.docker.com +Pin: version 5:27.* Pin-Priority: 500 # Docker command-line tool diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index 6762f2bcbee27..37a53349bb903 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 2.9" + version = ">= 2.12.0" } docker = { source = "kreuzwerker/docker" @@ -269,6 +269,13 @@ data "coder_workspace_tags" "tags" { } } +data "coder_workspace_tags" "prebuild" { + count = data.coder_workspace_owner.me.name == "prebuilds" ? 1 : 0 + tags = { + "is_prebuild" = "true" + } +} + data "coder_parameter" "ide_choices" { type = "list(string)" name = "Select IDEs" @@ -364,7 +371,7 @@ module "git-config" { module "git-clone" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/git-clone/coder" - version = "1.1.1" + version = "1.2.0" agent_id = coder_agent.dev.id url = "https://github.com/coder/coder" base_dir = local.repo_base_dir @@ -444,7 +451,7 @@ module "windsurf" { module "zed" { count = contains(jsondecode(data.coder_parameter.ide_choices.value), "zed") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/zed/coder" - version = "1.1.0" + version = "1.1.1" agent_id = coder_agent.dev.id agent_name = "dev" folder = local.repo_dir @@ -472,7 +479,7 @@ resource "coder_agent" "dev" { dir = local.repo_dir env = { OIDC_TOKEN : data.coder_workspace_owner.me.oidc_access_token, - ANTHROPIC_BASE_URL : "https://dev.coder.com/api/experimental/aibridge/anthropic", + ANTHROPIC_BASE_URL : "https://dev.coder.com/api/v2/aibridge/anthropic", ANTHROPIC_AUTH_TOKEN : data.coder_workspace_owner.me.session_token } startup_script_behavior = "blocking" @@ -813,7 +820,6 @@ resource "coder_metadata" "container_info" { locals { claude_system_prompt = <<-EOT - -- Framing -- You are a helpful Coding assistant. Aim to autonomously investigate and solve issues the user gives you and test your work, whenever possible. @@ -822,7 +828,6 @@ locals { but opt for autonomy. -- Tool Selection -- - - coder_report_task: providing status updates or requesting user input. - playwright: previewing your changes after you made them to confirm it worked as expected - desktop-commander - use only for commands that keep running @@ -834,44 +839,24 @@ locals { - Stays running? β†’ desktop-commander - Finishes immediately? β†’ built-in tools - -- Task Reporting -- - Report all tasks to Coder, following these EXACT guidelines: - 1. Be granular. If you are investigating with multiple steps, report each step - to coder. - 2. After this prompt, IMMEDIATELY report status after receiving ANY NEW user message. - Do not report any status related with this system prompt. - 3. Use "state": "working" when actively processing WITHOUT needing - additional user input - 4. Use "state": "complete" only when finished with a task - 5. Use "state": "failure" when you need ANY user input, lack sufficient - details, or encounter blockers - - In your summary: - - Be specific about what you're doing - - Clearly indicate what information you need from the user when in - "failure" state - - Keep it under 160 characters - - Make it actionable - -- Context -- There is an existing application in the current directory. Be sure to read CLAUDE.md before making any changes. This is a real-world production application. As such, make sure to think carefully, use TODO lists, and plan carefully before making changes. - EOT - } module "claude-code" { count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/claude-code/coder" - version = "3.0.0" + version = "3.3.2" agent_id = coder_agent.dev.id workdir = local.repo_dir claude_code_version = "latest" order = 999 - claude_api_key = data.coder_workspace_owner.me.session_token + claude_api_key = data.coder_workspace_owner.me.session_token # To Enable AI Bridge integration + agentapi_version = "latest" system_prompt = local.claude_system_prompt ai_prompt = data.coder_parameter.ai_prompt.value diff --git a/enterprise/x/aibridged/aibridged.go b/enterprise/aibridged/aibridged.go similarity index 97% rename from enterprise/x/aibridged/aibridged.go rename to enterprise/aibridged/aibridged.go index a1fa4022ff960..fcec1629b8701 100644 --- a/enterprise/x/aibridged/aibridged.go +++ b/enterprise/aibridged/aibridged.go @@ -19,7 +19,7 @@ var _ io.Closer = &Server{} // Server provides the AI Bridge functionality. // It is responsible for: -// - receiving requests on /api/experimental/aibridged/* // TODO: update endpoint once out of experimental +// - receiving requests on /api/v2/aibridged/* // - manipulating the requests // - relaying requests to upstream AI services and relaying responses to caller // diff --git a/enterprise/x/aibridged/aibridged_integration_test.go b/enterprise/aibridged/aibridged_integration_test.go similarity index 98% rename from enterprise/x/aibridged/aibridged_integration_test.go rename to enterprise/aibridged/aibridged_integration_test.go index 69d7627e04c5f..88fa21377f5a2 100644 --- a/enterprise/x/aibridged/aibridged_integration_test.go +++ b/enterprise/aibridged/aibridged_integration_test.go @@ -19,8 +19,8 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" - "github.com/coder/coder/v2/enterprise/x/aibridged" "github.com/coder/coder/v2/testutil" ) @@ -164,7 +164,7 @@ func TestIntegration(t *testing.T) { require.NoError(t, err) logger := testutil.Logger(t) - providers := []aibridge.Provider{aibridge.NewOpenAIProvider(aibridge.ProviderConfig{BaseURL: mockOpenAI.URL})} + providers := []aibridge.Provider{aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{BaseURL: mockOpenAI.URL})} pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, logger) require.NoError(t, err) diff --git a/enterprise/x/aibridged/aibridged_test.go b/enterprise/aibridged/aibridged_test.go similarity index 95% rename from enterprise/x/aibridged/aibridged_test.go rename to enterprise/aibridged/aibridged_test.go index 22210a00f34f3..5d38b7f54d18c 100644 --- a/enterprise/x/aibridged/aibridged_test.go +++ b/enterprise/aibridged/aibridged_test.go @@ -18,9 +18,9 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/aibridge" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - mock "github.com/coder/coder/v2/enterprise/x/aibridged/aibridgedmock" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged" + mock "github.com/coder/coder/v2/enterprise/aibridged/aibridgedmock" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/coder/v2/testutil" ) @@ -70,6 +70,7 @@ func TestServeHTTP_FailureModes(t *testing.T) { t.Parallel() defaultHeaders := map[string]string{"Authorization": "Bearer key"} + httpClient := &http.Client{} cases := []struct { name string @@ -155,7 +156,7 @@ func TestServeHTTP_FailureModes(t *testing.T) { req.Header.Set(k, v) } - resp, err := http.DefaultClient.Do(req) + resp, err := httpClient.Do(req) t.Cleanup(func() { if resp == nil || resp.Body == nil { return @@ -287,8 +288,8 @@ func TestRouting(t *testing.T) { client := mock.NewMockDRPCClient(ctrl) providers := []aibridge.Provider{ - aibridge.NewOpenAIProvider(aibridge.ProviderConfig{BaseURL: openaiSrv.URL}), - aibridge.NewAnthropicProvider(aibridge.ProviderConfig{BaseURL: antSrv.URL}), + aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{BaseURL: openaiSrv.URL}), + aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{BaseURL: antSrv.URL}, nil), } pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, logger) require.NoError(t, err) @@ -304,6 +305,7 @@ func TestRouting(t *testing.T) { interceptionID = in.GetId() return &proto.RecordInterceptionResponse{}, nil }) + client.EXPECT().RecordInterceptionEnded(gomock.Any(), gomock.Any()).Times(tc.expectedHits) // Given: aibridged is started. srv, err := aibridged.New(t.Context(), pool, func(ctx context.Context) (aibridged.DRPCClient, error) { diff --git a/enterprise/x/aibridged/aibridgedmock/clientmock.go b/enterprise/aibridged/aibridgedmock/clientmock.go similarity index 86% rename from enterprise/x/aibridged/aibridgedmock/clientmock.go rename to enterprise/aibridged/aibridgedmock/clientmock.go index 2210051689402..2bb7083e10924 100644 --- a/enterprise/x/aibridged/aibridgedmock/clientmock.go +++ b/enterprise/aibridged/aibridgedmock/clientmock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/coder/coder/v2/enterprise/x/aibridged (interfaces: DRPCClient) +// Source: github.com/coder/coder/v2/enterprise/aibridged (interfaces: DRPCClient) // // Generated by this command: // -// mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged DRPCClient +// mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged DRPCClient // // Package aibridgedmock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - proto "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + proto "github.com/coder/coder/v2/enterprise/aibridged/proto" gomock "go.uber.org/mock/gomock" drpc "storj.io/drpc" ) @@ -116,6 +116,21 @@ func (mr *MockDRPCClientMockRecorder) RecordInterception(ctx, in any) *gomock.Ca return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecordInterception", reflect.TypeOf((*MockDRPCClient)(nil).RecordInterception), ctx, in) } +// RecordInterceptionEnded mocks base method. +func (m *MockDRPCClient) RecordInterceptionEnded(ctx context.Context, in *proto.RecordInterceptionEndedRequest) (*proto.RecordInterceptionEndedResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RecordInterceptionEnded", ctx, in) + ret0, _ := ret[0].(*proto.RecordInterceptionEndedResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RecordInterceptionEnded indicates an expected call of RecordInterceptionEnded. +func (mr *MockDRPCClientMockRecorder) RecordInterceptionEnded(ctx, in any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecordInterceptionEnded", reflect.TypeOf((*MockDRPCClient)(nil).RecordInterceptionEnded), ctx, in) +} + // RecordPromptUsage mocks base method. func (m *MockDRPCClient) RecordPromptUsage(ctx context.Context, in *proto.RecordPromptUsageRequest) (*proto.RecordPromptUsageResponse, error) { m.ctrl.T.Helper() diff --git a/enterprise/x/aibridged/aibridgedmock/doc.go b/enterprise/aibridged/aibridgedmock/doc.go similarity index 52% rename from enterprise/x/aibridged/aibridgedmock/doc.go rename to enterprise/aibridged/aibridgedmock/doc.go index 3d3f56c05574d..9c9c644570463 100644 --- a/enterprise/x/aibridged/aibridgedmock/doc.go +++ b/enterprise/aibridged/aibridgedmock/doc.go @@ -1,4 +1,4 @@ package aibridgedmock -//go:generate mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged DRPCClient -//go:generate mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged Pooler +//go:generate mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged DRPCClient +//go:generate mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged Pooler diff --git a/enterprise/x/aibridged/aibridgedmock/poolmock.go b/enterprise/aibridged/aibridgedmock/poolmock.go similarity index 91% rename from enterprise/x/aibridged/aibridgedmock/poolmock.go rename to enterprise/aibridged/aibridgedmock/poolmock.go index bf3b39ed2a879..fcd941fc7c989 100644 --- a/enterprise/x/aibridged/aibridgedmock/poolmock.go +++ b/enterprise/aibridged/aibridgedmock/poolmock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/coder/coder/v2/enterprise/x/aibridged (interfaces: Pooler) +// Source: github.com/coder/coder/v2/enterprise/aibridged (interfaces: Pooler) // // Generated by this command: // -// mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged Pooler +// mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged Pooler // // Package aibridgedmock is a generated GoMock package. @@ -14,7 +14,7 @@ import ( http "net/http" reflect "reflect" - aibridged "github.com/coder/coder/v2/enterprise/x/aibridged" + aibridged "github.com/coder/coder/v2/enterprise/aibridged" gomock "go.uber.org/mock/gomock" ) diff --git a/enterprise/x/aibridged/client.go b/enterprise/aibridged/client.go similarity index 90% rename from enterprise/x/aibridged/client.go rename to enterprise/aibridged/client.go index 3004a84df9626..60650bf994f28 100644 --- a/enterprise/x/aibridged/client.go +++ b/enterprise/aibridged/client.go @@ -5,7 +5,7 @@ import ( "storj.io/drpc" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) type Dialer func(ctx context.Context) (DRPCClient, error) diff --git a/enterprise/x/aibridged/http.go b/enterprise/aibridged/http.go similarity index 98% rename from enterprise/x/aibridged/http.go rename to enterprise/aibridged/http.go index 43f4ba7670671..e87238cc7bbc0 100644 --- a/enterprise/x/aibridged/http.go +++ b/enterprise/aibridged/http.go @@ -9,7 +9,7 @@ import ( "cdr.dev/slog" "github.com/coder/aibridge" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var _ http.Handler = &Server{} diff --git a/enterprise/x/aibridged/mcp.go b/enterprise/aibridged/mcp.go similarity index 99% rename from enterprise/x/aibridged/mcp.go rename to enterprise/aibridged/mcp.go index 4b42287e02899..ab6d1d0031d37 100644 --- a/enterprise/x/aibridged/mcp.go +++ b/enterprise/aibridged/mcp.go @@ -10,7 +10,7 @@ import ( "cdr.dev/slog" "github.com/coder/aibridge/mcp" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var ( diff --git a/enterprise/x/aibridged/mcp_internal_test.go b/enterprise/aibridged/mcp_internal_test.go similarity index 95% rename from enterprise/x/aibridged/mcp_internal_test.go rename to enterprise/aibridged/mcp_internal_test.go index 20edf79d06bf5..37fb6fe2c25d2 100644 --- a/enterprise/x/aibridged/mcp_internal_test.go +++ b/enterprise/aibridged/mcp_internal_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/coder/v2/testutil" ) diff --git a/enterprise/x/aibridged/pool.go b/enterprise/aibridged/pool.go similarity index 100% rename from enterprise/x/aibridged/pool.go rename to enterprise/aibridged/pool.go diff --git a/enterprise/x/aibridged/pool_test.go b/enterprise/aibridged/pool_test.go similarity index 96% rename from enterprise/x/aibridged/pool_test.go rename to enterprise/aibridged/pool_test.go index 38cae85da9d92..e3609144f0d59 100644 --- a/enterprise/x/aibridged/pool_test.go +++ b/enterprise/aibridged/pool_test.go @@ -13,8 +13,8 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/aibridge/mcp" "github.com/coder/aibridge/mcpmock" - "github.com/coder/coder/v2/enterprise/x/aibridged" - mock "github.com/coder/coder/v2/enterprise/x/aibridged/aibridgedmock" + "github.com/coder/coder/v2/enterprise/aibridged" + mock "github.com/coder/coder/v2/enterprise/aibridged/aibridgedmock" ) // TestPool validates the published behavior of [aibridged.CachedBridgePool]. diff --git a/enterprise/aibridged/proto/aibridged.pb.go b/enterprise/aibridged/proto/aibridged.pb.go new file mode 100644 index 0000000000000..a13a39ed95245 --- /dev/null +++ b/enterprise/aibridged/proto/aibridged.pb.go @@ -0,0 +1,1560 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.30.0 +// protoc v4.23.4 +// source: enterprise/aibridged/proto/aibridged.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + anypb "google.golang.org/protobuf/types/known/anypb" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type RecordInterceptionRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // UUID. + InitiatorId string `protobuf:"bytes,2,opt,name=initiator_id,json=initiatorId,proto3" json:"initiator_id,omitempty"` // UUID. + Provider string `protobuf:"bytes,3,opt,name=provider,proto3" json:"provider,omitempty"` + Model string `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` + Metadata map[string]*anypb.Any `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + StartedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=started_at,json=startedAt,proto3" json:"started_at,omitempty"` +} + +func (x *RecordInterceptionRequest) Reset() { + *x = RecordInterceptionRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordInterceptionRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordInterceptionRequest) ProtoMessage() {} + +func (x *RecordInterceptionRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordInterceptionRequest.ProtoReflect.Descriptor instead. +func (*RecordInterceptionRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{0} +} + +func (x *RecordInterceptionRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *RecordInterceptionRequest) GetInitiatorId() string { + if x != nil { + return x.InitiatorId + } + return "" +} + +func (x *RecordInterceptionRequest) GetProvider() string { + if x != nil { + return x.Provider + } + return "" +} + +func (x *RecordInterceptionRequest) GetModel() string { + if x != nil { + return x.Model + } + return "" +} + +func (x *RecordInterceptionRequest) GetMetadata() map[string]*anypb.Any { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *RecordInterceptionRequest) GetStartedAt() *timestamppb.Timestamp { + if x != nil { + return x.StartedAt + } + return nil +} + +type RecordInterceptionResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RecordInterceptionResponse) Reset() { + *x = RecordInterceptionResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordInterceptionResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordInterceptionResponse) ProtoMessage() {} + +func (x *RecordInterceptionResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordInterceptionResponse.ProtoReflect.Descriptor instead. +func (*RecordInterceptionResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{1} +} + +type RecordInterceptionEndedRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // UUID. + EndedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=ended_at,json=endedAt,proto3" json:"ended_at,omitempty"` +} + +func (x *RecordInterceptionEndedRequest) Reset() { + *x = RecordInterceptionEndedRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordInterceptionEndedRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordInterceptionEndedRequest) ProtoMessage() {} + +func (x *RecordInterceptionEndedRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordInterceptionEndedRequest.ProtoReflect.Descriptor instead. +func (*RecordInterceptionEndedRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{2} +} + +func (x *RecordInterceptionEndedRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *RecordInterceptionEndedRequest) GetEndedAt() *timestamppb.Timestamp { + if x != nil { + return x.EndedAt + } + return nil +} + +type RecordInterceptionEndedResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RecordInterceptionEndedResponse) Reset() { + *x = RecordInterceptionEndedResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordInterceptionEndedResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordInterceptionEndedResponse) ProtoMessage() {} + +func (x *RecordInterceptionEndedResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordInterceptionEndedResponse.ProtoReflect.Descriptor instead. +func (*RecordInterceptionEndedResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{3} +} + +type RecordTokenUsageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. + MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. + InputTokens int64 `protobuf:"varint,3,opt,name=input_tokens,json=inputTokens,proto3" json:"input_tokens,omitempty"` + OutputTokens int64 `protobuf:"varint,4,opt,name=output_tokens,json=outputTokens,proto3" json:"output_tokens,omitempty"` + Metadata map[string]*anypb.Any `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` +} + +func (x *RecordTokenUsageRequest) Reset() { + *x = RecordTokenUsageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordTokenUsageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordTokenUsageRequest) ProtoMessage() {} + +func (x *RecordTokenUsageRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordTokenUsageRequest.ProtoReflect.Descriptor instead. +func (*RecordTokenUsageRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{4} +} + +func (x *RecordTokenUsageRequest) GetInterceptionId() string { + if x != nil { + return x.InterceptionId + } + return "" +} + +func (x *RecordTokenUsageRequest) GetMsgId() string { + if x != nil { + return x.MsgId + } + return "" +} + +func (x *RecordTokenUsageRequest) GetInputTokens() int64 { + if x != nil { + return x.InputTokens + } + return 0 +} + +func (x *RecordTokenUsageRequest) GetOutputTokens() int64 { + if x != nil { + return x.OutputTokens + } + return 0 +} + +func (x *RecordTokenUsageRequest) GetMetadata() map[string]*anypb.Any { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *RecordTokenUsageRequest) GetCreatedAt() *timestamppb.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +type RecordTokenUsageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RecordTokenUsageResponse) Reset() { + *x = RecordTokenUsageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordTokenUsageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordTokenUsageResponse) ProtoMessage() {} + +func (x *RecordTokenUsageResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordTokenUsageResponse.ProtoReflect.Descriptor instead. +func (*RecordTokenUsageResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{5} +} + +type RecordPromptUsageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. + MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. + Prompt string `protobuf:"bytes,3,opt,name=prompt,proto3" json:"prompt,omitempty"` + Metadata map[string]*anypb.Any `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` +} + +func (x *RecordPromptUsageRequest) Reset() { + *x = RecordPromptUsageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordPromptUsageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordPromptUsageRequest) ProtoMessage() {} + +func (x *RecordPromptUsageRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordPromptUsageRequest.ProtoReflect.Descriptor instead. +func (*RecordPromptUsageRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{6} +} + +func (x *RecordPromptUsageRequest) GetInterceptionId() string { + if x != nil { + return x.InterceptionId + } + return "" +} + +func (x *RecordPromptUsageRequest) GetMsgId() string { + if x != nil { + return x.MsgId + } + return "" +} + +func (x *RecordPromptUsageRequest) GetPrompt() string { + if x != nil { + return x.Prompt + } + return "" +} + +func (x *RecordPromptUsageRequest) GetMetadata() map[string]*anypb.Any { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *RecordPromptUsageRequest) GetCreatedAt() *timestamppb.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +type RecordPromptUsageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RecordPromptUsageResponse) Reset() { + *x = RecordPromptUsageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordPromptUsageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordPromptUsageResponse) ProtoMessage() {} + +func (x *RecordPromptUsageResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordPromptUsageResponse.ProtoReflect.Descriptor instead. +func (*RecordPromptUsageResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{7} +} + +type RecordToolUsageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. + MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. + ServerUrl *string `protobuf:"bytes,3,opt,name=server_url,json=serverUrl,proto3,oneof" json:"server_url,omitempty"` // The URL of the MCP server. + Tool string `protobuf:"bytes,4,opt,name=tool,proto3" json:"tool,omitempty"` + Input string `protobuf:"bytes,5,opt,name=input,proto3" json:"input,omitempty"` + Injected bool `protobuf:"varint,6,opt,name=injected,proto3" json:"injected,omitempty"` + InvocationError *string `protobuf:"bytes,7,opt,name=invocation_error,json=invocationError,proto3,oneof" json:"invocation_error,omitempty"` // Only injected tools are invoked. + Metadata map[string]*anypb.Any `protobuf:"bytes,8,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,9,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` +} + +func (x *RecordToolUsageRequest) Reset() { + *x = RecordToolUsageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordToolUsageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordToolUsageRequest) ProtoMessage() {} + +func (x *RecordToolUsageRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordToolUsageRequest.ProtoReflect.Descriptor instead. +func (*RecordToolUsageRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{8} +} + +func (x *RecordToolUsageRequest) GetInterceptionId() string { + if x != nil { + return x.InterceptionId + } + return "" +} + +func (x *RecordToolUsageRequest) GetMsgId() string { + if x != nil { + return x.MsgId + } + return "" +} + +func (x *RecordToolUsageRequest) GetServerUrl() string { + if x != nil && x.ServerUrl != nil { + return *x.ServerUrl + } + return "" +} + +func (x *RecordToolUsageRequest) GetTool() string { + if x != nil { + return x.Tool + } + return "" +} + +func (x *RecordToolUsageRequest) GetInput() string { + if x != nil { + return x.Input + } + return "" +} + +func (x *RecordToolUsageRequest) GetInjected() bool { + if x != nil { + return x.Injected + } + return false +} + +func (x *RecordToolUsageRequest) GetInvocationError() string { + if x != nil && x.InvocationError != nil { + return *x.InvocationError + } + return "" +} + +func (x *RecordToolUsageRequest) GetMetadata() map[string]*anypb.Any { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *RecordToolUsageRequest) GetCreatedAt() *timestamppb.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +type RecordToolUsageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RecordToolUsageResponse) Reset() { + *x = RecordToolUsageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RecordToolUsageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RecordToolUsageResponse) ProtoMessage() {} + +func (x *RecordToolUsageResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RecordToolUsageResponse.ProtoReflect.Descriptor instead. +func (*RecordToolUsageResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{9} +} + +type GetMCPServerConfigsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` // UUID. // Not used yet, will be necessary for later RBAC purposes. +} + +func (x *GetMCPServerConfigsRequest) Reset() { + *x = GetMCPServerConfigsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetMCPServerConfigsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetMCPServerConfigsRequest) ProtoMessage() {} + +func (x *GetMCPServerConfigsRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetMCPServerConfigsRequest.ProtoReflect.Descriptor instead. +func (*GetMCPServerConfigsRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{10} +} + +func (x *GetMCPServerConfigsRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +type GetMCPServerConfigsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CoderMcpConfig *MCPServerConfig `protobuf:"bytes,1,opt,name=coder_mcp_config,json=coderMcpConfig,proto3" json:"coder_mcp_config,omitempty"` + ExternalAuthMcpConfigs []*MCPServerConfig `protobuf:"bytes,2,rep,name=external_auth_mcp_configs,json=externalAuthMcpConfigs,proto3" json:"external_auth_mcp_configs,omitempty"` +} + +func (x *GetMCPServerConfigsResponse) Reset() { + *x = GetMCPServerConfigsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetMCPServerConfigsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetMCPServerConfigsResponse) ProtoMessage() {} + +func (x *GetMCPServerConfigsResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetMCPServerConfigsResponse.ProtoReflect.Descriptor instead. +func (*GetMCPServerConfigsResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{11} +} + +func (x *GetMCPServerConfigsResponse) GetCoderMcpConfig() *MCPServerConfig { + if x != nil { + return x.CoderMcpConfig + } + return nil +} + +func (x *GetMCPServerConfigsResponse) GetExternalAuthMcpConfigs() []*MCPServerConfig { + if x != nil { + return x.ExternalAuthMcpConfigs + } + return nil +} + +type MCPServerConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Maps to the ID of the External Auth; this ID is unique. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + ToolAllowRegex string `protobuf:"bytes,3,opt,name=tool_allow_regex,json=toolAllowRegex,proto3" json:"tool_allow_regex,omitempty"` + ToolDenyRegex string `protobuf:"bytes,4,opt,name=tool_deny_regex,json=toolDenyRegex,proto3" json:"tool_deny_regex,omitempty"` +} + +func (x *MCPServerConfig) Reset() { + *x = MCPServerConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MCPServerConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MCPServerConfig) ProtoMessage() {} + +func (x *MCPServerConfig) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MCPServerConfig.ProtoReflect.Descriptor instead. +func (*MCPServerConfig) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{12} +} + +func (x *MCPServerConfig) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *MCPServerConfig) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *MCPServerConfig) GetToolAllowRegex() string { + if x != nil { + return x.ToolAllowRegex + } + return "" +} + +func (x *MCPServerConfig) GetToolDenyRegex() string { + if x != nil { + return x.ToolDenyRegex + } + return "" +} + +type GetMCPServerAccessTokensBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` // UUID. + McpServerConfigIds []string `protobuf:"bytes,2,rep,name=mcp_server_config_ids,json=mcpServerConfigIds,proto3" json:"mcp_server_config_ids,omitempty"` +} + +func (x *GetMCPServerAccessTokensBatchRequest) Reset() { + *x = GetMCPServerAccessTokensBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetMCPServerAccessTokensBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetMCPServerAccessTokensBatchRequest) ProtoMessage() {} + +func (x *GetMCPServerAccessTokensBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetMCPServerAccessTokensBatchRequest.ProtoReflect.Descriptor instead. +func (*GetMCPServerAccessTokensBatchRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{13} +} + +func (x *GetMCPServerAccessTokensBatchRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *GetMCPServerAccessTokensBatchRequest) GetMcpServerConfigIds() []string { + if x != nil { + return x.McpServerConfigIds + } + return nil +} + +// GetMCPServerAccessTokensBatchResponse returns a map for resulting tokens or errors, indexed +// by server ID. +type GetMCPServerAccessTokensBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AccessTokens map[string]string `protobuf:"bytes,1,rep,name=access_tokens,json=accessTokens,proto3" json:"access_tokens,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Errors map[string]string `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *GetMCPServerAccessTokensBatchResponse) Reset() { + *x = GetMCPServerAccessTokensBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetMCPServerAccessTokensBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetMCPServerAccessTokensBatchResponse) ProtoMessage() {} + +func (x *GetMCPServerAccessTokensBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetMCPServerAccessTokensBatchResponse.ProtoReflect.Descriptor instead. +func (*GetMCPServerAccessTokensBatchResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{14} +} + +func (x *GetMCPServerAccessTokensBatchResponse) GetAccessTokens() map[string]string { + if x != nil { + return x.AccessTokens + } + return nil +} + +func (x *GetMCPServerAccessTokensBatchResponse) GetErrors() map[string]string { + if x != nil { + return x.Errors + } + return nil +} + +type IsAuthorizedRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` +} + +func (x *IsAuthorizedRequest) Reset() { + *x = IsAuthorizedRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *IsAuthorizedRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IsAuthorizedRequest) ProtoMessage() {} + +func (x *IsAuthorizedRequest) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IsAuthorizedRequest.ProtoReflect.Descriptor instead. +func (*IsAuthorizedRequest) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{15} +} + +func (x *IsAuthorizedRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +type IsAuthorizedResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + OwnerId string `protobuf:"bytes,1,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` +} + +func (x *IsAuthorizedResponse) Reset() { + *x = IsAuthorizedResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *IsAuthorizedResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IsAuthorizedResponse) ProtoMessage() {} + +func (x *IsAuthorizedResponse) ProtoReflect() protoreflect.Message { + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IsAuthorizedResponse.ProtoReflect.Descriptor instead. +func (*IsAuthorizedResponse) Descriptor() ([]byte, []int) { + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{16} +} + +func (x *IsAuthorizedResponse) GetOwnerId() string { + if x != nil { + return x.OwnerId + } + return "" +} + +var File_enterprise_aibridged_proto_aibridged_proto protoreflect.FileDescriptor + +var file_enterprise_aibridged_proto_aibridged_proto_rawDesc = []byte{ + 0x0a, 0x2a, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x69, 0x73, 0x65, 0x2f, 0x61, 0x69, 0x62, + 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x69, 0x62, + 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, + 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0xda, 0x02, 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, + 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x21, 0x0a, + 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x64, + 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x12, 0x4a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, + 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, + 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1c, 0x0a, 0x1a, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x67, 0x0a, 0x1e, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x35, 0x0a, 0x08, + 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x65, + 0x64, 0x41, 0x74, 0x22, 0x21, 0x0a, 0x1f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, + 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf9, 0x02, 0x0a, 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, + 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, + 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, + 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x48, 0x0a, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, + 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, + 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, + 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xcb, + 0x02, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x12, 0x49, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, + 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, + 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1b, 0x0a, 0x19, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xed, 0x03, 0x0a, 0x16, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, + 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, + 0x73, 0x67, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x75, + 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, 0x73, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x55, 0x72, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x6f, 0x6f, 0x6c, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x6f, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x12, 0x2e, + 0x0a, 0x10, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x69, 0x6e, 0x76, 0x6f, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x47, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, + 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x5f, 0x75, 0x72, 0x6c, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x19, 0x0a, 0x17, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0xb2, 0x01, 0x0a, 0x1b, + 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, 0x10, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, + 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x51, 0x0a, + 0x19, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x6d, + 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x16, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, + 0x22, 0x85, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x61, + 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0e, 0x74, 0x6f, 0x6f, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x67, 0x65, 0x78, + 0x12, 0x26, 0x0a, 0x0f, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x65, 0x6e, 0x79, 0x5f, 0x72, 0x65, + 0x67, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x6f, 0x6f, 0x6c, 0x44, + 0x65, 0x6e, 0x79, 0x52, 0x65, 0x67, 0x65, 0x78, 0x22, 0x72, 0x0a, 0x24, 0x47, 0x65, 0x74, 0x4d, + 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x31, 0x0a, 0x15, 0x6d, 0x63, 0x70, + 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x69, + 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x6d, 0x63, 0x70, 0x53, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x49, 0x64, 0x73, 0x22, 0xda, 0x02, 0x0a, + 0x25, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, + 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x0d, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x63, 0x63, 0x65, + 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x61, + 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x50, 0x0a, 0x06, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, 0x3f, 0x0a, + 0x11, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x39, + 0x0a, 0x0b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x27, 0x0a, 0x13, 0x49, 0x73, 0x41, + 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x22, 0x31, 0x0a, 0x14, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x49, 0x64, 0x32, 0xce, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x12, 0x59, 0x0a, 0x12, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, + 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x68, 0x0a, + 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, + 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x10, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x56, 0x0a, 0x11, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, + 0x65, 0x12, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, + 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, + 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, + 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xeb, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x13, 0x47, 0x65, + 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x73, 0x12, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, + 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7a, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4d, + 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, + 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x55, 0x0a, 0x0a, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x65, 0x72, 0x12, 0x47, 0x0a, 0x0c, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x65, 0x64, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, + 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, + 0x7a, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2b, 0x5a, 0x29, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, + 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_enterprise_aibridged_proto_aibridged_proto_rawDescOnce sync.Once + file_enterprise_aibridged_proto_aibridged_proto_rawDescData = file_enterprise_aibridged_proto_aibridged_proto_rawDesc +) + +func file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP() []byte { + file_enterprise_aibridged_proto_aibridged_proto_rawDescOnce.Do(func() { + file_enterprise_aibridged_proto_aibridged_proto_rawDescData = protoimpl.X.CompressGZIP(file_enterprise_aibridged_proto_aibridged_proto_rawDescData) + }) + return file_enterprise_aibridged_proto_aibridged_proto_rawDescData +} + +var file_enterprise_aibridged_proto_aibridged_proto_msgTypes = make([]protoimpl.MessageInfo, 23) +var file_enterprise_aibridged_proto_aibridged_proto_goTypes = []interface{}{ + (*RecordInterceptionRequest)(nil), // 0: proto.RecordInterceptionRequest + (*RecordInterceptionResponse)(nil), // 1: proto.RecordInterceptionResponse + (*RecordInterceptionEndedRequest)(nil), // 2: proto.RecordInterceptionEndedRequest + (*RecordInterceptionEndedResponse)(nil), // 3: proto.RecordInterceptionEndedResponse + (*RecordTokenUsageRequest)(nil), // 4: proto.RecordTokenUsageRequest + (*RecordTokenUsageResponse)(nil), // 5: proto.RecordTokenUsageResponse + (*RecordPromptUsageRequest)(nil), // 6: proto.RecordPromptUsageRequest + (*RecordPromptUsageResponse)(nil), // 7: proto.RecordPromptUsageResponse + (*RecordToolUsageRequest)(nil), // 8: proto.RecordToolUsageRequest + (*RecordToolUsageResponse)(nil), // 9: proto.RecordToolUsageResponse + (*GetMCPServerConfigsRequest)(nil), // 10: proto.GetMCPServerConfigsRequest + (*GetMCPServerConfigsResponse)(nil), // 11: proto.GetMCPServerConfigsResponse + (*MCPServerConfig)(nil), // 12: proto.MCPServerConfig + (*GetMCPServerAccessTokensBatchRequest)(nil), // 13: proto.GetMCPServerAccessTokensBatchRequest + (*GetMCPServerAccessTokensBatchResponse)(nil), // 14: proto.GetMCPServerAccessTokensBatchResponse + (*IsAuthorizedRequest)(nil), // 15: proto.IsAuthorizedRequest + (*IsAuthorizedResponse)(nil), // 16: proto.IsAuthorizedResponse + nil, // 17: proto.RecordInterceptionRequest.MetadataEntry + nil, // 18: proto.RecordTokenUsageRequest.MetadataEntry + nil, // 19: proto.RecordPromptUsageRequest.MetadataEntry + nil, // 20: proto.RecordToolUsageRequest.MetadataEntry + nil, // 21: proto.GetMCPServerAccessTokensBatchResponse.AccessTokensEntry + nil, // 22: proto.GetMCPServerAccessTokensBatchResponse.ErrorsEntry + (*timestamppb.Timestamp)(nil), // 23: google.protobuf.Timestamp + (*anypb.Any)(nil), // 24: google.protobuf.Any +} +var file_enterprise_aibridged_proto_aibridged_proto_depIdxs = []int32{ + 17, // 0: proto.RecordInterceptionRequest.metadata:type_name -> proto.RecordInterceptionRequest.MetadataEntry + 23, // 1: proto.RecordInterceptionRequest.started_at:type_name -> google.protobuf.Timestamp + 23, // 2: proto.RecordInterceptionEndedRequest.ended_at:type_name -> google.protobuf.Timestamp + 18, // 3: proto.RecordTokenUsageRequest.metadata:type_name -> proto.RecordTokenUsageRequest.MetadataEntry + 23, // 4: proto.RecordTokenUsageRequest.created_at:type_name -> google.protobuf.Timestamp + 19, // 5: proto.RecordPromptUsageRequest.metadata:type_name -> proto.RecordPromptUsageRequest.MetadataEntry + 23, // 6: proto.RecordPromptUsageRequest.created_at:type_name -> google.protobuf.Timestamp + 20, // 7: proto.RecordToolUsageRequest.metadata:type_name -> proto.RecordToolUsageRequest.MetadataEntry + 23, // 8: proto.RecordToolUsageRequest.created_at:type_name -> google.protobuf.Timestamp + 12, // 9: proto.GetMCPServerConfigsResponse.coder_mcp_config:type_name -> proto.MCPServerConfig + 12, // 10: proto.GetMCPServerConfigsResponse.external_auth_mcp_configs:type_name -> proto.MCPServerConfig + 21, // 11: proto.GetMCPServerAccessTokensBatchResponse.access_tokens:type_name -> proto.GetMCPServerAccessTokensBatchResponse.AccessTokensEntry + 22, // 12: proto.GetMCPServerAccessTokensBatchResponse.errors:type_name -> proto.GetMCPServerAccessTokensBatchResponse.ErrorsEntry + 24, // 13: proto.RecordInterceptionRequest.MetadataEntry.value:type_name -> google.protobuf.Any + 24, // 14: proto.RecordTokenUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any + 24, // 15: proto.RecordPromptUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any + 24, // 16: proto.RecordToolUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any + 0, // 17: proto.Recorder.RecordInterception:input_type -> proto.RecordInterceptionRequest + 2, // 18: proto.Recorder.RecordInterceptionEnded:input_type -> proto.RecordInterceptionEndedRequest + 4, // 19: proto.Recorder.RecordTokenUsage:input_type -> proto.RecordTokenUsageRequest + 6, // 20: proto.Recorder.RecordPromptUsage:input_type -> proto.RecordPromptUsageRequest + 8, // 21: proto.Recorder.RecordToolUsage:input_type -> proto.RecordToolUsageRequest + 10, // 22: proto.MCPConfigurator.GetMCPServerConfigs:input_type -> proto.GetMCPServerConfigsRequest + 13, // 23: proto.MCPConfigurator.GetMCPServerAccessTokensBatch:input_type -> proto.GetMCPServerAccessTokensBatchRequest + 15, // 24: proto.Authorizer.IsAuthorized:input_type -> proto.IsAuthorizedRequest + 1, // 25: proto.Recorder.RecordInterception:output_type -> proto.RecordInterceptionResponse + 3, // 26: proto.Recorder.RecordInterceptionEnded:output_type -> proto.RecordInterceptionEndedResponse + 5, // 27: proto.Recorder.RecordTokenUsage:output_type -> proto.RecordTokenUsageResponse + 7, // 28: proto.Recorder.RecordPromptUsage:output_type -> proto.RecordPromptUsageResponse + 9, // 29: proto.Recorder.RecordToolUsage:output_type -> proto.RecordToolUsageResponse + 11, // 30: proto.MCPConfigurator.GetMCPServerConfigs:output_type -> proto.GetMCPServerConfigsResponse + 14, // 31: proto.MCPConfigurator.GetMCPServerAccessTokensBatch:output_type -> proto.GetMCPServerAccessTokensBatchResponse + 16, // 32: proto.Authorizer.IsAuthorized:output_type -> proto.IsAuthorizedResponse + 25, // [25:33] is the sub-list for method output_type + 17, // [17:25] is the sub-list for method input_type + 17, // [17:17] is the sub-list for extension type_name + 17, // [17:17] is the sub-list for extension extendee + 0, // [0:17] is the sub-list for field type_name +} + +func init() { file_enterprise_aibridged_proto_aibridged_proto_init() } +func file_enterprise_aibridged_proto_aibridged_proto_init() { + if File_enterprise_aibridged_proto_aibridged_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordInterceptionRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordInterceptionResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordInterceptionEndedRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordInterceptionEndedResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordTokenUsageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordTokenUsageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordPromptUsageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordPromptUsageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordToolUsageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RecordToolUsageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetMCPServerConfigsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetMCPServerConfigsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MCPServerConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetMCPServerAccessTokensBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetMCPServerAccessTokensBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*IsAuthorizedRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*IsAuthorizedResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_enterprise_aibridged_proto_aibridged_proto_rawDesc, + NumEnums: 0, + NumMessages: 23, + NumExtensions: 0, + NumServices: 3, + }, + GoTypes: file_enterprise_aibridged_proto_aibridged_proto_goTypes, + DependencyIndexes: file_enterprise_aibridged_proto_aibridged_proto_depIdxs, + MessageInfos: file_enterprise_aibridged_proto_aibridged_proto_msgTypes, + }.Build() + File_enterprise_aibridged_proto_aibridged_proto = out.File + file_enterprise_aibridged_proto_aibridged_proto_rawDesc = nil + file_enterprise_aibridged_proto_aibridged_proto_goTypes = nil + file_enterprise_aibridged_proto_aibridged_proto_depIdxs = nil +} diff --git a/enterprise/x/aibridged/proto/aibridged.proto b/enterprise/aibridged/proto/aibridged.proto similarity index 93% rename from enterprise/x/aibridged/proto/aibridged.proto rename to enterprise/aibridged/proto/aibridged.proto index 21e09fc8152db..01ab07c8be40d 100644 --- a/enterprise/x/aibridged/proto/aibridged.proto +++ b/enterprise/aibridged/proto/aibridged.proto @@ -11,6 +11,7 @@ service Recorder { // RecordInterception creates a new interception record to which all other sub-resources // (token, prompt, tool uses) will be related. rpc RecordInterception(RecordInterceptionRequest) returns (RecordInterceptionResponse); + rpc RecordInterceptionEnded(RecordInterceptionEndedRequest) returns (RecordInterceptionEndedResponse); rpc RecordTokenUsage(RecordTokenUsageRequest) returns (RecordTokenUsageResponse); rpc RecordPromptUsage(RecordPromptUsageRequest) returns (RecordPromptUsageResponse); rpc RecordToolUsage(RecordToolUsageRequest) returns (RecordToolUsageResponse); @@ -45,6 +46,13 @@ message RecordInterceptionRequest { message RecordInterceptionResponse {} +message RecordInterceptionEndedRequest { + string id = 1; // UUID. + google.protobuf.Timestamp ended_at = 2; +} + +message RecordInterceptionEndedResponse {} + message RecordTokenUsageRequest { string interception_id = 1; // UUID. string msg_id = 2; // ID provided by provider. diff --git a/enterprise/x/aibridged/proto/aibridged_drpc.pb.go b/enterprise/aibridged/proto/aibridged_drpc.pb.go similarity index 75% rename from enterprise/x/aibridged/proto/aibridged_drpc.pb.go rename to enterprise/aibridged/proto/aibridged_drpc.pb.go index 37c2cc71ff459..1309957d153d5 100644 --- a/enterprise/x/aibridged/proto/aibridged_drpc.pb.go +++ b/enterprise/aibridged/proto/aibridged_drpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-drpc. DO NOT EDIT. // protoc-gen-go-drpc version: v0.0.34 -// source: enterprise/x/aibridged/proto/aibridged.proto +// source: enterprise/aibridged/proto/aibridged.proto package proto @@ -13,25 +13,25 @@ import ( drpcerr "storj.io/drpc/drpcerr" ) -type drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto struct{} +type drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto struct{} -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) Marshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) Marshal(msg drpc.Message) ([]byte, error) { return proto.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) Unmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) Unmarshal(buf []byte, msg drpc.Message) error { return proto.Unmarshal(buf, msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { return protojson.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { return protojson.Unmarshal(buf, msg.(proto.Message)) } @@ -39,6 +39,7 @@ type DRPCRecorderClient interface { DRPCConn() drpc.Conn RecordInterception(ctx context.Context, in *RecordInterceptionRequest) (*RecordInterceptionResponse, error) + RecordInterceptionEnded(ctx context.Context, in *RecordInterceptionEndedRequest) (*RecordInterceptionEndedResponse, error) RecordTokenUsage(ctx context.Context, in *RecordTokenUsageRequest) (*RecordTokenUsageResponse, error) RecordPromptUsage(ctx context.Context, in *RecordPromptUsageRequest) (*RecordPromptUsageResponse, error) RecordToolUsage(ctx context.Context, in *RecordToolUsageRequest) (*RecordToolUsageResponse, error) @@ -56,7 +57,16 @@ func (c *drpcRecorderClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcRecorderClient) RecordInterception(ctx context.Context, in *RecordInterceptionRequest) (*RecordInterceptionResponse, error) { out := new(RecordInterceptionResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcRecorderClient) RecordInterceptionEnded(ctx context.Context, in *RecordInterceptionEndedRequest) (*RecordInterceptionEndedResponse, error) { + out := new(RecordInterceptionEndedResponse) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -65,7 +75,7 @@ func (c *drpcRecorderClient) RecordInterception(ctx context.Context, in *RecordI func (c *drpcRecorderClient) RecordTokenUsage(ctx context.Context, in *RecordTokenUsageRequest) (*RecordTokenUsageResponse, error) { out := new(RecordTokenUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -74,7 +84,7 @@ func (c *drpcRecorderClient) RecordTokenUsage(ctx context.Context, in *RecordTok func (c *drpcRecorderClient) RecordPromptUsage(ctx context.Context, in *RecordPromptUsageRequest) (*RecordPromptUsageResponse, error) { out := new(RecordPromptUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -83,7 +93,7 @@ func (c *drpcRecorderClient) RecordPromptUsage(ctx context.Context, in *RecordPr func (c *drpcRecorderClient) RecordToolUsage(ctx context.Context, in *RecordToolUsageRequest) (*RecordToolUsageResponse, error) { out := new(RecordToolUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -92,6 +102,7 @@ func (c *drpcRecorderClient) RecordToolUsage(ctx context.Context, in *RecordTool type DRPCRecorderServer interface { RecordInterception(context.Context, *RecordInterceptionRequest) (*RecordInterceptionResponse, error) + RecordInterceptionEnded(context.Context, *RecordInterceptionEndedRequest) (*RecordInterceptionEndedResponse, error) RecordTokenUsage(context.Context, *RecordTokenUsageRequest) (*RecordTokenUsageResponse, error) RecordPromptUsage(context.Context, *RecordPromptUsageRequest) (*RecordPromptUsageResponse, error) RecordToolUsage(context.Context, *RecordToolUsageRequest) (*RecordToolUsageResponse, error) @@ -103,6 +114,10 @@ func (s *DRPCRecorderUnimplementedServer) RecordInterception(context.Context, *R return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } +func (s *DRPCRecorderUnimplementedServer) RecordInterceptionEnded(context.Context, *RecordInterceptionEndedRequest) (*RecordInterceptionEndedResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + func (s *DRPCRecorderUnimplementedServer) RecordTokenUsage(context.Context, *RecordTokenUsageRequest) (*RecordTokenUsageResponse, error) { return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } @@ -117,12 +132,12 @@ func (s *DRPCRecorderUnimplementedServer) RecordToolUsage(context.Context, *Reco type DRPCRecorderDescription struct{} -func (DRPCRecorderDescription) NumMethods() int { return 4 } +func (DRPCRecorderDescription) NumMethods() int { return 5 } func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordInterception( @@ -131,7 +146,16 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv ) }, DRPCRecorderServer.RecordInterception, true case 1: - return "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCRecorderServer). + RecordInterceptionEnded( + ctx, + in1.(*RecordInterceptionEndedRequest), + ) + }, DRPCRecorderServer.RecordInterceptionEnded, true + case 2: + return "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordTokenUsage( @@ -139,8 +163,8 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv in1.(*RecordTokenUsageRequest), ) }, DRPCRecorderServer.RecordTokenUsage, true - case 2: - return "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + case 3: + return "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordPromptUsage( @@ -148,8 +172,8 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv in1.(*RecordPromptUsageRequest), ) }, DRPCRecorderServer.RecordPromptUsage, true - case 3: - return "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + case 4: + return "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordToolUsage( @@ -176,7 +200,23 @@ type drpcRecorder_RecordInterceptionStream struct { } func (x *drpcRecorder_RecordInterceptionStream) SendAndClose(m *RecordInterceptionResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCRecorder_RecordInterceptionEndedStream interface { + drpc.Stream + SendAndClose(*RecordInterceptionEndedResponse) error +} + +type drpcRecorder_RecordInterceptionEndedStream struct { + drpc.Stream +} + +func (x *drpcRecorder_RecordInterceptionEndedStream) SendAndClose(m *RecordInterceptionEndedResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -192,7 +232,7 @@ type drpcRecorder_RecordTokenUsageStream struct { } func (x *drpcRecorder_RecordTokenUsageStream) SendAndClose(m *RecordTokenUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -208,7 +248,7 @@ type drpcRecorder_RecordPromptUsageStream struct { } func (x *drpcRecorder_RecordPromptUsageStream) SendAndClose(m *RecordPromptUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -224,7 +264,7 @@ type drpcRecorder_RecordToolUsageStream struct { } func (x *drpcRecorder_RecordToolUsageStream) SendAndClose(m *RecordToolUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -249,7 +289,7 @@ func (c *drpcMCPConfiguratorClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcMCPConfiguratorClient) GetMCPServerConfigs(ctx context.Context, in *GetMCPServerConfigsRequest) (*GetMCPServerConfigsResponse, error) { out := new(GetMCPServerConfigsResponse) - err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -258,7 +298,7 @@ func (c *drpcMCPConfiguratorClient) GetMCPServerConfigs(ctx context.Context, in func (c *drpcMCPConfiguratorClient) GetMCPServerAccessTokensBatch(ctx context.Context, in *GetMCPServerAccessTokensBatchRequest) (*GetMCPServerAccessTokensBatchResponse, error) { out := new(GetMCPServerAccessTokensBatchResponse) - err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -287,7 +327,7 @@ func (DRPCMCPConfiguratorDescription) NumMethods() int { return 2 } func (DRPCMCPConfiguratorDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCMCPConfiguratorServer). GetMCPServerConfigs( @@ -296,7 +336,7 @@ func (DRPCMCPConfiguratorDescription) Method(n int) (string, drpc.Encoding, drpc ) }, DRPCMCPConfiguratorServer.GetMCPServerConfigs, true case 1: - return "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCMCPConfiguratorServer). GetMCPServerAccessTokensBatch( @@ -323,7 +363,7 @@ type drpcMCPConfigurator_GetMCPServerConfigsStream struct { } func (x *drpcMCPConfigurator_GetMCPServerConfigsStream) SendAndClose(m *GetMCPServerConfigsResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -339,7 +379,7 @@ type drpcMCPConfigurator_GetMCPServerAccessTokensBatchStream struct { } func (x *drpcMCPConfigurator_GetMCPServerAccessTokensBatchStream) SendAndClose(m *GetMCPServerAccessTokensBatchResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -363,7 +403,7 @@ func (c *drpcAuthorizerClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcAuthorizerClient) IsAuthorized(ctx context.Context, in *IsAuthorizedRequest) (*IsAuthorizedResponse, error) { out := new(IsAuthorizedResponse) - err := c.cc.Invoke(ctx, "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -387,7 +427,7 @@ func (DRPCAuthorizerDescription) NumMethods() int { return 1 } func (DRPCAuthorizerDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCAuthorizerServer). IsAuthorized( @@ -414,7 +454,7 @@ type drpcAuthorizer_IsAuthorizedStream struct { } func (x *drpcAuthorizer_IsAuthorizedStream) SendAndClose(m *IsAuthorizedResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() diff --git a/enterprise/x/aibridged/request.go b/enterprise/aibridged/request.go similarity index 100% rename from enterprise/x/aibridged/request.go rename to enterprise/aibridged/request.go diff --git a/enterprise/x/aibridged/server.go b/enterprise/aibridged/server.go similarity index 68% rename from enterprise/x/aibridged/server.go rename to enterprise/aibridged/server.go index 713ea2a0cd126..052c94dad4a9e 100644 --- a/enterprise/x/aibridged/server.go +++ b/enterprise/aibridged/server.go @@ -1,6 +1,6 @@ package aibridged -import "github.com/coder/coder/v2/enterprise/x/aibridged/proto" +import "github.com/coder/coder/v2/enterprise/aibridged/proto" type DRPCServer interface { proto.DRPCRecorderServer diff --git a/enterprise/x/aibridged/translator.go b/enterprise/aibridged/translator.go similarity index 91% rename from enterprise/x/aibridged/translator.go rename to enterprise/aibridged/translator.go index 57d574f1bfd68..f36185715a745 100644 --- a/enterprise/x/aibridged/translator.go +++ b/enterprise/aibridged/translator.go @@ -11,7 +11,7 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" "github.com/coder/coder/v2/coderd/util/ptr" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/aibridge" ) @@ -35,6 +35,14 @@ func (t *recorderTranslation) RecordInterception(ctx context.Context, req *aibri return err } +func (t *recorderTranslation) RecordInterceptionEnded(ctx context.Context, req *aibridge.InterceptionRecordEnded) error { + _, err := t.client.RecordInterceptionEnded(ctx, &proto.RecordInterceptionEndedRequest{ + Id: req.ID, + EndedAt: timestamppb.New(req.EndedAt), + }) + return err +} + func (t *recorderTranslation) RecordPromptUsage(ctx context.Context, req *aibridge.PromptUsageRecord) error { _, err := t.client.RecordPromptUsage(ctx, &proto.RecordPromptUsageRequest{ InterceptionId: req.InterceptionID, diff --git a/enterprise/x/aibridged/utils_test.go b/enterprise/aibridged/utils_test.go similarity index 100% rename from enterprise/x/aibridged/utils_test.go rename to enterprise/aibridged/utils_test.go diff --git a/enterprise/x/aibridgedserver/aibridgedserver.go b/enterprise/aibridgedserver/aibridgedserver.go similarity index 93% rename from enterprise/x/aibridgedserver/aibridgedserver.go rename to enterprise/aibridgedserver/aibridgedserver.go index ee79cd15f07d1..6adf7b793c1d2 100644 --- a/enterprise/x/aibridgedserver/aibridgedserver.go +++ b/enterprise/aibridgedserver/aibridgedserver.go @@ -2,8 +2,6 @@ package aibridgedserver import ( "context" - "crypto/sha256" - "crypto/subtle" "database/sql" "encoding/json" "net/url" @@ -17,6 +15,7 @@ import ( "google.golang.org/protobuf/types/known/structpb" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" @@ -25,8 +24,8 @@ import ( "github.com/coder/coder/v2/coderd/httpmw" codermcp "github.com/coder/coder/v2/coderd/mcp" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var ( @@ -56,6 +55,7 @@ type store interface { InsertAIBridgeTokenUsage(ctx context.Context, arg database.InsertAIBridgeTokenUsageParams) (database.AIBridgeTokenUsage, error) InsertAIBridgeUserPrompt(ctx context.Context, arg database.InsertAIBridgeUserPromptParams) (database.AIBridgeUserPrompt, error) InsertAIBridgeToolUsage(ctx context.Context, arg database.InsertAIBridgeToolUsageParams) (database.AIBridgeToolUsage, error) + UpdateAIBridgeInterceptionEnded(ctx context.Context, intcID database.UpdateAIBridgeInterceptionEndedParams) (database.AIBridgeInterception, error) // MCPConfigurator-related queries. GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) @@ -130,6 +130,26 @@ func (s *Server) RecordInterception(ctx context.Context, in *proto.RecordInterce return &proto.RecordInterceptionResponse{}, nil } +func (s *Server) RecordInterceptionEnded(ctx context.Context, in *proto.RecordInterceptionEndedRequest) (*proto.RecordInterceptionEndedResponse, error) { + //nolint:gocritic // AIBridged has specific authz rules. + ctx = dbauthz.AsAIBridged(ctx) + + intcID, err := uuid.Parse(in.GetId()) + if err != nil { + return nil, xerrors.Errorf("invalid interception ID %q: %w", in.GetId(), err) + } + + _, err = s.store.UpdateAIBridgeInterceptionEnded(ctx, database.UpdateAIBridgeInterceptionEndedParams{ + ID: intcID, + EndedAt: in.EndedAt.AsTime(), + }) + if err != nil { + return nil, xerrors.Errorf("end interception: %w", err) + } + + return &proto.RecordInterceptionEndedResponse{}, nil +} + func (s *Server) RecordTokenUsage(ctx context.Context, in *proto.RecordTokenUsageRequest) (*proto.RecordTokenUsageResponse, error) { //nolint:gocritic // AIBridged has specific authz rules. ctx = dbauthz.AsAIBridged(ctx) @@ -358,8 +378,7 @@ func (s *Server) IsAuthorized(ctx context.Context, in *proto.IsAuthorizedRequest } // Key secret matches. - hashedSecret := sha256.Sum256([]byte(keySecret)) - if subtle.ConstantTimeCompare(key.HashedSecret, hashedSecret[:]) != 1 { + if !apikey.ValidateHash(key.HashedSecret, keySecret) { return nil, ErrInvalidKey } diff --git a/enterprise/x/aibridgedserver/aibridgedserver_internal_test.go b/enterprise/aibridgedserver/aibridgedserver_internal_test.go similarity index 100% rename from enterprise/x/aibridgedserver/aibridgedserver_internal_test.go rename to enterprise/aibridgedserver/aibridgedserver_internal_test.go diff --git a/enterprise/x/aibridgedserver/aibridgedserver_test.go b/enterprise/aibridgedserver/aibridgedserver_test.go similarity index 91% rename from enterprise/x/aibridgedserver/aibridgedserver_test.go rename to enterprise/aibridgedserver/aibridgedserver_test.go index 03fec9398bae4..27598c79857f1 100644 --- a/enterprise/x/aibridgedserver/aibridgedserver_test.go +++ b/enterprise/aibridgedserver/aibridgedserver_test.go @@ -2,7 +2,6 @@ package aibridgedserver_test import ( "context" - "crypto/sha256" "database/sql" "encoding/json" "fmt" @@ -21,6 +20,7 @@ import ( "google.golang.org/protobuf/types/known/structpb" "google.golang.org/protobuf/types/known/timestamppb" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbmock" "github.com/coder/coder/v2/coderd/database/dbtime" @@ -28,9 +28,9 @@ import ( codermcp "github.com/coder/coder/v2/coderd/mcp" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" - "github.com/coder/coder/v2/enterprise/x/aibridged" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" - "github.com/coder/coder/v2/enterprise/x/aibridgedserver" + "github.com/coder/coder/v2/enterprise/aibridged" + "github.com/coder/coder/v2/enterprise/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridgedserver" "github.com/coder/coder/v2/testutil" ) @@ -138,13 +138,12 @@ func TestAuthorization(t *testing.T) { } keyID, _ := cryptorand.String(10) - keySecret, _ := cryptorand.String(22) + keySecret, keySecretHashed, _ := apikey.GenerateSecret(22) token := fmt.Sprintf("%s-%s", keyID, keySecret) - keySecretHashed := sha256.Sum256([]byte(keySecret)) apiKey := database.APIKey{ ID: keyID, LifetimeSeconds: 86400, // default in db - HashedSecret: keySecretHashed[:], + HashedSecret: keySecretHashed, IPAddress: pqtype.Inet{ IPNet: net.IPNet{ IP: net.IPv4(127, 0, 0, 1), @@ -424,6 +423,60 @@ func TestRecordInterception(t *testing.T) { ) } +func TestRecordInterceptionEnded(t *testing.T) { + t.Parallel() + + testRecordMethod(t, + func(srv *aibridgedserver.Server, ctx context.Context, req *proto.RecordInterceptionEndedRequest) (*proto.RecordInterceptionEndedResponse, error) { + return srv.RecordInterceptionEnded(ctx, req) + }, + []testRecordMethodCase[*proto.RecordInterceptionEndedRequest]{ + { + name: "ok", + request: &proto.RecordInterceptionEndedRequest{ + Id: uuid.UUID{1}.String(), + EndedAt: timestamppb.Now(), + }, + setupMocks: func(t *testing.T, db *dbmock.MockStore, req *proto.RecordInterceptionEndedRequest) { + interceptionID, err := uuid.Parse(req.GetId()) + assert.NoError(t, err, "parse interception UUID") + + db.EXPECT().UpdateAIBridgeInterceptionEnded(gomock.Any(), database.UpdateAIBridgeInterceptionEndedParams{ + ID: interceptionID, + EndedAt: req.EndedAt.AsTime(), + }).Return(database.AIBridgeInterception{ + ID: interceptionID, + InitiatorID: uuid.UUID{2}, + Provider: "prov", + Model: "mod", + StartedAt: time.Now(), + EndedAt: sql.NullTime{Time: req.EndedAt.AsTime(), Valid: true}, + }, nil) + }, + }, + { + name: "bad_uuid_error", + request: &proto.RecordInterceptionEndedRequest{ + Id: "this-is-not-uuid", + }, + setupMocks: func(t *testing.T, db *dbmock.MockStore, req *proto.RecordInterceptionEndedRequest) {}, + expectedErr: "invalid interception ID", + }, + { + name: "database_error", + request: &proto.RecordInterceptionEndedRequest{ + Id: uuid.UUID{1}.String(), + EndedAt: timestamppb.Now(), + }, + setupMocks: func(t *testing.T, db *dbmock.MockStore, req *proto.RecordInterceptionEndedRequest) { + db.EXPECT().UpdateAIBridgeInterceptionEnded(gomock.Any(), gomock.Any()).Return(database.AIBridgeInterception{}, sql.ErrConnDone) + }, + expectedErr: "end interception: " + sql.ErrConnDone.Error(), + }, + }, + ) +} + func TestRecordTokenUsage(t *testing.T) { t.Parallel() diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 8cba29f2e9196..9b887b30ef517 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -27,6 +27,7 @@ var AuditActionMap = map[string][]codersdk.AuditAction{ "Group": {codersdk.AuditActionCreate, codersdk.AuditActionWrite, codersdk.AuditActionDelete}, "APIKey": {codersdk.AuditActionLogin, codersdk.AuditActionLogout, codersdk.AuditActionRegister, codersdk.AuditActionCreate, codersdk.AuditActionDelete}, "License": {codersdk.AuditActionCreate, codersdk.AuditActionDelete}, + "Task": {codersdk.AuditActionCreate, codersdk.AuditActionWrite, codersdk.AuditActionDelete}, } type Action string @@ -347,6 +348,18 @@ var auditableResourcesTypes = map[any]map[string]Action{ "field": ActionTrack, "mapping": ActionTrack, }, + &database.TaskTable{}: { + "id": ActionTrack, + "organization_id": ActionIgnore, // Never changes. + "owner_id": ActionTrack, + "name": ActionTrack, + "workspace_id": ActionTrack, + "template_version_id": ActionTrack, + "template_parameters": ActionTrack, + "prompt": ActionTrack, + "created_at": ActionIgnore, // Never changes. + "deleted_at": ActionIgnore, // Changes, but is implicit when a delete event is fired. + }, } // auditMap converts a map of struct pointers to a map of struct names as diff --git a/enterprise/cli/exp_aibridge.go b/enterprise/cli/aibridge.go similarity index 97% rename from enterprise/cli/exp_aibridge.go rename to enterprise/cli/aibridge.go index 722f7bf239223..90953b6aa2bf2 100644 --- a/enterprise/cli/exp_aibridge.go +++ b/enterprise/cli/aibridge.go @@ -134,8 +134,7 @@ func (r *RootCmd) aibridgeInterceptionsList() *serpent.Command { return xerrors.Errorf("limit value must be between 1 and %d", maxInterceptionsLimit) } - expCli := codersdk.NewExperimentalClient(client) - resp, err := expCli.AIBridgeListInterceptions(inv.Context(), codersdk.AIBridgeListInterceptionsFilter{ + resp, err := client.AIBridgeListInterceptions(inv.Context(), codersdk.AIBridgeListInterceptionsFilter{ Pagination: codersdk.Pagination{ AfterID: afterID, // #nosec G115 - Checked above. diff --git a/enterprise/cli/exp_aibridge_test.go b/enterprise/cli/aibridge_test.go similarity index 95% rename from enterprise/cli/exp_aibridge_test.go rename to enterprise/cli/aibridge_test.go index 64b7eef698f27..a5b48a14e1c38 100644 --- a/enterprise/cli/exp_aibridge_test.go +++ b/enterprise/cli/aibridge_test.go @@ -27,7 +27,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -43,19 +42,18 @@ func TestAIBridgeListInterceptions(t *testing.T) { interception1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now.Add(-time.Hour), - }) + }, &now) interception2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now, - }) + }, nil) // Should not be returned because the user can't see it. _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: owner.UserID, StartedAt: now.Add(-2 * time.Hour), - }) + }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", @@ -78,7 +76,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -99,7 +96,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { Provider: "real-provider", Model: "real-model", StartedAt: now, - }) + }, nil) // These interceptions should not be returned since they don't match the // filters. @@ -108,36 +105,35 @@ func TestAIBridgeListInterceptions(t *testing.T) { Provider: goodInterception.Provider, Model: goodInterception.Model, StartedAt: goodInterception.StartedAt, - }) + }, nil) _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: goodInterception.InitiatorID, Provider: "bad-provider", Model: goodInterception.Model, StartedAt: goodInterception.StartedAt, - }) + }, nil) _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: goodInterception.InitiatorID, Provider: goodInterception.Provider, Model: "bad-model", StartedAt: goodInterception.StartedAt, - }) + }, nil) _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: goodInterception.InitiatorID, Provider: goodInterception.Provider, Model: goodInterception.Model, // Violates the started after filter. StartedAt: now.Add(-2 * time.Hour), - }) + }, nil) _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: goodInterception.InitiatorID, Provider: goodInterception.Provider, Model: goodInterception.Model, // Violates the started before filter. StartedAt: now.Add(2 * time.Hour), - }) + }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", @@ -164,7 +160,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -181,18 +176,17 @@ func TestAIBridgeListInterceptions(t *testing.T) { firstInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now, - }) + }, nil) returnedInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now.Add(-time.Hour), - }) + }, &now) _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now.Add(-2 * time.Hour), - }) + }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", diff --git a/enterprise/cli/aibridged.go b/enterprise/cli/aibridged.go index 9e59327039fc3..b2dc3d7725b93 100644 --- a/enterprise/cli/aibridged.go +++ b/enterprise/cli/aibridged.go @@ -8,8 +8,9 @@ import ( "golang.org/x/xerrors" "github.com/coder/aibridge" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/coderd" - "github.com/coder/coder/v2/enterprise/x/aibridged" ) func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) { @@ -20,14 +21,14 @@ func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) { // Setup supported providers. providers := []aibridge.Provider{ - aibridge.NewOpenAIProvider(aibridge.ProviderConfig{ + aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{ BaseURL: coderAPI.DeploymentValues.AI.BridgeConfig.OpenAI.BaseURL.String(), Key: coderAPI.DeploymentValues.AI.BridgeConfig.OpenAI.Key.String(), }), - aibridge.NewAnthropicProvider(aibridge.ProviderConfig{ + aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{ BaseURL: coderAPI.DeploymentValues.AI.BridgeConfig.Anthropic.BaseURL.String(), Key: coderAPI.DeploymentValues.AI.BridgeConfig.Anthropic.Key.String(), - }), + }, getBedrockConfig(coderAPI.DeploymentValues.AI.BridgeConfig.Bedrock)), } // Create pool for reusable stateful [aibridge.RequestBridge] instances (one per user). @@ -45,3 +46,17 @@ func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) { } return srv, nil } + +func getBedrockConfig(cfg codersdk.AIBridgeBedrockConfig) *aibridge.AWSBedrockConfig { + if cfg.Region.String() == "" && cfg.AccessKey.String() == "" && cfg.AccessKeySecret.String() == "" { + return nil + } + + return &aibridge.AWSBedrockConfig{ + Region: cfg.Region.String(), + AccessKey: cfg.AccessKey.String(), + AccessKeySecret: cfg.AccessKeySecret.String(), + Model: cfg.Model.String(), + SmallFastModel: cfg.SmallFastModel.String(), + } +} diff --git a/enterprise/cli/provisionerdaemonstart_test.go b/enterprise/cli/provisionerdaemonstart_test.go index 58603715f8184..884c3e6436e9e 100644 --- a/enterprise/cli/provisionerdaemonstart_test.go +++ b/enterprise/cli/provisionerdaemonstart_test.go @@ -495,6 +495,7 @@ func TestProvisionerDaemon_PrometheusEnabled(t *testing.T) { // Fetch metrics from Prometheus endpoint var req *http.Request var res *http.Response + httpClient := &http.Client{} require.Eventually(t, func() bool { req, err = http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("http://127.0.0.1:%d", prometheusPort), nil) if err != nil { @@ -503,7 +504,7 @@ func TestProvisionerDaemon_PrometheusEnabled(t *testing.T) { } // nolint:bodyclose - res, err = http.DefaultClient.Do(req) + res, err = httpClient.Do(req) if err != nil { t.Logf("unable to call Prometheus endpoint: %s", err.Error()) return false diff --git a/enterprise/cli/proxyserver_test.go b/enterprise/cli/proxyserver_test.go index ae01f6ac9dda6..b8df3d2c6a072 100644 --- a/enterprise/cli/proxyserver_test.go +++ b/enterprise/cli/proxyserver_test.go @@ -114,11 +114,12 @@ func TestWorkspaceProxy_Server_PrometheusEnabled(t *testing.T) { // Fetch metrics from Prometheus endpoint var res *http.Response + client := &http.Client{} require.Eventually(t, func() bool { req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("http://127.0.0.1:%d", prometheusPort), nil) assert.NoError(t, err) // nolint:bodyclose - res, err = http.DefaultClient.Do(req) + res, err = client.Do(req) return err == nil }, testutil.WaitShort, testutil.IntervalFast) defer res.Body.Close() diff --git a/enterprise/cli/root.go b/enterprise/cli/root.go index 3cec11970369e..78858ef48da7b 100644 --- a/enterprise/cli/root.go +++ b/enterprise/cli/root.go @@ -25,13 +25,12 @@ func (r *RootCmd) enterpriseOnly() []*serpent.Command { r.prebuilds(), r.provisionerd(), r.externalWorkspaces(), + r.aibridge(), } } -func (r *RootCmd) enterpriseExperimental() []*serpent.Command { - return []*serpent.Command{ - r.aibridge(), - } +func (*RootCmd) enterpriseExperimental() []*serpent.Command { + return []*serpent.Command{} } func (r *RootCmd) EnterpriseSubcommands() []*serpent.Command { diff --git a/enterprise/cli/server.go b/enterprise/cli/server.go index ea9f2d3e93825..bc77bc54ba522 100644 --- a/enterprise/cli/server.go +++ b/enterprise/cli/server.go @@ -7,7 +7,6 @@ import ( "database/sql" "encoding/base64" "errors" - "fmt" "io" "net/url" @@ -16,8 +15,8 @@ import ( "tailscale.com/types/key" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/audit" "github.com/coder/coder/v2/enterprise/audit/backends" "github.com/coder/coder/v2/enterprise/coderd" @@ -25,7 +24,6 @@ import ( "github.com/coder/coder/v2/enterprise/coderd/usage" "github.com/coder/coder/v2/enterprise/dbcrypt" "github.com/coder/coder/v2/enterprise/trialer" - "github.com/coder/coder/v2/enterprise/x/aibridged" "github.com/coder/coder/v2/tailnet" "github.com/coder/quartz" "github.com/coder/serpent" @@ -146,8 +144,6 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { } closers.Add(publisher) - experiments := agplcoderd.ReadExperiments(options.Logger, options.DeploymentValues.Experiments.Value()) - // In-memory aibridge daemon. // TODO(@deansheather): the lifecycle of the aibridged server is // probably better managed by the enterprise API type itself. Managing @@ -155,26 +151,18 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { // is not entitled to the feature. var aibridgeDaemon *aibridged.Server if options.DeploymentValues.AI.BridgeConfig.Enabled { - if experiments.Enabled(codersdk.ExperimentAIBridge) { - aibridgeDaemon, err = newAIBridgeDaemon(api) - if err != nil { - return nil, nil, xerrors.Errorf("create aibridged: %w", err) - } + aibridgeDaemon, err = newAIBridgeDaemon(api) + if err != nil { + return nil, nil, xerrors.Errorf("create aibridged: %w", err) + } - api.RegisterInMemoryAIBridgedHTTPHandler(aibridgeDaemon) + api.RegisterInMemoryAIBridgedHTTPHandler(aibridgeDaemon) - // When running as an in-memory daemon, the HTTP handler is wired into the - // coderd API and therefore is subject to its context. Calling Close() on - // aibridged will NOT affect in-flight requests but those will be closed once - // the API server is itself shutdown. - closers.Add(aibridgeDaemon) - } else { - api.Logger.Warn(ctx, fmt.Sprintf("CODER_AIBRIDGE_ENABLED=true but experiment %q not enabled", codersdk.ExperimentAIBridge)) - } - } else { - if experiments.Enabled(codersdk.ExperimentAIBridge) { - api.Logger.Warn(ctx, "aibridge experiment enabled but CODER_AIBRIDGE_ENABLED=false") - } + // When running as an in-memory daemon, the HTTP handler is wired into the + // coderd API and therefore is subject to its context. Calling Close() on + // aibridged will NOT affect in-flight requests but those will be closed once + // the API server is itself shutdown. + closers.Add(aibridgeDaemon) } return api.AGPL, closers, nil diff --git a/enterprise/cli/server_dbcrypt_test.go b/enterprise/cli/server_dbcrypt_test.go index 06851dd0a2eaf..b50b8c0c504cb 100644 --- a/enterprise/cli/server_dbcrypt_test.go +++ b/enterprise/cli/server_dbcrypt_test.go @@ -24,10 +24,6 @@ import ( // // nolint: paralleltest // use of t.Setenv func TestServerDBCrypt(t *testing.T) { - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires a postgres instance") - } - ctx, cancel := context.WithCancel(context.Background()) t.Cleanup(cancel) diff --git a/enterprise/cli/server_test.go b/enterprise/cli/server_test.go index 7489699a6f3dd..38001b701a9c1 100644 --- a/enterprise/cli/server_test.go +++ b/enterprise/cli/server_test.go @@ -43,13 +43,14 @@ func TestServer_Single(t *testing.T) { ) clitest.Start(t, inv.WithContext(ctx)) accessURL := waitAccessURL(t, cfg) + client := &http.Client{} require.Eventually(t, func() bool { reqCtx := testutil.Context(t, testutil.IntervalMedium) req, err := http.NewRequestWithContext(reqCtx, http.MethodGet, accessURL.String()+"/healthz", nil) if err != nil { panic(err) } - resp, err := http.DefaultClient.Do(req) + resp, err := client.Do(req) if err != nil { t.Log("/healthz not ready yet") return false diff --git a/enterprise/cli/testdata/coder_--help.golden b/enterprise/cli/testdata/coder_--help.golden index ddb44f78ae524..8424ccac923a2 100644 --- a/enterprise/cli/testdata/coder_--help.golden +++ b/enterprise/cli/testdata/coder_--help.golden @@ -14,6 +14,7 @@ USAGE: $ coder templates init SUBCOMMANDS: + aibridge Manage AIBridge. external-workspaces Create or manage external workspaces features List Enterprise features groups Manage groups diff --git a/enterprise/cli/testdata/coder_aibridge_--help.golden b/enterprise/cli/testdata/coder_aibridge_--help.golden new file mode 100644 index 0000000000000..d005ae429ad50 --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_--help.golden @@ -0,0 +1,12 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge + + Manage AIBridge. + +SUBCOMMANDS: + interceptions Manage AIBridge interceptions. + +β€”β€”β€” +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden b/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden new file mode 100644 index 0000000000000..1f3b3af5ad3d3 --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden @@ -0,0 +1,12 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge interceptions + + Manage AIBridge interceptions. + +SUBCOMMANDS: + list List AIBridge interceptions as JSON. + +β€”β€”β€” +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden b/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden new file mode 100644 index 0000000000000..c98fd0019a45a --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden @@ -0,0 +1,37 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge interceptions list [flags] + + List AIBridge interceptions as JSON. + +OPTIONS: + --after-id string + The ID of the last result on the previous page to use as a pagination + cursor. + + --initiator string + Only return interceptions initiated by this user. Accepts a user ID, + username, or "me". + + --limit int (default: 100) + The limit of results to return. Must be between 1 and 1000. + + --model string + Only return interceptions from this model. + + --provider string + Only return interceptions from this provider. + + --started-after string + Only return interceptions started after this time. Must be before + 'started-before' if set. Accepts a time in the RFC 3339 format, e.g. + "====[timestamp]=====07:00". + + --started-before string + Only return interceptions started before this time. Must be after + 'started-after' if set. Accepts a time in the RFC 3339 format, e.g. + "====[timestamp]=====07:00". + +β€”β€”β€” +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden b/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden index 8e22f78e978f2..3a581bd880829 100644 --- a/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden +++ b/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden @@ -11,9 +11,12 @@ OPTIONS: -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. - -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) + -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) Columns to display in table output. + -i, --initiator string, $CODER_PROVISIONER_JOB_LIST_INITIATOR + Filter by initiator (user ID or username). + -l, --limit int, $CODER_PROVISIONER_JOB_LIST_LIMIT (default: 50) Limit the number of jobs returned. diff --git a/enterprise/cli/testdata/coder_server_--help.golden b/enterprise/cli/testdata/coder_server_--help.golden index 162d4214ccc6a..492306c55882d 100644 --- a/enterprise/cli/testdata/coder_server_--help.golden +++ b/enterprise/cli/testdata/coder_server_--help.golden @@ -81,6 +81,41 @@ OPTIONS: Periodically check for new releases of Coder and inform the owner. The check is performed once per day. +AIBRIDGE OPTIONS: + --aibridge-anthropic-base-url string, $CODER_AIBRIDGE_ANTHROPIC_BASE_URL (default: https://api.anthropic.com/) + The base URL of the Anthropic API. + + --aibridge-anthropic-key string, $CODER_AIBRIDGE_ANTHROPIC_KEY + The key to authenticate against the Anthropic API. + + --aibridge-bedrock-access-key string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY + The access key to authenticate against the AWS Bedrock API. + + --aibridge-bedrock-access-key-secret string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET + The access key secret to use with the access key to authenticate + against the AWS Bedrock API. + + --aibridge-bedrock-model string, $CODER_AIBRIDGE_BEDROCK_MODEL (default: global.anthropic.claude-sonnet-4-5-20250929-v1:0) + The model to use when making requests to the AWS Bedrock API. + + --aibridge-bedrock-region string, $CODER_AIBRIDGE_BEDROCK_REGION + The AWS Bedrock API region. + + --aibridge-bedrock-small-fastmodel string, $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL (default: global.anthropic.claude-haiku-4-5-20251001-v1:0) + The small fast model to use when making requests to the AWS Bedrock + API. Claude Code uses Haiku-class models to perform background tasks. + See + https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + + --aibridge-enabled bool, $CODER_AIBRIDGE_ENABLED (default: false) + Whether to start an in-memory aibridged instance. + + --aibridge-openai-base-url string, $CODER_AIBRIDGE_OPENAI_BASE_URL (default: https://api.openai.com/v1/) + The base URL of the OpenAI API. + + --aibridge-openai-key string, $CODER_AIBRIDGE_OPENAI_KEY + The key to authenticate against the OpenAI API. + CLIENT OPTIONS: These options change the behavior of how clients interact with the Coder. Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI. diff --git a/enterprise/coderd/aibridge.go b/enterprise/coderd/aibridge.go index 2917603c235d6..bdd2a99166910 100644 --- a/enterprise/coderd/aibridge.go +++ b/enterprise/coderd/aibridge.go @@ -33,9 +33,10 @@ const ( // @Tags AIBridge // @Param q query string false "Search query in the format `key:value`. Available keys are: initiator, provider, model, started_after, started_before." // @Param limit query int false "Page limit" -// @Param after_id query string false "Cursor pagination after ID" +// @Param after_id query string false "Cursor pagination after ID (cannot be used with offset)" +// @Param offset query int false "Offset pagination (cannot be used with after_id)" // @Success 200 {object} codersdk.AIBridgeListInterceptionsResponse -// @Router /api/experimental/aibridge/interceptions [get] +// @Router /aibridge/interceptions [get] func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) @@ -44,10 +45,10 @@ func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Reques if !ok { return } - if page.Offset != 0 { + if page.AfterID != uuid.Nil && page.Offset != 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Offset pagination is not supported.", - Detail: "Offset pagination is not supported for AIBridge interceptions. Use cursor pagination instead with after_id.", + Message: "Query parameters have invalid values.", + Detail: "Cannot use both after_id and offset pagination in the same request.", }) return } @@ -72,7 +73,10 @@ func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Reques return } - var rows []database.AIBridgeInterception + var ( + count int64 + rows []database.ListAIBridgeInterceptionsRow + ) err := api.Database.InTx(func(db database.Store) error { // Ensure the after_id interception exists and is visible to the user. if page.AfterID != uuid.Nil { @@ -83,6 +87,19 @@ func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Reques } var err error + // Get the full count of authorized interceptions matching the filter + // for pagination purposes. + count, err = db.CountAIBridgeInterceptions(ctx, database.CountAIBridgeInterceptionsParams{ + StartedAfter: filter.StartedAfter, + StartedBefore: filter.StartedBefore, + InitiatorID: filter.InitiatorID, + Provider: filter.Provider, + Model: filter.Model, + }) + if err != nil { + return xerrors.Errorf("count authorized aibridge interceptions: %w", err) + } + // This only returns authorized interceptions (when using dbauthz). rows, err = db.ListAIBridgeInterceptions(ctx, filter) if err != nil { @@ -110,14 +127,15 @@ func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Reques } httpapi.Write(ctx, rw, http.StatusOK, codersdk.AIBridgeListInterceptionsResponse{ + Count: count, Results: items, }) } -func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.Store, dbInterceptions []database.AIBridgeInterception) ([]codersdk.AIBridgeInterception, error) { +func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.Store, dbInterceptions []database.ListAIBridgeInterceptionsRow) ([]codersdk.AIBridgeInterception, error) { ids := make([]uuid.UUID, len(dbInterceptions)) for i, row := range dbInterceptions { - ids[i] = row.ID + ids[i] = row.AIBridgeInterception.ID } //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AIBridge interception subresources use the same authorization call as their parent. @@ -152,7 +170,13 @@ func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.S items := make([]codersdk.AIBridgeInterception, len(dbInterceptions)) for i, row := range dbInterceptions { - items[i] = db2sdk.AIBridgeInterception(row, tokenUsagesMap[row.ID], userPromptsMap[row.ID], toolUsagesMap[row.ID]) + items[i] = db2sdk.AIBridgeInterception( + row.AIBridgeInterception, + row.VisibleUser, + tokenUsagesMap[row.AIBridgeInterception.ID], + userPromptsMap[row.AIBridgeInterception.ID], + toolUsagesMap[row.AIBridgeInterception.ID], + ) } return items, nil diff --git a/enterprise/coderd/aibridge_test.go b/enterprise/coderd/aibridge_test.go index 8babf2324deeb..17e5df56fb65d 100644 --- a/enterprise/coderd/aibridge_test.go +++ b/enterprise/coderd/aibridge_test.go @@ -27,7 +27,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -37,10 +36,10 @@ func TestAIBridgeListInterceptions(t *testing.T) { Features: license.Features{}, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) - _, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + //nolint:gocritic // Owner role is irrelevant here. + _, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) var sdkErr *codersdk.Error require.ErrorAs(t, err, &sdkErr) require.Equal(t, http.StatusForbidden, sdkErr.StatusCode()) @@ -50,7 +49,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("EmptyDB", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -61,9 +59,9 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + //nolint:gocritic // Owner role is irrelevant here. + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.Empty(t, res.Results) }) @@ -71,8 +69,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("OK", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} - client, db, _ := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, }, @@ -82,14 +79,31 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) + user1, err := client.User(ctx, codersdk.Me) + require.NoError(t, err) + user1Visible := database.VisibleUser{ + ID: user1.ID, + Username: user1.Username, + Name: user1.Name, + AvatarURL: user1.AvatarURL, + } + + _, user2 := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + user2Visible := database.VisibleUser{ + ID: user2.ID, + Username: user2.Username, + Name: user2.Name, + AvatarURL: user2.AvatarURL, + } + // Insert a bunch of test data. now := dbtime.Now() i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ - StartedAt: now.Add(-time.Hour), - }) + InitiatorID: user1.ID, + StartedAt: now.Add(-time.Hour), + }, nil) i1tok1 := dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ InterceptionID: i1.ID, CreatedAt: now, @@ -115,16 +129,17 @@ func TestAIBridgeListInterceptions(t *testing.T) { CreatedAt: now.Add(-time.Minute), }) i2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ - StartedAt: now, - }) + InitiatorID: user2.ID, + StartedAt: now, + }, &now) // Convert to SDK types for response comparison. // You may notice that the ordering of the inner arrays are ASC, this is // intentional. - i1SDK := db2sdk.AIBridgeInterception(i1, []database.AIBridgeTokenUsage{i1tok2, i1tok1}, []database.AIBridgeUserPrompt{i1up2, i1up1}, []database.AIBridgeToolUsage{i1tool2, i1tool1}) - i2SDK := db2sdk.AIBridgeInterception(i2, nil, nil, nil) + i1SDK := db2sdk.AIBridgeInterception(i1, user1Visible, []database.AIBridgeTokenUsage{i1tok2, i1tok1}, []database.AIBridgeUserPrompt{i1up2, i1up1}, []database.AIBridgeToolUsage{i1tool2, i1tool1}) + i2SDK := db2sdk.AIBridgeInterception(i2, user2Visible, nil, nil, nil) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.Len(t, res.Results, 2) require.Equal(t, i2SDK.ID, res.Results[0].ID) @@ -150,6 +165,13 @@ func TestAIBridgeListInterceptions(t *testing.T) { res.Results[1].ToolUsages[0].CreatedAt = i1SDK.ToolUsages[0].CreatedAt res.Results[1].ToolUsages[1].CreatedAt = i1SDK.ToolUsages[1].CreatedAt + // Time comparison + require.Len(t, res.Results, 2) + require.Equal(t, res.Results[0].ID, i2SDK.ID) + require.NotNil(t, now, res.Results[0].EndedAt) + require.WithinDuration(t, now, *res.Results[0].EndedAt, 5*time.Second) + res.Results[0].EndedAt = i2SDK.EndedAt + require.Equal(t, []codersdk.AIBridgeInterception{i2SDK, i1SDK}, res.Results) }) @@ -157,8 +179,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} - client, db, _ := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, }, @@ -168,7 +189,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) allInterceptionIDs := make([]uuid.UUID, 0, 20) @@ -178,9 +198,10 @@ func TestAIBridgeListInterceptions(t *testing.T) { now := dbtime.Now() for i := range 10 { interception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ - ID: uuid.UUID{byte(i)}, - StartedAt: now, - }) + ID: uuid.UUID{byte(i)}, + InitiatorID: firstUser.UserID, + StartedAt: now, + }, &now) allInterceptionIDs = append(allInterceptionIDs, interception.ID) } @@ -190,49 +211,15 @@ func TestAIBridgeListInterceptions(t *testing.T) { require.NoError(t, err) randomOffsetDur := time.Duration(randomOffset) * time.Second interception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ - ID: uuid.UUID{byte(i + 10)}, - StartedAt: now.Add(randomOffsetDur), - }) + ID: uuid.UUID{byte(i + 10)}, + InitiatorID: firstUser.UserID, + StartedAt: now.Add(randomOffsetDur), + }, nil) allInterceptionIDs = append(allInterceptionIDs, interception.ID) } - // Get all interceptions one by one from the API using cursor - // pagination. - getAllInterceptionsOneByOne := func() []uuid.UUID { - interceptionIDs := []uuid.UUID{} - for { - afterID := uuid.Nil - if len(interceptionIDs) > 0 { - afterID = interceptionIDs[len(interceptionIDs)-1] - } - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ - Pagination: codersdk.Pagination{ - AfterID: afterID, - Limit: 1, - }, - }) - require.NoError(t, err) - if len(res.Results) == 0 { - break - } - require.Len(t, res.Results, 1) - interceptionIDs = append(interceptionIDs, res.Results[0].ID) - } - return interceptionIDs - } - - // First attempt: get all interceptions one by one. - gotInterceptionIDs1 := getAllInterceptionsOneByOne() - // We should have all of the interceptions returned: - require.ElementsMatch(t, allInterceptionIDs, gotInterceptionIDs1) - - // Second attempt: get all interceptions one by one again. - gotInterceptionIDs2 := getAllInterceptionsOneByOne() - // They should be returned in the exact same order. - require.Equal(t, gotInterceptionIDs1, gotInterceptionIDs2) - - // Try to get an invalid limit. - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + // Try to fetch with an invalid limit. + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ Pagination: codersdk.Pagination{ Limit: 1001, }, @@ -241,12 +228,71 @@ func TestAIBridgeListInterceptions(t *testing.T) { require.ErrorAs(t, err, &sdkErr) require.Contains(t, sdkErr.Message, "Invalid pagination limit value.") require.Empty(t, res.Results) + + // Try to fetch with both after_id and offset pagination. + res, err = client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + Pagination: codersdk.Pagination{ + AfterID: allInterceptionIDs[0], + Offset: 1, + }, + }) + require.ErrorAs(t, err, &sdkErr) + require.Contains(t, sdkErr.Message, "Query parameters have invalid values") + require.Contains(t, sdkErr.Detail, "Cannot use both after_id and offset pagination in the same request.") + + // Iterate over all interceptions using both cursor and offset + // pagination modes. + for _, paginationMode := range []string{"after_id", "offset"} { + t.Run(paginationMode, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + + // Get all interceptions one by one using the given pagination + // mode. + getAllInterceptionsOneByOne := func() []uuid.UUID { + interceptionIDs := []uuid.UUID{} + for { + pagination := codersdk.Pagination{ + Limit: 1, + } + if paginationMode == "after_id" { + if len(interceptionIDs) > 0 { + pagination.AfterID = interceptionIDs[len(interceptionIDs)-1] + } + } else { + pagination.Offset = len(interceptionIDs) + } + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + Pagination: pagination, + }) + require.NoError(t, err) + if len(res.Results) == 0 { + break + } + require.EqualValues(t, len(allInterceptionIDs), res.Count) + require.Len(t, res.Results, 1) + interceptionIDs = append(interceptionIDs, res.Results[0].ID) + } + return interceptionIDs + } + + // First attempt: get all interceptions one by one. + gotInterceptionIDs1 := getAllInterceptionsOneByOne() + // We should have all of the interceptions returned: + require.ElementsMatch(t, allInterceptionIDs, gotInterceptionIDs1) + + // Second attempt: get all interceptions one by one again. + gotInterceptionIDs2 := getAllInterceptionsOneByOne() + // They should be returned in the exact same order. + require.Equal(t, gotInterceptionIDs1, gotInterceptionIDs2) + }) + } }) t.Run("Authorized", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} adminClient, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -257,32 +303,32 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - adminExperimentalClient := codersdk.NewExperimentalClient(adminClient) ctx := testutil.Context(t, testutil.WaitLong) secondUserClient, secondUser := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) - secondUserExperimentalClient := codersdk.NewExperimentalClient(secondUserClient) now := dbtime.Now() i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: firstUser.UserID, StartedAt: now, - }) + }, nil) i2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: secondUser.ID, StartedAt: now.Add(-time.Hour), - }) + }, &now) // Admin can see all interceptions. - res, err := adminExperimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err := adminClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) + require.EqualValues(t, 2, res.Count) require.Len(t, res.Results, 2) require.Equal(t, i1.ID, res.Results[0].ID) require.Equal(t, i2.ID, res.Results[1].ID) // Second user can only see their own interceptions. - res, err = secondUserExperimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err = secondUserClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) + require.EqualValues(t, 1, res.Count) require.Len(t, res.Results, 1) require.Equal(t, i2.ID, res.Results[0].ID) }) @@ -290,7 +336,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("Filter", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -301,38 +346,54 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) - _, secondUser := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + ctx := testutil.Context(t, testutil.WaitLong) + + user1, err := client.User(ctx, codersdk.Me) + require.NoError(t, err) + user1Visible := database.VisibleUser{ + ID: user1.ID, + Username: user1.Username, + Name: user1.Name, + AvatarURL: user1.AvatarURL, + } + + _, user2 := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + user2Visible := database.VisibleUser{ + ID: user2.ID, + Username: user2.Username, + Name: user2.Name, + AvatarURL: user2.AvatarURL, + } // Insert a bunch of test data with varying filterable fields. now := dbtime.Now() i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.MustParse("00000000-0000-0000-0000-000000000001"), - InitiatorID: firstUser.UserID, + InitiatorID: user1.ID, Provider: "one", Model: "one", StartedAt: now, - }) + }, nil) i2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.MustParse("00000000-0000-0000-0000-000000000002"), - InitiatorID: firstUser.UserID, + InitiatorID: user1.ID, Provider: "two", Model: "two", StartedAt: now.Add(-time.Hour), - }) + }, &now) i3 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.MustParse("00000000-0000-0000-0000-000000000003"), - InitiatorID: secondUser.ID, + InitiatorID: user2.ID, Provider: "three", Model: "three", StartedAt: now.Add(-2 * time.Hour), - }) + }, &now) // Convert to SDK types for response comparison. We don't care about the // inner arrays for this test. - i1SDK := db2sdk.AIBridgeInterception(i1, nil, nil, nil) - i2SDK := db2sdk.AIBridgeInterception(i2, nil, nil, nil) - i3SDK := db2sdk.AIBridgeInterception(i3, nil, nil, nil) + i1SDK := db2sdk.AIBridgeInterception(i1, user1Visible, nil, nil, nil) + i2SDK := db2sdk.AIBridgeInterception(i2, user1Visible, nil, nil, nil) + i3SDK := db2sdk.AIBridgeInterception(i3, user2Visible, nil, nil, nil) cases := []struct { name string @@ -356,12 +417,12 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, { name: "Initiator/UserID", - filter: codersdk.AIBridgeListInterceptionsFilter{Initiator: secondUser.ID.String()}, + filter: codersdk.AIBridgeListInterceptionsFilter{Initiator: user2.ID.String()}, want: []codersdk.AIBridgeInterception{i3SDK}, }, { name: "Initiator/Username", - filter: codersdk.AIBridgeListInterceptionsFilter{Initiator: secondUser.Username}, + filter: codersdk.AIBridgeListInterceptionsFilter{Initiator: user2.Username}, want: []codersdk.AIBridgeInterception{i3SDK}, }, { @@ -434,8 +495,9 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, tc.filter) + res, err := client.AIBridgeListInterceptions(ctx, tc.filter) require.NoError(t, err) + require.EqualValues(t, len(tc.want), res.Count) // We just compare UUID strings for the sake of this test. wantIDs := make([]string, len(tc.want)) for i, r := range tc.want { @@ -453,7 +515,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("FilterErrors", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -464,7 +525,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) // No need to insert any test data, we're just testing the filter // errors. @@ -521,7 +581,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ FilterQuery: tc.q, }) var sdkErr *codersdk.Error diff --git a/enterprise/coderd/aibridged.go b/enterprise/coderd/aibridged.go index bf991103b1f52..285575df33862 100644 --- a/enterprise/coderd/aibridged.go +++ b/enterprise/coderd/aibridged.go @@ -14,9 +14,9 @@ import ( "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/codersdk/drpcsdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - aibridgedproto "github.com/coder/coder/v2/enterprise/x/aibridged/proto" - "github.com/coder/coder/v2/enterprise/x/aibridgedserver" + "github.com/coder/coder/v2/enterprise/aibridged" + aibridgedproto "github.com/coder/coder/v2/enterprise/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridgedserver" ) // RegisterInMemoryAIBridgedHTTPHandler mounts [aibridged.Server]'s HTTP router onto diff --git a/enterprise/coderd/appearance_test.go b/enterprise/coderd/appearance_test.go index 81ba7eddc7354..8255dd4c8aa8c 100644 --- a/enterprise/coderd/appearance_test.go +++ b/enterprise/coderd/appearance_test.go @@ -201,6 +201,17 @@ func TestCustomSupportLinks(t *testing.T) { Target: "http://second-link-2", Icon: "bug", }, + { + Name: "First button", + Target: "http://first-button-1", + Icon: "bug", + Location: "navbar", + }, + { + Name: "Third link", + Target: "http://third-link-3", + Icon: "star", + }, } cfg := coderdtest.DeploymentValues(t) cfg.Support.Links = serpent.Struct[[]codersdk.LinkConfig]{ diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 7666e8f957fc2..a4adb0479b96b 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -226,12 +226,9 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { return api.refreshEntitlements(ctx) } - api.AGPL.ExperimentalHandler.Group(func(r chi.Router) { + api.AGPL.APIHandler.Group(func(r chi.Router) { r.Route("/aibridge", func(r chi.Router) { - r.Use( - api.RequireFeatureMW(codersdk.FeatureAIBridge), - httpmw.RequireExperimentWithDevBypass(api.AGPL.Experiments, codersdk.ExperimentAIBridge), - ) + r.Use(api.RequireFeatureMW(codersdk.FeatureAIBridge)) r.Group(func(r chi.Router) { r.Use(apiKeyMiddleware) r.Get("/interceptions", api.aiBridgeListInterceptions) @@ -246,7 +243,7 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { }) return } - http.StripPrefix("/api/experimental/aibridge", api.aibridgedHandler).ServeHTTP(rw, r) + http.StripPrefix("/api/v2/aibridge", api.aibridgedHandler).ServeHTTP(rw, r) }) }) }) diff --git a/enterprise/coderd/coderd_test.go b/enterprise/coderd/coderd_test.go index 302b367c304cd..c3e6e1579fe91 100644 --- a/enterprise/coderd/coderd_test.go +++ b/enterprise/coderd/coderd_test.go @@ -79,10 +79,6 @@ func TestEntitlements(t *testing.T) { require.Equal(t, fmt.Sprintf("%p", api.Entitlements), fmt.Sprintf("%p", api.AGPL.Entitlements)) }) t.Run("FullLicense", func(t *testing.T) { - // PGCoordinator requires a real postgres - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } t.Parallel() adminClient, _ := coderdenttest.New(t, &coderdenttest.Options{ AuditLogging: true, @@ -595,6 +591,7 @@ func TestSCIMDisabled(t *testing.T) { "/scim/v2/random/path/that/is/long.txt", } + client := &http.Client{} for _, p := range checkPaths { t.Run(p, func(t *testing.T) { t.Parallel() @@ -605,7 +602,7 @@ func TestSCIMDisabled(t *testing.T) { req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, u.String(), nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + resp, err := client.Do(req) require.NoError(t, err) defer resp.Body.Close() require.Equal(t, http.StatusNotFound, resp.StatusCode) @@ -738,8 +735,8 @@ func testDBAuthzRole(ctx context.Context) context.Context { Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceWildcard.Type: {policy.WildcardSymbol}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -881,10 +878,6 @@ func (s *restartableTestServer) startWithFirstUser(t *testing.T) (client *coders func TestConn_CoordinatorRollingRestart(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } - // Although DERP will have connection issues until the connection is // reestablished, any open connections should be maintained. // diff --git a/enterprise/coderd/coderdenttest/coderdenttest.go b/enterprise/coderd/coderdenttest/coderdenttest.go index ce9050992eb92..a31d1d495bb6e 100644 --- a/enterprise/coderd/coderdenttest/coderdenttest.go +++ b/enterprise/coderd/coderdenttest/coderdenttest.go @@ -186,6 +186,8 @@ type LicenseOptions struct { // past. IssuedAt time.Time Features license.Features + + AllowEmpty bool } func (opts *LicenseOptions) WithIssuedAt(now time.Time) *LicenseOptions { @@ -276,10 +278,10 @@ func GenerateLicense(t *testing.T, options LicenseOptions) string { issuedAt = time.Now().Add(-time.Minute) } - if options.AccountType == "" { + if !options.AllowEmpty && options.AccountType == "" { options.AccountType = license.AccountTypeSalesforce } - if options.AccountID == "" { + if !options.AllowEmpty && options.AccountID == "" { options.AccountID = "test-account-id" } diff --git a/enterprise/coderd/license/license.go b/enterprise/coderd/license/license.go index 40d14c294cda1..3cf23823d2d5d 100644 --- a/enterprise/coderd/license/license.go +++ b/enterprise/coderd/license/license.go @@ -262,6 +262,36 @@ func LicensesEntitlements( claims.FeatureSet = codersdk.FeatureSetEnterprise } + // Temporary: If the license doesn't have a managed agent limit, we add + // a default of 1000 managed agents per deployment for a 100 + // year license term. + // This only applies to "Premium" licenses. + if claims.FeatureSet == codersdk.FeatureSetPremium { + var ( + // We intentionally use a fixed issue time here, before the + // entitlement was added to any new licenses, so any + // licenses with the corresponding features actually set + // trump this default entitlement, even if they are set to a + // smaller value. + defaultManagedAgentsIsuedAt = time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) + defaultManagedAgentsStart = defaultManagedAgentsIsuedAt + defaultManagedAgentsEnd = defaultManagedAgentsStart.AddDate(100, 0, 0) + defaultManagedAgentsSoftLimit int64 = 1000 + defaultManagedAgentsHardLimit int64 = 1000 + ) + entitlements.AddFeature(codersdk.FeatureManagedAgentLimit, codersdk.Feature{ + Enabled: true, + Entitlement: entitlement, + SoftLimit: &defaultManagedAgentsSoftLimit, + Limit: &defaultManagedAgentsHardLimit, + UsagePeriod: &codersdk.UsagePeriod{ + IssuedAt: defaultManagedAgentsIsuedAt, + Start: defaultManagedAgentsStart, + End: defaultManagedAgentsEnd, + }, + }) + } + // Add all features from the feature set defined. for _, featureName := range claims.FeatureSet.Features() { if _, ok := licenseForbiddenFeatures[featureName]; ok { @@ -338,33 +368,6 @@ func LicensesEntitlements( Limit: &featureValue, Actual: &featureArguments.ActiveUserCount, }) - - // Temporary: If the license doesn't have a managed agent limit, - // we add a default of 800 managed agents per user. - // This only applies to "Premium" licenses. - if claims.FeatureSet == codersdk.FeatureSetPremium { - var ( - // We intentionally use a fixed issue time here, before the - // entitlement was added to any new licenses, so any - // licenses with the corresponding features actually set - // trump this default entitlement, even if they are set to a - // smaller value. - issueTime = time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) - defaultSoftAgentLimit = 800 * featureValue - defaultHardAgentLimit = 1000 * featureValue - ) - entitlements.AddFeature(codersdk.FeatureManagedAgentLimit, codersdk.Feature{ - Enabled: true, - Entitlement: entitlement, - SoftLimit: &defaultSoftAgentLimit, - Limit: &defaultHardAgentLimit, - UsagePeriod: &codersdk.UsagePeriod{ - IssuedAt: issueTime, - Start: usagePeriodStart, - End: usagePeriodEnd, - }, - }) - } default: if featureValue <= 0 { // The feature is disabled. @@ -490,15 +493,15 @@ func LicensesEntitlements( if featureArguments.ManagedAgentCountFn != nil { managedAgentCount, err = featureArguments.ManagedAgentCountFn(ctx, agentLimit.UsagePeriod.Start, agentLimit.UsagePeriod.End) } - switch { - case xerrors.Is(err, context.Canceled) || xerrors.Is(err, context.DeadlineExceeded): + if xerrors.Is(err, context.Canceled) || xerrors.Is(err, context.DeadlineExceeded) { // If the context is canceled, we want to bail the entire // LicensesEntitlements call. return entitlements, xerrors.Errorf("get managed agent count: %w", err) - case err != nil: - entitlements.Errors = append(entitlements.Errors, - fmt.Sprintf("Error getting managed agent count: %s", err.Error())) - default: + } + if err != nil { + entitlements.Errors = append(entitlements.Errors, fmt.Sprintf("Error getting managed agent count: %s", err.Error())) + // no return + } else { agentLimit.Actual = &managedAgentCount entitlements.AddFeature(codersdk.FeatureManagedAgentLimit, agentLimit) @@ -612,6 +615,8 @@ var ( ErrMissingLicenseExpires = xerrors.New("license has invalid or missing license_expires claim") ErrMissingExp = xerrors.New("license has invalid or missing exp (expires at) claim") ErrMultipleIssues = xerrors.New("license has multiple issues; contact support") + ErrMissingAccountType = xerrors.New("license must contain valid account type") + ErrMissingAccountID = xerrors.New("license must contain valid account ID") ) type Features map[codersdk.FeatureName]int64 @@ -696,12 +701,20 @@ func validateClaims(tok *jwt.Token) (*Claims, error) { if claims.NotBefore == nil { return nil, ErrMissingNotBefore } - if claims.LicenseExpires == nil { + + yearsHardLimit := time.Now().Add(5 /* years */ * 365 * 24 * time.Hour) + if claims.LicenseExpires == nil || claims.LicenseExpires.Time.After(yearsHardLimit) { return nil, ErrMissingLicenseExpires } if claims.ExpiresAt == nil { return nil, ErrMissingExp } + if claims.AccountType == "" { + return nil, ErrMissingAccountType + } + if claims.AccountID == "" { + return nil, ErrMissingAccountID + } return claims, nil } return nil, xerrors.New("unable to parse Claims") diff --git a/enterprise/coderd/license/license_test.go b/enterprise/coderd/license/license_test.go index 0e540989b69da..6c53fb3d89f22 100644 --- a/enterprise/coderd/license/license_test.go +++ b/enterprise/coderd/license/license_test.go @@ -520,8 +520,8 @@ func TestEntitlements(t *testing.T) { t.Run("Premium", func(t *testing.T) { t.Parallel() const userLimit = 1 - const expectedAgentSoftLimit = 800 * userLimit - const expectedAgentHardLimit = 1000 * userLimit + const expectedAgentSoftLimit = 1000 + const expectedAgentHardLimit = 1000 db, _ := dbtestutil.NewDB(t) licenseOptions := coderdenttest.LicenseOptions{ @@ -530,9 +530,7 @@ func TestEntitlements(t *testing.T) { ExpiresAt: dbtime.Now().Add(time.Hour * 24 * 2), FeatureSet: codersdk.FeatureSetPremium, Features: license.Features{ - // Temporary: allows the default value for the - // managed_agent_limit feature to be used. - codersdk.FeatureUserLimit: 1, + codersdk.FeatureUserLimit: userLimit, }, } _, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ @@ -557,11 +555,15 @@ func TestEntitlements(t *testing.T) { require.Equal(t, codersdk.EntitlementEntitled, agentEntitlement.Entitlement) require.EqualValues(t, expectedAgentSoftLimit, *agentEntitlement.SoftLimit) require.EqualValues(t, expectedAgentHardLimit, *agentEntitlement.Limit) + // This might be shocking, but there's a sound reason for this. // See license.go for more details. - require.Equal(t, time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC), agentEntitlement.UsagePeriod.IssuedAt) - require.WithinDuration(t, licenseOptions.NotBefore, agentEntitlement.UsagePeriod.Start, time.Second) - require.WithinDuration(t, licenseOptions.ExpiresAt, agentEntitlement.UsagePeriod.End, time.Second) + agentUsagePeriodIssuedAt := time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) + agentUsagePeriodStart := agentUsagePeriodIssuedAt + agentUsagePeriodEnd := agentUsagePeriodStart.AddDate(100, 0, 0) + require.Equal(t, agentUsagePeriodIssuedAt, agentEntitlement.UsagePeriod.IssuedAt) + require.WithinDuration(t, agentUsagePeriodStart, agentEntitlement.UsagePeriod.Start, time.Second) + require.WithinDuration(t, agentUsagePeriodEnd, agentEntitlement.UsagePeriod.End, time.Second) continue } @@ -1496,14 +1498,14 @@ func TestManagedAgentLimitDefault(t *testing.T) { }) // "Premium" licenses should receive a default managed agent limit of: - // soft = 800 * user_limit - // hard = 1000 * user_limit + // soft = 1000 + // hard = 1000 t.Run("Premium", func(t *testing.T) { t.Parallel() - const userLimit = 100 - const softLimit = 800 * userLimit - const hardLimit = 1000 * userLimit + const userLimit = 33 + const softLimit = 1000 + const hardLimit = 1000 lic := database.License{ ID: 1, UploadedAt: time.Now(), diff --git a/enterprise/coderd/licenses_test.go b/enterprise/coderd/licenses_test.go index bbd6ef717fe8e..fbcbbf654ed09 100644 --- a/enterprise/coderd/licenses_test.go +++ b/enterprise/coderd/licenses_test.go @@ -54,6 +54,56 @@ func TestPostLicense(t *testing.T) { require.Contains(t, errResp.Message, "License cannot be used on this deployment!") }) + t.Run("InvalidAccountID", func(t *testing.T) { + t.Parallel() + // The generated deployment will start out with a different deployment ID. + client, _ := coderdenttest.New(t, &coderdenttest.Options{DontAddLicense: true}) + license := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + AllowEmpty: true, + AccountID: "", + }) + _, err := client.AddLicense(context.Background(), codersdk.AddLicenseRequest{ + License: license, + }) + errResp := &codersdk.Error{} + require.ErrorAs(t, err, &errResp) + require.Equal(t, http.StatusBadRequest, errResp.StatusCode()) + require.Contains(t, errResp.Message, "Invalid license") + }) + + t.Run("InvalidAccountType", func(t *testing.T) { + t.Parallel() + // The generated deployment will start out with a different deployment ID. + client, _ := coderdenttest.New(t, &coderdenttest.Options{DontAddLicense: true}) + license := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + AllowEmpty: true, + AccountType: "", + }) + _, err := client.AddLicense(context.Background(), codersdk.AddLicenseRequest{ + License: license, + }) + errResp := &codersdk.Error{} + require.ErrorAs(t, err, &errResp) + require.Equal(t, http.StatusBadRequest, errResp.StatusCode()) + require.Contains(t, errResp.Message, "Invalid license") + }) + + t.Run("InvalidLicenseExpires", func(t *testing.T) { + t.Parallel() + // The generated deployment will start out with a different deployment ID. + client, _ := coderdenttest.New(t, &coderdenttest.Options{DontAddLicense: true}) + license := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + GraceAt: time.Unix(99999999999, 0), + }) + _, err := client.AddLicense(context.Background(), codersdk.AddLicenseRequest{ + License: license, + }) + errResp := &codersdk.Error{} + require.ErrorAs(t, err, &errResp) + require.Equal(t, http.StatusBadRequest, errResp.StatusCode()) + require.Contains(t, errResp.Message, "Invalid license") + }) + t.Run("Unauthorized", func(t *testing.T) { t.Parallel() client, _ := coderdenttest.New(t, &coderdenttest.Options{DontAddLicense: true}) diff --git a/enterprise/coderd/notifications_test.go b/enterprise/coderd/notifications_test.go index 77b057bf41657..571ed4ced00dd 100644 --- a/enterprise/coderd/notifications_test.go +++ b/enterprise/coderd/notifications_test.go @@ -12,7 +12,6 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" @@ -36,10 +35,6 @@ func TestUpdateNotificationTemplateMethod(t *testing.T) { t.Run("Happy path", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on read from and writing to the notification_templates table") - } - ctx := testutil.Context(t, testutil.WaitSuperLong) api, _ := coderdenttest.New(t, createOpts(t)) @@ -67,10 +62,6 @@ func TestUpdateNotificationTemplateMethod(t *testing.T) { t.Run("Insufficient permissions", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on read from and writing to the notification_templates table") - } - ctx := testutil.Context(t, testutil.WaitSuperLong) // Given: the first user which has an "owner" role, and another user which does not. @@ -91,10 +82,6 @@ func TestUpdateNotificationTemplateMethod(t *testing.T) { t.Run("Invalid notification method", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on read from and writing to the notification_templates table") - } - ctx := testutil.Context(t, testutil.WaitSuperLong) // Given: the first user which has an "owner" role @@ -120,10 +107,6 @@ func TestUpdateNotificationTemplateMethod(t *testing.T) { t.Run("Not modified", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres; it relies on read from and writing to the notification_templates table") - } - ctx := testutil.Context(t, testutil.WaitSuperLong) api, _ := coderdenttest.New(t, createOpts(t)) diff --git a/enterprise/coderd/prebuilds/claim.go b/enterprise/coderd/prebuilds/claim.go index daea281d38d60..743513cedbc6a 100644 --- a/enterprise/coderd/prebuilds/claim.go +++ b/enterprise/coderd/prebuilds/claim.go @@ -55,8 +55,4 @@ func (c EnterpriseClaimer) Claim( return &result.ID, nil } -func (EnterpriseClaimer) Initiator() uuid.UUID { - return database.PrebuildsSystemUserID -} - var _ prebuilds.Claimer = &EnterpriseClaimer{} diff --git a/enterprise/coderd/prebuilds/claim_test.go b/enterprise/coderd/prebuilds/claim_test.go index 9ed7e9ffd19e0..217a9ff09614a 100644 --- a/enterprise/coderd/prebuilds/claim_test.go +++ b/enterprise/coderd/prebuilds/claim_test.go @@ -86,10 +86,6 @@ func (m *storeSpy) ClaimPrebuiltWorkspace(ctx context.Context, arg database.Clai func TestClaimPrebuild(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("This test requires postgres") - } - const ( desiredInstances = 1 presetCount = 2 @@ -260,13 +256,15 @@ func TestClaimPrebuild(t *testing.T) { switch { case tc.claimingErr != nil && (isNoPrebuiltWorkspaces || isUnsupported): require.NoError(t, err) - build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, userWorkspace.LatestBuild.ID) - _ = build + coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, userWorkspace.LatestBuild.ID) // Then: the number of running prebuilds hasn't changed because claiming prebuild is failed and we fallback to creating new workspace. currentPrebuilds, err := spy.GetRunningPrebuiltWorkspaces(ctx) require.NoError(t, err) require.Equal(t, expectedPrebuildsCount, len(currentPrebuilds)) + // If there are no prebuilt workspaces to claim, a new workspace is created from scratch + // and the initiator is set as usual. + require.Equal(t, user.ID, userWorkspace.LatestBuild.Job.InitiatorID) return case tc.claimingErr != nil && errors.Is(tc.claimingErr, unexpectedClaimingError): @@ -278,6 +276,9 @@ func TestClaimPrebuild(t *testing.T) { currentPrebuilds, err := spy.GetRunningPrebuiltWorkspaces(ctx) require.NoError(t, err) require.Equal(t, expectedPrebuildsCount, len(currentPrebuilds)) + // If a prebuilt workspace claim fails for an unanticipated, erroneous reason, + // no workspace is created and therefore the initiator is not set. + require.Equal(t, uuid.Nil, userWorkspace.LatestBuild.Job.InitiatorID) return default: @@ -285,6 +286,8 @@ func TestClaimPrebuild(t *testing.T) { require.NoError(t, err) build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, userWorkspace.LatestBuild.ID) require.Equal(t, build.Job.Status, codersdk.ProvisionerJobSucceeded) + // Prebuild claims are initiated by the user who requested to create a workspace. + require.Equal(t, user.ID, userWorkspace.LatestBuild.Job.InitiatorID) } // at this point we know that tc.claimingErr is nil diff --git a/enterprise/coderd/prebuilds/membership.go b/enterprise/coderd/prebuilds/membership.go index f843d33f7f106..9436f68737d4a 100644 --- a/enterprise/coderd/prebuilds/membership.go +++ b/enterprise/coderd/prebuilds/membership.go @@ -2,12 +2,13 @@ package prebuilds import ( "context" - "database/sql" "errors" "github.com/google/uuid" "golang.org/x/xerrors" + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/quartz" ) @@ -21,114 +22,117 @@ const ( // organizations for which prebuilt workspaces are requested. This is necessary because our data model requires that such // prebuilt workspaces belong to a member of the organization of their eventual claimant. type StoreMembershipReconciler struct { - store database.Store - clock quartz.Clock + store database.Store + clock quartz.Clock + logger slog.Logger } -func NewStoreMembershipReconciler(store database.Store, clock quartz.Clock) StoreMembershipReconciler { +func NewStoreMembershipReconciler(store database.Store, clock quartz.Clock, logger slog.Logger) StoreMembershipReconciler { return StoreMembershipReconciler{ - store: store, - clock: clock, + store: store, + clock: clock, + logger: logger, } } -// ReconcileAll compares the current organization and group memberships of a user to the memberships required -// in order to create prebuilt workspaces. If the user in question is not yet a member of an organization that -// needs prebuilt workspaces, ReconcileAll will create the membership required. +// ReconcileAll ensures the prebuilds system user has the necessary memberships to create prebuilt workspaces. +// For each organization with prebuilds configured, it ensures: +// * The user is a member of the organization +// * A group exists with quota 0 +// * The user is a member of that group // -// To facilitate quota management, ReconcileAll will ensure: -// * the existence of a group (defined by PrebuiltWorkspacesGroupName) in each organization that needs prebuilt workspaces -// * that the prebuilds system user belongs to the group in each organization that needs prebuilt workspaces -// * that the group has a quota of 0 by default, which users can adjust based on their needs. +// Unique constraint violations are safely ignored (concurrent creation). // // ReconcileAll does not have an opinion on transaction or lock management. These responsibilities are left to the caller. -func (s StoreMembershipReconciler) ReconcileAll(ctx context.Context, userID uuid.UUID, presets []database.GetTemplatePresetsWithPrebuildsRow) error { - organizationMemberships, err := s.store.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: userID, - Deleted: sql.NullBool{ - Bool: false, - Valid: true, - }, +func (s StoreMembershipReconciler) ReconcileAll(ctx context.Context, userID uuid.UUID, groupName string) error { + orgStatuses, err := s.store.GetOrganizationsWithPrebuildStatus(ctx, database.GetOrganizationsWithPrebuildStatusParams{ + UserID: userID, + GroupName: groupName, }) if err != nil { - return xerrors.Errorf("determine prebuild organization membership: %w", err) - } - - orgMemberships := make(map[uuid.UUID]struct{}, 0) - defaultOrg, err := s.store.GetDefaultOrganization(ctx) - if err != nil { - return xerrors.Errorf("get default organization: %w", err) - } - orgMemberships[defaultOrg.ID] = struct{}{} - for _, o := range organizationMemberships { - orgMemberships[o.ID] = struct{}{} + return xerrors.Errorf("get organizations with prebuild status: %w", err) } var membershipInsertionErrors error - for _, preset := range presets { - _, alreadyOrgMember := orgMemberships[preset.OrganizationID] - if !alreadyOrgMember { - // Add the organization to our list of memberships regardless of potential failure below - // to avoid a retry that will probably be doomed anyway. - orgMemberships[preset.OrganizationID] = struct{}{} + for _, orgStatus := range orgStatuses { + s.logger.Debug(ctx, "organization prebuild status", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("has_prebuild_user", orgStatus.HasPrebuildUser), + slog.F("has_prebuild_group", orgStatus.PrebuildsGroupID.Valid), + slog.F("has_prebuild_user_in_group", orgStatus.HasPrebuildUserInGroup)) - // Insert the missing membership + // Add user to org if needed + if !orgStatus.HasPrebuildUser { _, err = s.store.InsertOrganizationMember(ctx, database.InsertOrganizationMemberParams{ - OrganizationID: preset.OrganizationID, + OrganizationID: orgStatus.OrganizationID, UserID: userID, CreatedAt: s.clock.Now(), UpdatedAt: s.clock.Now(), Roles: []string{}, }) - if err != nil { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("insert membership for prebuilt workspaces: %w", err)) + // Unique violation means organization membership was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) continue } - } - - // determine whether the org already has a prebuilds group - prebuildsGroupExists := true - prebuildsGroup, err := s.store.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ - OrganizationID: preset.OrganizationID, - Name: PrebuiltWorkspacesGroupName, - }) - if err != nil { - if !xerrors.Is(err, sql.ErrNoRows) { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("get prebuilds group: %w", err)) - continue + if err == nil { + s.logger.Info(ctx, "added prebuilds user to organization", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_user", userID.String())) } - prebuildsGroupExists = false } - // if the prebuilds group does not exist, create it - if !prebuildsGroupExists { - // create a "prebuilds" group in the organization and add the system user to it - // this group will have a quota of 0 by default, which users can adjust based on their needs - prebuildsGroup, err = s.store.InsertGroup(ctx, database.InsertGroupParams{ + // Create group if it doesn't exist + var groupID uuid.UUID + if !orgStatus.PrebuildsGroupID.Valid { + // Group doesn't exist, create it + group, err := s.store.InsertGroup(ctx, database.InsertGroupParams{ ID: uuid.New(), Name: PrebuiltWorkspacesGroupName, DisplayName: PrebuiltWorkspacesGroupDisplayName, - OrganizationID: preset.OrganizationID, + OrganizationID: orgStatus.OrganizationID, AvatarURL: "", - QuotaAllowance: 0, // Default quota of 0, users should set this based on their needs + QuotaAllowance: 0, }) - if err != nil { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("create prebuilds group: %w", err)) + // Unique violation means group was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) continue } + if err == nil { + s.logger.Info(ctx, "created prebuilds group in organization", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_group", group.ID.String())) + } + groupID = group.ID + } else { + // Group exists + groupID = orgStatus.PrebuildsGroupID.UUID } - // add the system user to the prebuilds group - err = s.store.InsertGroupMember(ctx, database.InsertGroupMemberParams{ - GroupID: prebuildsGroup.ID, - UserID: userID, - }) - if err != nil { - // ignore unique violation errors as the user might already be in the group - if !database.IsUniqueViolation(err) { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("add system user to prebuilds group: %w", err)) + // Add user to group if needed + if !orgStatus.HasPrebuildUserInGroup { + err = s.store.InsertGroupMember(ctx, database.InsertGroupMemberParams{ + GroupID: groupID, + UserID: userID, + }) + // Unique violation means group membership was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) + continue + } + if err == nil { + s.logger.Info(ctx, "added prebuilds user to prebuilds group", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_user", userID.String()), + slog.F("prebuilds_group", groupID.String())) } } } + return membershipInsertionErrors } diff --git a/enterprise/coderd/prebuilds/membership_test.go b/enterprise/coderd/prebuilds/membership_test.go index 80e2f907349ae..fe4ec26259889 100644 --- a/enterprise/coderd/prebuilds/membership_test.go +++ b/enterprise/coderd/prebuilds/membership_test.go @@ -7,17 +7,17 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" - "tailscale.com/types/ptr" - "github.com/coder/quartz" + "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" - "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/enterprise/coderd/prebuilds" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) // TestReconcileAll verifies that StoreMembershipReconciler correctly updates membership @@ -27,177 +27,178 @@ func TestReconcileAll(t *testing.T) { clock := quartz.NewMock(t) - // Helper to build a minimal Preset row belonging to a given org. - newPresetRow := func(orgID uuid.UUID) database.GetTemplatePresetsWithPrebuildsRow { - return database.GetTemplatePresetsWithPrebuildsRow{ - ID: uuid.New(), - OrganizationID: orgID, - } - } - tests := []struct { name string - includePreset []bool + includePreset bool preExistingOrgMembership []bool preExistingGroup []bool preExistingGroupMembership []bool // Expected outcomes - expectOrgMembershipExists *bool - expectGroupExists *bool - expectUserInGroup *bool + expectOrgMembershipExists bool + expectGroupExists bool + expectUserInGroup bool }{ { name: "if there are no presets, membership reconciliation is a no-op", - includePreset: []bool{false}, + includePreset: false, preExistingOrgMembership: []bool{true, false}, preExistingGroup: []bool{true, false}, preExistingGroupMembership: []bool{true, false}, - expectOrgMembershipExists: ptr.To(false), - expectGroupExists: ptr.To(false), + expectOrgMembershipExists: false, + expectGroupExists: false, + expectUserInGroup: false, }, { name: "if there is a preset, then we should enforce org and group membership in all cases", - includePreset: []bool{true}, + includePreset: true, preExistingOrgMembership: []bool{true, false}, preExistingGroup: []bool{true, false}, preExistingGroupMembership: []bool{true, false}, - expectOrgMembershipExists: ptr.To(true), - expectGroupExists: ptr.To(true), - expectUserInGroup: ptr.To(true), + expectOrgMembershipExists: true, + expectGroupExists: true, + expectUserInGroup: true, }, } for _, tc := range tests { tc := tc - for _, includePreset := range tc.includePreset { - includePreset := includePreset - for _, preExistingOrgMembership := range tc.preExistingOrgMembership { - preExistingOrgMembership := preExistingOrgMembership - for _, preExistingGroup := range tc.preExistingGroup { - preExistingGroup := preExistingGroup - for _, preExistingGroupMembership := range tc.preExistingGroupMembership { - preExistingGroupMembership := preExistingGroupMembership - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // nolint:gocritic // Reconciliation happens as prebuilds system user, not a human user. - ctx := dbauthz.AsPrebuildsOrchestrator(testutil.Context(t, testutil.WaitLong)) - _, db := coderdtest.NewWithDatabase(t, nil) - - defaultOrg, err := db.GetDefaultOrganization(ctx) - require.NoError(t, err) - - // introduce an unrelated organization to ensure that the membership reconciler doesn't interfere with it. - unrelatedOrg := dbgen.Organization(t, db, database.Organization{}) - targetOrg := dbgen.Organization(t, db, database.Organization{}) - - if !dbtestutil.WillUsePostgres() { - // dbmem doesn't ensure membership to the default organization - dbgen.OrganizationMember(t, db, database.OrganizationMember{ - OrganizationID: defaultOrg.ID, - UserID: database.PrebuildsSystemUserID, - }) - } - - // Ensure membership to unrelated org. - dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: unrelatedOrg.ID, UserID: database.PrebuildsSystemUserID}) - - if preExistingOrgMembership { - // System user already a member of both orgs. - dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: targetOrg.ID, UserID: database.PrebuildsSystemUserID}) - } + includePreset := tc.includePreset + for _, preExistingOrgMembership := range tc.preExistingOrgMembership { + preExistingOrgMembership := preExistingOrgMembership + for _, preExistingGroup := range tc.preExistingGroup { + preExistingGroup := preExistingGroup + for _, preExistingGroupMembership := range tc.preExistingGroupMembership { + preExistingGroupMembership := preExistingGroupMembership + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + // nolint:gocritic // Reconciliation happens as prebuilds system user, not a human user. + ctx := dbauthz.AsPrebuildsOrchestrator(testutil.Context(t, testutil.WaitLong)) + client, db := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + defaultOrg, err := db.GetDefaultOrganization(ctx) + require.NoError(t, err) + + // Introduce an unrelated organization to ensure that the membership reconciler doesn't interfere with it. + unrelatedOrg := dbgen.Organization(t, db, database.Organization{}) + dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: unrelatedOrg.ID, UserID: database.PrebuildsSystemUserID}) + + // Organization to test + targetOrg := dbgen.Organization(t, db, database.Organization{}) + + // Prebuilds system user is a member of the organization + if preExistingOrgMembership { + dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: targetOrg.ID, UserID: database.PrebuildsSystemUserID}) + } + + // Organization has the prebuilds group + var prebuildsGroup database.Group + if preExistingGroup { + prebuildsGroup = dbgen.Group(t, db, database.Group{ + Name: prebuilds.PrebuiltWorkspacesGroupName, + DisplayName: prebuilds.PrebuiltWorkspacesGroupDisplayName, + OrganizationID: targetOrg.ID, + QuotaAllowance: 0, + }) - // Create pre-existing prebuilds group if required by test case - var prebuildsGroup database.Group - if preExistingGroup { - prebuildsGroup = dbgen.Group(t, db, database.Group{ - Name: prebuilds.PrebuiltWorkspacesGroupName, - DisplayName: prebuilds.PrebuiltWorkspacesGroupDisplayName, - OrganizationID: targetOrg.ID, - QuotaAllowance: 0, + // Add the system user to the group if required by test case + if preExistingGroupMembership { + dbgen.GroupMember(t, db, database.GroupMemberTable{ + GroupID: prebuildsGroup.ID, + UserID: database.PrebuildsSystemUserID, }) - - // Add the system user to the group if preExistingGroupMembership is true - if preExistingGroupMembership { - dbgen.GroupMember(t, db, database.GroupMemberTable{ - GroupID: prebuildsGroup.ID, - UserID: database.PrebuildsSystemUserID, - }) - } - } - - presets := []database.GetTemplatePresetsWithPrebuildsRow{newPresetRow(unrelatedOrg.ID)} - if includePreset { - presets = append(presets, newPresetRow(targetOrg.ID)) } - - // Verify memberships before reconciliation. - preReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: database.PrebuildsSystemUserID, - }) - require.NoError(t, err) - expectedMembershipsBefore := []uuid.UUID{defaultOrg.ID, unrelatedOrg.ID} - if preExistingOrgMembership { - expectedMembershipsBefore = append(expectedMembershipsBefore, targetOrg.ID) - } - require.ElementsMatch(t, expectedMembershipsBefore, extractOrgIDs(preReconcileMemberships)) - - // Reconcile - reconciler := prebuilds.NewStoreMembershipReconciler(db, clock) - require.NoError(t, reconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, presets)) - - // Verify memberships after reconciliation. - postReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: database.PrebuildsSystemUserID, - }) + } + + // Setup unrelated org preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: unrelatedOrg.ID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + + // Setup target org preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: targetOrg.ID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 0, + Valid: includePreset, + }, + }).Do() + + // Verify memberships before reconciliation. + preReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ + UserID: database.PrebuildsSystemUserID, + }) + require.NoError(t, err) + expectedMembershipsBefore := []uuid.UUID{defaultOrg.ID, unrelatedOrg.ID} + if preExistingOrgMembership { + expectedMembershipsBefore = append(expectedMembershipsBefore, targetOrg.ID) + } + require.ElementsMatch(t, expectedMembershipsBefore, extractOrgIDs(preReconcileMemberships)) + + // Reconcile + reconciler := prebuilds.NewStoreMembershipReconciler(db, clock, slogtest.Make(t, nil)) + require.NoError(t, reconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, prebuilds.PrebuiltWorkspacesGroupName)) + + // Verify memberships after reconciliation. + postReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ + UserID: database.PrebuildsSystemUserID, + }) + require.NoError(t, err) + expectedMembershipsAfter := expectedMembershipsBefore + if !preExistingOrgMembership && tc.expectOrgMembershipExists { + expectedMembershipsAfter = append(expectedMembershipsAfter, targetOrg.ID) + } + require.ElementsMatch(t, expectedMembershipsAfter, extractOrgIDs(postReconcileMemberships)) + + // Verify prebuilds group behavior based on expected outcomes + prebuildsGroup, err = db.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ + OrganizationID: targetOrg.ID, + Name: prebuilds.PrebuiltWorkspacesGroupName, + }) + if tc.expectGroupExists { require.NoError(t, err) - expectedMembershipsAfter := expectedMembershipsBefore - if !preExistingOrgMembership && tc.expectOrgMembershipExists != nil && *tc.expectOrgMembershipExists { - expectedMembershipsAfter = append(expectedMembershipsAfter, targetOrg.ID) - } - require.ElementsMatch(t, expectedMembershipsAfter, extractOrgIDs(postReconcileMemberships)) - - // Verify prebuilds group behavior based on expected outcomes - prebuildsGroup, err = db.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ - OrganizationID: targetOrg.ID, - Name: prebuilds.PrebuiltWorkspacesGroupName, - }) - if tc.expectGroupExists != nil && *tc.expectGroupExists { + require.Equal(t, prebuilds.PrebuiltWorkspacesGroupName, prebuildsGroup.Name) + require.Equal(t, prebuilds.PrebuiltWorkspacesGroupDisplayName, prebuildsGroup.DisplayName) + require.Equal(t, int32(0), prebuildsGroup.QuotaAllowance) // Default quota should be 0 + + if tc.expectUserInGroup { + // Check that the system user is a member of the prebuilds group + groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ + GroupID: prebuildsGroup.ID, + IncludeSystem: true, + }) require.NoError(t, err) - require.Equal(t, prebuilds.PrebuiltWorkspacesGroupName, prebuildsGroup.Name) - require.Equal(t, prebuilds.PrebuiltWorkspacesGroupDisplayName, prebuildsGroup.DisplayName) - require.Equal(t, int32(0), prebuildsGroup.QuotaAllowance) // Default quota should be 0 - - if tc.expectUserInGroup != nil && *tc.expectUserInGroup { - // Check that the system user is a member of the prebuilds group - groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ - GroupID: prebuildsGroup.ID, - IncludeSystem: true, - }) - require.NoError(t, err) - require.Len(t, groupMembers, 1) - require.Equal(t, database.PrebuildsSystemUserID, groupMembers[0].UserID) - } - - // If no preset exists, then we do not enforce group membership: - if tc.expectUserInGroup != nil && !*tc.expectUserInGroup { - // Check that the system user is NOT a member of the prebuilds group - groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ - GroupID: prebuildsGroup.ID, - IncludeSystem: true, - }) - require.NoError(t, err) - require.Len(t, groupMembers, 0) - } + require.Len(t, groupMembers, 1) + require.Equal(t, database.PrebuildsSystemUserID, groupMembers[0].UserID) } - if !preExistingGroup && tc.expectGroupExists != nil && !*tc.expectGroupExists { - // Verify that no prebuilds group exists - require.Error(t, err) - require.True(t, errors.Is(err, sql.ErrNoRows)) + // If no preset exists, then we do not enforce group membership: + if !tc.expectUserInGroup { + // Check that the system user is NOT a member of the prebuilds group + groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ + GroupID: prebuildsGroup.ID, + IncludeSystem: true, + }) + require.NoError(t, err) + require.Len(t, groupMembers, 0) } - }) - } + } + + if !preExistingGroup && !tc.expectGroupExists { + // Verify that no prebuilds group exists + require.Error(t, err) + require.True(t, errors.Is(err, sql.ErrNoRows)) + } + }) } } } diff --git a/enterprise/coderd/prebuilds/metricscollector_test.go b/enterprise/coderd/prebuilds/metricscollector_test.go index b852079beb2af..aa9886fb7ad1b 100644 --- a/enterprise/coderd/prebuilds/metricscollector_test.go +++ b/enterprise/coderd/prebuilds/metricscollector_test.go @@ -30,10 +30,6 @@ import ( func TestMetricsCollector(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - type metricCheck struct { name string value *float64 @@ -298,10 +294,6 @@ func TestMetricsCollector(t *testing.T) { func TestMetricsCollector_DuplicateTemplateNames(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - type metricCheck struct { name string value *float64 @@ -478,10 +470,6 @@ func findAllMetricSeries(metricsFamilies []*prometheus_client.MetricFamily, labe func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - t.Run("reconciliation_not_paused", func(t *testing.T) { t.Parallel() @@ -497,7 +485,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is not paused @@ -526,7 +514,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is paused @@ -555,7 +543,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is not paused diff --git a/enterprise/coderd/prebuilds/reconcile.go b/enterprise/coderd/prebuilds/reconcile.go index 214d1643bb228..f280436ea98c8 100644 --- a/enterprise/coderd/prebuilds/reconcile.go +++ b/enterprise/coderd/prebuilds/reconcile.go @@ -12,10 +12,14 @@ import ( "sync/atomic" "time" + "github.com/google/uuid" "github.com/hashicorp/go-multierror" "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + "golang.org/x/sync/errgroup" + "golang.org/x/xerrors" - "github.com/coder/quartz" + "cdr.dev/slog" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" @@ -30,12 +34,7 @@ import ( "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/codersdk" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" - - "cdr.dev/slog" - - "github.com/google/uuid" - "golang.org/x/sync/errgroup" - "golang.org/x/xerrors" + "github.com/coder/quartz" ) type StoreReconciler struct { @@ -46,7 +45,6 @@ type StoreReconciler struct { logger slog.Logger clock quartz.Clock registerer prometheus.Registerer - metrics *MetricsCollector notifEnq notifications.Enqueuer buildUsageChecker *atomic.Pointer[wsbuilder.UsageChecker] @@ -55,10 +53,33 @@ type StoreReconciler struct { stopped atomic.Bool done chan struct{} provisionNotifyCh chan database.ProvisionerJob + + // Prebuild state metrics + metrics *MetricsCollector + // Operational metrics + reconciliationDuration prometheus.Histogram } var _ prebuilds.ReconciliationOrchestrator = &StoreReconciler{} +type DeprovisionMode int + +const ( + DeprovisionModeNormal DeprovisionMode = iota + DeprovisionModeOrphan +) + +func (d DeprovisionMode) String() string { + switch d { + case DeprovisionModeOrphan: + return "orphan" + case DeprovisionModeNormal: + return "normal" + default: + return "unknown" + } +} + func NewStoreReconciler(store database.Store, ps pubsub.Pubsub, fileCache *files.Cache, @@ -89,6 +110,15 @@ func NewStoreReconciler(store database.Store, // If the registerer fails to register the metrics collector, it's not fatal. logger.Error(context.Background(), "failed to register prometheus metrics", slog.Error(err)) } + + factory := promauto.With(registerer) + reconciler.reconciliationDuration = factory.NewHistogram(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "prebuilds", + Name: "reconciliation_duration_seconds", + Help: "Duration of each prebuilds reconciliation cycle.", + Buckets: prometheus.DefBuckets, + }) } return reconciler @@ -160,10 +190,15 @@ func (c *StoreReconciler) Run(ctx context.Context) { // instead of waiting for the next reconciliation interval case <-ticker.C: // Trigger a new iteration on each tick. - err := c.ReconcileAll(ctx) + stats, err := c.ReconcileAll(ctx) if err != nil { c.logger.Error(context.Background(), "reconciliation failed", slog.Error(err)) } + + if c.reconciliationDuration != nil { + c.reconciliationDuration.Observe(stats.Elapsed.Seconds()) + } + c.logger.Debug(ctx, "reconciliation stats", slog.F("elapsed", stats.Elapsed)) case <-ctx.Done(): // nolint:gocritic // it's okay to use slog.F() for an error in this case // because we want to differentiate two different types of errors: ctx.Err() and context.Cause() @@ -247,19 +282,24 @@ func (c *StoreReconciler) Stop(ctx context.Context, cause error) { // be reconciled again, leading to another workspace being provisioned. Two workspace builds will be occurring // simultaneously for the same preset, but once both jobs have completed the reconciliation loop will notice the // extraneous instance and delete it. -func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { +func (c *StoreReconciler) ReconcileAll(ctx context.Context) (stats prebuilds.ReconcileStats, err error) { + start := c.clock.Now() + defer func() { + stats.Elapsed = c.clock.Since(start) + }() + logger := c.logger.With(slog.F("reconcile_context", "all")) select { case <-ctx.Done(): logger.Warn(context.Background(), "reconcile exiting prematurely; context done", slog.Error(ctx.Err())) - return nil + return stats, nil default: } logger.Debug(ctx, "starting reconciliation") - err := c.WithReconciliationLock(ctx, logger, func(ctx context.Context, _ database.Store) error { + err = c.WithReconciliationLock(ctx, logger, func(ctx context.Context, _ database.Store) error { // Check if prebuilds reconciliation is paused settingsJSON, err := c.store.GetPrebuildsSettings(ctx) if err != nil { @@ -282,6 +322,12 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { return nil } + membershipReconciler := NewStoreMembershipReconciler(c.store, c.clock, logger) + err = membershipReconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, PrebuiltWorkspacesGroupName) + if err != nil { + return xerrors.Errorf("reconcile prebuild membership: %w", err) + } + snapshot, err := c.SnapshotState(ctx, c.store) if err != nil { return xerrors.Errorf("determine current snapshot: %w", err) @@ -294,12 +340,6 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { return nil } - membershipReconciler := NewStoreMembershipReconciler(c.store, c.clock) - err = membershipReconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, snapshot.Presets) - if err != nil { - return xerrors.Errorf("reconcile prebuild membership: %w", err) - } - var eg errgroup.Group // Reconcile presets in parallel. Each preset in its own goroutine. for _, preset := range snapshot.Presets { @@ -332,7 +372,7 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { logger.Error(ctx, "failed to reconcile", slog.Error(err)) } - return err + return stats, err } func (c *StoreReconciler) reportHardLimitedPresets(snapshot *prebuilds.GlobalSnapshot) { @@ -412,6 +452,11 @@ func (c *StoreReconciler) SnapshotState(ctx context.Context, store database.Stor return xerrors.Errorf("failed to get prebuilds in progress: %w", err) } + allPendingPrebuilds, err := db.CountPendingNonActivePrebuilds(ctx) + if err != nil { + return xerrors.Errorf("failed to get pending prebuilds: %w", err) + } + presetsBackoff, err := db.GetPresetsBackoff(ctx, c.clock.Now().Add(-c.cfg.ReconciliationBackoffLookback.Value())) if err != nil { return xerrors.Errorf("failed to get backoffs for presets: %w", err) @@ -427,6 +472,7 @@ func (c *StoreReconciler) SnapshotState(ctx context.Context, store database.Stor presetPrebuildSchedules, allRunningPrebuilds, allPrebuildsInProgress, + allPendingPrebuilds, presetsBackoff, hardLimitedPresets, c.clock, @@ -581,6 +627,8 @@ func (c *StoreReconciler) executeReconciliationAction(ctx context.Context, logge levelFn = logger.Info case action.ActionType == prebuilds.ActionTypeDelete && len(action.DeleteIDs) > 0: levelFn = logger.Info + case action.ActionType == prebuilds.ActionTypeCancelPending: + levelFn = logger.Info } switch action.ActionType { @@ -635,6 +683,9 @@ func (c *StoreReconciler) executeReconciliationAction(ctx context.Context, logge return multiErr.ErrorOrNil() + case prebuilds.ActionTypeCancelPending: + return c.cancelAndOrphanDeletePendingPrebuilds(ctx, ps.Preset.TemplateID, ps.Preset.TemplateVersionID, ps.Preset.ID) + default: return xerrors.Errorf("unknown action type: %v", action.ActionType) } @@ -681,33 +732,100 @@ func (c *StoreReconciler) createPrebuiltWorkspace(ctx context.Context, prebuiltW c.logger.Info(ctx, "attempting to create prebuild", slog.F("name", name), slog.F("workspace_id", prebuiltWorkspaceID.String()), slog.F("preset_id", presetID.String())) - return c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionStart, workspace) + return c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionStart, workspace, DeprovisionModeNormal) }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, ReadOnly: false, }) } -func (c *StoreReconciler) deletePrebuiltWorkspace(ctx context.Context, prebuiltWorkspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID) error { +// provisionDelete provisions a delete transition for a prebuilt workspace. +// +// If mode is DeprovisionModeOrphan, the builder will not send Terraform state to the provisioner. +// This allows the workspace to be deleted even when no provisioners are available, and is safe +// when no Terraform resources were actually created (e.g., for pending prebuilds that were canceled +// before provisioning started). +// +// IMPORTANT: This function must be called within a database transaction. It does not create its own transaction. +// The caller is responsible for managing the transaction boundary via db.InTx(). +func (c *StoreReconciler) provisionDelete(ctx context.Context, db database.Store, workspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID, mode DeprovisionMode) error { + workspace, err := db.GetWorkspaceByID(ctx, workspaceID) + if err != nil { + return xerrors.Errorf("get workspace by ID: %w", err) + } + + template, err := db.GetTemplateByID(ctx, templateID) + if err != nil { + return xerrors.Errorf("failed to get template: %w", err) + } + + if workspace.OwnerID != database.PrebuildsSystemUserID { + return xerrors.Errorf("prebuilt workspace is not owned by prebuild user anymore, probably it was claimed") + } + + c.logger.Info(ctx, "attempting to delete prebuild", slog.F("orphan", mode.String()), + slog.F("name", workspace.Name), slog.F("workspace_id", workspaceID.String()), slog.F("preset_id", presetID.String())) + + return c.provision(ctx, db, workspaceID, template, presetID, + database.WorkspaceTransitionDelete, workspace, mode) +} + +// cancelAndOrphanDeletePendingPrebuilds cancels pending prebuild jobs from inactive template versions +// and orphan-deletes their associated workspaces. +// +// The cancel operation uses a criteria-based update to ensure only jobs that are still pending at +// execution time are canceled, avoiding race conditions where jobs may have transitioned to running. +// +// Since these jobs were never processed by a provisioner, no Terraform resources were created, +// making it safe to orphan-delete the workspaces (skipping Terraform destroy). +func (c *StoreReconciler) cancelAndOrphanDeletePendingPrebuilds(ctx context.Context, templateID uuid.UUID, templateVersionID uuid.UUID, presetID uuid.UUID) error { return c.store.InTx(func(db database.Store) error { - workspace, err := db.GetWorkspaceByID(ctx, prebuiltWorkspaceID) + canceledJobs, err := db.UpdatePrebuildProvisionerJobWithCancel( + ctx, + database.UpdatePrebuildProvisionerJobWithCancelParams{ + Now: c.clock.Now(), + PresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }) if err != nil { - return xerrors.Errorf("get workspace by ID: %w", err) + c.logger.Error(ctx, "failed to cancel pending prebuild jobs", + slog.F("template_id", templateID.String()), + slog.F("template_version_id", templateVersionID.String()), + slog.F("preset_id", presetID.String()), + slog.Error(err)) + return err } - template, err := db.GetTemplateByID(ctx, templateID) - if err != nil { - return xerrors.Errorf("failed to get template: %w", err) + if len(canceledJobs) > 0 { + c.logger.Info(ctx, "canceled pending prebuild jobs for inactive version", + slog.F("template_id", templateID.String()), + slog.F("template_version_id", templateVersionID.String()), + slog.F("preset_id", presetID.String()), + slog.F("count", len(canceledJobs))) } - if workspace.OwnerID != database.PrebuildsSystemUserID { - return xerrors.Errorf("prebuilt workspace is not owned by prebuild user anymore, probably it was claimed") + var multiErr multierror.Error + for _, job := range canceledJobs { + err = c.provisionDelete(ctx, db, job.WorkspaceID, job.TemplateID, presetID, DeprovisionModeOrphan) + if err != nil { + c.logger.Error(ctx, "failed to orphan delete canceled prebuild", + slog.F("workspace_id", job.WorkspaceID.String()), slog.Error(err)) + multiErr.Errors = append(multiErr.Errors, err) + } } - c.logger.Info(ctx, "attempting to delete prebuild", - slog.F("workspace_id", prebuiltWorkspaceID.String()), slog.F("preset_id", presetID.String())) + return multiErr.ErrorOrNil() + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + ReadOnly: false, + }) +} - return c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionDelete, workspace) +func (c *StoreReconciler) deletePrebuiltWorkspace(ctx context.Context, prebuiltWorkspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID) error { + return c.store.InTx(func(db database.Store) error { + return c.provisionDelete(ctx, db, prebuiltWorkspaceID, templateID, presetID, DeprovisionModeNormal) }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, ReadOnly: false, @@ -722,6 +840,7 @@ func (c *StoreReconciler) provision( presetID uuid.UUID, transition database.WorkspaceTransition, workspace database.Workspace, + mode DeprovisionMode, ) error { tvp, err := db.GetPresetParametersByTemplateVersionID(ctx, template.ActiveVersionID) if err != nil { @@ -759,6 +878,11 @@ func (c *StoreReconciler) provision( builder = builder.RichParameterValues(params) } + // Use orphan mode for deletes when no Terraform resources exist + if transition == database.WorkspaceTransitionDelete && mode == DeprovisionModeOrphan { + builder = builder.Orphan() + } + _, provisionerJob, _, err := builder.Build( ctx, db, diff --git a/enterprise/coderd/prebuilds/reconcile_test.go b/enterprise/coderd/prebuilds/reconcile_test.go index 413d61ddbbc6a..7548faebd7dab 100644 --- a/enterprise/coderd/prebuilds/reconcile_test.go +++ b/enterprise/coderd/prebuilds/reconcile_test.go @@ -9,36 +9,35 @@ import ( "testing" "time" + "github.com/google/uuid" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - "golang.org/x/xerrors" - - "github.com/coder/coder/v2/coderd/coderdtest" - "github.com/coder/coder/v2/coderd/database/dbtime" - "github.com/coder/coder/v2/coderd/files" - "github.com/coder/coder/v2/coderd/notifications" - "github.com/coder/coder/v2/coderd/notifications/notificationstest" - "github.com/coder/coder/v2/coderd/util/slice" - "github.com/coder/coder/v2/coderd/wsbuilder" - sdkproto "github.com/coder/coder/v2/provisionersdk/proto" - - "github.com/google/uuid" "github.com/stretchr/testify/require" + "golang.org/x/xerrors" "tailscale.com/types/ptr" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" - "github.com/coder/quartz" - - "github.com/coder/serpent" + "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/files" + "github.com/coder/coder/v2/coderd/notifications" + "github.com/coder/coder/v2/coderd/notifications/notificationstest" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/util/slice" + "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/enterprise/coderd/prebuilds" + sdkproto "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" + "github.com/coder/serpent" ) func TestNoReconciliationActionsIfNoPresets(t *testing.T) { @@ -73,7 +72,8 @@ func TestNoReconciliationActionsIfNoPresets(t *testing.T) { require.Equal(t, templateVersion, gotTemplateVersion) // when we trigger the reconciliation loop for all templates - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // then no reconciliation actions are taken // because without presets, there are no prebuilds @@ -127,7 +127,8 @@ func TestNoReconciliationActionsIfNoPrebuilds(t *testing.T) { require.NotEmpty(t, presetParameters) // when we trigger the reconciliation loop for all templates - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // then no reconciliation actions are taken // because without prebuilds, there is nothing to reconcile @@ -205,7 +206,10 @@ func TestPrebuildReconciliation(t *testing.T) { templateDeleted: []bool{false}, }, { - name: "never attempt to interfere with active builds", + // TODO(ssncferreira): Investigate why the GetRunningPrebuiltWorkspaces query is returning 0 rows. + // When a template version is inactive (templateVersionActive = false), any prebuilds in the + // database.ProvisionerJobStatusRunning state should be deleted. + name: "never attempt to interfere with prebuilds from an active template version", // The workspace builder does not allow scheduling a new build if there is already a build // pending, running, or canceling. As such, we should never attempt to start, stop or delete // such prebuilds. Rather, we should wait for the existing build to complete and reconcile @@ -216,7 +220,7 @@ func TestPrebuildReconciliation(t *testing.T) { database.ProvisionerJobStatusRunning, database.ProvisionerJobStatusCanceling, }, - templateVersionActive: []bool{true, false}, + templateVersionActive: []bool{true}, shouldDeleteOldPrebuild: ptr.To(false), templateDeleted: []bool{false}, }, @@ -426,7 +430,8 @@ func (tc testCase) run(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) if tc.shouldCreateNewPrebuild != nil { newPrebuildCount := 0 @@ -540,7 +545,8 @@ func TestMultiplePresetsPerTemplateVersion(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) newPrebuildCount := 0 workspaces, err := db.GetWorkspacesByTemplateID(ctx, template.ID) @@ -666,7 +672,7 @@ func TestPrebuildScheduling(t *testing.T) { DesiredInstances: 5, }) - err := controller.ReconcileAll(ctx) + _, err := controller.ReconcileAll(ctx) require.NoError(t, err) // get workspace builds @@ -749,7 +755,8 @@ func TestInvalidPreset(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) workspaces, err := db.GetWorkspacesByTemplateID(ctx, template.ID) require.NoError(t, err) @@ -815,7 +822,8 @@ func TestDeletionOfPrebuiltWorkspaceWithInvalidPreset(t *testing.T) { }) // Old prebuilt workspace should be deleted. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) builds, err := db.GetWorkspaceBuildsByWorkspaceID(ctx, database.GetWorkspaceBuildsByWorkspaceIDParams{ WorkspaceID: prebuiltWorkspace.ID, @@ -914,12 +922,15 @@ func TestSkippingHardLimitedPresets(t *testing.T) { // Trigger reconciliation to attempt creating a new prebuild. // The outcome depends on whether the hard limit has been reached. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // These two additional calls to ReconcileAll should not trigger any notifications. // A notification is only sent once. - require.NoError(t, controller.ReconcileAll(ctx)) - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Verify the final state after reconciliation. workspaces, err = db.GetWorkspacesByTemplateID(ctx, template.ID) @@ -1091,12 +1102,15 @@ func TestHardLimitedPresetShouldNotBlockDeletion(t *testing.T) { // Trigger reconciliation to attempt creating a new prebuild. // The outcome depends on whether the hard limit has been reached. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // These two additional calls to ReconcileAll should not trigger any notifications. // A notification is only sent once. - require.NoError(t, controller.ReconcileAll(ctx)) - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Verify the final state after reconciliation. // When hard limit is reached, no new workspace should be created. @@ -1139,7 +1153,8 @@ func TestHardLimitedPresetShouldNotBlockDeletion(t *testing.T) { } // Trigger reconciliation to make sure that successful, but outdated prebuilt workspace will be deleted. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) workspaces, err = db.GetWorkspacesByTemplateID(ctx, template.ID) require.NoError(t, err) @@ -1738,7 +1753,8 @@ func TestExpiredPrebuildsMultipleActions(t *testing.T) { } // Trigger reconciliation to process expired prebuilds and enforce desired state. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Sort non-expired workspaces by CreatedAt in ascending order (oldest first) sort.Slice(nonExpiredWorkspaces, func(i, j int) bool { @@ -1783,6 +1799,637 @@ func TestExpiredPrebuildsMultipleActions(t *testing.T) { } } +func TestCancelPendingPrebuilds(t *testing.T) { + t.Parallel() + + t.Run("CancelPendingPrebuilds", func(t *testing.T) { + t.Parallel() + + for _, tt := range []struct { + name string + setupBuild func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse + activeTemplateVersion bool + previouslyCanceled bool + previouslyCompleted bool + shouldCancel bool + }{ + // Should cancel pending prebuild-related jobs from a non-active template version + { + name: "CancelsPendingPrebuildJobNonActiveVersion", + // Given: a pending prebuild job + setupBuild: func(t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: true, + }, + // Should not cancel pending prebuild-related jobs from an active template version + { + name: "DoesNotCancelPendingPrebuildJobActiveVersion", + // Given: a pending prebuild job + setupBuild: func(t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: true, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel pending prebuild-related jobs associated to a second workspace build + { + name: "DoesNotCancelPendingPrebuildJobSecondBuild", + // Given: a pending prebuild job associated to a second workspace build + setupBuild: func(t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + BuildNumber: int32(2), + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel pending prebuild-related jobs of a different template + { + name: "DoesNotCancelPrebuildJobDifferentTemplate", + // Given: a pending prebuild job belonging to a different template + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: uuid.Nil, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel pending user workspace build jobs + { + name: "DoesNotCancelUserWorkspaceJob", + // Given: a pending user workspace build job + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + _, member := coderdtest.CreateAnotherUser(t, client, orgID, rbac.RoleMember()) + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: member.ID, + OrganizationID: orgID, + TemplateID: uuid.Nil, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: member.ID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel pending prebuild-related jobs with a delete transition + { + name: "DoesNotCancelPrebuildJobDeleteTransition", + // Given: a pending prebuild job with a delete transition + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Pending().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + Transition: database.WorkspaceTransitionDelete, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel prebuild-related jobs already being processed by a provisioner + { + name: "DoesNotCancelRunningPrebuildJob", + // Given: a running prebuild job + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Starting().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + previouslyCanceled: false, + previouslyCompleted: false, + shouldCancel: false, + }, + // Should not cancel already canceled prebuild-related jobs + { + name: "DoesNotCancelCanceledPrebuildJob", + // Given: a canceled prebuild job + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Canceled().Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + shouldCancel: false, + previouslyCanceled: true, + previouslyCompleted: true, + }, + // Should not cancel completed prebuild-related jobs + { + name: "DoesNotCancelCompletedPrebuildJob", + // Given: a completed prebuild job + setupBuild: func( + t *testing.T, + db database.Store, + client *codersdk.Client, + orgID uuid.UUID, + templateID uuid.UUID, + templateVersionID uuid.UUID, + presetID uuid.NullUUID, + ) dbfake.WorkspaceResponse { + return dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }).Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: templateVersionID, + TemplateVersionPresetID: presetID, + }).Do() + }, + activeTemplateVersion: false, + shouldCancel: false, + previouslyCanceled: false, + previouslyCompleted: true, + }, + } { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic + clock := quartz.NewMock(t) + clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC)) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + // Setup + db, ps := dbtestutil.NewDB(t) + client, _, _ := coderdtest.NewWithAPI(t, &coderdtest.Options{ + // Explicitly not including provisioner daemons, as we don't want the jobs to be processed + // Jobs operations will be simulated via the database model + IncludeProvisionerDaemon: false, + Database: db, + Pubsub: ps, + Clock: clock, + }) + fakeEnqueuer := newFakeEnqueuer() + registry := prometheus.NewRegistry() + cache := files.New(registry, &coderdtest.FakeAuthorizer{}) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false}).Leveled(slog.LevelDebug) + reconciler := prebuilds.NewStoreReconciler(db, ps, cache, codersdk.PrebuildsConfig{}, logger, clock, registry, fakeEnqueuer, newNoopUsageCheckerPtr()) + owner := coderdtest.CreateFirstUser(t, client) + + // Given: a template with a version containing a preset with 1 prebuild instance + nonActivePresetID := uuid.NullUUID{ + UUID: uuid.New(), + Valid: true, + } + nonActiveTemplateVersion := dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + ID: nonActivePresetID.UUID, + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + templateID := nonActiveTemplateVersion.Template.ID + + // Given: a new active template version + activePresetID := uuid.NullUUID{ + UUID: uuid.New(), + Valid: true, + } + activeTemplateVersion := dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + TemplateID: uuid.NullUUID{ + UUID: templateID, + Valid: true, + }, + }).Preset(database.TemplateVersionPreset{ + ID: activePresetID.UUID, + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).SkipCreateTemplate().Do() + + var pendingWorkspace dbfake.WorkspaceResponse + if tt.activeTemplateVersion { + // Given: a prebuilt workspace, workspace build and respective provisioner job from an + // active template version + pendingWorkspace = tt.setupBuild(t, db, client, + owner.OrganizationID, templateID, activeTemplateVersion.TemplateVersion.ID, activePresetID) + } else { + // Given: a prebuilt workspace, workspace build and respective provisioner job from a + // non-active template version + pendingWorkspace = tt.setupBuild(t, db, client, + owner.OrganizationID, templateID, nonActiveTemplateVersion.TemplateVersion.ID, nonActivePresetID) + } + + // Given: the new template version is promoted to active + err := db.UpdateTemplateActiveVersionByID(ctx, database.UpdateTemplateActiveVersionByIDParams{ + ID: templateID, + ActiveVersionID: activeTemplateVersion.TemplateVersion.ID, + }) + require.NoError(t, err) + + // When: the reconciliation loop is triggered + _, err = reconciler.ReconcileAll(ctx) + require.NoError(t, err) + + if tt.shouldCancel { + // Then: the pending prebuild job from non-active version should be canceled + cancelledJob, err := db.GetProvisionerJobByID(ctx, pendingWorkspace.Build.JobID) + require.NoError(t, err) + require.Equal(t, clock.Now().UTC(), cancelledJob.CanceledAt.Time.UTC()) + require.Equal(t, clock.Now().UTC(), cancelledJob.CompletedAt.Time.UTC()) + require.Equal(t, database.ProvisionerJobStatusCanceled, cancelledJob.JobStatus) + + // Then: the workspace should be deleted + deletedWorkspace, err := db.GetWorkspaceByID(ctx, pendingWorkspace.Workspace.ID) + require.NoError(t, err) + require.True(t, deletedWorkspace.Deleted) + latestBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, deletedWorkspace.ID) + require.NoError(t, err) + require.Equal(t, database.WorkspaceTransitionDelete, latestBuild.Transition) + deleteJob, err := db.GetProvisionerJobByID(ctx, latestBuild.JobID) + require.NoError(t, err) + require.True(t, deleteJob.CompletedAt.Valid) + require.False(t, deleteJob.WorkerID.Valid) + require.Equal(t, database.ProvisionerJobStatusSucceeded, deleteJob.JobStatus) + } else { + // Then: the pending prebuild job should not be canceled + job, err := db.GetProvisionerJobByID(ctx, pendingWorkspace.Build.JobID) + require.NoError(t, err) + if !tt.previouslyCanceled { + require.Zero(t, job.CanceledAt.Time.UTC()) + require.NotEqual(t, database.ProvisionerJobStatusCanceled, job.JobStatus) + } + if !tt.previouslyCompleted { + require.Zero(t, job.CompletedAt.Time.UTC()) + } + + // Then: the workspace should not be deleted + workspace, err := db.GetWorkspaceByID(ctx, pendingWorkspace.Workspace.ID) + require.NoError(t, err) + require.False(t, workspace.Deleted) + } + }) + } + }) + + t.Run("CancelPendingPrebuildsMultipleTemplates", func(t *testing.T) { + t.Parallel() + + createTemplateVersionWithPreset := func( + t *testing.T, + db database.Store, + orgID uuid.UUID, + userID uuid.UUID, + templateID uuid.UUID, + prebuiltInstances int32, + ) (uuid.UUID, uuid.UUID, uuid.UUID) { + templatePreset := uuid.NullUUID{ + UUID: uuid.New(), + Valid: true, + } + templateVersion := dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: orgID, + CreatedBy: userID, + TemplateID: uuid.NullUUID{ + UUID: templateID, + Valid: true, + }, + }).Preset(database.TemplateVersionPreset{ + ID: templatePreset.UUID, + DesiredInstances: sql.NullInt32{ + Int32: prebuiltInstances, + Valid: true, + }, + }).Do() + + return templateVersion.Template.ID, templateVersion.TemplateVersion.ID, templatePreset.UUID + } + + setupPrebuilds := func( + t *testing.T, + db database.Store, + orgID uuid.UUID, + templateID uuid.UUID, + versionID uuid.UUID, + presetID uuid.UUID, + count int, + pending bool, + ) []dbfake.WorkspaceResponse { + prebuilds := make([]dbfake.WorkspaceResponse, count) + for i := range count { + builder := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + OrganizationID: orgID, + TemplateID: templateID, + }) + + if pending { + builder = builder.Pending() + } + + prebuilds[i] = builder.Seed(database.WorkspaceBuild{ + InitiatorID: database.PrebuildsSystemUserID, + TemplateVersionID: versionID, + TemplateVersionPresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }).Do() + } + + return prebuilds + } + + checkIfJobCanceledAndDeleted := func( + t *testing.T, + clock *quartz.Mock, + ctx context.Context, + db database.Store, + shouldBeCanceledAndDeleted bool, + prebuilds []dbfake.WorkspaceResponse, + ) { + for _, prebuild := range prebuilds { + pendingJob, err := db.GetProvisionerJobByID(ctx, prebuild.Build.JobID) + require.NoError(t, err) + + if shouldBeCanceledAndDeleted { + // Pending job should be canceled + require.Equal(t, database.ProvisionerJobStatusCanceled, pendingJob.JobStatus) + require.Equal(t, clock.Now().UTC(), pendingJob.CanceledAt.Time.UTC()) + require.Equal(t, clock.Now().UTC(), pendingJob.CompletedAt.Time.UTC()) + + // Workspace should be deleted + deletedWorkspace, err := db.GetWorkspaceByID(ctx, prebuild.Workspace.ID) + require.NoError(t, err) + require.True(t, deletedWorkspace.Deleted) + latestBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, deletedWorkspace.ID) + require.NoError(t, err) + require.Equal(t, database.WorkspaceTransitionDelete, latestBuild.Transition) + deleteJob, err := db.GetProvisionerJobByID(ctx, latestBuild.JobID) + require.NoError(t, err) + require.True(t, deleteJob.CompletedAt.Valid) + require.False(t, deleteJob.WorkerID.Valid) + require.Equal(t, database.ProvisionerJobStatusSucceeded, deleteJob.JobStatus) + } else { + // Pending job should not be canceled + require.NotEqual(t, database.ProvisionerJobStatusCanceled, pendingJob.JobStatus) + require.Zero(t, pendingJob.CanceledAt.Time.UTC()) + + // Workspace should not be deleted + workspace, err := db.GetWorkspaceByID(ctx, prebuild.Workspace.ID) + require.NoError(t, err) + require.False(t, workspace.Deleted) + } + } + } + + // Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic + clock := quartz.NewMock(t) + clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC)) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + // Setup + db, ps := dbtestutil.NewDB(t) + client, _, _ := coderdtest.NewWithAPI(t, &coderdtest.Options{ + // Explicitly not including provisioner daemons, as we don't want the jobs to be processed + // Jobs operations will be simulated via the database model + IncludeProvisionerDaemon: false, + Database: db, + Pubsub: ps, + Clock: clock, + }) + fakeEnqueuer := newFakeEnqueuer() + registry := prometheus.NewRegistry() + cache := files.New(registry, &coderdtest.FakeAuthorizer{}) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false}).Leveled(slog.LevelDebug) + reconciler := prebuilds.NewStoreReconciler(db, ps, cache, codersdk.PrebuildsConfig{}, logger, clock, registry, fakeEnqueuer, newNoopUsageCheckerPtr()) + owner := coderdtest.CreateFirstUser(t, client) + + // Given: template A with 2 versions + // Given: template A version v1: with a preset with 5 instances (2 running, 3 pending) + templateAID, templateAVersion1ID, templateAVersion1PresetID := createTemplateVersionWithPreset(t, db, owner.OrganizationID, owner.UserID, uuid.Nil, 5) + templateAVersion1Running := setupPrebuilds(t, db, owner.OrganizationID, templateAID, templateAVersion1ID, templateAVersion1PresetID, 2, false) + templateAVersion1Pending := setupPrebuilds(t, db, owner.OrganizationID, templateAID, templateAVersion1ID, templateAVersion1PresetID, 3, true) + // Given: template A version v2 (active version): with a preset with 2 instances (1 running, 1 pending) + _, templateAVersion2ID, templateAVersion2PresetID := createTemplateVersionWithPreset(t, db, owner.OrganizationID, owner.UserID, templateAID, 2) + templateAVersion2Running := setupPrebuilds(t, db, owner.OrganizationID, templateAID, templateAVersion2ID, templateAVersion2PresetID, 1, false) + templateAVersion2Pending := setupPrebuilds(t, db, owner.OrganizationID, templateAID, templateAVersion2ID, templateAVersion2PresetID, 1, true) + + // Given: template B with 3 versions + // Given: template B version v1: with a preset with 3 instances (1 running, 2 pending) + templateBID, templateBVersion1ID, templateBVersion1PresetID := createTemplateVersionWithPreset(t, db, owner.OrganizationID, owner.UserID, uuid.Nil, 3) + templateBVersion1Running := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion1ID, templateBVersion1PresetID, 1, false) + templateBVersion1Pending := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion1ID, templateBVersion1PresetID, 2, true) + // Given: template B version v2: with a preset with 2 instances (2 pending) + _, templateBVersion2ID, templateBVersion2PresetID := createTemplateVersionWithPreset(t, db, owner.OrganizationID, owner.UserID, templateBID, 2) + templateBVersion2Pending := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion2ID, templateBVersion2PresetID, 2, true) + // Given: template B version v3 (active version): with a preset with 2 instances (1 running, 1 pending) + _, templateBVersion3ID, templateBVersion3PresetID := createTemplateVersionWithPreset(t, db, owner.OrganizationID, owner.UserID, templateBID, 2) + templateBVersion3Running := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion3ID, templateBVersion3PresetID, 1, false) + templateBVersion3Pending := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion3ID, templateBVersion3PresetID, 1, true) + + // When: the reconciliation loop is executed + _, err := reconciler.ReconcileAll(ctx) + require.NoError(t, err) + + // Then: template A version 1 running workspaces should not be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion1Running) + // Then: template A version 1 pending workspaces should be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateAVersion1Pending) + // Then: template A version 2 running and pending workspaces should not be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion2Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion2Pending) + + // Then: template B version 1 running workspaces should not be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion1Running) + // Then: template B version 1 pending workspaces should be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateBVersion1Pending) + // Then: template B version 2 pending workspaces should be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateBVersion2Pending) + // Then: template B version 3 running and pending workspaces should not be canceled + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion3Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion3Pending) + }) +} + +func TestReconciliationStats(t *testing.T) { + t.Parallel() + + // Setup + clock := quartz.NewReal() + db, ps := dbtestutil.NewDB(t) + client, _, _ := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, + Clock: clock, + }) + fakeEnqueuer := newFakeEnqueuer() + registry := prometheus.NewRegistry() + cache := files.New(registry, &coderdtest.FakeAuthorizer{}) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false}).Leveled(slog.LevelDebug) + reconciler := prebuilds.NewStoreReconciler(db, ps, cache, codersdk.PrebuildsConfig{}, logger, clock, registry, fakeEnqueuer, newNoopUsageCheckerPtr()) + owner := coderdtest.CreateFirstUser(t, client) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // Create a template version with a preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + + // Verify that ReconcileAll tracks and returns elapsed time + start := time.Now() + stats, err := reconciler.ReconcileAll(ctx) + actualElapsed := time.Since(start) + require.NoError(t, err) + require.Greater(t, stats.Elapsed, time.Duration(0)) + + // Verify stats.Elapsed matches actual execution time + require.InDelta(t, actualElapsed.Milliseconds(), stats.Elapsed.Milliseconds(), 100) + // Verify reconciliation loop is not unexpectedly slow + require.Less(t, stats.Elapsed, 5*time.Second) +} + func newNoopEnqueuer() *notifications.NoopEnqueuer { return notifications.NewNoopEnqueuer() } @@ -2277,7 +2924,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { _ = setupTestDBPreset(t, db, templateVersionID, 2, "test") // Initially, reconciliation should create prebuilds - err := reconciler.ReconcileAll(ctx) + _, err := reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that prebuilds were created @@ -2304,7 +2951,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { require.Len(t, workspaces, 0, "prebuilds should be deleted") // Run reconciliation again - it should be paused and not recreate prebuilds - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that no new prebuilds were created because reconciliation is paused @@ -2317,7 +2964,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { require.NoError(t, err) // Run reconciliation again - it should now recreate the prebuilds - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that prebuilds were recreated diff --git a/enterprise/coderd/replicas_test.go b/enterprise/coderd/replicas_test.go index 5a56817b19409..4b16f7bb70b91 100644 --- a/enterprise/coderd/replicas_test.go +++ b/enterprise/coderd/replicas_test.go @@ -20,9 +20,7 @@ import ( func TestReplicas(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("only test with real postgres") - } + t.Run("ErrorWithoutLicense", func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitLong) diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go index 30432af76c7eb..f103a8d1b06a0 100644 --- a/enterprise/coderd/roles.go +++ b/enterprise/coderd/roles.go @@ -311,5 +311,13 @@ func validOrganizationRoleRequest(ctx context.Context, req codersdk.CustomRoleRe return false } + if len(req.OrganizationMemberPermissions) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request, not allowed to assign organization member permissions for an organization role.", + Detail: "organization scoped roles may not contain organization member permissions", + }) + return false + } + return true } diff --git a/enterprise/coderd/workspaceagents_test.go b/enterprise/coderd/workspaceagents_test.go index 917d44dff2d48..2e4690bc961a9 100644 --- a/enterprise/coderd/workspaceagents_test.go +++ b/enterprise/coderd/workspaceagents_test.go @@ -89,10 +89,6 @@ func TestBlockNonBrowser(t *testing.T) { func TestReinitializeAgent(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("dbmem cannot currently claim a workspace") - } - if runtime.GOOS == "windows" { t.Skip("test startup script is not supported on windows") } diff --git a/enterprise/coderd/workspaceproxy.go b/enterprise/coderd/workspaceproxy.go index 2ebee9d93f9e8..4f3ce12056617 100644 --- a/enterprise/coderd/workspaceproxy.go +++ b/enterprise/coderd/workspaceproxy.go @@ -2,7 +2,6 @@ package coderd import ( "context" - "crypto/sha256" "database/sql" "fmt" "net/http" @@ -16,6 +15,7 @@ import ( "cdr.dev/slog" agpl "github.com/coder/coder/v2/coderd" + "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" @@ -28,7 +28,6 @@ import ( "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/cryptorand" "github.com/coder/coder/v2/enterprise/coderd/proxyhealth" "github.com/coder/coder/v2/enterprise/replicasync" "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" @@ -934,13 +933,13 @@ func (api *API) reconnectingPTYSignedToken(rw http.ResponseWriter, r *http.Reque } func generateWorkspaceProxyToken(id uuid.UUID) (token string, hashed []byte, err error) { - secret, err := cryptorand.HexString(64) + secret, hashedSecret, err := apikey.GenerateSecret(64) if err != nil { return "", nil, xerrors.Errorf("generate token: %w", err) } - hashedSecret := sha256.Sum256([]byte(secret)) + fullToken := fmt.Sprintf("%s:%s", id, secret) - return fullToken, hashedSecret[:], nil + return fullToken, hashedSecret, nil } func convertProxies(p []database.WorkspaceProxy, statuses map[uuid.UUID]proxyhealth.ProxyStatus) []codersdk.WorkspaceProxy { diff --git a/enterprise/coderd/workspacequota_test.go b/enterprise/coderd/workspacequota_test.go index 186af3a787d94..937aa8d57433a 100644 --- a/enterprise/coderd/workspacequota_test.go +++ b/enterprise/coderd/workspacequota_test.go @@ -660,10 +660,6 @@ func TestWorkspaceQuota(t *testing.T) { func TestWorkspaceSerialization(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("Serialization errors only occur in postgres") - } - db, _ := dbtestutil.NewDB(t) user := dbgen.User(t, db, database.User{}) diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go index 25fa1527d894e..5201e613f7a1d 100644 --- a/enterprise/coderd/workspaces_test.go +++ b/enterprise/coderd/workspaces_test.go @@ -534,10 +534,6 @@ func TestCreateUserWorkspace(t *testing.T) { t.Run("ClaimPrebuild", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("dbmem cannot currently claim a workspace") - } - client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: coderdtest.DeploymentValues(t), @@ -845,10 +841,6 @@ func TestWorkspaceAutobuild(t *testing.T) { t.Run("NoDeadlock", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skipf("Skipping non-postgres run") - } - var ( ticker = make(chan time.Time) statCh = make(chan autobuild.Stats) @@ -1654,10 +1646,6 @@ func TestWorkspaceAutobuild(t *testing.T) { t.Run("NextStartAtIsNullifiedOnScheduleChange", func(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test uses triggers so does not work with dbmem.go") - } - var ( tickCh = make(chan time.Time) statsCh = make(chan autobuild.Stats) @@ -1781,10 +1769,6 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) { func TestPrebuildsAutobuild(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("this test requires postgres") - } - getRunningPrebuilds := func( t *testing.T, ctx context.Context, diff --git a/enterprise/tailnet/handshaker_test.go b/enterprise/tailnet/handshaker_test.go index 523f20ea122da..dbb05418e3827 100644 --- a/enterprise/tailnet/handshaker_test.go +++ b/enterprise/tailnet/handshaker_test.go @@ -14,9 +14,7 @@ import ( func TestPGCoordinator_ReadyForHandshake_OK(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -30,9 +28,7 @@ func TestPGCoordinator_ReadyForHandshake_OK(t *testing.T) { func TestPGCoordinator_ReadyForHandshake_NoPermission(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() diff --git a/enterprise/tailnet/multiagent_test.go b/enterprise/tailnet/multiagent_test.go index fe3c3eaee04d3..c79f11153a166 100644 --- a/enterprise/tailnet/multiagent_test.go +++ b/enterprise/tailnet/multiagent_test.go @@ -23,9 +23,6 @@ import ( // +--------+ func TestPGCoordinator_MultiAgent(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -60,9 +57,6 @@ func TestPGCoordinator_MultiAgent(t *testing.T) { func TestPGCoordinator_MultiAgent_CoordClose(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -90,9 +84,6 @@ func TestPGCoordinator_MultiAgent_CoordClose(t *testing.T) { // +--------+ func TestPGCoordinator_MultiAgent_UnsubscribeRace(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -135,9 +126,6 @@ func TestPGCoordinator_MultiAgent_UnsubscribeRace(t *testing.T) { // +--------+ func TestPGCoordinator_MultiAgent_Unsubscribe(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -197,9 +185,6 @@ func TestPGCoordinator_MultiAgent_Unsubscribe(t *testing.T) { // +--------+ func TestPGCoordinator_MultiAgent_MultiCoordinator(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -247,9 +232,6 @@ func TestPGCoordinator_MultiAgent_MultiCoordinator(t *testing.T) { // +--------+ func TestPGCoordinator_MultiAgent_MultiCoordinator_UpdateBeforeSubscribe(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) @@ -299,9 +281,6 @@ func TestPGCoordinator_MultiAgent_MultiCoordinator_UpdateBeforeSubscribe(t *test // +--------+ func TestPGCoordinator_MultiAgent_TwoAgents(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) store, ps := dbtestutil.NewDB(t) diff --git a/enterprise/tailnet/pgcoord.go b/enterprise/tailnet/pgcoord.go index 32bd896669c14..54bb87f932d04 100644 --- a/enterprise/tailnet/pgcoord.go +++ b/enterprise/tailnet/pgcoord.go @@ -106,8 +106,8 @@ var pgCoordSubject = rbac.Subject{ Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceTailnetCoordinator.Type: {policy.WildcardSymbol}, }), - Org: map[string][]rbac.Permission{}, - User: []rbac.Permission{}, + User: []rbac.Permission{}, + ByOrgID: map[string]rbac.OrgPermissions{}, }, }), Scope: rbac.ScopeAll, @@ -873,9 +873,11 @@ func (q *querier) handleIncoming() { return case c := <-q.newConnections: + q.logger.Debug(q.ctx, "new connection received", slog.F("peer_id", c.UniqueID())) q.newConn(c) case c := <-q.closeConnections: + q.logger.Debug(q.ctx, "connection close request", slog.F("peer_id", c.UniqueID())) q.cleanupConn(c) } } @@ -902,7 +904,8 @@ func (q *querier) newConn(c *connIO) { mk := mKey(c.UniqueID()) dup, ok := q.mappers[mk] if ok { - // duplicate, overwrite and close the old one + q.logger.Debug(q.ctx, "duplicate mapper found; closing old connection", slog.F("peer_id", dup.c.UniqueID())) + // overwrite and close the old one atomic.StoreInt64(&c.overwrites, dup.c.Overwrites()+1) err := dup.c.CoordinatorClose() if err != nil { @@ -913,6 +916,7 @@ func (q *querier) newConn(c *connIO) { q.workQ.enqueue(querierWorkKey{ mappingQuery: mk, }) + q.logger.Debug(q.ctx, "added new mapper", slog.F("peer_id", c.UniqueID())) } func (q *querier) isHealthy() bool { @@ -940,11 +944,12 @@ func (q *querier) cleanupConn(c *connIO) { logger.Error(q.ctx, "failed to close connIO", slog.Error(err)) } delete(q.mappers, mk) - q.logger.Debug(q.ctx, "removed mapper") + q.logger.Debug(q.ctx, "removed mapper", slog.F("peer_id", c.UniqueID())) } func (q *querier) worker() { defer q.wg.Done() + defer q.logger.Debug(q.ctx, "worker exited") eb := backoff.NewExponentialBackOff() eb.MaxElapsedTime = 0 // retry indefinitely eb.MaxInterval = dbMaxBackoff @@ -1019,7 +1024,7 @@ func (q *querier) mappingQuery(peer mKey) error { return nil } logger.Debug(q.ctx, "sending mappings", slog.F("mapping_len", len(mappings))) - return agpl.SendCtx(q.ctx, mpr.mappings, mappings) + return agpl.SendCtx(mpr.ctx, mpr.mappings, mappings) } func (q *querier) bindingsToMappings(bindings []database.GetTailnetTunnelPeerBindingsRow) ([]mapping, error) { diff --git a/enterprise/tailnet/pgcoord_internal_test.go b/enterprise/tailnet/pgcoord_internal_test.go index dacf61e42acde..88dbe245f062a 100644 --- a/enterprise/tailnet/pgcoord_internal_test.go +++ b/enterprise/tailnet/pgcoord_internal_test.go @@ -163,9 +163,7 @@ func TestHeartbeats_LostCoordinator_MarkLost(t *testing.T) { // that is, clean up peers and associated tunnels that have been lost for over 24 hours. func TestLostPeerCleanupQueries(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, _, sqlDB := dbtestutil.NewDBWithSQLDB(t, dbtestutil.WithDumpOnFailure()) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() @@ -326,9 +324,7 @@ func TestDebugTemplate(t *testing.T) { func TestGetDebug(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, _ := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() diff --git a/enterprise/tailnet/pgcoord_test.go b/enterprise/tailnet/pgcoord_test.go index 7923ffdb81519..eee64f75f4ea3 100644 --- a/enterprise/tailnet/pgcoord_test.go +++ b/enterprise/tailnet/pgcoord_test.go @@ -36,9 +36,7 @@ func TestMain(m *testing.M) { func TestPGCoordinatorSingle_ClientWithoutAgent(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -71,9 +69,7 @@ func TestPGCoordinatorSingle_ClientWithoutAgent(t *testing.T) { func TestPGCoordinatorSingle_AgentWithoutClients(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -105,9 +101,7 @@ func TestPGCoordinatorSingle_AgentWithoutClients(t *testing.T) { func TestPGCoordinatorSingle_AgentInvalidIP(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -132,9 +126,7 @@ func TestPGCoordinatorSingle_AgentInvalidIP(t *testing.T) { func TestPGCoordinatorSingle_AgentInvalidIPBits(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -160,9 +152,7 @@ func TestPGCoordinatorSingle_AgentInvalidIPBits(t *testing.T) { func TestPGCoordinatorSingle_AgentValidIP(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -199,9 +189,7 @@ func TestPGCoordinatorSingle_AgentValidIP(t *testing.T) { func TestPGCoordinatorSingle_AgentWithClient(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -248,9 +236,7 @@ func TestPGCoordinatorSingle_AgentWithClient(t *testing.T) { func TestPGCoordinatorSingle_MissedHeartbeats(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() @@ -333,9 +319,7 @@ func TestPGCoordinatorSingle_MissedHeartbeats(t *testing.T) { func TestPGCoordinatorSingle_MissedHeartbeats_NoDrop(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -379,9 +363,7 @@ func TestPGCoordinatorSingle_MissedHeartbeats_NoDrop(t *testing.T) { func TestPGCoordinatorSingle_SendsHeartbeats(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -429,9 +411,7 @@ func TestPGCoordinatorSingle_SendsHeartbeats(t *testing.T) { // +---------+ func TestPGCoordinatorDual_Mainline(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -527,9 +507,7 @@ func TestPGCoordinatorDual_Mainline(t *testing.T) { // +---------+ func TestPGCoordinator_MultiCoordinatorAgent(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -695,9 +673,7 @@ func TestPGCoordinator_Node_Empty(t *testing.T) { // do this now, but it's schematically possible, so we should make sure it doesn't break anything. func TestPGCoordinator_BidirectionalTunnels(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -710,9 +686,7 @@ func TestPGCoordinator_BidirectionalTunnels(t *testing.T) { func TestPGCoordinator_GracefulDisconnect(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -725,9 +699,7 @@ func TestPGCoordinator_GracefulDisconnect(t *testing.T) { func TestPGCoordinator_Lost(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -740,9 +712,7 @@ func TestPGCoordinator_Lost(t *testing.T) { func TestPGCoordinator_NoDeleteOnClose(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } + store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -795,10 +765,6 @@ func TestPGCoordinator_NoDeleteOnClose(t *testing.T) { func TestPGCoordinatorDual_FailedHeartbeat(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } - dburl, err := dbtestutil.Open(t) require.NoError(t, err) @@ -861,10 +827,6 @@ func TestPGCoordinatorDual_FailedHeartbeat(t *testing.T) { func TestPGCoordinatorDual_PeerReconnect(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } - store, ps := dbtestutil.NewDB(t) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) defer cancel() @@ -918,10 +880,6 @@ func TestPGCoordinatorDual_PeerReconnect(t *testing.T) { func TestPGCoordinatorPropogatedPeerContext(t *testing.T) { t.Parallel() - if !dbtestutil.WillUsePostgres() { - t.Skip("test only with postgres") - } - ctx := testutil.Context(t, testutil.WaitMedium) store, ps := dbtestutil.NewDB(t) logger := testutil.Logger(t) diff --git a/enterprise/wsproxy/wsproxy.go b/enterprise/wsproxy/wsproxy.go index 94b1802959987..734f6b2b594c8 100644 --- a/enterprise/wsproxy/wsproxy.go +++ b/enterprise/wsproxy/wsproxy.go @@ -436,8 +436,8 @@ func New(ctx context.Context, opts *Options) (*Server, error) { return s, nil } -func (s *Server) RegisterNow() error { - _, err := s.registerLoop.RegisterNow() +func (s *Server) RegisterNow(ctx context.Context) error { + _, err := s.registerLoop.RegisterNow(ctx) return err } @@ -521,7 +521,7 @@ func pingSiblingReplicas(ctx context.Context, logger slog.Logger, sf *singleflig errs := make(chan error, len(replicas)) for _, peer := range replicas { go func(peer codersdk.Replica) { - err := replicasync.PingPeerReplica(ctx, client, peer.RelayAddress) + err := pingReplica(ctx, client, peer) if err != nil { errs <- xerrors.Errorf("ping sibling replica %s (%s): %w", peer.Hostname, peer.RelayAddress, err) logger.Warn(ctx, "failed to ping sibling replica, this could happen if the replica has shutdown", @@ -553,6 +553,28 @@ func pingSiblingReplicas(ctx context.Context, logger slog.Logger, sf *singleflig return errStrInterface.(string) } +// pingReplica pings a replica over it's internal relay address to ensure it's +// reachable and alive for health purposes. It will try to ping the replica +// twice if the first ping fails, with a short delay between attempts. +func pingReplica(ctx context.Context, client http.Client, replica codersdk.Replica) error { + const attempts = 2 + var err error + for i := 0; i < attempts; i++ { + err = replicasync.PingPeerReplica(ctx, client, replica.RelayAddress) + if err == nil { + return nil + } + if i < attempts-1 { + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(1 * time.Second): + } + } + } + return err +} + func (s *Server) handleRegisterFailure(err error) { if s.ctx.Err() != nil { return diff --git a/enterprise/wsproxy/wsproxy_test.go b/enterprise/wsproxy/wsproxy_test.go index 5d7eaada7f990..c876db113ea60 100644 --- a/enterprise/wsproxy/wsproxy_test.go +++ b/enterprise/wsproxy/wsproxy_test.go @@ -9,6 +9,7 @@ import ( "net/http" "net/http/httptest" "net/url" + "sync" "testing" "time" @@ -35,6 +36,7 @@ import ( "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/workspacesdk" "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/enterprise/coderd" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" "github.com/coder/coder/v2/enterprise/coderd/license" "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" @@ -464,6 +466,7 @@ func TestDERPMesh(t *testing.T) { "*", } + ctx := testutil.Context(t, testutil.WaitLong) client, closer, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -494,35 +497,21 @@ func TestDERPMesh(t *testing.T) { require.NoError(t, err) // Create 3 proxy replicas. - const count = 3 - var ( - sessionToken = "" - proxies = [count]coderdenttest.WorkspaceProxy{} - derpURLs = [count]string{} - ) - for i := range proxies { - proxies[i] = coderdenttest.NewWorkspaceProxyReplica(t, api, client, &coderdenttest.ProxyOptions{ - Name: "best-proxy", - Token: sessionToken, - ProxyURL: proxyURL, - }) - if i == 0 { - sessionToken = proxies[i].Options.ProxySessionToken - } - - derpURL := *proxies[i].ServerURL + proxies := createProxyReplicas(ctx, t, &createProxyReplicasOptions{ + API: api, + Client: client, + Name: "best-proxy", + ProxyURL: proxyURL, + ProxyToken: "", // will be generated automatically + Count: 3, + }) + derpURLs := make([]string, len(proxies)) + for i, proxy := range proxies { + derpURL := *proxy.ServerURL derpURL.Path = "/derp" derpURLs[i] = derpURL.String() } - // Force all proxies to re-register immediately. This ensures the DERP mesh - // is up-to-date. In production this will happen automatically after about - // 15 seconds. - for i, proxy := range proxies { - err := proxy.RegisterNow() - require.NoErrorf(t, err, "failed to force proxy %d to re-register", i) - } - // Generate cases. We have a case for: // - Each proxy to itself. // - Each proxy to each other proxy (one way, no duplicates). @@ -533,7 +522,7 @@ func TestDERPMesh(t *testing.T) { cases = append(cases, [2]string{derpURL, derpURLs[j]}) } } - require.Len(t, cases, (count*(count+1))/2) // triangle number + require.Len(t, cases, (len(proxies)*(len(proxies)+1))/2) // triangle number for i, c := range cases { i, c := i, c @@ -598,7 +587,6 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { } t.Run("ProbeOK", func(t *testing.T) { - t.Skip("flaky test: https://github.com/coder/internal/issues/957") t.Parallel() deploymentValues := coderdtest.DeploymentValues(t) @@ -642,59 +630,23 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { require.NoError(t, err) // Create 6 proxy replicas. - const count = 6 - var ( - sessionToken = "" - proxies = [count]coderdenttest.WorkspaceProxy{} - replicaPingDone = [count]bool{} - ) - for i := range proxies { - proxies[i] = coderdenttest.NewWorkspaceProxyReplica(t, api, client, &coderdenttest.ProxyOptions{ - Name: "proxy-1", - Token: sessionToken, - ProxyURL: proxyURL, - ReplicaPingCallback: func(replicas []codersdk.Replica, err string) { - if len(replicas) != count-1 { - // Still warming up... - return - } - replicaPingDone[i] = true - assert.Emptyf(t, err, "replica %d ping callback error", i) - }, - }) - if i == 0 { - sessionToken = proxies[i].Options.ProxySessionToken - } - } - - // Force all proxies to re-register immediately. This ensures the DERP - // mesh is up-to-date. In production this will happen automatically - // after about 15 seconds. - for i, proxy := range proxies { - err := proxy.RegisterNow() - require.NoErrorf(t, err, "failed to force proxy %d to re-register", i) - } - - // Ensure that all proxies have pinged. - require.Eventually(t, func() bool { - ok := true - for i := range proxies { - if !replicaPingDone[i] { - t.Logf("replica %d has not pinged yet", i) - ok = false - } - } - return ok - }, testutil.WaitLong, testutil.IntervalSlow) - t.Log("all replicas have pinged") + proxies := createProxyReplicas(ctx, t, &createProxyReplicasOptions{ + API: api, + Client: client, + Name: "proxy-1", + ProxyURL: proxyURL, + ProxyToken: "", // will be generated automatically + Count: 6, + }) // Check they're all healthy according to /healthz-report. + httpClient := &http.Client{} for _, proxy := range proxies { // GET /healthz-report u := proxy.ServerURL.ResolveReference(&url.URL{Path: "/healthz-report"}) req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + resp, err := httpClient.Do(req) require.NoError(t, err) var respJSON codersdk.ProxyHealthReport @@ -770,7 +722,7 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { } // Force the proxy to re-register immediately. - err = proxy.RegisterNow() + err = proxy.RegisterNow(ctx) require.NoError(t, err, "failed to force proxy to re-register") // Wait for the ping to fail. @@ -781,7 +733,8 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { u := proxy.ServerURL.ResolveReference(&url.URL{Path: "/healthz-report"}) req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + httpClient := &http.Client{} + resp, err := httpClient.Do(req) require.NoError(t, err) var respJSON codersdk.ProxyHealthReport @@ -853,7 +806,7 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { // Force the proxy to re-register and wait for the ping to fail. for { - err = proxy.RegisterNow() + err = proxy.RegisterNow(ctx) require.NoError(t, err, "failed to force proxy to re-register") pingRes := testutil.TryReceive(ctx, t, replicaPingRes) @@ -869,7 +822,8 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { u := proxy.ServerURL.ResolveReference(&url.URL{Path: "/healthz-report"}) req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + httpClient := &http.Client{} + resp, err := httpClient.Do(req) require.NoError(t, err) var respJSON codersdk.ProxyHealthReport err = json.NewDecoder(resp.Body).Decode(&respJSON) @@ -888,7 +842,7 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { // Force the proxy to re-register and wait for the ping to be skipped // because there are no more siblings. for { - err = proxy.RegisterNow() + err = proxy.RegisterNow(ctx) require.NoError(t, err, "failed to force proxy to re-register") replicaErr := testutil.TryReceive(ctx, t, replicaPingRes) @@ -903,7 +857,7 @@ func TestWorkspaceProxyDERPMeshProbe(t *testing.T) { // GET /healthz-report req, err = http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) - resp, err = http.DefaultClient.Do(req) + resp, err = httpClient.Do(req) require.NoError(t, err) err = json.NewDecoder(resp.Body).Decode(&respJSON) resp.Body.Close() @@ -1167,3 +1121,106 @@ func testDERPSend(t *testing.T, ctx context.Context, dstKey key.NodePublic, dstC require.NoError(t, err, "send message via DERP") } } + +type createProxyReplicasOptions struct { + API *coderd.API + Client *codersdk.Client + + Name string + ProxyURL *url.URL + // If ProxyToken is not provided, a new workspace proxy region will be + // created automatically using the API client. + ProxyToken string + Count int +} + +// createProxyReplicas creates and runs a set of proxy replicas and ensures that +// they are all functioning correctly and aware of each other with no errors. +func createProxyReplicas(ctx context.Context, t *testing.T, opts *createProxyReplicasOptions) []coderdenttest.WorkspaceProxy { + t.Helper() + + var ( + proxies = make([]coderdenttest.WorkspaceProxy, opts.Count) + // replicaPingSuccessful tracks whether the replica ping callback + // was called with no errors for each replica. + replicaPingMutex sync.Mutex + replicaPingSuccessful = make([]bool, opts.Count) + ) + for i := range proxies { + proxies[i] = coderdenttest.NewWorkspaceProxyReplica(t, opts.API, opts.Client, &coderdenttest.ProxyOptions{ + Name: opts.Name, + Token: opts.ProxyToken, + ProxyURL: opts.ProxyURL, + ReplicaPingCallback: func(siblings []codersdk.Replica, err string) { + t.Logf("got wsproxy ping callback: i=%d, siblings=%v, err=%s", i, len(siblings), err) + + replicaPingMutex.Lock() + defer replicaPingMutex.Unlock() + // The replica only "successfully" pinged if it has the + // correct number of siblings and no error. + replicaPingSuccessful[i] = len(siblings) == opts.Count-1 && err == "" + }, + }) + if i == 0 { + // The first proxy will have a new token if we just created a new + // proxy region. + opts.ProxyToken = proxies[i].Options.ProxySessionToken + } + } + + // Force all proxies to re-register immediately. This ensures the DERP + // mesh is up-to-date. In production this will happen automatically + // after about 15 seconds. + for i, proxy := range proxies { + err := proxy.RegisterNow(ctx) + require.NoErrorf(t, err, "failed to force proxy %d to re-register", i) + } + + // Ensure that all proxies have pinged successfully. If replicas haven't + // successfully pinged yet, force them to re-register again. We don't + // use require.Eventually here because it runs the condition function in + // a goroutine. + ticker := time.NewTicker(testutil.IntervalSlow) + defer ticker.Stop() + for { + var ( + ok = true + wg sync.WaitGroup + ) + + // Copy the replicaPingSuccessful slice to a local variable so we can + // view the state of all proxies at the same point in time. + replicaPingSuccessfulCopy := make([]bool, len(replicaPingSuccessful)) + replicaPingMutex.Lock() + copy(replicaPingSuccessfulCopy, replicaPingSuccessful) + replicaPingMutex.Unlock() + + for i, proxy := range proxies { + success := replicaPingSuccessfulCopy[i] + if !success { + t.Logf("replica %d has not successfully pinged yet", i) + ok = false + + // Retry registration on this proxy. + wg.Add(1) + go func() { + defer wg.Done() + err := proxy.RegisterNow(ctx) + t.Logf("replica %d re-registered: err=%v", i, err) + }() + } + } + wg.Wait() + if ok { + break + } + select { + case <-ctx.Done(): + t.Fatal("proxies did not ping successfully in time:", ctx.Err()) + case <-ticker.C: + } + } + t.Log("all replicas have pinged successfully") + + return proxies +} diff --git a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go index d768a919f4624..443baa815942b 100644 --- a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go +++ b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go @@ -404,15 +404,19 @@ func (l *RegisterWorkspaceProxyLoop) Start(ctx context.Context) (RegisterWorkspa // RegisterNow asks the registration loop to register immediately. A timeout of // 2x the attempt timeout is used to wait for the response. -func (l *RegisterWorkspaceProxyLoop) RegisterNow() (RegisterWorkspaceProxyResponse, error) { +func (l *RegisterWorkspaceProxyLoop) RegisterNow(ctx context.Context) (RegisterWorkspaceProxyResponse, error) { // The channel is closed by the loop after sending the response. respCh := make(chan RegisterWorkspaceProxyResponse, 1) select { + case <-ctx.Done(): + return RegisterWorkspaceProxyResponse{}, ctx.Err() case <-l.done: return RegisterWorkspaceProxyResponse{}, xerrors.New("proxy registration loop closed") case l.runLoopNow <- respCh: } select { + case <-ctx.Done(): + return RegisterWorkspaceProxyResponse{}, ctx.Err() case <-l.done: return RegisterWorkspaceProxyResponse{}, xerrors.New("proxy registration loop closed") case resp := <-respCh: diff --git a/enterprise/x/aibridged/proto/aibridged.pb.go b/enterprise/x/aibridged/proto/aibridged.pb.go deleted file mode 100644 index e88d0d1a9aee9..0000000000000 --- a/enterprise/x/aibridged/proto/aibridged.pb.go +++ /dev/null @@ -1,1423 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.30.0 -// protoc v4.23.4 -// source: enterprise/x/aibridged/proto/aibridged.proto - -package proto - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - anypb "google.golang.org/protobuf/types/known/anypb" - timestamppb "google.golang.org/protobuf/types/known/timestamppb" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type RecordInterceptionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // UUID. - InitiatorId string `protobuf:"bytes,2,opt,name=initiator_id,json=initiatorId,proto3" json:"initiator_id,omitempty"` // UUID. - Provider string `protobuf:"bytes,3,opt,name=provider,proto3" json:"provider,omitempty"` - Model string `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` - Metadata map[string]*anypb.Any `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - StartedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=started_at,json=startedAt,proto3" json:"started_at,omitempty"` -} - -func (x *RecordInterceptionRequest) Reset() { - *x = RecordInterceptionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordInterceptionRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordInterceptionRequest) ProtoMessage() {} - -func (x *RecordInterceptionRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordInterceptionRequest.ProtoReflect.Descriptor instead. -func (*RecordInterceptionRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{0} -} - -func (x *RecordInterceptionRequest) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *RecordInterceptionRequest) GetInitiatorId() string { - if x != nil { - return x.InitiatorId - } - return "" -} - -func (x *RecordInterceptionRequest) GetProvider() string { - if x != nil { - return x.Provider - } - return "" -} - -func (x *RecordInterceptionRequest) GetModel() string { - if x != nil { - return x.Model - } - return "" -} - -func (x *RecordInterceptionRequest) GetMetadata() map[string]*anypb.Any { - if x != nil { - return x.Metadata - } - return nil -} - -func (x *RecordInterceptionRequest) GetStartedAt() *timestamppb.Timestamp { - if x != nil { - return x.StartedAt - } - return nil -} - -type RecordInterceptionResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *RecordInterceptionResponse) Reset() { - *x = RecordInterceptionResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordInterceptionResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordInterceptionResponse) ProtoMessage() {} - -func (x *RecordInterceptionResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordInterceptionResponse.ProtoReflect.Descriptor instead. -func (*RecordInterceptionResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{1} -} - -type RecordTokenUsageRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. - MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. - InputTokens int64 `protobuf:"varint,3,opt,name=input_tokens,json=inputTokens,proto3" json:"input_tokens,omitempty"` - OutputTokens int64 `protobuf:"varint,4,opt,name=output_tokens,json=outputTokens,proto3" json:"output_tokens,omitempty"` - Metadata map[string]*anypb.Any `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - CreatedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` -} - -func (x *RecordTokenUsageRequest) Reset() { - *x = RecordTokenUsageRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordTokenUsageRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordTokenUsageRequest) ProtoMessage() {} - -func (x *RecordTokenUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordTokenUsageRequest.ProtoReflect.Descriptor instead. -func (*RecordTokenUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{2} -} - -func (x *RecordTokenUsageRequest) GetInterceptionId() string { - if x != nil { - return x.InterceptionId - } - return "" -} - -func (x *RecordTokenUsageRequest) GetMsgId() string { - if x != nil { - return x.MsgId - } - return "" -} - -func (x *RecordTokenUsageRequest) GetInputTokens() int64 { - if x != nil { - return x.InputTokens - } - return 0 -} - -func (x *RecordTokenUsageRequest) GetOutputTokens() int64 { - if x != nil { - return x.OutputTokens - } - return 0 -} - -func (x *RecordTokenUsageRequest) GetMetadata() map[string]*anypb.Any { - if x != nil { - return x.Metadata - } - return nil -} - -func (x *RecordTokenUsageRequest) GetCreatedAt() *timestamppb.Timestamp { - if x != nil { - return x.CreatedAt - } - return nil -} - -type RecordTokenUsageResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *RecordTokenUsageResponse) Reset() { - *x = RecordTokenUsageResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordTokenUsageResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordTokenUsageResponse) ProtoMessage() {} - -func (x *RecordTokenUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordTokenUsageResponse.ProtoReflect.Descriptor instead. -func (*RecordTokenUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{3} -} - -type RecordPromptUsageRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. - MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. - Prompt string `protobuf:"bytes,3,opt,name=prompt,proto3" json:"prompt,omitempty"` - Metadata map[string]*anypb.Any `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - CreatedAt *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` -} - -func (x *RecordPromptUsageRequest) Reset() { - *x = RecordPromptUsageRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordPromptUsageRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordPromptUsageRequest) ProtoMessage() {} - -func (x *RecordPromptUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordPromptUsageRequest.ProtoReflect.Descriptor instead. -func (*RecordPromptUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{4} -} - -func (x *RecordPromptUsageRequest) GetInterceptionId() string { - if x != nil { - return x.InterceptionId - } - return "" -} - -func (x *RecordPromptUsageRequest) GetMsgId() string { - if x != nil { - return x.MsgId - } - return "" -} - -func (x *RecordPromptUsageRequest) GetPrompt() string { - if x != nil { - return x.Prompt - } - return "" -} - -func (x *RecordPromptUsageRequest) GetMetadata() map[string]*anypb.Any { - if x != nil { - return x.Metadata - } - return nil -} - -func (x *RecordPromptUsageRequest) GetCreatedAt() *timestamppb.Timestamp { - if x != nil { - return x.CreatedAt - } - return nil -} - -type RecordPromptUsageResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *RecordPromptUsageResponse) Reset() { - *x = RecordPromptUsageResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordPromptUsageResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordPromptUsageResponse) ProtoMessage() {} - -func (x *RecordPromptUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordPromptUsageResponse.ProtoReflect.Descriptor instead. -func (*RecordPromptUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{5} -} - -type RecordToolUsageRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - InterceptionId string `protobuf:"bytes,1,opt,name=interception_id,json=interceptionId,proto3" json:"interception_id,omitempty"` // UUID. - MsgId string `protobuf:"bytes,2,opt,name=msg_id,json=msgId,proto3" json:"msg_id,omitempty"` // ID provided by provider. - ServerUrl *string `protobuf:"bytes,3,opt,name=server_url,json=serverUrl,proto3,oneof" json:"server_url,omitempty"` // The URL of the MCP server. - Tool string `protobuf:"bytes,4,opt,name=tool,proto3" json:"tool,omitempty"` - Input string `protobuf:"bytes,5,opt,name=input,proto3" json:"input,omitempty"` - Injected bool `protobuf:"varint,6,opt,name=injected,proto3" json:"injected,omitempty"` - InvocationError *string `protobuf:"bytes,7,opt,name=invocation_error,json=invocationError,proto3,oneof" json:"invocation_error,omitempty"` // Only injected tools are invoked. - Metadata map[string]*anypb.Any `protobuf:"bytes,8,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - CreatedAt *timestamppb.Timestamp `protobuf:"bytes,9,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` -} - -func (x *RecordToolUsageRequest) Reset() { - *x = RecordToolUsageRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordToolUsageRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordToolUsageRequest) ProtoMessage() {} - -func (x *RecordToolUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordToolUsageRequest.ProtoReflect.Descriptor instead. -func (*RecordToolUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{6} -} - -func (x *RecordToolUsageRequest) GetInterceptionId() string { - if x != nil { - return x.InterceptionId - } - return "" -} - -func (x *RecordToolUsageRequest) GetMsgId() string { - if x != nil { - return x.MsgId - } - return "" -} - -func (x *RecordToolUsageRequest) GetServerUrl() string { - if x != nil && x.ServerUrl != nil { - return *x.ServerUrl - } - return "" -} - -func (x *RecordToolUsageRequest) GetTool() string { - if x != nil { - return x.Tool - } - return "" -} - -func (x *RecordToolUsageRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *RecordToolUsageRequest) GetInjected() bool { - if x != nil { - return x.Injected - } - return false -} - -func (x *RecordToolUsageRequest) GetInvocationError() string { - if x != nil && x.InvocationError != nil { - return *x.InvocationError - } - return "" -} - -func (x *RecordToolUsageRequest) GetMetadata() map[string]*anypb.Any { - if x != nil { - return x.Metadata - } - return nil -} - -func (x *RecordToolUsageRequest) GetCreatedAt() *timestamppb.Timestamp { - if x != nil { - return x.CreatedAt - } - return nil -} - -type RecordToolUsageResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *RecordToolUsageResponse) Reset() { - *x = RecordToolUsageResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RecordToolUsageResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RecordToolUsageResponse) ProtoMessage() {} - -func (x *RecordToolUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RecordToolUsageResponse.ProtoReflect.Descriptor instead. -func (*RecordToolUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{7} -} - -type GetMCPServerConfigsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` // UUID. // Not used yet, will be necessary for later RBAC purposes. -} - -func (x *GetMCPServerConfigsRequest) Reset() { - *x = GetMCPServerConfigsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetMCPServerConfigsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetMCPServerConfigsRequest) ProtoMessage() {} - -func (x *GetMCPServerConfigsRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetMCPServerConfigsRequest.ProtoReflect.Descriptor instead. -func (*GetMCPServerConfigsRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{8} -} - -func (x *GetMCPServerConfigsRequest) GetUserId() string { - if x != nil { - return x.UserId - } - return "" -} - -type GetMCPServerConfigsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CoderMcpConfig *MCPServerConfig `protobuf:"bytes,1,opt,name=coder_mcp_config,json=coderMcpConfig,proto3" json:"coder_mcp_config,omitempty"` - ExternalAuthMcpConfigs []*MCPServerConfig `protobuf:"bytes,2,rep,name=external_auth_mcp_configs,json=externalAuthMcpConfigs,proto3" json:"external_auth_mcp_configs,omitempty"` -} - -func (x *GetMCPServerConfigsResponse) Reset() { - *x = GetMCPServerConfigsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetMCPServerConfigsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetMCPServerConfigsResponse) ProtoMessage() {} - -func (x *GetMCPServerConfigsResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetMCPServerConfigsResponse.ProtoReflect.Descriptor instead. -func (*GetMCPServerConfigsResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{9} -} - -func (x *GetMCPServerConfigsResponse) GetCoderMcpConfig() *MCPServerConfig { - if x != nil { - return x.CoderMcpConfig - } - return nil -} - -func (x *GetMCPServerConfigsResponse) GetExternalAuthMcpConfigs() []*MCPServerConfig { - if x != nil { - return x.ExternalAuthMcpConfigs - } - return nil -} - -type MCPServerConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Maps to the ID of the External Auth; this ID is unique. - Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` - ToolAllowRegex string `protobuf:"bytes,3,opt,name=tool_allow_regex,json=toolAllowRegex,proto3" json:"tool_allow_regex,omitempty"` - ToolDenyRegex string `protobuf:"bytes,4,opt,name=tool_deny_regex,json=toolDenyRegex,proto3" json:"tool_deny_regex,omitempty"` -} - -func (x *MCPServerConfig) Reset() { - *x = MCPServerConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *MCPServerConfig) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*MCPServerConfig) ProtoMessage() {} - -func (x *MCPServerConfig) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use MCPServerConfig.ProtoReflect.Descriptor instead. -func (*MCPServerConfig) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{10} -} - -func (x *MCPServerConfig) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *MCPServerConfig) GetUrl() string { - if x != nil { - return x.Url - } - return "" -} - -func (x *MCPServerConfig) GetToolAllowRegex() string { - if x != nil { - return x.ToolAllowRegex - } - return "" -} - -func (x *MCPServerConfig) GetToolDenyRegex() string { - if x != nil { - return x.ToolDenyRegex - } - return "" -} - -type GetMCPServerAccessTokensBatchRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` // UUID. - McpServerConfigIds []string `protobuf:"bytes,2,rep,name=mcp_server_config_ids,json=mcpServerConfigIds,proto3" json:"mcp_server_config_ids,omitempty"` -} - -func (x *GetMCPServerAccessTokensBatchRequest) Reset() { - *x = GetMCPServerAccessTokensBatchRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetMCPServerAccessTokensBatchRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetMCPServerAccessTokensBatchRequest) ProtoMessage() {} - -func (x *GetMCPServerAccessTokensBatchRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetMCPServerAccessTokensBatchRequest.ProtoReflect.Descriptor instead. -func (*GetMCPServerAccessTokensBatchRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{11} -} - -func (x *GetMCPServerAccessTokensBatchRequest) GetUserId() string { - if x != nil { - return x.UserId - } - return "" -} - -func (x *GetMCPServerAccessTokensBatchRequest) GetMcpServerConfigIds() []string { - if x != nil { - return x.McpServerConfigIds - } - return nil -} - -// GetMCPServerAccessTokensBatchResponse returns a map for resulting tokens or errors, indexed -// by server ID. -type GetMCPServerAccessTokensBatchResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - AccessTokens map[string]string `protobuf:"bytes,1,rep,name=access_tokens,json=accessTokens,proto3" json:"access_tokens,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Errors map[string]string `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *GetMCPServerAccessTokensBatchResponse) Reset() { - *x = GetMCPServerAccessTokensBatchResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetMCPServerAccessTokensBatchResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetMCPServerAccessTokensBatchResponse) ProtoMessage() {} - -func (x *GetMCPServerAccessTokensBatchResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetMCPServerAccessTokensBatchResponse.ProtoReflect.Descriptor instead. -func (*GetMCPServerAccessTokensBatchResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{12} -} - -func (x *GetMCPServerAccessTokensBatchResponse) GetAccessTokens() map[string]string { - if x != nil { - return x.AccessTokens - } - return nil -} - -func (x *GetMCPServerAccessTokensBatchResponse) GetErrors() map[string]string { - if x != nil { - return x.Errors - } - return nil -} - -type IsAuthorizedRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` -} - -func (x *IsAuthorizedRequest) Reset() { - *x = IsAuthorizedRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *IsAuthorizedRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*IsAuthorizedRequest) ProtoMessage() {} - -func (x *IsAuthorizedRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use IsAuthorizedRequest.ProtoReflect.Descriptor instead. -func (*IsAuthorizedRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{13} -} - -func (x *IsAuthorizedRequest) GetKey() string { - if x != nil { - return x.Key - } - return "" -} - -type IsAuthorizedResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - OwnerId string `protobuf:"bytes,1,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` -} - -func (x *IsAuthorizedResponse) Reset() { - *x = IsAuthorizedResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *IsAuthorizedResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*IsAuthorizedResponse) ProtoMessage() {} - -func (x *IsAuthorizedResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use IsAuthorizedResponse.ProtoReflect.Descriptor instead. -func (*IsAuthorizedResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{14} -} - -func (x *IsAuthorizedResponse) GetOwnerId() string { - if x != nil { - return x.OwnerId - } - return "" -} - -var File_enterprise_x_aibridged_proto_aibridged_proto protoreflect.FileDescriptor - -var file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc = []byte{ - 0x0a, 0x2c, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x69, 0x73, 0x65, 0x2f, 0x78, 0x2f, 0x61, - 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, - 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x22, 0xda, 0x02, 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, - 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, - 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x14, - 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, - 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x4a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1c, - 0x0a, 0x1a, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf9, 0x02, 0x0a, - 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x03, 0x52, 0x0c, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x12, 0x48, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xcb, 0x02, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, - 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, - 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x12, 0x49, 0x0a, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, - 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x22, 0x1b, 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0xed, 0x03, 0x0a, 0x16, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, - 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x09, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x12, - 0x0a, 0x04, 0x74, 0x6f, 0x6f, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x6f, - 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x6a, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x6a, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x12, 0x2e, 0x0a, 0x10, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, - 0x52, 0x0f, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, - 0x72, 0x88, 0x01, 0x01, 0x12, 0x47, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, - 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x69, - 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, - 0x19, 0x0a, 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, - 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35, 0x0a, 0x1a, 0x47, 0x65, - 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, - 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, - 0x64, 0x22, 0xb2, 0x01, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x40, 0x0a, 0x10, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x52, 0x0e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x51, 0x0a, 0x19, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, - 0x61, 0x75, 0x74, 0x68, 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, - 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x16, - 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x4d, 0x63, 0x70, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x22, 0x85, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x53, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, - 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x28, 0x0a, 0x10, - 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x74, 0x6f, 0x6f, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, - 0x77, 0x52, 0x65, 0x67, 0x65, 0x78, 0x12, 0x26, 0x0a, 0x0f, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, - 0x65, 0x6e, 0x79, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0d, 0x74, 0x6f, 0x6f, 0x6c, 0x44, 0x65, 0x6e, 0x79, 0x52, 0x65, 0x67, 0x65, 0x78, 0x22, 0x72, - 0x0a, 0x24, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, - 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, - 0x31, 0x0a, 0x15, 0x6d, 0x63, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, - 0x6d, 0x63, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x49, - 0x64, 0x73, 0x22, 0xda, 0x02, 0x0a, 0x25, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, - 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x0d, - 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, - 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, - 0x73, 0x12, 0x50, 0x0a, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, - 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x73, 0x1a, 0x3f, 0x0a, 0x11, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x39, 0x0a, 0x0b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x27, 0x0a, 0x13, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x22, 0x31, 0x0a, 0x14, 0x49, 0x73, 0x41, 0x75, - 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x32, 0xe4, 0x02, 0x0a, 0x08, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x59, 0x0a, 0x12, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, - 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, - 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x10, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x56, 0x0a, 0x11, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1f, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x50, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, - 0x61, 0x67, 0x65, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x32, 0xeb, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x75, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x21, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7a, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, - 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, - 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x32, 0x55, 0x0a, 0x0a, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x72, 0x12, 0x47, - 0x0a, 0x0c, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x12, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, - 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, - 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescOnce sync.Once - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData = file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc -) - -func file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP() []byte { - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescOnce.Do(func() { - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData = protoimpl.X.CompressGZIP(file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData) - }) - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData -} - -var file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes = make([]protoimpl.MessageInfo, 21) -var file_enterprise_x_aibridged_proto_aibridged_proto_goTypes = []interface{}{ - (*RecordInterceptionRequest)(nil), // 0: proto.RecordInterceptionRequest - (*RecordInterceptionResponse)(nil), // 1: proto.RecordInterceptionResponse - (*RecordTokenUsageRequest)(nil), // 2: proto.RecordTokenUsageRequest - (*RecordTokenUsageResponse)(nil), // 3: proto.RecordTokenUsageResponse - (*RecordPromptUsageRequest)(nil), // 4: proto.RecordPromptUsageRequest - (*RecordPromptUsageResponse)(nil), // 5: proto.RecordPromptUsageResponse - (*RecordToolUsageRequest)(nil), // 6: proto.RecordToolUsageRequest - (*RecordToolUsageResponse)(nil), // 7: proto.RecordToolUsageResponse - (*GetMCPServerConfigsRequest)(nil), // 8: proto.GetMCPServerConfigsRequest - (*GetMCPServerConfigsResponse)(nil), // 9: proto.GetMCPServerConfigsResponse - (*MCPServerConfig)(nil), // 10: proto.MCPServerConfig - (*GetMCPServerAccessTokensBatchRequest)(nil), // 11: proto.GetMCPServerAccessTokensBatchRequest - (*GetMCPServerAccessTokensBatchResponse)(nil), // 12: proto.GetMCPServerAccessTokensBatchResponse - (*IsAuthorizedRequest)(nil), // 13: proto.IsAuthorizedRequest - (*IsAuthorizedResponse)(nil), // 14: proto.IsAuthorizedResponse - nil, // 15: proto.RecordInterceptionRequest.MetadataEntry - nil, // 16: proto.RecordTokenUsageRequest.MetadataEntry - nil, // 17: proto.RecordPromptUsageRequest.MetadataEntry - nil, // 18: proto.RecordToolUsageRequest.MetadataEntry - nil, // 19: proto.GetMCPServerAccessTokensBatchResponse.AccessTokensEntry - nil, // 20: proto.GetMCPServerAccessTokensBatchResponse.ErrorsEntry - (*timestamppb.Timestamp)(nil), // 21: google.protobuf.Timestamp - (*anypb.Any)(nil), // 22: google.protobuf.Any -} -var file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs = []int32{ - 15, // 0: proto.RecordInterceptionRequest.metadata:type_name -> proto.RecordInterceptionRequest.MetadataEntry - 21, // 1: proto.RecordInterceptionRequest.started_at:type_name -> google.protobuf.Timestamp - 16, // 2: proto.RecordTokenUsageRequest.metadata:type_name -> proto.RecordTokenUsageRequest.MetadataEntry - 21, // 3: proto.RecordTokenUsageRequest.created_at:type_name -> google.protobuf.Timestamp - 17, // 4: proto.RecordPromptUsageRequest.metadata:type_name -> proto.RecordPromptUsageRequest.MetadataEntry - 21, // 5: proto.RecordPromptUsageRequest.created_at:type_name -> google.protobuf.Timestamp - 18, // 6: proto.RecordToolUsageRequest.metadata:type_name -> proto.RecordToolUsageRequest.MetadataEntry - 21, // 7: proto.RecordToolUsageRequest.created_at:type_name -> google.protobuf.Timestamp - 10, // 8: proto.GetMCPServerConfigsResponse.coder_mcp_config:type_name -> proto.MCPServerConfig - 10, // 9: proto.GetMCPServerConfigsResponse.external_auth_mcp_configs:type_name -> proto.MCPServerConfig - 19, // 10: proto.GetMCPServerAccessTokensBatchResponse.access_tokens:type_name -> proto.GetMCPServerAccessTokensBatchResponse.AccessTokensEntry - 20, // 11: proto.GetMCPServerAccessTokensBatchResponse.errors:type_name -> proto.GetMCPServerAccessTokensBatchResponse.ErrorsEntry - 22, // 12: proto.RecordInterceptionRequest.MetadataEntry.value:type_name -> google.protobuf.Any - 22, // 13: proto.RecordTokenUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any - 22, // 14: proto.RecordPromptUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any - 22, // 15: proto.RecordToolUsageRequest.MetadataEntry.value:type_name -> google.protobuf.Any - 0, // 16: proto.Recorder.RecordInterception:input_type -> proto.RecordInterceptionRequest - 2, // 17: proto.Recorder.RecordTokenUsage:input_type -> proto.RecordTokenUsageRequest - 4, // 18: proto.Recorder.RecordPromptUsage:input_type -> proto.RecordPromptUsageRequest - 6, // 19: proto.Recorder.RecordToolUsage:input_type -> proto.RecordToolUsageRequest - 8, // 20: proto.MCPConfigurator.GetMCPServerConfigs:input_type -> proto.GetMCPServerConfigsRequest - 11, // 21: proto.MCPConfigurator.GetMCPServerAccessTokensBatch:input_type -> proto.GetMCPServerAccessTokensBatchRequest - 13, // 22: proto.Authorizer.IsAuthorized:input_type -> proto.IsAuthorizedRequest - 1, // 23: proto.Recorder.RecordInterception:output_type -> proto.RecordInterceptionResponse - 3, // 24: proto.Recorder.RecordTokenUsage:output_type -> proto.RecordTokenUsageResponse - 5, // 25: proto.Recorder.RecordPromptUsage:output_type -> proto.RecordPromptUsageResponse - 7, // 26: proto.Recorder.RecordToolUsage:output_type -> proto.RecordToolUsageResponse - 9, // 27: proto.MCPConfigurator.GetMCPServerConfigs:output_type -> proto.GetMCPServerConfigsResponse - 12, // 28: proto.MCPConfigurator.GetMCPServerAccessTokensBatch:output_type -> proto.GetMCPServerAccessTokensBatchResponse - 14, // 29: proto.Authorizer.IsAuthorized:output_type -> proto.IsAuthorizedResponse - 23, // [23:30] is the sub-list for method output_type - 16, // [16:23] is the sub-list for method input_type - 16, // [16:16] is the sub-list for extension type_name - 16, // [16:16] is the sub-list for extension extendee - 0, // [0:16] is the sub-list for field type_name -} - -func init() { file_enterprise_x_aibridged_proto_aibridged_proto_init() } -func file_enterprise_x_aibridged_proto_aibridged_proto_init() { - if File_enterprise_x_aibridged_proto_aibridged_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordInterceptionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordInterceptionResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordTokenUsageRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordTokenUsageResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordPromptUsageRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordPromptUsageResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordToolUsageRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordToolUsageResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetMCPServerConfigsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetMCPServerConfigsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MCPServerConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetMCPServerAccessTokensBatchRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetMCPServerAccessTokensBatchResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IsAuthorizedRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IsAuthorizedResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6].OneofWrappers = []interface{}{} - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc, - NumEnums: 0, - NumMessages: 21, - NumExtensions: 0, - NumServices: 3, - }, - GoTypes: file_enterprise_x_aibridged_proto_aibridged_proto_goTypes, - DependencyIndexes: file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs, - MessageInfos: file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes, - }.Build() - File_enterprise_x_aibridged_proto_aibridged_proto = out.File - file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc = nil - file_enterprise_x_aibridged_proto_aibridged_proto_goTypes = nil - file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs = nil -} diff --git a/examples/examples.gen.json b/examples/examples.gen.json index c891389568a55..432e6d3f51ea6 100644 --- a/examples/examples.gen.json +++ b/examples/examples.gen.json @@ -205,5 +205,19 @@ "icon": "/emojis/1f4e6.png", "tags": [], "markdown": "\n# A minimal Scaffolding for a Coder Template\n\nUse this starter template as a basis to create your own unique template from scratch.\n" + }, + { + "id": "tasks-docker", + "url": "", + "name": "Tasks on Docker", + "description": "Run Coder Tasks on Docker with an example application", + "icon": "/icon/tasks.svg", + "tags": [ + "docker", + "container", + "ai", + "tasks" + ], + "markdown": "\n# Run Coder Tasks on Docker\n\nThis is an example template for running [Coder Tasks](https://coder.com/docs/ai-coder/tasks), Claude Code, along with a [real world application](https://realworld-docs.netlify.app/).\n\n![Tasks](../../.images/tasks-screenshot.png)\n\nThis is a fantastic starting point for working with AI agents with Coder Tasks. Try prompts such as:\n\n- \"Make the background color blue\"\n- \"Add a dark mode\"\n- \"Rewrite the entire backend in Go\"\n\n## Included in this template\n\nThis template is designed to be an example and a reference for building other templates with Coder Tasks. You can always run Coder Tasks on different infrastructure (e.g. as on Kubernetes, VMs) and with your own GitHub repositories, MCP servers, images, etc.\n\nAdditionally, this template uses our [Claude Code](https://registry.coder.com/modules/coder/claude-code) module, but [other agents](https://registry.coder.com/modules?search=tag%3Aagent) or even [custom agents](https://coder.com/docs/ai-coder/custom-agents) can be used in its place.\n\nThis template uses a [Workspace Preset](https://coder.com/docs/admin/templates/extending-templates/parameters#workspace-presets) that pre-defines:\n\n- Universal Container Image (e.g. contains Node.js, Java, Python, Ruby, etc)\n- MCP servers (desktop-commander for long-running logs, playwright for previewing changes)\n- System prompt and [repository](https://github.com/coder-contrib/realworld-django-rest-framework-angular) for the AI agent\n- Startup script to initialize the repository and start the development server\n\n## Add this template to your Coder deployment\n\nYou can also add this template to your Coder deployment and begin tinkering right away!\n\n### Prerequisites\n\n- Coder installed (see [our docs](https://coder.com/docs/install)), ideally a Linux VM with Docker\n- Anthropic API Key (or access to Anthropic models via Bedrock or Vertex, see [Claude Code docs](https://docs.anthropic.com/en/docs/claude-code/third-party-integrations))\n- Access to a Docker socket\n - If on the local VM, ensure the `coder` user is added to the Docker group (docs)\n\n ```sh\n # Add coder user to Docker group\n sudo adduser coder docker\n \n # Restart Coder server\n sudo systemctl restart coder\n \n # Test Docker\n sudo -u coder docker ps\n ```\n\n - If on a remote VM, see the [Docker Terraform provider documentation](https://registry.terraform.io/providers/kreuzwerker/docker/latest/docs#remote-hosts) to configure a remote host\n\nTo import this template into Coder, first create a template from \"Scratch\" in the template editor.\n\nVisit this URL for your Coder deployment:\n\n```sh\nhttps://coder.example.com/templates/new?exampleId=scratch\n```\n\nAfter creating the template, paste the contents from [main.tf](https://github.com/coder/registry/blob/main/registry/coder-labs/templates/tasks-docker/main.tf) into the template editor and save.\n\nAlternatively, you can use the Coder CLI to [push the template](https://coder.com/docs/reference/cli/templates_push)\n\n```sh\n# Download the CLI\ncurl -L https://coder.com/install.sh | sh\n\n# Log in to your deployment\ncoder login https://coder.example.com\n\n# Clone the registry\ngit clone https://github.com/coder/registry\ncd registry\n\n# Navigate to this template\ncd registry/coder-labs/templates/tasks-docker\n\n# Push the template\ncoder templates push\n```\n" } ] diff --git a/examples/examples.go b/examples/examples.go index 7deff18f5f9ad..8490267b7fe28 100644 --- a/examples/examples.go +++ b/examples/examples.go @@ -40,6 +40,7 @@ var ( //go:embed templates/kubernetes-devcontainer //go:embed templates/nomad-docker //go:embed templates/scratch + //go:embed templates/tasks-docker files embed.FS exampleBasePath = "https://github.com/coder/coder/tree/main/examples/templates/" diff --git a/examples/monitoring/dashboards/grafana/aibridge/README.md b/examples/monitoring/dashboards/grafana/aibridge/README.md new file mode 100644 index 0000000000000..54cca4bed6e54 --- /dev/null +++ b/examples/monitoring/dashboards/grafana/aibridge/README.md @@ -0,0 +1,39 @@ +# AI Bridge Grafana Dashboard + +![AI Bridge example Grafana Dashboard](./grafana_dashboard.png)A sample Grafana dashboard for monitoring AI Bridge token usage, costs, and cache hit rates in Coder. + +The dashboard includes three main sections with multiple visualization panels: + +**Usage Leaderboards** - Track token consumption across your organization: +- Bar chart showing input, output, cache read, and cache write tokens per user +- Total usage statistics with breakdowns by token type + +**Approximate Cost Table** - Estimate AI spending by joining token usage with live pricing data from LiteLLM: +- Per-provider and per-model cost breakdown +- Input, output, cache read, and cache write costs +- Total cost calculations with footer summaries + +**Interceptions** - Monitor AI API calls over time: +- Time-series bar chart of interceptions by user +- Total interception count + +**Prompts & Tool Calls Details** - Inspect actual AI interactions: +- User Prompts table showing all prompts sent to AI models with timestamps +- Tool Calls table displaying MCP tool invocations, inputs, and errors (color-coded for failures) + +All panels support filtering by time range, username, provider (Anthropic, OpenAI, etc.), and model using regex patterns. + +## Setup + +1. **Install the Infinity plugin**: `grafana-cli plugins install yesoreyeram-infinity-datasource` + +2. **Configure data sources**: + - **PostgreSQL datasource** (`coder-observability-ro`): Connect to your Coder database with read access to `aibridge_interceptions`, `aibridge_token_usages`, `aibridge_user_prompts`, `aibridge_tool_usages` and `users` + - **Infinity datasource** (`litellm-pricing-data`): Point to `https://raw.githubusercontent.com/BerriAI/litellm/refs/heads/main/model_prices_and_context_window.json` for model pricing data + +3. **Import**: Download [`dashboard.json`](https://raw.githubusercontent.com/coder/coder/main/examples/monitoring/dashboards/grafana/aibridge/dashboard.json) from this directory, then in Grafana navigate to **Dashboards** β†’ **Import** β†’ **Upload JSON file**. Map the data sources when prompted. + +## Features + +- Token usage leaderboards by user, provider, and model +- Filterable by time range, username, provider, and model (regex supported) diff --git a/examples/monitoring/dashboards/grafana/aibridge/dashboard.json b/examples/monitoring/dashboards/grafana/aibridge/dashboard.json new file mode 100644 index 0000000000000..16bb5a201c79a --- /dev/null +++ b/examples/monitoring/dashboards/grafana/aibridge/dashboard.json @@ -0,0 +1,1411 @@ +{ + "__inputs": [ + { + "name": "DS_CODER-OBSERVABILITY-RO", + "label": "coder-observability-ro", + "description": "", + "type": "datasource", + "pluginId": "grafana-postgresql-datasource", + "pluginName": "PostgreSQL" + }, + { + "name": "DS_LITELLM-PRICING-DATA", + "label": "litellm-pricing-data", + "description": "", + "type": "datasource", + "pluginId": "yesoreyeram-infinity-datasource", + "pluginName": "Infinity" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "panel", + "id": "barchart", + "name": "Bar chart", + "version": "" + }, + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "12.1.0" + }, + { + "type": "datasource", + "id": "grafana-postgresql-datasource", + "name": "PostgreSQL", + "version": "12.1.0" + }, + { + "type": "panel", + "id": "stat", + "name": "Stat", + "version": "" + }, + { + "type": "panel", + "id": "table", + "name": "Table", + "version": "" + }, + { + "type": "datasource", + "id": "yesoreyeram-infinity-datasource", + "name": "Infinity", + "version": "3.6.0" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 11, + "panels": [], + "title": "Usage leaderboards", + "type": "row" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "output" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "yellow", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Cache Read" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "green", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Input" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "orange", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Cache Write" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "light-red", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 12, + "w": 20, + "x": 0, + "y": 1 + }, + "id": 1, + "options": { + "barRadius": 0, + "barWidth": 0.97, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "hideZeros": false, + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select u.username, sum(t.input_tokens) as input,\nsum(t.output_tokens) as output,\nsum(\n COALESCE(\n t.metadata->>'cache_read_input', -- Anthropic\n t.metadata->>'prompt_cached' -- OpenAI\n )::int\n) AS cache_read_input,\nsum((t.metadata->>'cache_creation_input')::int) AS cache_creation_input -- Anthropic\nfrom aibridge_token_usages t\njoin aibridge_interceptions i on t.interception_id = i.id\njoin users u on i.initiator_id = u.id\nwhere $__timeFilter(i.started_at)\n AND u.username ~ '${username:regex}'\n AND i.provider ~ '${provider:regex}'\n AND i.model ~ '${model:regex}'\ngroup by u.username\norder by input desc", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } + } + ], + "title": "Leaderboard per user", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "cache_creation_input": "Cache Write", + "cache_read_input": "Cache Read", + "input": "Input", + "output": "Output", + "username": "" + } + } + } + ], + "type": "barchart" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "text", + "value": 0 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 12, + "w": 4, + "x": 20, + "y": 1 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select sum(t.input_tokens) as input,\nsum(t.output_tokens) as output,\nsum(\n COALESCE(\n t.metadata->>'cache_read_input', -- Anthropic\n t.metadata->>'prompt_cached' -- OpenAI\n )::int\n) AS cache_read_input,\nsum((t.metadata->>'cache_creation_input')::int) AS cache_creation_input -- Anthropic\nfrom aibridge_token_usages t\njoin aibridge_interceptions i on t.interception_id = i.id\njoin users u on i.initiator_id = u.id\nwhere $__timeFilter(i.started_at)\n AND u.username ~ '${username:regex}'\n AND i.provider ~ '${provider:regex}'\n AND i.model ~ '${model:regex}'\norder by input desc", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } + } + ], + "title": "Total usage for $username", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": { + "cache_creation_input": 3, + "cache_read_input": 2, + "input": 0, + "output": 1 + }, + "renameByName": { + "cache_creation_input": "Cache Write", + "cache_read_input": "Cache Read", + "input": "Input", + "output": "Output" + } + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "datasource", + "uid": "-- Mixed --" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byRegexp", + "options": "/.*Cost.*/" + }, + "properties": [ + { + "id": "unit", + "value": "currencyUSD" + }, + { + "id": "decimals", + "value": 2 + } + ] + } + ] + }, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 13 + }, + "id": 12, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": true + }, + "frameIndex": 0, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Total Cost" + } + ] + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "columns": [], + "computed_columns": [], + "datasource": { + "type": "yesoreyeram-infinity-datasource", + "uid": "${DS_LITELLM-PRICING-DATA}" + }, + "filterExpression": "", + "filters": [], + "format": "table", + "global_query_id": "", + "hide": false, + "pagination_mode": "none", + "parser": "backend", + "refId": "A", + "root_selector": "$ ~> $each(function($v, $k) {\n {\n \"model\": $k,\n \"input_cost_per_token\": $v.input_cost_per_token ? $v.input_cost_per_token : 0,\n \"output_cost_per_token\": $v.output_cost_per_token ? $v.output_cost_per_token : 0,\n \"cache_creation_input_token_cost\": $v.cache_creation_input_token_cost ? $v.cache_creation_input_token_cost : 0,\n \"cache_read_input_token_cost\": $v.cache_read_input_token_cost ? $v.cache_read_input_token_cost : 0\n }\n})", + "source": "url", + "type": "json", + "url": "", + "url_options": { + "data": "", + "method": "GET" + } + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "hide": false, + "rawQuery": true, + "rawSql": "select i.provider, i.model,\nsum(t.input_tokens) as input,\nsum(t.output_tokens) as output,\nsum(\n COALESCE(\n t.metadata->>'cache_read_input', -- Anthropic\n t.metadata->>'prompt_cached' -- OpenAI\n )::int\n) AS cache_read_input,\nsum((t.metadata->>'cache_creation_input')::int) AS cache_creation_input -- Anthropic\nfrom aibridge_token_usages t\njoin aibridge_interceptions i on t.interception_id = i.id\njoin users u on i.initiator_id = u.id\nwhere $__timeFilter(i.started_at)\n AND u.username ~ '${username:regex}'\n AND i.provider ~ '${provider:regex}'\n AND i.model ~ '${model:regex}'\ngroup by i.provider, i.model\norder by input desc", + "refId": "B", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } + } + ], + "title": "Approximate Cost", + "transformations": [ + { + "id": "joinByField", + "options": { + "byField": "model", + "mode": "inner" + } + }, + { + "id": "calculateField", + "options": { + "alias": "Input Cost", + "binary": { + "left": { + "matcher": { + "id": "byName", + "options": "input_cost_per_token A" + } + }, + "operator": "*", + "right": { + "matcher": { + "id": "byName", + "options": "input" + } + } + }, + "mode": "binary", + "reduce": { + "include": ["input_cost_per_token A", "input"], + "reducer": "sum" + } + } + }, + { + "id": "calculateField", + "options": { + "alias": "Output Cost", + "binary": { + "left": { + "matcher": { + "id": "byName", + "options": "output_cost_per_token A" + } + }, + "operator": "*", + "right": { + "matcher": { + "id": "byName", + "options": "output" + } + } + }, + "mode": "binary", + "reduce": { + "reducer": "sum" + } + } + }, + { + "id": "calculateField", + "options": { + "alias": "Cache Read Cost", + "binary": { + "left": { + "matcher": { + "id": "byName", + "options": "cache_read_input_token_cost A" + } + }, + "operator": "*", + "right": { + "matcher": { + "id": "byName", + "options": "cache_read_input" + } + } + }, + "mode": "binary", + "reduce": { + "reducer": "sum" + } + } + }, + { + "id": "calculateField", + "options": { + "alias": "Cache Write Cost", + "binary": { + "left": { + "matcher": { + "id": "byName", + "options": "cache_creation_input_token_cost A" + } + }, + "operator": "*", + "right": { + "matcher": { + "id": "byName", + "options": "cache_creation_input" + } + } + }, + "mode": "binary", + "reduce": { + "reducer": "sum" + } + } + }, + { + "id": "calculateField", + "options": { + "alias": "Total Cost", + "binary": { + "left": { + "matcher": { + "id": "byType", + "options": "number" + } + }, + "right": { + "fixed": "" + } + }, + "cumulative": { + "field": "Input Cost", + "reducer": "sum" + }, + "mode": "reduceRow", + "reduce": { + "include": [ + "Input Cost", + "Output Cost", + "Cache Read Cost", + "Cache Write Cost" + ], + "reducer": "sum" + } + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "cache_creation_input": false, + "cache_creation_input_token_cost A": true, + "cache_read_input": false, + "cache_read_input_token_cost A": true, + "input": false, + "input_cost_per_token A": true, + "output": false, + "output_cost_per_token A": true + }, + "includeByName": {}, + "indexByName": { + "Cache Read Cost": 12, + "Cache Write Cost": 13, + "Input Cost": 10, + "Output Cost": 11, + "Total Cost": 14, + "cache_creation_input": 9, + "cache_creation_input_token_cost A": 2, + "cache_read_input": 8, + "cache_read_input_token_cost A": 3, + "input": 6, + "input_cost_per_token A": 4, + "model": 1, + "output": 7, + "output_cost_per_token A": 5, + "provider": 0 + }, + "renameByName": { + "cache_creation_input": "Cache Write", + "cache_read_input": "Cache Read", + "input": "Input", + "model": "Model", + "output": "Output", + "provider": "Provider" + } + } + } + ], + "type": "table" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 10, + "panels": [], + "title": "Interceptions", + "type": "row" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "fieldMinMax": false, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "output" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "orange", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 12, + "w": 20, + "x": 0, + "y": 23 + }, + "id": 4, + "maxDataPoints": 30, + "options": { + "barRadius": 0, + "barWidth": 0.97, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "normal", + "text": { + "valueSize": 10 + }, + "tooltip": { + "hideZeros": false, + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": -45, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n$__timeGroupAlias(i.started_at, $__interval, NULL),\ncount(i.id) AS value,\nu.username AS metric\nFROM aibridge_interceptions i\njoin users u ON i.initiator_id = u.id\nWHERE\n$__timeFilter(i.started_at)\nAND u.username ~ '${username:regex}'\nAND i.provider ~ '${provider:regex}'\nAND i.model ~ '${model:regex}'\nGROUP BY u.username, $__timeGroup(i.started_at, $__interval)\nORDER BY $__timeGroup(i.started_at, $__interval)", + "refId": "A", + "sql": { + "columns": [ + { + "name": "COUNT", + "parameters": [ + { + "name": "id", + "type": "functionParameter" + } + ], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "name": "started_at", + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "table": "aibridge_interceptions" + } + ], + "title": "Interceptions over time by user", + "type": "barchart" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "output" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "orange", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 12, + "w": 4, + "x": 20, + "y": 23 + }, + "id": 5, + "interval": "1m", + "maxDataPoints": 500, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select count(*) from aibridge_interceptions\nWHERE started_at > $__timeFrom() AND started_at <= $__timeTo()\nAND provider ~ '${provider:regex}'\nAND model ~ '${model:regex}'", + "refId": "A", + "sql": { + "columns": [ + { + "name": "COUNT", + "parameters": [ + { + "name": "id", + "type": "functionParameter" + } + ], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "name": "started_at", + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "table": "aibridge_interceptions" + } + ], + "title": "Total interceptions", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 35 + }, + "id": 9, + "panels": [], + "title": "Prompts & tool calls details", + "type": "row" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto", + "wrapText": false + }, + "inspect": true + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Interception ID" + }, + "properties": [ + { + "id": "custom.width", + "value": 357 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Model" + }, + "properties": [ + { + "id": "custom.width", + "value": 240 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Provider" + }, + "properties": [ + { + "id": "custom.width", + "value": 157 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Username" + }, + "properties": [ + { + "id": "custom.width", + "value": 188 + } + ] + } + ] + }, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 36 + }, + "id": 7, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Created At" + } + ] + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT i.id,\n u.username,\n i.provider,\n i.model,\n p.prompt,\n p.created_at\nFROM aibridge_user_prompts p\nJOIN aibridge_interceptions i ON p.interception_id = i.id\nJOIN users u ON i.initiator_id = u.id\nWHERE $__timeFilter(i.started_at)\n AND u.username ~ '${username:regex}'\n AND i.provider ~ '${provider:regex}'\n AND i.model ~ '${model:regex}'\nORDER BY p.created_at DESC;", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } + } + ], + "title": "User Prompts", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "created_at": "Created At", + "id": "Interception ID", + "input": "Tool Input", + "invocation_error": "Tool Error", + "model": "Model", + "prompt": "Prompt", + "provider": "Provider", + "server_url": "MCP Server", + "tool": "Tool Name", + "username": "Username" + } + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto", + "wrapText": false + }, + "inspect": true + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Tool Name" + }, + "properties": [ + { + "id": "custom.width", + "value": 342 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "invocation_error" + }, + "properties": [ + { + "id": "custom.cellOptions", + "value": { + "applyToRow": true, + "type": "color-background", + "wrapText": false + } + }, + { + "id": "noValue" + }, + { + "id": "mappings", + "value": [ + { + "options": { + "match": "null", + "result": { + "color": "green", + "index": 0 + } + }, + "type": "special" + }, + { + "options": { + "pattern": ".+", + "result": { + "color": "red", + "index": 1 + } + }, + "type": "regex" + } + ] + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Tool Input" + }, + "properties": [ + { + "id": "custom.width", + "value": 309 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Interception ID" + }, + "properties": [ + { + "id": "custom.width", + "value": 357 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Model" + }, + "properties": [ + { + "id": "custom.width", + "value": 240 + } + ] + } + ] + }, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 50 + }, + "id": 6, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Created At" + } + ] + }, + "pluginVersion": "12.1.0", + "targets": [ + { + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select i.id, u.username, i.provider, i.model, t.server_url, t.tool, t.input, t.invocation_error, t.created_at FROM aibridge_tool_usages t\njoin aibridge_interceptions i ON t.interception_id = i.id\njoin users u on i.initiator_id = u.id\nwhere $__timeFilter(i.started_at)\nAND u.username ~ '${username:regex}'\nAND i.provider ~ '${provider:regex}'\nAND i.model ~ '${model:regex}'\norder by t.created_at desc", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } + } + ], + "title": "Tool Calls", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "created_at": "Created At", + "id": "Interception ID", + "input": "Tool Input", + "invocation_error": "Tool Error", + "model": "Model", + "provider": "Provider", + "server_url": "MCP Server", + "tool": "Tool Name", + "username": "Username" + } + } + } + ], + "type": "table" + } + ], + "refresh": "1m", + "schemaVersion": 41, + "tags": [], + "templating": { + "list": [ + { + "allValue": ".+", + "current": {}, + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "definition": "select username from users where deleted=false;", + "description": "", + "includeAll": true, + "multi": true, + "name": "username", + "options": [], + "query": "select username from users where deleted=false;", + "refresh": 1, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "allValue": ".+", + "current": {}, + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "definition": "SELECT DISTINCT provider FROM aibridge_interceptions WHERE provider IS NOT NULL ORDER BY 1;", + "description": "", + "includeAll": true, + "multi": true, + "name": "provider", + "options": [], + "query": "SELECT DISTINCT provider FROM aibridge_interceptions WHERE provider IS NOT NULL ORDER BY 1;", + "refresh": 1, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "allValue": ".+", + "current": {}, + "datasource": { + "type": "grafana-postgresql-datasource", + "uid": "${DS_CODER-OBSERVABILITY-RO}" + }, + "definition": "SELECT DISTINCT model FROM aibridge_interceptions WHERE model IS NOT NULL AND provider ~ '${provider:regex}' ORDER BY 1;", + "description": "", + "includeAll": true, + "multi": true, + "name": "model", + "options": [], + "query": "SELECT DISTINCT model FROM aibridge_interceptions WHERE model IS NOT NULL AND provider ~ '${provider:regex}' ORDER BY 1;", + "refresh": 1, + "regex": "", + "sort": 1, + "type": "query" + } + ] + }, + "time": { + "from": "now-7d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "aibridge", + "uid": "0c61d33f-c809-4184-9e88-cb27e2d9d224", + "version": 43, + "weekStart": "" +} diff --git a/examples/monitoring/dashboards/grafana/aibridge/grafana_dashboard.png b/examples/monitoring/dashboards/grafana/aibridge/grafana_dashboard.png new file mode 100644 index 0000000000000..c292bb0cf498d Binary files /dev/null and b/examples/monitoring/dashboards/grafana/aibridge/grafana_dashboard.png differ diff --git a/examples/templates/community-templates.md b/examples/templates/community-templates.md index 86bc9bce174ba..23d2f51807a70 100644 --- a/examples/templates/community-templates.md +++ b/examples/templates/community-templates.md @@ -23,9 +23,6 @@ templates. and API examples. - [bpmct/coder-templates](https://github.com/bpmct/coder-templates) - Kubernetes, OpenStack, podman, Docker, VM, AWS, Google Cloud, Azure templates. -- [kozmiknano/vscode-server-template](https://github.com/KozmikNano/vscode-server-template) - - Run the full VS Code server within docker! (Built-in settings sync and - Microsoft Marketplace enabled) - [atnomoverflow/coder-template](https://github.com/atnomoverflow/coder-template) - Kubernetes template that install VS code server Rstudio jupyter and also set ssh access to gitlab (Works also on self managed gitlab). diff --git a/examples/templates/tasks-docker/README.md b/examples/templates/tasks-docker/README.md new file mode 100644 index 0000000000000..02262e5d6989c --- /dev/null +++ b/examples/templates/tasks-docker/README.md @@ -0,0 +1,87 @@ +--- +display_name: Tasks on Docker +description: Run Coder Tasks on Docker with an example application +icon: ../../../site/static/icon/tasks.svg +verified: false +tags: [docker, container, ai, tasks] +maintainer_github: coder +--- + +# Run Coder Tasks on Docker + +This is an example template for running [Coder Tasks](https://coder.com/docs/ai-coder/tasks), Claude Code, along with a [real world application](https://realworld-docs.netlify.app/). + +![Tasks](../../.images/tasks-screenshot.png) + +This is a fantastic starting point for working with AI agents with Coder Tasks. Try prompts such as: + +- "Make the background color blue" +- "Add a dark mode" +- "Rewrite the entire backend in Go" + +## Included in this template + +This template is designed to be an example and a reference for building other templates with Coder Tasks. You can always run Coder Tasks on different infrastructure (e.g. as on Kubernetes, VMs) and with your own GitHub repositories, MCP servers, images, etc. + +Additionally, this template uses our [Claude Code](https://registry.coder.com/modules/coder/claude-code) module, but [other agents](https://registry.coder.com/modules?search=tag%3Aagent) or even [custom agents](https://coder.com/docs/ai-coder/custom-agents) can be used in its place. + +This template uses a [Workspace Preset](https://coder.com/docs/admin/templates/extending-templates/parameters#workspace-presets) that pre-defines: + +- Universal Container Image (e.g. contains Node.js, Java, Python, Ruby, etc) +- MCP servers (desktop-commander for long-running logs, playwright for previewing changes) +- System prompt and [repository](https://github.com/coder-contrib/realworld-django-rest-framework-angular) for the AI agent +- Startup script to initialize the repository and start the development server + +## Add this template to your Coder deployment + +You can also add this template to your Coder deployment and begin tinkering right away! + +### Prerequisites + +- Coder installed (see [our docs](https://coder.com/docs/install)), ideally a Linux VM with Docker +- Anthropic API Key (or access to Anthropic models via Bedrock or Vertex, see [Claude Code docs](https://docs.anthropic.com/en/docs/claude-code/third-party-integrations)) +- Access to a Docker socket + - If on the local VM, ensure the `coder` user is added to the Docker group (docs) + + ```sh + # Add coder user to Docker group + sudo adduser coder docker + + # Restart Coder server + sudo systemctl restart coder + + # Test Docker + sudo -u coder docker ps + ``` + + - If on a remote VM, see the [Docker Terraform provider documentation](https://registry.terraform.io/providers/kreuzwerker/docker/latest/docs#remote-hosts) to configure a remote host + +To import this template into Coder, first create a template from "Scratch" in the template editor. + +Visit this URL for your Coder deployment: + +```sh +https://coder.example.com/templates/new?exampleId=scratch +``` + +After creating the template, paste the contents from [main.tf](https://github.com/coder/registry/blob/main/registry/coder-labs/templates/tasks-docker/main.tf) into the template editor and save. + +Alternatively, you can use the Coder CLI to [push the template](https://coder.com/docs/reference/cli/templates_push) + +```sh +# Download the CLI +curl -L https://coder.com/install.sh | sh + +# Log in to your deployment +coder login https://coder.example.com + +# Clone the registry +git clone https://github.com/coder/registry +cd registry + +# Navigate to this template +cd registry/coder-labs/templates/tasks-docker + +# Push the template +coder templates push +``` diff --git a/examples/templates/tasks-docker/main.tf b/examples/templates/tasks-docker/main.tf new file mode 100644 index 0000000000000..8a457584a4674 --- /dev/null +++ b/examples/templates/tasks-docker/main.tf @@ -0,0 +1,376 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + } + docker = { + source = "kreuzwerker/docker" + } + } +} + +# This template requires a valid Docker socket +# However, you can reference our Kubernetes/VM +# example templates and adapt the Claude Code module +# +# see: https://registry.coder.com/templates +provider "docker" {} + +# The Claude Code module does the automatic task reporting +# Other agent modules: https://registry.coder.com/modules?search=agent +# Or use a custom agent: +module "claude-code" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/coder/claude-code/coder" + version = "3.3.2" + agent_id = coder_agent.main.id + workdir = "/home/coder/projects" + order = 999 + claude_api_key = "" + ai_prompt = data.coder_parameter.ai_prompt.value + system_prompt = data.coder_parameter.system_prompt.value + model = "sonnet" + permission_mode = "plan" + post_install_script = data.coder_parameter.setup_script.value +} + +# We are using presets to set the prompts, image, and set up instructions +# See https://coder.com/docs/admin/templates/extending-templates/parameters#workspace-presets +data "coder_workspace_preset" "default" { + name = "Real World App: Angular + Django" + default = true + parameters = { + "system_prompt" = <<-EOT + -- Framing -- + You are a helpful assistant that can help with code. You are running inside a Coder Workspace and provide status updates to the user via Coder MCP. Stay on track, feel free to debug, but when the original plan fails, do not choose a different route/architecture without checking the user first. + + -- Tool Selection -- + - playwright: previewing your changes after you made them + to confirm it worked as expected + - desktop-commander - use only for commands that keep running + (servers, dev watchers, GUI apps). + - Built-in tools - use for everything else: + (file operations, git commands, builds & installs, one-off shell commands) + + Remember this decision rule: + - Stays running? β†’ desktop-commander + - Finishes immediately? β†’ built-in tools + + -- Context -- + There is an existing app and tmux dev server running on port 8000. Be sure to read it's CLAUDE.md (./realworld-django-rest-framework-angular/CLAUDE.md) to learn more about it. + + Since this app is for demo purposes and the user is previewing the homepage and subsequent pages, aim to make the first visual change/prototype very quickly so the user can preview it, then focus on backend or logic which can be a more involved, long-running architecture plan. + + EOT + + "setup_script" = <<-EOT + # Set up projects dir + mkdir -p /home/coder/projects + cd $HOME/projects + + # Packages: Install additional packages + sudo apt-get update && sudo apt-get install -y tmux + if ! command -v google-chrome >/dev/null 2>&1; then + yes | npx playwright install chrome + fi + + # MCP: Install and configure MCP Servers + npm install -g @wonderwhy-er/desktop-commander + claude mcp add playwright npx -- @playwright/mcp@latest --headless --isolated --no-sandbox + claude mcp add desktop-commander desktop-commander + + # Repo: Clone and pull changes from the git repository + if [ ! -d "realworld-django-rest-framework-angular" ]; then + git clone https://github.com/coder-contrib/realworld-django-rest-framework-angular.git + else + cd realworld-django-rest-framework-angular + git fetch + # Check for uncommitted changes + if git diff-index --quiet HEAD -- && \ + [ -z "$(git status --porcelain --untracked-files=no)" ] && \ + [ -z "$(git log --branches --not --remotes)" ]; then + echo "Repo is clean. Pulling latest changes..." + git pull + else + echo "Repo has uncommitted or unpushed changes. Skipping pull." + fi + + cd .. + fi + + # Initialize: Start the development server + cd realworld-django-rest-framework-angular && ./start-dev.sh + EOT + "preview_port" = "4200" + "container_image" = "codercom/example-universal:ubuntu" + } + + # Pre-builds is a Coder Premium + # feature to speed up workspace creation + # + # see https://coder.com/docs/admin/templates/extending-templates/prebuilt-workspaces + # prebuilds { + # instances = 1 + # expiration_policy { + # ttl = 86400 # Time (in seconds) after which unclaimed prebuilds are expired (1 day) + # } + # } +} + +# Advanced parameters (these are all set via preset) +data "coder_parameter" "system_prompt" { + name = "system_prompt" + display_name = "System Prompt" + type = "string" + form_type = "textarea" + description = "System prompt for the agent with generalized instructions" + mutable = false +} +data "coder_parameter" "ai_prompt" { + type = "string" + name = "AI Prompt" + default = "" + description = "Write a prompt for Claude Code" + mutable = true +} +data "coder_parameter" "setup_script" { + name = "setup_script" + display_name = "Setup Script" + type = "string" + form_type = "textarea" + description = "Script to run before running the agent" + mutable = false +} +data "coder_parameter" "container_image" { + name = "container_image" + display_name = "Container Image" + type = "string" + default = "codercom/example-universal:ubuntu" + mutable = false +} +data "coder_parameter" "preview_port" { + name = "preview_port" + display_name = "Preview Port" + description = "The port the web app is running to preview in Tasks" + type = "number" + default = "3000" + mutable = false +} + +data "coder_provisioner" "me" {} +data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} + +resource "coder_agent" "main" { + arch = data.coder_provisioner.me.arch + os = "linux" + startup_script = <<-EOT + set -e + # Prepare user home with default files on first start. + if [ ! -f ~/.init_done ]; then + cp -rT /etc/skel ~ + touch ~/.init_done + fi + EOT + + # These environment variables allow you to make Git commits right away after creating a + # workspace. Note that they take precedence over configuration defined in ~/.gitconfig! + # You can remove this block if you'd prefer to configure Git manually or using + # dotfiles. (see docs/dotfiles.md) + env = { + GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" + } + + # The following metadata blocks are optional. They are used to display + # information about your workspace in the dashboard. You can remove them + # if you don't want to display any information. + # For basic resources, you can use the `coder stat` command. + # If you need more control, you can write your own script. + metadata { + display_name = "CPU Usage" + key = "0_cpu_usage" + script = "coder stat cpu" + interval = 10 + timeout = 1 + } + + metadata { + display_name = "RAM Usage" + key = "1_ram_usage" + script = "coder stat mem" + interval = 10 + timeout = 1 + } + + metadata { + display_name = "Home Disk" + key = "3_home_disk" + script = "coder stat disk --path $${HOME}" + interval = 60 + timeout = 1 + } + + metadata { + display_name = "CPU Usage (Host)" + key = "4_cpu_usage_host" + script = "coder stat cpu --host" + interval = 10 + timeout = 1 + } + + metadata { + display_name = "Memory Usage (Host)" + key = "5_mem_usage_host" + script = "coder stat mem --host" + interval = 10 + timeout = 1 + } + + metadata { + display_name = "Load Average (Host)" + key = "6_load_host" + # get load avg scaled by number of cores + script = < github.com/aslilac/afero v0.0.0-20250403163713 require ( cdr.dev/slog v1.6.2-0.20250703074222-9df5e0a6c145 - cloud.google.com/go/compute/metadata v0.8.4 + cloud.google.com/go/compute/metadata v0.9.0 github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d github.com/adrg/xdg v0.5.0 github.com/ammario/tlru v0.4.0 @@ -96,15 +96,15 @@ require ( github.com/chromedp/chromedp v0.14.1 github.com/cli/safeexec v1.0.1 github.com/coder/flog v1.1.0 - github.com/coder/guts v1.5.0 + github.com/coder/guts v1.6.1 github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 github.com/coder/quartz v0.2.1 github.com/coder/retry v1.5.1 - github.com/coder/serpent v0.10.0 - github.com/coder/terraform-provider-coder/v2 v2.11.0 + github.com/coder/serpent v0.11.0 + github.com/coder/terraform-provider-coder/v2 v2.12.0 github.com/coder/websocket v1.8.13 github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 - github.com/coreos/go-oidc/v3 v3.15.0 + github.com/coreos/go-oidc/v3 v3.16.0 github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf github.com/creack/pty v1.1.21 github.com/dave/dst v0.27.2 @@ -123,11 +123,11 @@ require ( github.com/go-chi/chi/v5 v5.2.2 github.com/go-chi/cors v1.2.1 github.com/go-chi/httprate v0.15.0 - github.com/go-jose/go-jose/v4 v4.1.1 + github.com/go-jose/go-jose/v4 v4.1.3 github.com/go-logr/logr v1.4.3 - github.com/go-playground/validator/v10 v10.27.0 - github.com/gofrs/flock v0.12.1 - github.com/gohugoio/hugo v0.150.0 + github.com/go-playground/validator/v10 v10.28.0 + github.com/gofrs/flock v0.13.0 + github.com/gohugoio/hugo v0.152.2 github.com/golang-jwt/jwt/v4 v4.5.2 github.com/golang-migrate/migrate/v4 v4.19.0 github.com/gomarkdown/markdown v0.0.0-20240930133441-72d49d9543d8 @@ -149,12 +149,12 @@ require ( github.com/justinas/nosurf v1.2.0 github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 github.com/kirsle/configdir v0.0.0-20170128060238-e45d2f54772f - github.com/klauspost/compress v1.18.0 + github.com/klauspost/compress v1.18.1 github.com/lib/pq v1.10.9 github.com/mattn/go-isatty v0.0.20 github.com/mitchellh/go-wordwrap v1.0.1 github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c - github.com/moby/moby v28.4.0+incompatible + github.com/moby/moby v28.5.0+incompatible github.com/mocktools/go-smtp-mock/v2 v2.5.0 github.com/muesli/termenv v0.16.0 github.com/natefinch/atomic v1.0.1 @@ -172,8 +172,8 @@ require ( github.com/robfig/cron/v3 v3.0.1 github.com/shirou/gopsutil/v4 v4.25.5 github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 - github.com/spf13/afero v1.14.0 - github.com/spf13/pflag v1.0.7 + github.com/spf13/afero v1.15.0 + github.com/spf13/pflag v1.0.10 github.com/sqlc-dev/pqtype v0.3.0 github.com/stretchr/testify v1.11.1 github.com/swaggo/http-swagger/v2 v2.0.1 @@ -181,7 +181,7 @@ require ( github.com/tidwall/gjson v1.18.0 github.com/u-root/u-root v0.14.0 github.com/unrolled/secure v1.17.0 - github.com/valyala/fasthttp v1.66.0 + github.com/valyala/fasthttp v1.68.0 github.com/wagslane/go-password-validator v0.3.0 github.com/zclconf/go-cty-yaml v1.1.0 go.mozilla.org/pkcs7 v0.9.0 @@ -195,20 +195,20 @@ require ( go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29 go.uber.org/mock v0.6.0 go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516 - golang.org/x/crypto v0.42.0 - golang.org/x/exp v0.0.0-20250911091902-df9299821621 - golang.org/x/mod v0.28.0 - golang.org/x/net v0.44.0 - golang.org/x/oauth2 v0.31.0 + golang.org/x/crypto v0.43.0 + golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 + golang.org/x/mod v0.29.0 + golang.org/x/net v0.46.0 + golang.org/x/oauth2 v0.32.0 golang.org/x/sync v0.17.0 - golang.org/x/sys v0.36.0 - golang.org/x/term v0.35.0 - golang.org/x/text v0.29.0 - golang.org/x/tools v0.37.0 + golang.org/x/sys v0.37.0 + golang.org/x/term v0.36.0 + golang.org/x/text v0.30.0 + golang.org/x/tools v0.38.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da - google.golang.org/api v0.250.0 - google.golang.org/grpc v1.75.1 - google.golang.org/protobuf v1.36.9 + google.golang.org/api v0.253.0 + google.golang.org/grpc v1.76.0 + google.golang.org/protobuf v1.36.10 gopkg.in/DataDog/dd-trace-go.v1 v1.74.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v3 v3.0.1 @@ -219,7 +219,7 @@ require ( ) require ( - cloud.google.com/go/auth v0.16.5 // indirect + cloud.google.com/go/auth v0.17.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/logging v1.13.0 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect @@ -255,13 +255,13 @@ require ( github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect github.com/atotto/clipboard v0.1.4 // indirect - github.com/aws/aws-sdk-go-v2 v1.39.0 + github.com/aws/aws-sdk-go-v2 v1.39.2 github.com/aws/aws-sdk-go-v2/config v1.31.3 github.com/aws/aws-sdk-go-v2/credentials v1.18.7 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4 // indirect github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.4 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.4 // indirect @@ -274,7 +274,7 @@ require ( github.com/beorn7/perks v1.0.1 // indirect github.com/bep/godartsass/v2 v2.5.0 // indirect github.com/bep/golibsass v1.2.0 // indirect - github.com/bmatcuk/doublestar/v4 v4.9.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect github.com/charmbracelet/x/ansi v0.8.0 // indirect github.com/charmbracelet/x/term v0.2.1 // indirect github.com/chromedp/sysutil v1.1.0 // indirect @@ -296,7 +296,7 @@ require ( github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fxamacker/cbor/v2 v2.7.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.8 // indirect + github.com/gabriel-vasile/mimetype v1.4.10 // indirect github.com/go-chi/hostrouter v0.3.0 // indirect github.com/go-ini/ini v1.67.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect @@ -315,7 +315,7 @@ require ( github.com/gobwas/ws v1.4.0 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/gohugoio/hashstructure v0.5.0 // indirect + github.com/gohugoio/hashstructure v0.6.0 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/btree v1.1.3 // indirect @@ -338,9 +338,9 @@ require ( github.com/hashicorp/hcl v1.0.1-vault-7 // indirect github.com/hashicorp/hcl/v2 v2.24.0 github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-plugin-go v0.27.0 // indirect + github.com/hashicorp/terraform-plugin-go v0.29.0 // indirect github.com/hashicorp/terraform-plugin-log v0.9.0 // indirect - github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 // indirect + github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect github.com/hdevalence/ed25519consensus v0.1.0 // indirect github.com/illarion/gonotify v1.0.1 // indirect github.com/insomniacslk/dhcp v0.0.0-20231206064809-8c70d406f6d2 // indirect @@ -353,7 +353,7 @@ require ( github.com/kr/fs v0.1.0 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect - github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect github.com/mailru/easyjson v0.9.1 // indirect github.com/mattn/go-colorable v0.1.14 // indirect @@ -379,7 +379,6 @@ require ( github.com/muesli/reflow v0.3.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/niklasfasching/go-org v1.9.1 // indirect - github.com/oklog/run v1.1.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect github.com/opencontainers/runc v1.2.3 // indirect @@ -412,7 +411,7 @@ require ( github.com/tailscale/wireguard-go v0.0.0-20231121184858-cc193a0b3272 github.com/tchap/go-patricia/v2 v2.3.2 // indirect github.com/tcnksm/go-httpstat v0.2.0 // indirect - github.com/tdewolff/parse/v2 v2.8.3 // indirect + github.com/tdewolff/parse/v2 v2.8.5-0.20251020133559-0efcf90bef1a // indirect github.com/tidwall/match v1.2.0 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tinylib/msgp v1.2.5 // indirect @@ -447,22 +446,21 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect go4.org/mem v0.0.0-20220726221520-4f986261bf13 // indirect - golang.org/x/time v0.13.0 // indirect + golang.org/x/time v0.14.0 // indirect golang.zx2c4.com/wintun v0.0.0-20230126152724-0fa3db229ce2 golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 // indirect golang.zx2c4.com/wireguard/windows v0.5.3 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f // indirect gopkg.in/ini.v1 v1.67.0 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect howett.net/plist v1.0.0 // indirect kernel.org/pub/linux/libs/security/libcap/psx v1.2.73 // indirect sigs.k8s.io/yaml v1.5.0 // indirect ) -require github.com/coder/clistat v1.0.0 +require github.com/coder/clistat v1.1.1 require github.com/SherClockHolmes/webpush-go v1.4.0 @@ -475,10 +473,10 @@ require ( ) require ( - github.com/anthropics/anthropic-sdk-go v1.12.0 - github.com/brianvoe/gofakeit/v7 v7.7.1 + github.com/anthropics/anthropic-sdk-go v1.13.0 + github.com/brianvoe/gofakeit/v7 v7.8.0 github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 - github.com/coder/aibridge v0.1.3 + github.com/coder/aibridge v0.1.7 github.com/coder/aisdk-go v0.0.9 github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945 github.com/coder/preview v1.0.4 @@ -487,6 +485,7 @@ require ( github.com/go-git/go-git/v5 v5.16.2 github.com/icholy/replace v0.6.0 github.com/mark3labs/mcp-go v0.38.0 + gonum.org/v1/gonum v0.16.0 ) require ( @@ -510,6 +509,7 @@ require ( github.com/aquasecurity/trivy v0.61.1-0.20250407075540-f1329c7ea1aa // indirect github.com/aquasecurity/trivy-checks v1.11.3-0.20250604022615-9a7efa7c9169 // indirect github.com/aws/aws-sdk-go v1.55.7 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/buger/jsonparser v1.1.1 // indirect @@ -522,6 +522,7 @@ require ( github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect github.com/go-sql-driver/mysql v1.9.3 // indirect + github.com/goccy/go-yaml v1.18.0 // indirect github.com/google/go-containerregistry v0.20.6 // indirect github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // indirect github.com/hashicorp/go-getter v1.7.9 // indirect diff --git a/go.sum b/go.sum index 2842cbcc7baa2..58992e6cebbdb 100644 --- a/go.sum +++ b/go.sum @@ -101,8 +101,8 @@ cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVo cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= -cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI= -cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ= +cloud.google.com/go/auth v0.17.0 h1:74yCm7hCj2rUyyAocqnFzsAYXgJhrG26XCFimrc/Kz4= +cloud.google.com/go/auth v0.17.0/go.mod h1:6wv/t5/6rOPAX4fJiRjKkJCvswLwdet7G8+UGXt7nCQ= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= @@ -184,8 +184,8 @@ cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZ cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/compute/metadata v0.8.4 h1:oXMa1VMQBVCyewMIOm3WQsnVd9FbKBtm8reqWRaXnHQ= -cloud.google.com/go/compute/metadata v0.8.4/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= +cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= +cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= @@ -676,6 +676,10 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0/go.mod h1:jUZ5LYlw40WMd07qxcQJD5M40aUxrfwqQX1g7zxYnrQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo= +github.com/JohannesKaufmann/dom v0.2.0 h1:1bragmEb19K8lHAqgFgqCpiPCFEZMTXzOIEjuxkUfLQ= +github.com/JohannesKaufmann/dom v0.2.0/go.mod h1:57iSUl5RKric4bUkgos4zu6Xt5LMHUnw3TF1l5CbGZo= +github.com/JohannesKaufmann/html-to-markdown/v2 v2.4.0 h1:C0/TerKdQX9Y9pbYi1EsLr5LDNANsqunyI/btpyfCg8= +github.com/JohannesKaufmann/html-to-markdown/v2 v2.4.0/go.mod h1:OLaKh+giepO8j7teevrNwiy/fwf8LXgoc9g7rwaE1jk= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= @@ -722,8 +726,8 @@ github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwTo github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/anthropics/anthropic-sdk-go v1.12.0 h1:xPqlGnq7rWrTiHazIvCiumA0u7mGQnwDQtvA1M82h9U= -github.com/anthropics/anthropic-sdk-go v1.12.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE= +github.com/anthropics/anthropic-sdk-go v1.13.0 h1:Bhbe8sRoDPtipttg8bQYrMCKe2b79+q6rFW1vOKEUKI= +github.com/anthropics/anthropic-sdk-go v1.13.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= @@ -758,8 +762,10 @@ github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.55.7 h1:UJrkFq7es5CShfBwlWAC8DA077vp8PyVbQd3lqLiztE= github.com/aws/aws-sdk-go v1.55.7/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.39.0 h1:xm5WV/2L4emMRmMjHFykqiA4M/ra0DJVSWUkDyBjbg4= -github.com/aws/aws-sdk-go-v2 v1.39.0/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY= +github.com/aws/aws-sdk-go-v2 v1.39.2 h1:EJLg8IdbzgeD7xgvZ+I8M1e0fL0ptn/M47lianzth0I= +github.com/aws/aws-sdk-go-v2 v1.39.2/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY= github.com/aws/aws-sdk-go-v2/config v1.31.3 h1:RIb3yr/+PZ18YYNe6MDiG/3jVoJrPmdoCARwNkMGvco= github.com/aws/aws-sdk-go-v2/config v1.31.3/go.mod h1:jjgx1n7x0FAKl6TnakqrpkHWWKcX3xfWtdnIJs5K9CE= github.com/aws/aws-sdk-go-v2/credentials v1.18.7 h1:zqg4OMrKj+t5HlswDApgvAHjxKtlduKS7KicXB+7RLg= @@ -768,10 +774,10 @@ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4 h1:lpdMwTzmuDLkgW7086jE94H github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4/go.mod h1:9xzb8/SV62W6gHQGC/8rrvgNXU6ZoYM3sAIJCIrXJxY= github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 h1:QbFjOdplTkOgviHNKyTW/TZpvIYhD6lqEc3tkIvqMoQ= github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2/go.mod h1:d0pTYUeTv5/tPSlbPZZQSqssM158jZBs02jx2LDslM8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.4 h1:IdCLsiiIj5YJ3AFevsewURCPV+YWUlOW8JiPhoAy8vg= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.4/go.mod h1:l4bdfCD7XyyZA9BolKBo1eLqgaJxl0/x91PL4Yqe0ao= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.4 h1:j7vjtr1YIssWQOMeOWRbh3z8g2oY/xPjnZH2gLY4sGw= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.4/go.mod h1:yDmJgqOiH4EA8Hndnv4KwAo8jCGTSnM5ASG1nBI+toA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 h1:se2vOWGD3dWQUtfn4wEjRQJb1HK1XsNIt825gskZ970= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9/go.mod h1:hijCGH2VfbZQxqCDN7bwz/4dzxV+hkyhjawAtdPWKZA= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 h1:6RBnKZLkJM4hQ+kN6E7yWFveOTg8NLPHAkqrs4ZPlTU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9/go.mod h1:V9rQKRmK7AWuEsOMnHzKj8WyrIir1yUJbZxDuZLFvXI= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 h1:6+lZi2JeGKtCraAj1rpoZfKqnQ9SptseRZioejfUOLM= @@ -828,16 +834,16 @@ github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI= github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= -github.com/bmatcuk/doublestar/v4 v4.9.0 h1:DBvuZxjdKkRP/dr4GVV4w2fnmrk5Hxc90T51LZjv0JA= -github.com/bmatcuk/doublestar/v4 v4.9.0/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= +github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE= +github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bool64/shared v0.1.5 h1:fp3eUhBsrSjNCQPcSdQqZxxh9bBwrYiZ+zOKFkM0/2E= github.com/bool64/shared v0.1.5/go.mod h1:081yz68YC9jeFB3+Bbmno2RFWvGKv1lPKkMP6MHJlPs= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM= github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= -github.com/brianvoe/gofakeit/v7 v7.7.1 h1:Z74GFLZz57rAUHjpNbaKOr8c7nXdUohsiwF/jhkqE0k= -github.com/brianvoe/gofakeit/v7 v7.7.1/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= +github.com/brianvoe/gofakeit/v7 v7.8.0 h1:FHLerglGVodD2O4pnQPCmFlkmIRXp8MpAflnarW5sQM= +github.com/brianvoe/gofakeit/v7 v7.8.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/bytecodealliance/wasmtime-go/v3 v3.0.2 h1:3uZCA/BLTIu+DqCfguByNMJa2HVHpXvjfy0Dy7g6fuA= @@ -911,16 +917,16 @@ github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 h1:tRIViZ5JRmzdOEo5wUWngaGEFBG8OaE1o2GIHN5ujJ8= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225/go.mod h1:rNLVpYgEVeu1Zk29K64z6Od8RBP9DwqCu9OfCzh8MR4= -github.com/coder/aibridge v0.1.3 h1:7A9RQaHQUjtse47ShF3kBj2hMmT1R7BEFgiyByr8Vvc= -github.com/coder/aibridge v0.1.3/go.mod h1:GWc0Owtlzz5iMHosDm6FhbO+SoG5W+VeOKyP9p9g9ZM= +github.com/coder/aibridge v0.1.7 h1:GTAM8nHawXMeb/pxAIwvzr76dyVGu9hw9qV6Gvpc7nw= +github.com/coder/aibridge v0.1.7/go.mod h1:7GhrLbzf6uM3sCA7OPaDzvq9QNrCjNuzMy+WgipYwfQ= github.com/coder/aisdk-go v0.0.9 h1:Vzo/k2qwVGLTR10ESDeP2Ecek1SdPfZlEjtTfMveiVo= github.com/coder/aisdk-go v0.0.9/go.mod h1:KF6/Vkono0FJJOtWtveh5j7yfNrSctVTpwgweYWSp5M= github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945 h1:hDUf02kTX8EGR3+5B+v5KdYvORs4YNfDPci0zCs+pC0= github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945/go.mod h1:d1AMFw81rUgrGHuZzWdPNhkY0G8w7pvLNLYF0e3ceC4= github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41 h1:SBN/DA63+ZHwuWwPHPYoCZ/KLAjHv5g4h2MS4f2/MTI= github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41/go.mod h1:I9ULxr64UaOSUv7hcb3nX4kowodJCVS7vt7VVJk/kW4= -github.com/coder/clistat v1.0.0 h1:MjiS7qQ1IobuSSgDnxcCSyBPESs44hExnh2TEqMcGnA= -github.com/coder/clistat v1.0.0/go.mod h1:F+gLef+F9chVrleq808RBxdaoq52R4VLopuLdAsh8Y4= +github.com/coder/clistat v1.1.1 h1:T45dlwr7fSmjLPGLk7QRKgynnDeMOPoraHSGtLIHY3s= +github.com/coder/clistat v1.1.1/go.mod h1:F+gLef+F9chVrleq808RBxdaoq52R4VLopuLdAsh8Y4= github.com/coder/flog v1.1.0 h1:kbAes1ai8fIS5OeV+QAnKBQE22ty1jRF/mcAwHpLBa4= github.com/coder/flog v1.1.0/go.mod h1:UQlQvrkJBvnRGo69Le8E24Tcl5SJleAAR7gYEHzAmdQ= github.com/coder/glog v1.0.1-0.20220322161911-7365fe7f2cd1/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= @@ -928,8 +934,8 @@ github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322 h1:m0lPZjlQ7vdVp github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322/go.mod h1:rOLFDDVKVFiDqZFXoteXc97YXx7kFi9kYqR+2ETPkLQ= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136 h1:0RgB61LcNs24WOxc3PBvygSNTQurm0PYPujJjLLOzs0= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136/go.mod h1:VkD1P761nykiq75dz+4iFqIQIZka189tx1BQLOp0Skc= -github.com/coder/guts v1.5.0 h1:a94apf7xMf5jDdg1bIHzncbRiTn3+BvBZgrFSDbUnyI= -github.com/coder/guts v1.5.0/go.mod h1:0Sbv5Kp83u1Nl7MIQiV2zmacJ3o02I341bkWkjWXSUQ= +github.com/coder/guts v1.6.1 h1:bMVBtDNP/1gW58NFRBdzStAQzXlveMrLAnORpwE9tYo= +github.com/coder/guts v1.6.1/go.mod h1:FaECwB632JE8nYi7nrKfO0PVjbOl4+hSWupKO2Z99JI= github.com/coder/pq v1.10.5-0.20250807075151-6ad9b0a25151 h1:YAxwg3lraGNRwoQ18H7R7n+wsCqNve7Brdvj0F1rDnU= github.com/coder/pq v1.10.5-0.20250807075151-6ad9b0a25151/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs= @@ -940,16 +946,16 @@ github.com/coder/quartz v0.2.1 h1:QgQ2Vc1+mvzewg2uD/nj8MJ9p9gE+QhGJm+Z+NGnrSE= github.com/coder/quartz v0.2.1/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= github.com/coder/retry v1.5.1/go.mod h1:blHMk9vs6LkoRT9ZHyuZo360cufXEhrxqvEzeMtRGoY= -github.com/coder/serpent v0.10.0 h1:ofVk9FJXSek+SmL3yVE3GoArP83M+1tX+H7S4t8BSuM= -github.com/coder/serpent v0.10.0/go.mod h1:cZFW6/fP+kE9nd/oRkEHJpG6sXCtQ+AX7WMMEHv0Y3Q= +github.com/coder/serpent v0.11.0 h1:VKIIbBg0ManopqqDsutBGf7YYTUXsPQgBx//m1SJQ90= +github.com/coder/serpent v0.11.0/go.mod h1:cZFW6/fP+kE9nd/oRkEHJpG6sXCtQ+AX7WMMEHv0Y3Q= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= github.com/coder/tailscale v1.1.1-0.20250829055706-6eafe0f9199e h1:9RKGKzGLHtTvVBQublzDGtCtal3cXP13diCHoAIGPeI= github.com/coder/tailscale v1.1.1-0.20250829055706-6eafe0f9199e/go.mod h1:jU9T1vEs+DOs8NtGp1F2PT0/TOGVwtg/JCCKYRgvMOs= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= -github.com/coder/terraform-provider-coder/v2 v2.11.0 h1:E9mjwCDHoKTp9agCmNQ5viky4dKrt0Gx0vDxsLEPRf0= -github.com/coder/terraform-provider-coder/v2 v2.11.0/go.mod h1:q0cAdlM1cafFGQ8Vug5kwU+34S+wtJsqylxTjVXX+rw= +github.com/coder/terraform-provider-coder/v2 v2.12.0 h1:guxDoZdBRfZqAgVlsJ+TLvV2uIBQ4RelsRpSPOT84tk= +github.com/coder/terraform-provider-coder/v2 v2.12.0/go.mod h1:4LVPWatHaTAdQS1v5A0pVn3g8XkNKkQ/xh+U2oXr/o0= github.com/coder/trivy v0.0.0-20250807211036-0bb0acd620a8 h1:VYB/6cIIKsVkwXOAWbqpj4Ux+WwF/XTnRyvHcwfHZ7A= github.com/coder/trivy v0.0.0-20250807211036-0bb0acd620a8/go.mod h1:O73tP+UvJlI2GQZD060Jt0sf+6alKcGAgORh6sgB0+M= github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= @@ -970,8 +976,8 @@ github.com/containerd/platforms v1.0.0-rc.1 h1:83KIq4yy1erSRgOVHNk1HYdPvzdJ5CnsW github.com/containerd/platforms v1.0.0-rc.1/go.mod h1:J71L7B+aiM5SdIEqmd9wp6THLVRzJGXfNuWCZCllLA4= github.com/coreos/go-iptables v0.6.0 h1:is9qnZMPYjLd8LYqmm/qlE+wwEgJIkTYdhV3rfZo4jk= github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= -github.com/coreos/go-oidc/v3 v3.15.0 h1:R6Oz8Z4bqWR7VFQ+sPSvZPQv4x8M+sJkDO5ojgwlyAg= -github.com/coreos/go-oidc/v3 v3.15.0/go.mod h1:HaZ3szPaZ0e4r6ebqvsLWlk2Tn+aejfmrfah6hnSYEU= +github.com/coreos/go-oidc/v3 v3.16.0 h1:qRQUCFstKpXwmEjDQTIbyY/5jF00+asXzSkmkoa/mow= +github.com/coreos/go-oidc/v3 v3.16.0/go.mod h1:wqPbKFrVnE90vty060SB40FCJ8fTHTxSwyXJqZH+sI8= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf h1:iW4rZ826su+pqaw19uhpSCzhj44qo35pNgKFGqzDKkU= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= @@ -1064,8 +1070,8 @@ github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6 github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= github.com/esiqveland/notify v0.13.3 h1:QCMw6o1n+6rl+oLUfg8P1IIDSFsDEb2WlXvVvIJbI/o= github.com/esiqveland/notify v0.13.3/go.mod h1:hesw/IRYTO0x99u1JPweAl4+5mwXJibQVUcP0Iu5ORE= -github.com/evanw/esbuild v0.25.9 h1:aU7GVC4lxJGC1AyaPwySWjSIaNLAdVEEuq3chD0Khxs= -github.com/evanw/esbuild v0.25.9/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= +github.com/evanw/esbuild v0.25.11 h1:NGtezc+xk+Mti4fgWaoD3dncZNCzcTA+r0BxMV3Koyw= +github.com/evanw/esbuild v0.25.11/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= @@ -1094,13 +1100,12 @@ github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa h1:RDBNVkRviHZtvD github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= -github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= +github.com/gabriel-vasile/mimetype v1.4.10 h1:zyueNbySn/z8mJZHLt6IPw0KoZsiQNszIpU+bX4+ZK0= +github.com/gabriel-vasile/mimetype v1.4.10/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/gen2brain/beeep v0.11.1 h1:EbSIhrQZFDj1K2fzlMpAYlFOzV8YuNe721A58XcCTYI= github.com/gen2brain/beeep v0.11.1/go.mod h1:jQVvuwnLuwOcdctHn/uyh8horSBNJ8uGb9Cn2W4tvoc= github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= -github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/github/fakeca v0.1.0 h1:Km/MVOFvclqxPM9dZBC4+QE564nU4gz4iZ0D9pMw28I= github.com/github/fakeca v0.1.0/go.mod h1:+bormgoGMMuamOscx7N91aOuUST7wdaJ2rNjeohylyo= @@ -1130,8 +1135,8 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-jose/go-jose/v4 v4.1.1 h1:JYhSgy4mXXzAdF3nUx3ygx347LRXJRrpgyU3adRmkAI= -github.com/go-jose/go-jose/v4 v4.1.1/go.mod h1:BdsZGqgdO3b6tTc6LSE56wcDbMMLuPsw5d4ZD5f94kA= +github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs= +github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08= github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs= github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= @@ -1163,8 +1168,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= -github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688= +github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= @@ -1185,22 +1190,24 @@ github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6Wezm github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs= github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc= github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= +github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e h1:QArsSubW7eDh8APMXkByjQWvuljwPGAGQpJEFn0F0wY= -github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e/go.mod h1:3Ltoo9Banwq0gOtcOwxuHG6omk+AwsQPADyw2vQYOJQ= -github.com/gohugoio/hashstructure v0.5.0 h1:G2fjSBU36RdwEJBWJ+919ERvOVqAg9tfcYp47K9swqg= -github.com/gohugoio/hashstructure v0.5.0/go.mod h1:Ser0TniXuu/eauYmrwM4o64EBvySxNzITEOLlm4igec= -github.com/gohugoio/httpcache v0.7.0 h1:ukPnn04Rgvx48JIinZvZetBfHaWE7I01JR2Q2RrQ3Vs= -github.com/gohugoio/httpcache v0.7.0/go.mod h1:fMlPrdY/vVJhAriLZnrF5QpN3BNAcoBClgAyQd+lGFI= -github.com/gohugoio/hugo v0.150.0 h1:0IUwsfgF38m/VXA7e1+aii0cF7XgLAFcWgBKWlShIQY= -github.com/gohugoio/hugo v0.150.0/go.mod h1:+c6VffXGahbNCe/wTQ05FkKMjT/fZqN6Gq3tgBfUhNw= +github.com/gohugoio/go-i18n/v2 v2.1.3-0.20251018145728-cfcc22d823c6 h1:pxlAea9eRwuAnt/zKbGqlFO2ZszpIe24YpOVLf+N+4I= +github.com/gohugoio/go-i18n/v2 v2.1.3-0.20251018145728-cfcc22d823c6/go.mod h1:m5hu1im5Qc7LDycVLvee6MPobJiRLBYHklypFJR0/aE= +github.com/gohugoio/hashstructure v0.6.0 h1:7wMB/2CfXoThFYhdWRGv3u3rUM761Cq29CxUW+NltUg= +github.com/gohugoio/hashstructure v0.6.0/go.mod h1:lapVLk9XidheHG1IQ4ZSbyYrXcaILU1ZEP/+vno5rBQ= +github.com/gohugoio/httpcache v0.8.0 h1:hNdsmGSELztetYCsPVgjA960zSa4dfEqqF/SficorCU= +github.com/gohugoio/httpcache v0.8.0/go.mod h1:fMlPrdY/vVJhAriLZnrF5QpN3BNAcoBClgAyQd+lGFI= +github.com/gohugoio/hugo v0.152.2 h1:k++AvrUCjFbq8lzzKRG5JizSwsBT/ARg6mMUXFDC5OA= +github.com/gohugoio/hugo v0.152.2/go.mod h1:eGE2cUADtMLFnb66WSlMJSNXXFrU6lLiYgDSP6H/Fm0= github.com/gohugoio/hugo-goldmark-extensions/extras v0.5.0 h1:dco+7YiOryRoPOMXwwaf+kktZSCtlFtreNdiJbETvYE= github.com/gohugoio/hugo-goldmark-extensions/extras v0.5.0/go.mod h1:CRrxQTKeM3imw+UoS4EHKyrqB7Zp6sAJiqHit+aMGTE= github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.3.1 h1:nUzXfRTszLliZuN0JTKeunXTRaiFX6ksaWP0puLLYAY= @@ -1377,8 +1384,8 @@ github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB1 github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= -github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0= +github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA= +github.com/hashicorp/go-plugin v1.7.0/go.mod h1:BExt6KEaIYx804z8k4gRzRLEvxKVb+kn0NMcihqOqb8= github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b h1:3GrpnZQBxcMj1gCXQLelfjCT1D5MPGTuGMKHVzSIH6A= github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b/go.mod h1:qIFzeFcJU3OIFk/7JreWXcUjFmcCaeHTH9KoNyHYVCs= github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48= @@ -1404,18 +1411,18 @@ github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQx github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= -github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= +github.com/hashicorp/terraform-exec v0.23.1 h1:diK5NSSDXDKqHEOIQefBMu9ny+FhzwlwV0xgUTB7VTo= +github.com/hashicorp/terraform-exec v0.23.1/go.mod h1:e4ZEg9BJDRaSalGm2z8vvrPONt0XWG0/tXpmzYTf+dM= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE= -github.com/hashicorp/terraform-plugin-go v0.27.0 h1:ujykws/fWIdsi6oTUT5Or4ukvEan4aN9lY+LOxVP8EE= -github.com/hashicorp/terraform-plugin-go v0.27.0/go.mod h1:FDa2Bb3uumkTGSkTFpWSOwWJDwA7bf3vdP3ltLDTH6o= +github.com/hashicorp/terraform-plugin-go v0.29.0 h1:1nXKl/nSpaYIUBU1IG/EsDOX0vv+9JxAltQyDMpq5mU= +github.com/hashicorp/terraform-plugin-go v0.29.0/go.mod h1:vYZbIyvxyy0FWSmDHChCqKvI40cFTDGSb3D8D70i9GM= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 h1:NFPMacTrY/IdcIcnUB+7hsore1ZaRWU9cnB6jFoBnIM= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0/go.mod h1:QYmYnLfsosrxjCnGY1p9c7Zj6n9thnEE+7RObeYs3fA= -github.com/hashicorp/terraform-registry-address v0.2.5 h1:2GTftHqmUhVOeuu9CW3kwDkRe4pcBDq0uuK5VJngU1M= -github.com/hashicorp/terraform-registry-address v0.2.5/go.mod h1:PpzXWINwB5kuVS5CA7m1+eO2f1jKb5ZDIxrOPfpnGkg= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 h1:mlAq/OrMlg04IuJT7NpefI1wwtdpWudnEmjuQs04t/4= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1/go.mod h1:GQhpKVvvuwzD79e8/NZ+xzj+ZpWovdPAe8nfV/skwNU= +github.com/hashicorp/terraform-registry-address v0.4.0 h1:S1yCGomj30Sao4l5BMPjTGZmCNzuv7/GDTDX99E9gTk= +github.com/hashicorp/terraform-registry-address v0.4.0/go.mod h1:LRS1Ay0+mAiRkUyltGT+UHWkIqTFvigGn/LbMshfflE= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= @@ -1482,8 +1489,8 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= -github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= -github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co= +github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= @@ -1516,8 +1523,8 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/liamg/memoryfs v1.6.0 h1:jAFec2HI1PgMTem5gR7UT8zi9u4BfG5jorCRlLH06W8= github.com/liamg/memoryfs v1.6.0/go.mod h1:z7mfqXFQS8eSeBBsFjYLlxYRMRyiPktytvYCYTb3BSk= -github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= -github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 h1:PpXWgLPs+Fqr325bN2FD2ISlRRztXibcX6e8f5FR5Dc= github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= @@ -1586,8 +1593,8 @@ github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3N github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= -github.com/moby/moby v28.4.0+incompatible h1:Rhu/o+7EaHGx0MV3KOouThtr3hY33m3aKyA6GDR2QmQ= -github.com/moby/moby v28.4.0+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= +github.com/moby/moby v28.5.0+incompatible h1:eN6ksRE7BojoGW18USJGfyqhx/FWJPLs0jqaTNlfSsM= +github.com/moby/moby v28.5.0+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= @@ -1637,8 +1644,8 @@ github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5 github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y= github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI= github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g= -github.com/olekukonko/tablewriter v1.0.9 h1:XGwRsYLC2bY7bNd93Dk51bcPZksWZmLYuaTHR0FqfL8= -github.com/olekukonko/tablewriter v1.0.9/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo= +github.com/olekukonko/tablewriter v1.1.0 h1:N0LHrshF4T39KvI96fn6GT8HEjXRXYNDrDjKFDB7RIY= +github.com/olekukonko/tablewriter v1.1.0/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo= github.com/open-policy-agent/opa v1.6.0 h1:/S/cnNQJ2MUMNzizHPbisTWBHowmLkPrugY5jjkPlRQ= github.com/open-policy-agent/opa v1.6.0/go.mod h1:zFmw4P+W62+CWGYRDDswfVYSCnPo6oYaktQnfIaRFC4= github.com/open-telemetry/opentelemetry-collector-contrib/pkg/sampling v0.120.1 h1:lK/3zr73guK9apbXTcnDnYrC0YCQ25V3CIULYz3k2xU= @@ -1772,8 +1779,8 @@ github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2 github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M= -github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/sqlc-dev/pqtype v0.3.0 h1:b09TewZ3cSnO5+M1Kqq05y0+OjqIptxELaSayg7bmqk= @@ -1822,10 +1829,10 @@ github.com/tc-hib/winres v0.2.1 h1:YDE0FiP0VmtRaDn7+aaChp1KiF4owBiJa5l964l5ujA= github.com/tc-hib/winres v0.2.1/go.mod h1:C/JaNhH3KBvhNKVbvdlDWkbMDO9H4fKKDaN7/07SSuk= github.com/tchap/go-patricia/v2 v2.3.2 h1:xTHFutuitO2zqKAQ5rCROYgUb7Or/+IC3fts9/Yc7nM= github.com/tchap/go-patricia/v2 v2.3.2/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= -github.com/tdewolff/minify/v2 v2.24.2 h1:vnY3nTulEAbCAAlxTxPPDkzG24rsq31SOzp63yT+7mo= -github.com/tdewolff/minify/v2 v2.24.2/go.mod h1:1JrCtoZXaDbqioQZfk3Jdmr0GPJKiU7c1Apmb+7tCeE= -github.com/tdewolff/parse/v2 v2.8.3 h1:5VbvtJ83cfb289A1HzRA9sf02iT8YyUwN84ezjkdY1I= -github.com/tdewolff/parse/v2 v2.8.3/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo= +github.com/tdewolff/minify/v2 v2.24.5 h1:ytxthX3xSxrK3Xx5B38flg5moCKs/dB8VwiD/RzJViU= +github.com/tdewolff/minify/v2 v2.24.5/go.mod h1:q09KtNnVai7TyEzGEZeWPAnK+c8Z+NI8prCXZW652bo= +github.com/tdewolff/parse/v2 v2.8.5-0.20251020133559-0efcf90bef1a h1:Rmq+utdraciok/97XHRweYdsAo/M4LOswpCboo3yvN4= +github.com/tdewolff/parse/v2 v2.8.5-0.20251020133559-0efcf90bef1a/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo= github.com/tdewolff/test v1.0.11 h1:FdLbwQVHxqG16SlkGveC0JVyrJN62COWTRyUFzfbtBE= github.com/tdewolff/test v1.0.11/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= github.com/testcontainers/testcontainers-go v0.38.0 h1:d7uEapLcv2P8AvH8ahLqDMMxda2W9gQN1nRbHS28HBw= @@ -1866,8 +1873,8 @@ github.com/unrolled/secure v1.17.0 h1:Io7ifFgo99Bnh0J7+Q+qcMzWM6kaDPCA5FroFZEdbW github.com/unrolled/secure v1.17.0/go.mod h1:BmF5hyM6tXczk3MpQkFf1hpKSRqCyhqcbiQtiAF7+40= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.66.0 h1:M87A0Z7EayeyNaV6pfO3tUTUiYO0dZfEJnRGXTVNuyU= -github.com/valyala/fasthttp v1.66.0/go.mod h1:Y4eC+zwoocmXSVCB1JmhNbYtS7tZPRI2ztPB72EVObs= +github.com/valyala/fasthttp v1.68.0 h1:v12Nx16iepr8r9ySOwqI+5RBJ/DqTxhOy1HrHoDFnok= +github.com/valyala/fasthttp v1.68.0/go.mod h1:5EXiRfYQAoiO/khu4oU9VISC/eVY6JqmSpPJoHCKsz4= github.com/vektah/gqlparser/v2 v2.5.28 h1:bIulcl3LF69ba6EiZVGD88y4MkM+Jxrf3P2MX8xLRkY= github.com/vektah/gqlparser/v2 v2.5.28/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo= github.com/vishvananda/netlink v1.2.1-beta.2 h1:Llsql0lnQEbHj0I1OuKyp8otXp0r3q0mPkuhwHfStVs= @@ -2048,8 +2055,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI= -golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -2065,8 +2072,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/exp v0.0.0-20250911091902-df9299821621 h1:2id6c1/gto0kaHYyrixvknJ8tUK/Qs5IsmBtrc+FtgU= -golang.org/x/exp v0.0.0-20250911091902-df9299821621/go.mod h1:TwQYMMnGpvZyc+JpB/UAuTNIsVJifOlSkrZkhcvpVUk= +golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY= +golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -2080,8 +2087,8 @@ golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeap golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4= -golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c= +golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ= +golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -2114,8 +2121,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U= -golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -2178,8 +2185,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/net v0.44.0 h1:evd8IRDyfNBMBTTY5XRF1vaZlD+EmWx6x8PkhR04H/I= -golang.org/x/net v0.44.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2209,8 +2216,8 @@ golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= -golang.org/x/oauth2 v0.31.0 h1:8Fq0yVZLh4j4YA47vHKFTa9Ew5XIrCP8LC6UeNZnLxo= -golang.org/x/oauth2 v0.31.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY= +golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -2330,8 +2337,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= -golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -2350,8 +2357,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= -golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ= -golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2374,16 +2381,16 @@ golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk= -golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.13.0 h1:eUlYslOIt32DgYD6utsuUeHs4d7AsEYLuIAdg7FlYgI= -golang.org/x/time v0.13.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -2449,8 +2456,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE= -golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2534,8 +2541,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/ google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= -google.golang.org/api v0.250.0 h1:qvkwrf/raASj82UegU2RSDGWi/89WkLckn4LuO4lVXM= -google.golang.org/api v0.250.0/go.mod h1:Y9Uup8bDLJJtMzJyQnu+rLRJLA0wn+wTtc6vTlOvfXo= +google.golang.org/api v0.253.0 h1:apU86Eq9Q2eQco3NsUYFpVTfy7DwemojL7LmbAj7g/I= +google.golang.org/api v0.253.0/go.mod h1:PX09ad0r/4du83vZVAaGg7OaeyGnaUmT/CYPNvtLCbw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -2678,10 +2685,10 @@ google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOl google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 h1:Nt6z9UHqSlIdIGJdz6KhTIs2VRx/iOsA5iE8bmQNcxs= google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79/go.mod h1:kTmlBHMPqR5uCZPBvwa2B18mvubkjyY3CRLI0c6fj0s= -google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79 h1:iOye66xuaAK0WnkPuhQPUFy8eJcmwUXqGGP3om6IxX8= -google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79/go.mod h1:HKJDgKsFUnv5VAGeQjz8kxcgDP0HoE0iZNp0OdZNlhE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090 h1:/OQuEa4YWtDt7uQWHd3q3sUMb+QOLQUg1xa8CEsRv5w= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og= +google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b h1:ULiyYQ0FdsJhwwZUwbaXpZF5yUE3h+RA+gxvBu37ucc= +google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:oDOGiMSXHL4sDTJvFvIB9nRQCGdLP1o/iVaqQK8zB+M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f h1:1FTH6cpXFsENbPR5Bu8NQddPSaUUE6NA2XdZdDSAJK4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -2723,8 +2730,8 @@ google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= -google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= +google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -2744,8 +2751,8 @@ google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= -google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/DataDog/dd-trace-go.v1 v1.74.0 h1:wScziU1ff6Bnyr8MEyxATPSLJdnLxKz3p6RsA8FUaek= gopkg.in/DataDog/dd-trace-go.v1 v1.74.0/go.mod h1:ReNBsNfnsjVC7GsCe80zRcykL/n+nxvsNrg3NbjuleM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/offlinedocs/package.json b/offlinedocs/package.json index 15c5ce793b135..26073286ddb65 100644 --- a/offlinedocs/package.json +++ b/offlinedocs/package.json @@ -38,7 +38,7 @@ "eslint": "8.57.1", "eslint-config-next": "14.2.33", "prettier": "3.6.2", - "typescript": "5.7.3" + "typescript": "5.9.3" }, "engines": { "npm": ">=9.0.0 <10.0.0", diff --git a/offlinedocs/pnpm-lock.yaml b/offlinedocs/pnpm-lock.yaml index 368b8515c6b54..7c4466814364c 100644 --- a/offlinedocs/pnpm-lock.yaml +++ b/offlinedocs/pnpm-lock.yaml @@ -78,13 +78,13 @@ importers: version: 8.57.1 eslint-config-next: specifier: 14.2.33 - version: 14.2.33(eslint@8.57.1)(typescript@5.7.3) + version: 14.2.33(eslint@8.57.1)(typescript@5.9.3) prettier: specifier: 3.6.2 version: 3.6.2 typescript: - specifier: 5.7.3 - version: 5.7.3 + specifier: 5.9.3 + version: 5.9.3 packages: @@ -2465,8 +2465,8 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typescript@5.7.3: - resolution: {integrity: sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==} + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} engines: {node: '>=14.17'} hasBin: true @@ -3101,41 +3101,41 @@ snapshots: '@types/unist@3.0.3': {} - '@typescript-eslint/eslint-plugin@8.45.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint@8.57.1)(typescript@5.7.3)': + '@typescript-eslint/eslint-plugin@8.45.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.9.3) '@typescript-eslint/scope-manager': 8.45.0 - '@typescript-eslint/type-utils': 8.45.0(eslint@8.57.1)(typescript@5.7.3) - '@typescript-eslint/utils': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/type-utils': 8.45.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/utils': 8.45.0(eslint@8.57.1)(typescript@5.9.3) '@typescript-eslint/visitor-keys': 8.45.0 eslint: 8.57.1 graphemer: 1.4.0 ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.7.3) - typescript: 5.7.3 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3)': + '@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@typescript-eslint/scope-manager': 8.45.0 '@typescript-eslint/types': 8.45.0 - '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.7.3) + '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.9.3) '@typescript-eslint/visitor-keys': 8.45.0 debug: 4.4.3 eslint: 8.57.1 - typescript: 5.7.3 + typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.45.0(typescript@5.7.3)': + '@typescript-eslint/project-service@8.45.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.45.0(typescript@5.7.3) + '@typescript-eslint/tsconfig-utils': 8.45.0(typescript@5.9.3) '@typescript-eslint/types': 8.45.0 debug: 4.4.3 - typescript: 5.7.3 + typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -3144,28 +3144,28 @@ snapshots: '@typescript-eslint/types': 8.45.0 '@typescript-eslint/visitor-keys': 8.45.0 - '@typescript-eslint/tsconfig-utils@8.45.0(typescript@5.7.3)': + '@typescript-eslint/tsconfig-utils@8.45.0(typescript@5.9.3)': dependencies: - typescript: 5.7.3 + typescript: 5.9.3 - '@typescript-eslint/type-utils@8.45.0(eslint@8.57.1)(typescript@5.7.3)': + '@typescript-eslint/type-utils@8.45.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@typescript-eslint/types': 8.45.0 - '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.7.3) - '@typescript-eslint/utils': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.45.0(eslint@8.57.1)(typescript@5.9.3) debug: 4.4.3 eslint: 8.57.1 - ts-api-utils: 2.1.0(typescript@5.7.3) - typescript: 5.7.3 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 transitivePeerDependencies: - supports-color '@typescript-eslint/types@8.45.0': {} - '@typescript-eslint/typescript-estree@8.45.0(typescript@5.7.3)': + '@typescript-eslint/typescript-estree@8.45.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.45.0(typescript@5.7.3) - '@typescript-eslint/tsconfig-utils': 8.45.0(typescript@5.7.3) + '@typescript-eslint/project-service': 8.45.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.45.0(typescript@5.9.3) '@typescript-eslint/types': 8.45.0 '@typescript-eslint/visitor-keys': 8.45.0 debug: 4.4.3 @@ -3173,19 +3173,19 @@ snapshots: is-glob: 4.0.3 minimatch: 9.0.5 semver: 7.7.2 - ts-api-utils: 2.1.0(typescript@5.7.3) - typescript: 5.7.3 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.45.0(eslint@8.57.1)(typescript@5.7.3)': + '@typescript-eslint/utils@8.45.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) '@typescript-eslint/scope-manager': 8.45.0 '@typescript-eslint/types': 8.45.0 - '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.7.3) + '@typescript-eslint/typescript-estree': 8.45.0(typescript@5.9.3) eslint: 8.57.1 - typescript: 5.7.3 + typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -3732,21 +3732,21 @@ snapshots: escape-string-regexp@5.0.0: {} - eslint-config-next@14.2.33(eslint@8.57.1)(typescript@5.7.3): + eslint-config-next@14.2.33(eslint@8.57.1)(typescript@5.9.3): dependencies: '@next/eslint-plugin-next': 14.2.33 '@rushstack/eslint-patch': 1.12.0 - '@typescript-eslint/eslint-plugin': 8.45.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint@8.57.1)(typescript@5.7.3) - '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/eslint-plugin': 8.45.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.9.3) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1) eslint-plugin-react: 7.37.5(eslint@8.57.1) eslint-plugin-react-hooks: 5.0.0-canary-7118f5dd7-20230705(eslint@8.57.1) optionalDependencies: - typescript: 5.7.3 + typescript: 5.9.3 transitivePeerDependencies: - eslint-import-resolver-webpack - eslint-plugin-import-x @@ -3771,22 +3771,22 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.9.3) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -3797,7 +3797,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.7.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.45.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -3809,7 +3809,7 @@ snapshots: string.prototype.trimend: 1.0.9 tsconfig-paths: 3.15.0 optionalDependencies: - '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.7.3) + '@typescript-eslint/parser': 8.45.0(eslint@8.57.1)(typescript@5.9.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -5506,9 +5506,9 @@ snapshots: trough@2.2.0: {} - ts-api-utils@2.1.0(typescript@5.7.3): + ts-api-utils@2.1.0(typescript@5.9.3): dependencies: - typescript: 5.7.3 + typescript: 5.9.3 tsconfig-paths@3.15.0: dependencies: @@ -5562,7 +5562,7 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typescript@5.7.3: {} + typescript@5.9.3: {} unbox-primitive@1.1.0: dependencies: diff --git a/provisioner/terraform/executor.go b/provisioner/terraform/executor.go index 8940a1708bf19..5ef7f626f9f58 100644 --- a/provisioner/terraform/executor.go +++ b/provisioner/terraform/executor.go @@ -6,6 +6,7 @@ import ( "context" "encoding/json" "fmt" + "hash/crc32" "io" "os" "os/exec" @@ -27,7 +28,10 @@ import ( "github.com/coder/coder/v2/provisionersdk/proto" ) -var version170 = version.Must(version.NewVersion("1.7.0")) +var ( + version170 = version.Must(version.NewVersion("1.7.0")) + version190 = version.Must(version.NewVersion("1.9.0")) +) type executor struct { logger slog.Logger @@ -220,7 +224,11 @@ func (e *executor) init(ctx, killCtx context.Context, logr logSink) error { e.mut.Lock() defer e.mut.Unlock() - outWriter, doneOut := logWriter(logr, proto.LogLevel_DEBUG) + // Record lock file checksum before init + lockFilePath := filepath.Join(e.workdir, ".terraform.lock.hcl") + preInitChecksum := checksumFileCRC32(ctx, e.logger, lockFilePath) + + outWriter, doneOut := e.provisionLogWriter(logr) errWriter, doneErr := logWriter(logr, proto.LogLevel_ERROR) defer func() { _ = outWriter.Close() @@ -239,14 +247,46 @@ func (e *executor) init(ctx, killCtx context.Context, logr logSink) error { "-input=false", } - err := e.execWriteOutput(ctx, killCtx, args, e.basicEnv(), outWriter, errBuf) + ver, err := e.version(ctx) + if err != nil { + return xerrors.Errorf("extract version: %w", err) + } + if ver.GreaterThanOrEqual(version190) { + // Added in v1.9.0: + args = append(args, "-json") + } + + err = e.execWriteOutput(ctx, killCtx, args, e.basicEnv(), outWriter, errBuf) var exitErr *exec.ExitError if xerrors.As(err, &exitErr) { if bytes.Contains(errBuf.b.Bytes(), []byte("text file busy")) { return &textFileBusyError{exitErr: exitErr, stderr: errBuf.b.String()} } } - return err + if err != nil { + return err + } + + // Check if lock file was modified + postInitChecksum := checksumFileCRC32(ctx, e.logger, lockFilePath) + if preInitChecksum != 0 && postInitChecksum != 0 && preInitChecksum != postInitChecksum { + e.logger.Warn(ctx, fmt.Sprintf(".terraform.lock.hcl was modified during init. This means provider hashes "+ + "are missing for the current platform (%s_%s). Update the lock file with:\n\n"+ + " terraform providers lock -platform=linux_amd64 -platform=linux_arm64 "+ + "-platform=darwin_amd64 -platform=darwin_arm64 -platform=windows_amd64\n", + runtime.GOOS, runtime.GOARCH), + ) + } + return nil +} + +func checksumFileCRC32(ctx context.Context, logger slog.Logger, path string) uint32 { + content, err := os.ReadFile(path) + if err != nil { + logger.Debug(ctx, "file %s does not exist or can't be read, skip checksum calculation") + return 0 + } + return crc32.ChecksumIEEE(content) } func getPlanFilePath(workdir string) string { @@ -781,6 +821,9 @@ func extractTimingSpan(log *terraformProvisionLog) (time.Time, *timingSpan, erro return time.Time{}, nil, xerrors.Errorf("unexpected timing kind: %q", log.Type) } + // Init logs omit millisecond precision, so using `time.Now` as a fallback + // for these logs is more precise than parsing the second precision alone. + // https://github.com/hashicorp/terraform/pull/37818 ts, err := time.Parse("2006-01-02T15:04:05.000000Z07:00", log.Timestamp) if err != nil { // TODO: log @@ -788,10 +831,11 @@ func extractTimingSpan(log *terraformProvisionLog) (time.Time, *timingSpan, erro } return ts, &timingSpan{ - kind: typ, - action: log.Hook.Action, - provider: log.Hook.Resource.Provider, - resource: log.Hook.Resource.Addr, + kind: typ, + messageCode: log.MessageCode, + action: log.Hook.Action, + provider: log.Hook.Resource.Provider, + resource: log.Hook.Resource.Addr, }, nil } @@ -814,11 +858,14 @@ func convertTerraformLogLevel(logLevel string, sink logSink) proto.LogLevel { } type terraformProvisionLog struct { - Level string `json:"@level"` - Message string `json:"@message"` - Timestamp string `json:"@timestamp"` - Type string `json:"type"` - Hook terraformProvisionLogHook `json:"hook"` + Level string `json:"@level"` + Message string `json:"@message"` + Timestamp string `json:"@timestamp"` + Type string `json:"type"` + // MessageCode is only set for init phase messages after Terraform 1.9.0 + // This field is not used by plan/apply. + MessageCode initMessageCode `json:"message_code,omitempty"` + Hook terraformProvisionLogHook `json:"hook"` Diagnostic *tfjson.Diagnostic `json:"diagnostic,omitempty"` } diff --git a/provisioner/terraform/executor_internal_test.go b/provisioner/terraform/executor_internal_test.go index a39d8758893b8..04d57a1e4c9f1 100644 --- a/provisioner/terraform/executor_internal_test.go +++ b/provisioner/terraform/executor_internal_test.go @@ -2,12 +2,14 @@ package terraform import ( "encoding/json" + "os" "testing" tfjson "github.com/hashicorp/terraform-json" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/testutil" ) type mockLogger struct { @@ -171,3 +173,46 @@ func TestOnlyDataResources(t *testing.T) { }) } } + +func TestChecksumFileCRC32(t *testing.T) { + t.Parallel() + + t.Run("file exists", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + logger := testutil.Logger(t) + + tmpfile, err := os.CreateTemp("", "lockfile-*.hcl") + require.NoError(t, err) + defer os.Remove(tmpfile.Name()) + + content := []byte("provider \"aws\" { version = \"5.0.0\" }") + _, err = tmpfile.Write(content) + require.NoError(t, err) + tmpfile.Close() + + // Calculate checksum - expected value for this specific content + expectedChecksum := uint32(0x08f39f51) + checksum := checksumFileCRC32(ctx, logger, tmpfile.Name()) + require.Equal(t, expectedChecksum, checksum) + + // Modify file + err = os.WriteFile(tmpfile.Name(), []byte("modified content"), 0o600) + require.NoError(t, err) + + // Checksum should be different + modifiedChecksum := checksumFileCRC32(ctx, logger, tmpfile.Name()) + require.NotEqual(t, expectedChecksum, modifiedChecksum) + }) + + t.Run("file does not exist", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + logger := testutil.Logger(t) + + checksum := checksumFileCRC32(ctx, logger, "/nonexistent/file.hcl") + require.Zero(t, checksum) + }) +} diff --git a/provisioner/terraform/inittimings.go b/provisioner/terraform/inittimings.go new file mode 100644 index 0000000000000..e72d237b5268f --- /dev/null +++ b/provisioner/terraform/inittimings.go @@ -0,0 +1,166 @@ +package terraform + +import ( + "slices" + "time" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/provisionersdk/proto" +) + +const ( + // defaultInitAction is a human-readable action for init timing spans. The coder + // frontend displays the action, which would be an empty string if not set to + // this constant. Setting it to "load" gives more context to users about what is + // happening during init. The init steps either "load" from disk or http. + defaultInitAction = "load" +) + +var ( + // resourceName maps init message codes to human-readable resource names. + // This is purely for better readability in the timing spans. + resourceName = map[initMessageCode]string{ + initInitializingBackendMessage: "backend", + initInitializingStateStoreMessage: "backend", + + initInitializingModulesMessage: "modules", + initUpgradingModulesMessage: "modules", + + initInitializingProviderPluginMessage: "provider plugins", + } + + // executionOrder is the expected sequential steps during `terraform init`. + // Some steps of the init have more than 1 possible "initMessageCode". + // + // In practice, since Coder has a defined way of running Terraform, only + // one code per step is expected. However, this allows for future-proofing + // in case Coder adds more Terraform init configurations. + executionOrder = [][]initMessageCode{ + { + initInitializingBackendMessage, + initInitializingStateStoreMessage, // If using a state store backend + }, + { + initInitializingModulesMessage, + initUpgradingModulesMessage, // if "-upgrade" flag provided + }, + {initInitializingProviderPluginMessage}, + { + initOutputInitSuccessMessage, + initOutputInitSuccessCloudMessage, // If using terraform cloud + }, + } +) + +// ingestInitTiming handles ingesting timing spans from `terraform init` logs. +// These logs are formatted differently from plan/apply logs, so they need their +// own ingestion logic. +// +// The logs are also less granular, only indicating the start of major init +// steps, rather than per-resource actions. Since initialization is done +// serially, we can infer the end time of each stage from the start time of the +// next stage. +func (t *timingAggregator) ingestInitTiming(ts time.Time, s *timingSpan) { + switch s.messageCode { + case initInitializingBackendMessage, initInitializingStateStoreMessage: + // Backend loads the tfstate from the backend data source. For coder, this is + // always a state file on disk, making it nearly an instantaneous operation. + s.start = ts + s.state = proto.TimingState_STARTED + case initInitializingModulesMessage, initUpgradingModulesMessage: + s.start = ts + s.state = proto.TimingState_STARTED + case initInitializingProviderPluginMessage: + s.start = ts + s.state = proto.TimingState_STARTED + case initOutputInitSuccessMessage, initOutputInitSuccessCloudMessage: + // The final message indicates successful completion of init. There is no start + // message for this, but we want to continue the pattern such that this completes + // the previous stage. + s.end = ts + s.state = proto.TimingState_COMPLETED + default: + return + } + + // Init logs should be assigned to the init stage. + // Ideally the executor could use an `init` stage aggregator directly, but + // that would require a larger refactor. + s.stage = database.ProvisionerJobTimingStageInit + // The default action is an empty string. Set it to "load" for some human readability. + s.action = defaultInitAction + // Resource name is an empty string. Name it something more useful. + s.resource = resourceName[s.messageCode] + + // finishPrevious completes the previous step in the init sequence, if applicable. + t.finishPrevious(ts, s) + + t.lookupMu.Lock() + // Memoize this span by its unique attributes and the determined state. + // This will be used in aggregate() to determine the duration of the resource action. + t.stateLookup[s.hashByState(s.state)] = s + t.lookupMu.Unlock() +} + +func (t *timingAggregator) finishPrevious(ts time.Time, s *timingSpan) { + index := slices.IndexFunc(executionOrder, func(codes []initMessageCode) bool { + return slices.Contains(codes, s.messageCode) + }) + if index <= 0 { + // If the index is not found or is the first item, nothing to complete. + return + } + + // Complete the previous message. + previousSteps := executionOrder[index-1] + + t.lookupMu.Lock() + // Complete the previous step. We are not tracking the state of these steps, so + // we cannot tell for sure what the previous step `MessageCode` was. The + // aggregator only reports timings that have a start & end. So if we end all + // possible previous step `MessageCodes`, the aggregator will only report the one + // that was actually started. + // + // This is a bit of a hack, but it works given the constraints of the init logs. + // Ideally we would store more state about the init steps. Or loop over the + // stored timings to find the one that was started. This is just simpler and + // accomplishes the same goal. + for _, step := range previousSteps { + cpy := *s + cpy.start = time.Time{} + cpy.end = ts + cpy.messageCode = step + cpy.resource = resourceName[step] + cpy.state = proto.TimingState_COMPLETED + t.stateLookup[cpy.hashByState(cpy.state)] = &cpy + } + + t.lookupMu.Unlock() +} + +// mergeInitTimings merges manual init timings with existing timings that are +// sourced by the logs. This is done because prior to Terraform v1.9, init logs +// did not have a `-json` formatting option. +// So before v1.9, the init stage is manually timed outside the `terraform init`. +// After v1.9, the init stage is timed via logs. +func mergeInitTimings(manualInit []*proto.Timing, existing []*proto.Timing) []*proto.Timing { + initFailed := slices.ContainsFunc(existing, func(timing *proto.Timing) bool { + return timing.State == proto.TimingState_FAILED + }) + + if initFailed { + // The init logs do not provide enough information for failed init timings. + // So use the manual timings in this case. + return append(manualInit, existing...) + } + + hasInitStage := slices.ContainsFunc(existing, func(timing *proto.Timing) bool { + return timing.Stage == string(database.ProvisionerJobTimingStageInit) + }) + + if hasInitStage { + return existing + } + + return append(manualInit, existing...) +} diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 50648a4d3ef1e..ec9f96c3ed397 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -170,7 +170,7 @@ func (s *server) Plan( // Prepend init timings since they occur prior to plan timings. // Order is irrelevant; this is merely indicative. - resp.Timings = append(initTimings.aggregate(), resp.Timings...) + resp.Timings = mergeInitTimings(initTimings.aggregate(), resp.Timings) resp.Modules = modules return resp } @@ -266,6 +266,8 @@ func provisionEnv( "CODER_WORKSPACE_TEMPLATE_NAME="+metadata.GetTemplateName(), "CODER_WORKSPACE_TEMPLATE_VERSION="+metadata.GetTemplateVersion(), "CODER_WORKSPACE_BUILD_ID="+metadata.GetWorkspaceBuildId(), + "CODER_TASK_ID="+metadata.GetTaskId(), + "CODER_TASK_PROMPT="+metadata.GetTaskPrompt(), ) if metadata.GetPrebuiltWorkspaceBuildStage().IsPrebuild() { env = append(env, provider.IsPrebuildEnvironmentVariable()+"=true") diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index 90a34e6d03a8c..450dd04b061a6 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -1048,29 +1048,6 @@ func TestProvision(t *testing.T) { }}, }, }, - { - Name: "ai-task-required-prompt-param", - Files: map[string]string{ - "main.tf": `terraform { - required_providers { - coder = { - source = "coder/coder" - version = ">= 2.7.0" - } - } - } - resource "coder_ai_task" "a" { - sidebar_app { - id = "7128be08-8722-44cb-bbe1-b5a391c4d94b" # fake ID, irrelevant here anyway but needed for validation - } - } - `, - }, - Request: &proto.PlanRequest{}, - Response: &proto.PlanComplete{ - Error: fmt.Sprintf("plan resources: coder_parameter named '%s' is required when 'coder_ai_task' resource is defined", provider.TaskPromptParameterName), - }, - }, { Name: "ai-task-multiple-allowed-in-plan", Files: map[string]string{ @@ -1160,6 +1137,39 @@ func TestProvision(t *testing.T) { }, SkipCacheProviders: true, }, + { + Name: "ai-task-app-id", + Files: map[string]string{ + "main.tf": `terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.12.0" + } + } + } + resource "coder_ai_task" "my-task" { + app_id = "7128be08-8722-44cb-bbe1-b5a391c4d94b" # fake ID, irrelevant here anyway but needed for validation + } + `, + }, + Response: &proto.PlanComplete{ + Resources: []*proto.Resource{ + { + Name: "my-task", + Type: "coder_ai_task", + }, + }, + AiTasks: []*proto.AITask{ + { + Id: "my-task", + AppId: "7128be08-8722-44cb-bbe1-b5a391c4d94b", + }, + }, + HasAiTasks: true, + }, + SkipCacheProviders: true, + }, } // Remove unused cache dirs before running tests. diff --git a/provisioner/terraform/resources.go b/provisioner/terraform/resources.go index 1b6c099038910..a65615e5f233e 100644 --- a/provisioner/terraform/resources.go +++ b/provisioner/terraform/resources.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "math" - "slices" "strings" "github.com/awalterschulze/gographviz" @@ -1023,14 +1022,16 @@ func ConvertState(ctx context.Context, modules []*tfjson.StateModule, rawGraph s return nil, xerrors.Errorf("decode coder_ai_task attributes: %w", err) } - if len(task.SidebarApp) < 1 { - return nil, xerrors.Errorf("coder_ai_task has no sidebar_app defined") + appID := task.AppID + if appID == "" && len(task.SidebarApp) > 0 { + appID = task.SidebarApp[0].ID } aiTasks = append(aiTasks, &proto.AITask{ - Id: task.ID, + Id: task.ID, + AppId: appID, SidebarApp: &proto.AITaskSidebarApp{ - Id: task.SidebarApp[0].ID, + Id: appID, }, }) } @@ -1066,14 +1067,6 @@ func ConvertState(ctx context.Context, modules []*tfjson.StateModule, rawGraph s } hasAITasks := hasAITaskResources(graph) - if hasAITasks { - hasPromptParam := slices.ContainsFunc(parameters, func(param *proto.RichParameter) bool { - return param.Name == provider.TaskPromptParameterName - }) - if !hasPromptParam { - return nil, xerrors.Errorf("coder_parameter named '%s' is required when 'coder_ai_task' resource is defined", provider.TaskPromptParameterName) - } - } return &State{ Resources: resources, diff --git a/provisioner/terraform/resources_test.go b/provisioner/terraform/resources_test.go index 715055c00cad9..a2c5b536ac2db 100644 --- a/provisioner/terraform/resources_test.go +++ b/provisioner/terraform/resources_test.go @@ -16,8 +16,6 @@ import ( "github.com/stretchr/testify/require" protobuf "google.golang.org/protobuf/proto" - "github.com/coder/terraform-provider-coder/v2/provider" - "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -1528,48 +1526,82 @@ func TestAITasks(t *testing.T) { t.Parallel() ctx, logger := ctxAndLogger(t) - t.Run("Prompt parameter is required", func(t *testing.T) { + t.Run("Multiple tasks can be defined", func(t *testing.T) { t.Parallel() // nolint:dogsled _, filename, _, _ := runtime.Caller(0) - dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "ai-tasks-missing-prompt") - tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "ai-tasks-missing-prompt.tfplan.json")) + dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "ai-tasks-multiple") + tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "ai-tasks-multiple.tfplan.json")) require.NoError(t, err) var tfPlan tfjson.Plan err = json.Unmarshal(tfPlanRaw, &tfPlan) require.NoError(t, err) - tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "ai-tasks-missing-prompt.tfplan.dot")) + tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "ai-tasks-multiple.tfplan.dot")) require.NoError(t, err) state, err := terraform.ConvertState(ctx, []*tfjson.StateModule{tfPlan.PlannedValues.RootModule, tfPlan.PriorState.Values.RootModule}, string(tfPlanGraph), logger) - require.Nil(t, state) - require.ErrorContains(t, err, fmt.Sprintf("coder_parameter named '%s' is required when 'coder_ai_task' resource is defined", provider.TaskPromptParameterName)) + require.NotNil(t, state) + require.NoError(t, err) + require.True(t, state.HasAITasks) + // Multiple coder_ai_tasks resources can be defined, but only 1 is allowed. + // This is validated once all parameters are resolved etc as part of the workspace build, but for now we can allow it. + require.Len(t, state.AITasks, 2) }) - t.Run("Multiple tasks can be defined", func(t *testing.T) { + t.Run("Can use sidebar app ID", func(t *testing.T) { t.Parallel() // nolint:dogsled _, filename, _, _ := runtime.Caller(0) - dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "ai-tasks-multiple") - tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "ai-tasks-multiple.tfplan.json")) + dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "ai-tasks-sidebar") + tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "ai-tasks-sidebar.tfplan.json")) require.NoError(t, err) var tfPlan tfjson.Plan err = json.Unmarshal(tfPlanRaw, &tfPlan) require.NoError(t, err) - tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "ai-tasks-multiple.tfplan.dot")) + tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "ai-tasks-sidebar.tfplan.dot")) require.NoError(t, err) state, err := terraform.ConvertState(ctx, []*tfjson.StateModule{tfPlan.PlannedValues.RootModule, tfPlan.PriorState.Values.RootModule}, string(tfPlanGraph), logger) require.NotNil(t, state) require.NoError(t, err) require.True(t, state.HasAITasks) - // Multiple coder_ai_tasks resources can be defined, but only 1 is allowed. - // This is validated once all parameters are resolved etc as part of the workspace build, but for now we can allow it. - require.Len(t, state.AITasks, 2) + require.Len(t, state.AITasks, 1) + + sidebarApp := state.AITasks[0].GetSidebarApp() + require.NotNil(t, sidebarApp) + require.Equal(t, "5ece4674-dd35-4f16-88c8-82e40e72e2fd", sidebarApp.GetId()) + require.Equal(t, "5ece4674-dd35-4f16-88c8-82e40e72e2fd", state.AITasks[0].AppId) + }) + + t.Run("Can use app ID", func(t *testing.T) { + t.Parallel() + + // nolint:dogsled + _, filename, _, _ := runtime.Caller(0) + + dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "ai-tasks-app") + tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "ai-tasks-app.tfplan.json")) + require.NoError(t, err) + var tfPlan tfjson.Plan + err = json.Unmarshal(tfPlanRaw, &tfPlan) + require.NoError(t, err) + tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "ai-tasks-app.tfplan.dot")) + require.NoError(t, err) + + state, err := terraform.ConvertState(ctx, []*tfjson.StateModule{tfPlan.PlannedValues.RootModule, tfPlan.PriorState.Values.RootModule}, string(tfPlanGraph), logger) + require.NotNil(t, state) + require.NoError(t, err) + require.True(t, state.HasAITasks) + require.Len(t, state.AITasks, 1) + + sidebarApp := state.AITasks[0].GetSidebarApp() + require.NotNil(t, sidebarApp) + require.Equal(t, "5ece4674-dd35-4f16-88c8-82e40e72e2fd", sidebarApp.GetId()) + require.Equal(t, "5ece4674-dd35-4f16-88c8-82e40e72e2fd", state.AITasks[0].AppId) }) } diff --git a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.dot b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.dot similarity index 87% rename from provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.dot rename to provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.dot index 758e6c990ec1e..c36ff5323696a 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.dot +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.dot @@ -2,19 +2,17 @@ digraph { compound = "true" newrank = "true" subgraph "root" { - "[root] coder_agent.main (expand)" [label = "coder_agent.main", shape = "box"] "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] - "[root] coder_agent.main (expand)" -> "[root] data.coder_provisioner.me (expand)" "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" - "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_agent.main (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" diff --git a/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.json b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.json new file mode 100644 index 0000000000000..2669980027ba0 --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfplan.json @@ -0,0 +1,187 @@ +{ + "format_version": "1.2", + "terraform_version": "1.13.0", + "planned_values": { + "root_module": { + "resources": [ + { + "address": "coder_ai_task.a[0]", + "mode": "managed", + "type": "coder_ai_task", + "name": "a", + "index": 0, + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "sidebar_app": [] + }, + "sensitive_values": { + "sidebar_app": [] + } + } + ] + } + }, + "resource_changes": [ + { + "address": "coder_ai_task.a[0]", + "mode": "managed", + "type": "coder_ai_task", + "name": "a", + "index": 0, + "provider_name": "registry.terraform.io/coder/coder", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "sidebar_app": [] + }, + "after_unknown": { + "id": true, + "prompt": true, + "sidebar_app": [] + }, + "before_sensitive": false, + "after_sensitive": { + "sidebar_app": [] + } + } + } + ], + "prior_state": { + "format_version": "1.0", + "terraform_version": "1.13.0", + "values": { + "root_module": { + "resources": [ + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "arch": "amd64", + "id": "6e0bee77-2319-4094-a29e-6d14412399d2", + "os": "linux" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "access_port": 443, + "access_url": "https://dev.coder.com/", + "id": "5c06d6ea-101b-4069-8d14-7179df66ebcc", + "is_prebuild": false, + "is_prebuild_claim": false, + "name": "coder", + "prebuild_count": 0, + "start_count": 1, + "template_id": "", + "template_name": "", + "template_version": "", + "transition": "start" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 0, + "values": { + "email": "default@example.com", + "full_name": "coder", + "groups": [], + "id": "8796d8d7-88f1-445a-bea7-65f5cf530b95", + "login_type": null, + "name": "default", + "oidc_access_token": "", + "rbac_roles": [], + "session_token": "", + "ssh_private_key": "", + "ssh_public_key": "" + }, + "sensitive_values": { + "groups": [], + "oidc_access_token": true, + "rbac_roles": [], + "session_token": true, + "ssh_private_key": true + } + } + ] + } + } + }, + "configuration": { + "provider_config": { + "coder": { + "name": "coder", + "full_name": "registry.terraform.io/coder/coder", + "version_constraint": ">= 2.0.0" + } + }, + "root_module": { + "resources": [ + { + "address": "coder_ai_task.a", + "mode": "managed", + "type": "coder_ai_task", + "name": "a", + "provider_config_key": "coder", + "expressions": { + "app_id": { + "constant_value": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + }, + "schema_version": 1, + "count_expression": { + "constant_value": 1 + } + }, + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_config_key": "coder", + "schema_version": 1 + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_config_key": "coder", + "schema_version": 1 + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_config_key": "coder", + "schema_version": 0 + } + ] + } + }, + "timestamp": "2025-10-09T14:27:27Z", + "applyable": true, + "complete": true, + "errored": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.dot b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.dot similarity index 87% rename from provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.dot rename to provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.dot index 758e6c990ec1e..c36ff5323696a 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.dot +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.dot @@ -2,19 +2,17 @@ digraph { compound = "true" newrank = "true" subgraph "root" { - "[root] coder_agent.main (expand)" [label = "coder_agent.main", shape = "box"] "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] - "[root] coder_agent.main (expand)" -> "[root] data.coder_provisioner.me (expand)" "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" - "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_agent.main (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" diff --git a/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.json b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.json new file mode 100644 index 0000000000000..a883d2143586c --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/ai-tasks-app.tfstate.json @@ -0,0 +1,93 @@ +{ + "format_version": "1.0", + "terraform_version": "1.13.0", + "values": { + "root_module": { + "resources": [ + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "arch": "amd64", + "id": "a1fe389c-ac5e-4e9d-ba76-bc23fe275cc0", + "os": "linux" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "access_port": 443, + "access_url": "https://dev.coder.com/", + "id": "bca94359-107b-43c9-a272-99af4b239aad", + "is_prebuild": false, + "is_prebuild_claim": false, + "name": "coder", + "prebuild_count": 0, + "start_count": 1, + "template_id": "", + "template_name": "", + "template_version": "", + "transition": "start" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 0, + "values": { + "email": "default@example.com", + "full_name": "coder", + "groups": [], + "id": "cb8c55f2-7f66-4e69-a584-eb08f4a7cf04", + "login_type": null, + "name": "default", + "oidc_access_token": "", + "rbac_roles": [], + "session_token": "", + "ssh_private_key": "", + "ssh_public_key": "" + }, + "sensitive_values": { + "groups": [], + "oidc_access_token": true, + "rbac_roles": [], + "session_token": true, + "ssh_private_key": true + } + }, + { + "address": "coder_ai_task.a[0]", + "mode": "managed", + "type": "coder_ai_task", + "name": "a", + "index": 0, + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "id": "c4f032b8-97e4-42b0-aa2f-30a9e698f8d4", + "prompt": "default", + "sidebar_app": [] + }, + "sensitive_values": { + "sidebar_app": [] + } + } + ] + } + } +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-app/main.tf b/provisioner/terraform/testdata/resources/ai-tasks-app/main.tf new file mode 100644 index 0000000000000..475e0560aec2b --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/main.tf @@ -0,0 +1,17 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.0.0" + } + } +} + +data "coder_provisioner" "me" {} +data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} + +resource "coder_ai_task" "a" { + count = 1 + app_id = "5ece4674-dd35-4f16-88c8-82e40e72e2fd" # fake ID to satisfy requirement, irrelevant otherwise +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.dot b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.dot index 4e660599e7a9b..2c05504b42460 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.dot +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.dot @@ -4,20 +4,17 @@ digraph { subgraph "root" { "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] "[root] coder_ai_task.b (expand)" [label = "coder_ai_task.b", shape = "box"] - "[root] data.coder_parameter.prompt (expand)" [label = "data.coder_parameter.prompt", shape = "box"] "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] coder_ai_task.b (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" - "[root] data.coder_parameter.prompt (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.b (expand)" - "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_parameter.prompt (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.json b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.json index c9f8fded6c192..f83c8646d7ae3 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.json +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfplan.json @@ -34,16 +34,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 1, "values": { - "sidebar_app": [ - { - "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" - } - ] + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "sidebar_app": [] }, "sensitive_values": { - "sidebar_app": [ - {} - ] + "sidebar_app": [] } } ] @@ -70,7 +65,9 @@ ] }, "after_unknown": { + "app_id": true, "id": true, + "prompt": true, "sidebar_app": [ {} ] @@ -96,23 +93,17 @@ ], "before": null, "after": { - "sidebar_app": [ - { - "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" - } - ] + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "sidebar_app": [] }, "after_unknown": { "id": true, - "sidebar_app": [ - {} - ] + "prompt": true, + "sidebar_app": [] }, "before_sensitive": false, "after_sensitive": { - "sidebar_app": [ - {} - ] + "sidebar_app": [] } } } @@ -123,35 +114,6 @@ "values": { "root_module": { "resources": [ - { - "address": "data.coder_parameter.prompt", - "mode": "data", - "type": "coder_parameter", - "name": "prompt", - "provider_name": "registry.terraform.io/coder/coder", - "schema_version": 1, - "values": { - "default": null, - "description": null, - "display_name": null, - "ephemeral": false, - "form_type": "input", - "icon": null, - "id": "f9502ef2-226e-49a6-8831-99a74f8415e7", - "mutable": false, - "name": "AI Prompt", - "option": null, - "optional": false, - "order": null, - "styling": "{}", - "type": "string", - "validation": [], - "value": "" - }, - "sensitive_values": { - "validation": [] - } - }, { "address": "data.coder_provisioner.me", "mode": "data", @@ -256,35 +218,15 @@ "name": "b", "provider_config_key": "coder", "expressions": { - "sidebar_app": [ - { - "id": { - "constant_value": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" - } - } - ] + "app_id": { + "constant_value": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } }, "schema_version": 1, "count_expression": { "constant_value": 1 } }, - { - "address": "data.coder_parameter.prompt", - "mode": "data", - "type": "coder_parameter", - "name": "prompt", - "provider_config_key": "coder", - "expressions": { - "name": { - "constant_value": "AI Prompt" - }, - "type": { - "constant_value": "string" - } - }, - "schema_version": 1 - }, { "address": "data.coder_provisioner.me", "mode": "data", diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.dot b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.dot index 4e660599e7a9b..2c05504b42460 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.dot +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.dot @@ -4,20 +4,17 @@ digraph { subgraph "root" { "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] "[root] coder_ai_task.b (expand)" [label = "coder_ai_task.b", shape = "box"] - "[root] data.coder_parameter.prompt (expand)" [label = "data.coder_parameter.prompt", shape = "box"] "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] coder_ai_task.b (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" - "[root] data.coder_parameter.prompt (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.b (expand)" - "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_parameter.prompt (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.json b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.json index 8eb9ccecd7636..d97cffd45725e 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.json +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/ai-tasks-multiple.tfstate.json @@ -4,35 +4,6 @@ "values": { "root_module": { "resources": [ - { - "address": "data.coder_parameter.prompt", - "mode": "data", - "type": "coder_parameter", - "name": "prompt", - "provider_name": "registry.terraform.io/coder/coder", - "schema_version": 1, - "values": { - "default": null, - "description": null, - "display_name": null, - "ephemeral": false, - "form_type": "input", - "icon": null, - "id": "d3c415c0-c6bc-42e3-806e-8c792a7e7b3b", - "mutable": false, - "name": "AI Prompt", - "option": null, - "optional": false, - "order": null, - "styling": "{}", - "type": "string", - "validation": [], - "value": "" - }, - "sensitive_values": { - "validation": [] - } - }, { "address": "data.coder_provisioner.me", "mode": "data", @@ -105,7 +76,9 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 1, "values": { + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", "id": "89e6ab36-2e98-4d13-9b4c-69b7588b7e1d", + "prompt": "default", "sidebar_app": [ { "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" @@ -127,17 +100,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 1, "values": { - "id": "d4643699-519d-44c8-a878-556789256cbd", - "sidebar_app": [ - { - "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" - } - ] + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "prompt": "default", + "sidebar_app": [] }, "sensitive_values": { - "sidebar_app": [ - {} - ] + "sidebar_app": [] } } ] diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/main.tf b/provisioner/terraform/testdata/resources/ai-tasks-multiple/main.tf index 6c15ee4dc5a69..805b16ab313d8 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-multiple/main.tf +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/main.tf @@ -11,11 +11,6 @@ data "coder_provisioner" "me" {} data "coder_workspace" "me" {} data "coder_workspace_owner" "me" {} -data "coder_parameter" "prompt" { - name = "AI Prompt" - type = "string" -} - resource "coder_ai_task" "a" { count = 1 sidebar_app { @@ -24,8 +19,6 @@ resource "coder_ai_task" "a" { } resource "coder_ai_task" "b" { - count = 1 - sidebar_app { - id = "5ece4674-dd35-4f16-88c8-82e40e72e2fd" # fake ID to satisfy requirement, irrelevant otherwise - } + count = 1 + app_id = "5ece4674-dd35-4f16-88c8-82e40e72e2fd" # fake ID to satisfy requirement, irrelevant otherwise } diff --git a/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.dot b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.dot new file mode 100644 index 0000000000000..c36ff5323696a --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.dot @@ -0,0 +1,20 @@ +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] + "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] + "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] + "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + } +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.json b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.json similarity index 60% rename from provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.json rename to provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.json index ad255c7db7624..6a507463d1292 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfplan.json +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfplan.json @@ -5,41 +5,11 @@ "root_module": { "resources": [ { - "address": "coder_agent.main", - "mode": "managed", - "type": "coder_agent", - "name": "main", - "provider_name": "registry.terraform.io/coder/coder", - "schema_version": 1, - "values": { - "api_key_scope": "all", - "arch": "amd64", - "auth": "token", - "connection_timeout": 120, - "dir": null, - "env": null, - "metadata": [], - "motd_file": null, - "order": null, - "os": "linux", - "resources_monitoring": [], - "shutdown_script": null, - "startup_script": null, - "startup_script_behavior": "non-blocking", - "troubleshooting_url": null - }, - "sensitive_values": { - "display_apps": [], - "metadata": [], - "resources_monitoring": [], - "token": true - } - }, - { - "address": "coder_ai_task.a", + "address": "coder_ai_task.a[0]", "mode": "managed", "type": "coder_ai_task", "name": "a", + "index": 0, "provider_name": "registry.terraform.io/coder/coder", "schema_version": 1, "values": { @@ -60,55 +30,11 @@ }, "resource_changes": [ { - "address": "coder_agent.main", - "mode": "managed", - "type": "coder_agent", - "name": "main", - "provider_name": "registry.terraform.io/coder/coder", - "change": { - "actions": [ - "create" - ], - "before": null, - "after": { - "api_key_scope": "all", - "arch": "amd64", - "auth": "token", - "connection_timeout": 120, - "dir": null, - "env": null, - "metadata": [], - "motd_file": null, - "order": null, - "os": "linux", - "resources_monitoring": [], - "shutdown_script": null, - "startup_script": null, - "startup_script_behavior": "non-blocking", - "troubleshooting_url": null - }, - "after_unknown": { - "display_apps": true, - "id": true, - "init_script": true, - "metadata": [], - "resources_monitoring": [], - "token": true - }, - "before_sensitive": false, - "after_sensitive": { - "display_apps": [], - "metadata": [], - "resources_monitoring": [], - "token": true - } - } - }, - { - "address": "coder_ai_task.a", + "address": "coder_ai_task.a[0]", "mode": "managed", "type": "coder_ai_task", "name": "a", + "index": 0, "provider_name": "registry.terraform.io/coder/coder", "change": { "actions": [ @@ -123,7 +49,9 @@ ] }, "after_unknown": { + "app_id": true, "id": true, + "prompt": true, "sidebar_app": [ {} ] @@ -152,7 +80,7 @@ "schema_version": 1, "values": { "arch": "amd64", - "id": "5a6ecb8b-fd26-4cfc-91b1-651d06bee98c", + "id": "6b538d81-f0db-4e2b-8d85-4b87a1563d89", "os": "linux" }, "sensitive_values": {} @@ -167,7 +95,7 @@ "values": { "access_port": 443, "access_url": "https://dev.coder.com/", - "id": "bf9ee323-4f3a-4d45-9841-2dcd6265e830", + "id": "344575c1-55b9-43bb-89b5-35f547e2cf08", "is_prebuild": false, "is_prebuild_claim": false, "name": "sebenza-nonix", @@ -191,7 +119,7 @@ "email": "default@example.com", "full_name": "default", "groups": [], - "id": "0b8cbfb8-3925-41fe-9f21-21b76d21edc7", + "id": "acb465b5-2709-4392-9486-4ad6eb1c06e0", "login_type": null, "name": "default", "oidc_access_token": "", @@ -220,28 +148,6 @@ }, "root_module": { "resources": [ - { - "address": "coder_agent.main", - "mode": "managed", - "type": "coder_agent", - "name": "main", - "provider_config_key": "coder", - "expressions": { - "arch": { - "references": [ - "data.coder_provisioner.me.arch", - "data.coder_provisioner.me" - ] - }, - "os": { - "references": [ - "data.coder_provisioner.me.os", - "data.coder_provisioner.me" - ] - } - }, - "schema_version": 1 - }, { "address": "coder_ai_task.a", "mode": "managed", @@ -257,7 +163,10 @@ } ] }, - "schema_version": 1 + "schema_version": 1, + "count_expression": { + "constant_value": 1 + } }, { "address": "data.coder_provisioner.me", @@ -286,20 +195,6 @@ ] } }, - "relevant_attributes": [ - { - "resource": "data.coder_provisioner.me", - "attribute": [ - "arch" - ] - }, - { - "resource": "data.coder_provisioner.me", - "attribute": [ - "os" - ] - } - ], "timestamp": "2025-06-19T14:30:00Z", "applyable": true, "complete": true, diff --git a/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.dot b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.dot new file mode 100644 index 0000000000000..c36ff5323696a --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.dot @@ -0,0 +1,20 @@ +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_ai_task.a (expand)" [label = "coder_ai_task.a", shape = "box"] + "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] + "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] + "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] coder_ai_task.a (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_ai_task.a (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + } +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.json b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.json similarity index 58% rename from provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.json rename to provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.json index 4d3affb7a31ed..947e3ee1e9485 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/ai-tasks-missing-prompt.tfstate.json +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/ai-tasks-sidebar.tfstate.json @@ -13,7 +13,7 @@ "schema_version": 1, "values": { "arch": "amd64", - "id": "e838328b-8dc2-49d0-b16c-42d6375cfb34", + "id": "764f8b0b-d931-4356-b1a8-446fa95fbeb0", "os": "linux" }, "sensitive_values": {} @@ -28,7 +28,7 @@ "values": { "access_port": 443, "access_url": "https://dev.coder.com/", - "id": "6a64b978-bd81-424a-92e5-d4004296f420", + "id": "b6713709-6736-4d2f-b3da-7b5b242df5f4", "is_prebuild": false, "is_prebuild_claim": false, "name": "sebenza-nonix", @@ -52,7 +52,7 @@ "email": "default@example.com", "full_name": "default", "groups": [], - "id": "ffb2e99a-efa7-4fb9-bb2c-aa282bb636c9", + "id": "0cc15fa2-24fc-4249-bdc7-56cf0af0f782", "login_type": null, "name": "default", "oidc_access_token": "", @@ -68,62 +68,17 @@ } }, { - "address": "coder_agent.main", - "mode": "managed", - "type": "coder_agent", - "name": "main", - "provider_name": "registry.terraform.io/coder/coder", - "schema_version": 1, - "values": { - "api_key_scope": "all", - "arch": "amd64", - "auth": "token", - "connection_timeout": 120, - "dir": null, - "display_apps": [ - { - "port_forwarding_helper": true, - "ssh_helper": true, - "vscode": true, - "vscode_insiders": false, - "web_terminal": true - } - ], - "env": null, - "id": "0c95434c-9e4b-40aa-bd9b-2a0cc86f11af", - "init_script": "", - "metadata": [], - "motd_file": null, - "order": null, - "os": "linux", - "resources_monitoring": [], - "shutdown_script": null, - "startup_script": null, - "startup_script_behavior": "non-blocking", - "token": "ac32e23e-336a-4e63-a7a4-71ab85f16831", - "troubleshooting_url": null - }, - "sensitive_values": { - "display_apps": [ - {} - ], - "metadata": [], - "resources_monitoring": [], - "token": true - }, - "depends_on": [ - "data.coder_provisioner.me" - ] - }, - { - "address": "coder_ai_task.a", + "address": "coder_ai_task.a[0]", "mode": "managed", "type": "coder_ai_task", "name": "a", + "index": 0, "provider_name": "registry.terraform.io/coder/coder", "schema_version": 1, "values": { - "id": "b83734ee-765f-45db-a37b-a1e89414be5f", + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "id": "89e6ab36-2e98-4d13-9b4c-69b7588b7e1d", + "prompt": "default", "sidebar_app": [ { "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" diff --git a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/main.tf b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/main.tf similarity index 76% rename from provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/main.tf rename to provisioner/terraform/testdata/resources/ai-tasks-sidebar/main.tf index 236baa1c9535b..6f1428eb83e99 100644 --- a/provisioner/terraform/testdata/resources/ai-tasks-missing-prompt/main.tf +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/main.tf @@ -11,12 +11,8 @@ data "coder_provisioner" "me" {} data "coder_workspace" "me" {} data "coder_workspace_owner" "me" {} -resource "coder_agent" "main" { - arch = data.coder_provisioner.me.arch - os = data.coder_provisioner.me.os -} - resource "coder_ai_task" "a" { + count = 1 sidebar_app { id = "5ece4674-dd35-4f16-88c8-82e40e72e2fd" # fake ID to satisfy requirement, irrelevant otherwise } diff --git a/provisioner/terraform/testdata/timings-aggregation/complete.txtar b/provisioner/terraform/testdata/timings-aggregation/complete.txtar index 40acb9ae06a65..564bbd45bf82a 100644 --- a/provisioner/terraform/testdata/timings-aggregation/complete.txtar +++ b/provisioner/terraform/testdata/timings-aggregation/complete.txtar @@ -1,5 +1,27 @@ A successful build which results in successful plan and apply timings. - +-- init -- +{"@level":"info","@message":"Terraform 1.13.3","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.576675-05:00","terraform":"1.13.3","type":"version","ui":"1.2"} +{"@level":"info","@message":"Initializing the backend...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_backend_message","type":"init_output"} +{"@level":"info","@message":"Initializing modules...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_modules_message","type":"init_output"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/cursor/coder 1.3.2 for cursor...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.780639-05:00","type":"log"} +{"@level":"info","@message":"- cursor in .terraform/modules/cursor","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.982904-05:00","type":"log"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/jetbrains/coder 1.1.0 for jetbrains...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.039894-05:00","type":"log"} +{"@level":"info","@message":"- jetbrains in .terraform/modules/jetbrains","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202355-05:00","type":"log"} +{"@level":"info","@message":"Downloading git::https://github.com/coder/large-module.git for large-5mb-module...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202394-05:00","type":"log"} +{"@level":"info","@message":"- large-5mb-module in .terraform/modules/large-5mb-module","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.799988-05:00","type":"log"} +{"@level":"info","@message":"Initializing provider plugins...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:31.000000Z","message_code":"initializing_provider_plugin_message","type":"init_output"} +{"@level":"info","@message":"kreuzwerker/docker: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.801342-05:00","type":"log"} +{"@level":"info","@message":"hashicorp/http: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.868885-05:00","type":"log"} +{"@level":"info","@message":"coder/coder: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.894724-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: hashicorp/http v3.5.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.081468-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: hashicorp/http v3.5.0 (signed by HashiCorp)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.375580-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: coder/coder v2.11.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.869110-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: coder/coder v2.11.0 (signed by a HashiCorp partnerkey_id: 93C75807601AA0EC)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.350069-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: kreuzwerker/docker v3.6.2...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.572112-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: kreuzwerker/docker v3.6.2 (self-signedkey_id: BD080C4571C6104C)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458153-05:00","type":"log"} +{"@level":"info","@message":"Partner and community providers are signed by their developers.\nIf you'd like to know more about provider signing, you can read about it here:\nhttps://developer.hashicorp.com/terraform/cli/plugins/signing","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458177-05:00","type":"log"} +{"@level":"info","@message":"Terraform has been successfully initialized!","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34.000000Z","message_code":"output_init_success_message","type":"init_output"} +{"@level":"info","@message":"You may now begin working with Terraform. Try running \"terraform plan\" to see\nany changes that are required for your infrastructure. All Terraform commands\nshould now work.\n\nIf you ever set or change modules or backend configuration for Terraform,\nrerun this command to reinitialize your working directory. If you forget, other\ncommands will detect it and remind you to do so if necessary.","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34Z","message_code":"output_init_success_cli_message","type":"init_output"} -- plan -- {"@level":"info","@message":"Terraform 1.9.2","@module":"terraform.ui","@timestamp":"2024-08-15T10:26:38.097648+02:00","terraform":"1.9.2","type":"version","ui":"1.2"} {"@level":"info","@message":"data.coder_workspace.me: Refreshing...","@module":"terraform.ui","@timestamp":"2024-08-15T10:26:39.194726+02:00","hook":{"resource":{"addr":"data.coder_workspace.me","module":"","resource":"data.coder_workspace.me","implied_provider":"coder","resource_type":"coder_workspace","resource_name":"me","resource_key":null},"action":"read"},"type":"apply_start"} @@ -30,10 +52,13 @@ A successful build which results in successful plan and apply timings. {"@level":"info","@message":"Apply complete! Resources: 4 added, 0 changed, 0 destroyed.","@module":"terraform.ui","@timestamp":"2024-08-15T10:26:40.204593+02:00","changes":{"add":4,"change":0,"import":0,"remove":0,"operation":"apply"},"type":"change_summary"} {"@level":"info","@message":"Outputs: 0","@module":"terraform.ui","@timestamp":"2024-08-15T10:26:40.205051+02:00","outputs":{},"type":"outputs"} -- timings -- +{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:31Z","action":"load","resource":"modules","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:29Z","action":"load","resource":"backend","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:31Z","end":"2025-10-22T17:48:34Z","action":"load","resource":"provider plugins","stage":"init","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.194726Z","end":"2024-08-15T08:26:39.195820Z","action":"read","source":"coder","resource":"data.coder_workspace.me","stage":"plan","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.194726Z","end":"2024-08-15T08:26:39.195712Z","action":"read","source":"coder","resource":"data.coder_provisioner.me","stage":"plan","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.194726Z","end":"2024-08-15T08:26:39.195836Z","action":"read","source":"coder","resource":"data.coder_parameter.memory_size","stage":"plan","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.616546Z","end":"2024-08-15T08:26:39.618045Z","action":"create","source":"coder","resource":"coder_agent.main","stage":"apply","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.626722Z","end":"2024-08-15T08:26:39.669954Z","action":"create","source":"docker","resource":"docker_image.main","stage":"apply","state":"COMPLETED"} {"start":"2024-08-15T08:26:39.627335Z","end":"2024-08-15T08:26:39.660616Z","action":"create","source":"docker","resource":"docker_volume.home_volume","stage":"apply","state":"COMPLETED"} -{"start":"2024-08-15T08:26:39.682223Z","end":"2024-08-15T08:26:40.186482Z","action":"create","source":"docker","resource":"docker_container.workspace[0]","stage":"apply","state":"COMPLETED"} \ No newline at end of file +{"start":"2024-08-15T08:26:39.682223Z","end":"2024-08-15T08:26:40.186482Z","action":"create","source":"docker","resource":"docker_container.workspace[0]","stage":"apply","state":"COMPLETED"} diff --git a/provisioner/terraform/testdata/timings-aggregation/fake-terraform.sh b/provisioner/terraform/testdata/timings-aggregation/fake-terraform.sh index 4eb0d11ad0ec6..582df28c62161 100755 --- a/provisioner/terraform/testdata/timings-aggregation/fake-terraform.sh +++ b/provisioner/terraform/testdata/timings-aggregation/fake-terraform.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash function terraform_version() { cat <<'EOL' @@ -58,24 +58,28 @@ EOL function terraform_init() { cat <<'EOL' - -Initializing the backend... - -Initializing provider plugins... -- Reusing previous version of coder/coder from the dependency lock file -- Reusing previous version of kreuzwerker/docker from the dependency lock file -- Using previously-installed coder/coder v1.0.1 -- Using previously-installed kreuzwerker/docker v3.0.2 - -Terraform has been successfully initialized! - -You may now begin working with Terraform. Try running "terraform plan" to see -any changes that are required for your infrastructure. All Terraform commands -should now work. - -If you ever set or change modules or backend configuration for Terraform, -rerun this command to reinitialize your working directory. If you forget, other -commands will detect it and remind you to do so if necessary. +{"@level":"info","@message":"Terraform 1.13.3","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.576675-05:00","terraform":"1.13.3","type":"version","ui":"1.2"} +{"@level":"info","@message":"Initializing the backend...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_backend_message","type":"init_output"} +{"@level":"info","@message":"Initializing modules...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_modules_message","type":"init_output"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/cursor/coder 1.3.2 for cursor...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.780639-05:00","type":"log"} +{"@level":"info","@message":"- cursor in .terraform/modules/cursor","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.982904-05:00","type":"log"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/jetbrains/coder 1.1.0 for jetbrains...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.039894-05:00","type":"log"} +{"@level":"info","@message":"- jetbrains in .terraform/modules/jetbrains","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202355-05:00","type":"log"} +{"@level":"info","@message":"Downloading git::https://github.com/coder/large-module.git for large-5mb-module...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202394-05:00","type":"log"} +{"@level":"info","@message":"- large-5mb-module in .terraform/modules/large-5mb-module","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.799988-05:00","type":"log"} +{"@level":"info","@message":"Initializing provider plugins...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:31.000000Z","message_code":"initializing_provider_plugin_message","type":"init_output"} +{"@level":"info","@message":"kreuzwerker/docker: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.801342-05:00","type":"log"} +{"@level":"info","@message":"hashicorp/http: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.868885-05:00","type":"log"} +{"@level":"info","@message":"coder/coder: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.894724-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: hashicorp/http v3.5.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.081468-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: hashicorp/http v3.5.0 (signed by HashiCorp)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.375580-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: coder/coder v2.11.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.869110-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: coder/coder v2.11.0 (signed by a HashiCorp partnerkey_id: 93C75807601AA0EC)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.350069-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: kreuzwerker/docker v3.6.2...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.572112-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: kreuzwerker/docker v3.6.2 (self-signedkey_id: BD080C4571C6104C)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458153-05:00","type":"log"} +{"@level":"info","@message":"Partner and community providers are signed by their developers.\nIf you'd like to know more about provider signing, you can read about it here:\nhttps://developer.hashicorp.com/terraform/cli/plugins/signing","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458177-05:00","type":"log"} +{"@level":"info","@message":"Terraform has been successfully initialized!","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34.000000Z","message_code":"output_init_success_message","type":"init_output"} +{"@level":"info","@message":"You may now begin working with Terraform. Try running \"terraform plan\" to see\nany changes that are required for your infrastructure. All Terraform commands\nshould now work.\n\nIf you ever set or change modules or backend configuration for Terraform,\nrerun this command to reinitialize your working directory. If you forget, other\ncommands will detect it and remind you to do so if necessary.","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34Z","message_code":"output_init_success_cli_message","type":"init_output"} EOL } diff --git a/provisioner/terraform/testdata/timings-aggregation/init.txtar b/provisioner/terraform/testdata/timings-aggregation/init.txtar index df9db78255d51..a4b0f640c6707 100644 --- a/provisioner/terraform/testdata/timings-aggregation/init.txtar +++ b/provisioner/terraform/testdata/timings-aggregation/init.txtar @@ -2,8 +2,6 @@ Init produces JSON logs, but not with discrete fields which we can parse. It only gained the ability to output JSON logs in v1.9.0 (https://github.com/hashicorp/terraform/blob/v1.9/CHANGELOG.md#190-june-26-2024), so I've included the non-JSON logs as well. -Neither one produces any timings. - -- init -- # Before v1.9.0 Initializing the backend... @@ -24,15 +22,30 @@ If you ever set or change modules or backend configuration for Terraform, rerun this command to reinitialize your working directory. If you forget, other commands will detect it and remind you to do so if necessary. -# After v1.9.0 -{"@level":"info","@message":"Terraform 1.9.2","@module":"terraform.ui","@timestamp":"2024-08-15T09:19:30.835464+02:00","terraform":"1.9.2","type":"version","ui":"1.2"} -{"@level":"info","@message":"Initializing the backend...","@module":"terraform.ui","@timestamp":"2024-08-15T07:19:30Z","message_code":"initializing_backend_message","type":"init_output"} -{"@level":"info","@message":"Initializing modules...","@module":"terraform.ui","@timestamp":"2024-08-15T07:19:30Z","message_code":"initializing_modules_message","type":"init_output"} -{"@level":"info","@message":"Initializing provider plugins...","@module":"terraform.ui","@timestamp":"2024-08-15T07:19:30Z","message_code":"initializing_provider_plugin_message","type":"init_output"} -{"@level":"info","@message":"coder/coder: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2024-08-15T09:19:30.870861+02:00","type":"log"} -{"@level":"info","@message":"hashicorp/http: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2024-08-15T09:19:31.282247+02:00","type":"log"} -{"@level":"info","@message":"coder/coder v1.0.1: Using previously-installed provider version","@module":"terraform.ui","@timestamp":"2024-08-15T09:19:31.466355+02:00","type":"log"} -{"@level":"info","@message":"hashicorp/http v3.4.4: Using previously-installed provider version","@module":"terraform.ui","@timestamp":"2024-08-15T09:19:31.479221+02:00","type":"log"} -{"@level":"info","@message":"Terraform has been successfully initialized!","@module":"terraform.ui","@timestamp":"2024-08-15T07:19:31Z","message_code":"output_init_success_message","type":"init_output"} -{"@level":"info","@message":"You may now begin working with Terraform. Try running \"terraform plan\" to see\nany changes that are required for your infrastructure. All Terraform commands\nshould now work.\n\nIf you ever set or change modules or backend configuration for Terraform,\nrerun this command to reinitialize your working directory. If you forget, other\ncommands will detect it and remind you to do so if necessary.","@module":"terraform.ui","@timestamp":"2024-08-15T07:19:31Z","message_code":"output_init_success_cli_message","type":"init_output"} --- timings -- \ No newline at end of file +# After v1.9.0, uncached +{"@level":"info","@message":"Terraform 1.13.3","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.576675-05:00","terraform":"1.13.3","type":"version","ui":"1.2"} +{"@level":"info","@message":"Initializing the backend...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_backend_message","type":"init_output"} +{"@level":"info","@message":"Initializing modules...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:29.000000Z","message_code":"initializing_modules_message","type":"init_output"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/cursor/coder 1.3.2 for cursor...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.780639-05:00","type":"log"} +{"@level":"info","@message":"- cursor in .terraform/modules/cursor","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:29.982904-05:00","type":"log"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/jetbrains/coder 1.1.0 for jetbrains...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.039894-05:00","type":"log"} +{"@level":"info","@message":"- jetbrains in .terraform/modules/jetbrains","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202355-05:00","type":"log"} +{"@level":"info","@message":"Downloading git::https://github.com/coder/large-module.git for large-5mb-module...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:30.202394-05:00","type":"log"} +{"@level":"info","@message":"- large-5mb-module in .terraform/modules/large-5mb-module","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.799988-05:00","type":"log"} +{"@level":"info","@message":"Initializing provider plugins...","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:31.000000Z","message_code":"initializing_provider_plugin_message","type":"init_output"} +{"@level":"info","@message":"kreuzwerker/docker: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.801342-05:00","type":"log"} +{"@level":"info","@message":"hashicorp/http: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.868885-05:00","type":"log"} +{"@level":"info","@message":"coder/coder: Reusing previous version from the dependency lock file","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:31.894724-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: hashicorp/http v3.5.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.081468-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: hashicorp/http v3.5.0 (signed by HashiCorp)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.375580-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: coder/coder v2.11.0...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:32.869110-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: coder/coder v2.11.0 (signed by a HashiCorp partnerkey_id: 93C75807601AA0EC)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.350069-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: kreuzwerker/docker v3.6.2...","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:33.572112-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: kreuzwerker/docker v3.6.2 (self-signedkey_id: BD080C4571C6104C)","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458153-05:00","type":"log"} +{"@level":"info","@message":"Partner and community providers are signed by their developers.\nIf you'd like to know more about provider signing, you can read about it here:\nhttps://developer.hashicorp.com/terraform/cli/plugins/signing","@module":"terraform.ui","@timestamp":"2025-10-22T12:48:34.458177-05:00","type":"log"} +{"@level":"info","@message":"Terraform has been successfully initialized!","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34.000000Z","message_code":"output_init_success_message","type":"init_output"} +{"@level":"info","@message":"You may now begin working with Terraform. Try running \"terraform plan\" to see\nany changes that are required for your infrastructure. All Terraform commands\nshould now work.\n\nIf you ever set or change modules or backend configuration for Terraform,\nrerun this command to reinitialize your working directory. If you forget, other\ncommands will detect it and remind you to do so if necessary.","@module":"terraform.ui","@timestamp":"2025-10-22T17:48:34Z","message_code":"output_init_success_cli_message","type":"init_output"} +-- timings -- +{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:31Z","action":"load","resource":"modules","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:29Z","action":"load","resource":"backend","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:31Z","end":"2025-10-22T17:48:34Z","action":"load","resource":"provider plugins","stage":"init","state":"COMPLETED"} diff --git a/provisioner/terraform/testdata/timings-aggregation/initupgrade.txtar b/provisioner/terraform/testdata/timings-aggregation/initupgrade.txtar new file mode 100644 index 0000000000000..25472b1a3728e --- /dev/null +++ b/provisioner/terraform/testdata/timings-aggregation/initupgrade.txtar @@ -0,0 +1,29 @@ +# terraform init -upgrade -json +-- init -- +{"@level":"info","@message":"Terraform 1.13.3","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:51.988373-05:00","terraform":"1.13.3","type":"version","ui":"1.2"} +{"@level":"info","@message":"Initializing the backend...","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:51.000000Z","message_code":"initializing_backend_message","type":"init_output"} +{"@level":"info","@message":"Upgrading modules...","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:51.000000Z","message_code":"upgrading_modules_message","type":"init_output"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/cursor/coder 1.3.2 for cursor...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:52.152388-05:00","type":"log"} +{"@level":"info","@message":"- cursor in .terraform/modules/cursor","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:52.394592-05:00","type":"log"} +{"@level":"info","@message":"Downloading registry.coder.com/coder/jetbrains/coder 1.1.0 for jetbrains...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:52.450002-05:00","type":"log"} +{"@level":"info","@message":"- jetbrains in .terraform/modules/jetbrains","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:52.686200-05:00","type":"log"} +{"@level":"info","@message":"Downloading git::https://github.com/coder/large-module.git for large-5mb-module...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:52.686229-05:00","type":"log"} +{"@level":"info","@message":"- large-5mb-module in .terraform/modules/large-5mb-module","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.298240-05:00","type":"log"} +{"@level":"info","@message":"Initializing provider plugins...","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:54.000000Z","message_code":"initializing_provider_plugin_message","type":"init_output"} +{"@level":"info","@message":"Finding matching versions for provider: hashicorp/http, version_constraint: \"\u003e= 3.0.0\"","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.299465-05:00","type":"log"} +{"@level":"info","@message":"Finding matching versions for provider: coder/coder, version_constraint: \"\u003e= 2.5.0, ~\u003e 2.9\"","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.364986-05:00","type":"log"} +{"@level":"info","@message":"Finding matching versions for provider: kreuzwerker/docker, version_constraint: \"~\u003e 3.0\"","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.391509-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: hashicorp/http v3.5.0...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.605182-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: hashicorp/http v3.5.0 (signed by HashiCorp)","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:54.892077-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: coder/coder v2.12.0...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:55.246866-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: coder/coder v2.12.0 (signed by a HashiCorp partnerkey_id: 93C75807601AA0EC)","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:55.641603-05:00","type":"log"} +{"@level":"info","@message":"Installing provider version: kreuzwerker/docker v3.6.2...","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:55.862015-05:00","type":"log"} +{"@level":"info","@message":"Installed provider version: kreuzwerker/docker v3.6.2 (self-signedkey_id: BD080C4571C6104C)","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:56.699002-05:00","type":"log"} +{"@level":"info","@message":"Partner and community providers are signed by their developers.\nIf you'd like to know more about provider signing, you can read about it here:\nhttps://developer.hashicorp.com/terraform/cli/plugins/signing","@module":"terraform.ui","@timestamp":"2025-10-27T14:00:56.699025-05:00","type":"log"} +{"@level":"info","@message":"Terraform has made some changes to the provider dependency selections recorded\nin the .terraform.lock.hcl file. Review those changes and commit them to your\nversion control system if they represent changes you intended to make.","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:56Z","message_code":"dependencies_lock_changes_info","type":"init_output"} +{"@level":"info","@message":"Terraform has been successfully initialized!","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:56.000000Z","message_code":"output_init_success_message","type":"init_output"} +{"@level":"info","@message":"You may now begin working with Terraform. Try running \"terraform plan\" to see\nany changes that are required for your infrastructure. All Terraform commands\nshould now work.\n\nIf you ever set or change modules or backend configuration for Terraform,\nrerun this command to reinitialize your working directory. If you forget, other\ncommands will detect it and remind you to do so if necessary.","@module":"terraform.ui","@timestamp":"2025-10-27T19:00:56Z","message_code":"output_init_success_cli_message","type":"init_output"} +-- timings -- +{"start":"2025-10-27T19:00:51Z","end":"2025-10-27T19:00:54Z","action":"load","resource":"modules","stage":"init","state":"COMPLETED"} +{"start":"2025-10-27T19:00:51Z","end":"2025-10-27T19:00:51Z","action":"load","resource":"backend","stage":"init","state":"COMPLETED"} +{"start":"2025-10-27T19:00:54Z","end":"2025-10-27T19:00:56Z","action":"load","resource":"provider plugins","stage":"init","state":"COMPLETED"} diff --git a/provisioner/terraform/timings.go b/provisioner/terraform/timings.go index 370836229dd73..d2fe74239826e 100644 --- a/provisioner/terraform/timings.go +++ b/provisioner/terraform/timings.go @@ -48,6 +48,28 @@ const ( timingInitOutput timingKind = "init_output" ) +// Source: https://github.com/hashicorp/terraform/blob/6b73f710f8152ef4808e4de5bdfb35314442f4a5/internal/command/views/init.go#L267-L321 +type initMessageCode string + +const ( + initCopyingConfigurationMessage initMessageCode = "copying_configuration_message" + initEmptyMessage initMessageCode = "empty_message" + initOutputInitEmptyMessage initMessageCode = "output_init_empty_message" + initOutputInitSuccessMessage initMessageCode = "output_init_success_message" + initOutputInitSuccessCloudMessage initMessageCode = "output_init_success_cloud_message" + initOutputInitSuccessCLIMessage initMessageCode = "output_init_success_cli_message" + initOutputInitSuccessCLICloudMessage initMessageCode = "output_init_success_cli_cloud_message" + initUpgradingModulesMessage initMessageCode = "upgrading_modules_message" + initInitializingTerraformCloudMessage initMessageCode = "initializing_terraform_cloud_message" + initInitializingModulesMessage initMessageCode = "initializing_modules_message" + initInitializingBackendMessage initMessageCode = "initializing_backend_message" + initInitializingStateStoreMessage initMessageCode = "initializing_state_store_message" + initDefaultWorkspaceCreatedMessage initMessageCode = "default_workspace_created_message" + initInitializingProviderPluginMessage initMessageCode = "initializing_provider_plugin_message" + initLockInfo initMessageCode = "lock_info" + initDependenciesLockChangesInfo initMessageCode = "dependencies_lock_changes_info" +) + type timingAggregator struct { stage database.ProvisionerJobTimingStage @@ -57,7 +79,9 @@ type timingAggregator struct { } type timingSpan struct { - kind timingKind + kind timingKind + // messageCode is only present in `terraform init` timings. + messageCode initMessageCode start, end time.Time stage database.ProvisionerJobTimingStage action, provider, resource string @@ -94,6 +118,10 @@ func (t *timingAggregator) ingest(ts time.Time, s *timingSpan) { case timingApplyErrored, timingProvisionErrored, timingInitErrored, timingGraphErrored: s.end = ts s.state = proto.TimingState_FAILED + case timingInitOutput: + // init timings are based on the init message code. + t.ingestInitTiming(ts, s) + return default: // We just want start/end timings, ignore all other events. return @@ -182,7 +210,7 @@ func (l timingKind) Valid() bool { // if all other attributes are identical. func (l timingKind) Category() string { switch l { - case timingInitStart, timingInitComplete, timingInitErrored: + case timingInitStart, timingInitComplete, timingInitErrored, timingInitOutput: return "init" case timingGraphStart, timingGraphComplete, timingGraphErrored: return "graph" @@ -201,6 +229,9 @@ func (l timingKind) Category() string { // The combination of resource and provider names MUST be unique across entries. func (e *timingSpan) hashByState(state proto.TimingState) uint64 { id := fmt.Sprintf("%s:%s:%s:%s:%s", e.kind.Category(), state.String(), e.action, e.resource, e.provider) + if e.messageCode != "" { + id += ":" + string(e.messageCode) + } return xxhash.Sum64String(id) } @@ -226,7 +257,7 @@ func createInitTimingsEvent(event timingKind) (time.Time, *timingSpan) { kind: event, action: "initializing terraform", provider: "terraform", - resource: "state file", + resource: "init", } } diff --git a/provisioner/terraform/timings_internal_test.go b/provisioner/terraform/timings_internal_test.go index 95dc47318e2d0..552bec5a1953e 100644 --- a/provisioner/terraform/timings_internal_test.go +++ b/provisioner/terraform/timings_internal_test.go @@ -22,6 +22,8 @@ var ( inputSimple []byte //go:embed testdata/timings-aggregation/init.txtar inputInit []byte + //go:embed testdata/timings-aggregation/initupgrade.txtar + inputInitUpgrade []byte //go:embed testdata/timings-aggregation/error.txtar inputError []byte //go:embed testdata/timings-aggregation/complete.txtar @@ -45,6 +47,10 @@ func TestAggregation(t *testing.T) { name: "init", input: inputInit, }, + { + name: "initupgrade", + input: inputInitUpgrade, + }, { name: "simple", input: inputSimple, diff --git a/provisioner/terraform/timings_test.go b/provisioner/terraform/timings_test.go index ec91caf301831..fe167b830ff05 100644 --- a/provisioner/terraform/timings_test.go +++ b/provisioner/terraform/timings_test.go @@ -98,6 +98,9 @@ func TestTimingsFromProvision(t *testing.T) { // Then: the received timings should match the expected values below. // NOTE: These timings have been encoded to JSON format to make the tests more readable. + initTimings := terraform_internal.ParseTimingLines(t, []byte(`{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:31Z","action":"load","resource":"modules","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:29Z","end":"2025-10-22T17:48:29Z","action":"load","resource":"backend","stage":"init","state":"COMPLETED"} +{"start":"2025-10-22T17:48:31Z","end":"2025-10-22T17:48:34Z","action":"load","resource":"provider plugins","stage":"init","state":"COMPLETED"}`)) planTimings := terraform_internal.ParseTimingLines(t, []byte(`{"start":"2024-08-15T08:26:39.194726Z", "end":"2024-08-15T08:26:39.195836Z", "action":"read", "source":"coder", "resource":"data.coder_parameter.memory_size", "stage":"plan", "state":"COMPLETED"} {"start":"2024-08-15T08:26:39.194726Z", "end":"2024-08-15T08:26:39.195712Z", "action":"read", "source":"coder", "resource":"data.coder_provisioner.me", "stage":"plan", "state":"COMPLETED"} {"start":"2024-08-15T08:26:39.194726Z", "end":"2024-08-15T08:26:39.195820Z", "action":"read", "source":"coder", "resource":"data.coder_workspace.me", "stage":"plan", "state":"COMPLETED"}`)) @@ -105,10 +108,10 @@ func TestTimingsFromProvision(t *testing.T) { {"start":"2024-08-15T08:26:39.626722Z", "end":"2024-08-15T08:26:39.669954Z", "action":"create", "source":"docker", "resource":"docker_image.main", "stage":"apply", "state":"COMPLETED"} {"start":"2024-08-15T08:26:39.627335Z", "end":"2024-08-15T08:26:39.660616Z", "action":"create", "source":"docker", "resource":"docker_volume.home_volume", "stage":"apply", "state":"COMPLETED"} {"start":"2024-08-15T08:26:39.682223Z", "end":"2024-08-15T08:26:40.186482Z", "action":"create", "source":"docker", "resource":"docker_container.workspace[0]", "stage":"apply", "state":"COMPLETED"}`)) - initTiming := terraform_internal.ParseTimingLines(t, []byte(`{"start":"2000-01-01T01:01:01.123456Z", "end":"2000-01-01T01:01:01.123456Z", "action":"initializing terraform", "source":"terraform", "resource":"state file", "stage":"init", "state":"COMPLETED"}`))[0] - graphTiming := terraform_internal.ParseTimingLines(t, []byte(`{"start":"2000-01-01T01:01:01.123456Z", "end":"2000-01-01T01:01:01.123456Z", "action":"building terraform dependency graph", "source":"terraform", "resource":"state file", "stage":"graph", "state":"COMPLETED"}`))[0] + graphTimings := terraform_internal.ParseTimingLines(t, []byte(`{"start":"2000-01-01T01:01:01.123456Z", "end":"2000-01-01T01:01:01.123456Z", "action":"building terraform dependency graph", "source":"terraform", "resource":"state file", "stage":"graph", "state":"COMPLETED"}`)) + graphTiming := graphTimings[0] - require.Len(t, timings, len(planTimings)+len(applyTimings)+2) + require.Len(t, timings, len(initTimings)+len(planTimings)+len(applyTimings)+len(graphTimings)) // init/graph timings are computed dynamically during provisioning whereas plan/apply come from the logs (fixtures) in // provisioner/terraform/testdata/timings-aggregation/fake-terraform.sh. @@ -117,11 +120,12 @@ func TestTimingsFromProvision(t *testing.T) { // We manually override the init/graph timings' timestamps so that the equality check works (all other fields should be as expected). pCursor := 0 aCursor := 0 + iCursor := 0 for _, tim := range timings { switch tim.Stage { case string(database.ProvisionerJobTimingStageInit): - tim.Start, tim.End = initTiming.Start, initTiming.End - require.True(t, terraform_internal.TimingsAreEqual(t, []*proto.Timing{initTiming}, []*proto.Timing{tim})) + require.True(t, terraform_internal.TimingsAreEqual(t, []*proto.Timing{initTimings[iCursor]}, []*proto.Timing{tim})) + iCursor++ case string(database.ProvisionerJobTimingStageGraph): tim.Start, tim.End = graphTiming.Start, graphTiming.End require.True(t, terraform_internal.TimingsAreEqual(t, []*proto.Timing{graphTiming}, []*proto.Timing{tim})) diff --git a/provisionerd/proto/version.go b/provisionerd/proto/version.go index dfe38cff30b7b..a7ea326d0f466 100644 --- a/provisionerd/proto/version.go +++ b/provisionerd/proto/version.go @@ -53,9 +53,13 @@ import "github.com/coder/coder/v2/apiversion" // // API v1.10: // - Added new field `tooltip` in `App` +// +// API v1.11: +// - Added new fields `task_id` and `task_prompt` to `Manifest`. +// - Added new field `app_id` to `AITask` const ( CurrentMajor = 1 - CurrentMinor = 10 + CurrentMinor = 11 ) // CurrentVersion is the current provisionerd API version. diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 905ab583613c2..b884f5a21aca6 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -2860,7 +2860,8 @@ type AITask struct { unknownFields protoimpl.UnknownFields Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - SidebarApp *AITaskSidebarApp `protobuf:"bytes,2,opt,name=sidebar_app,json=sidebarApp,proto3" json:"sidebar_app,omitempty"` + SidebarApp *AITaskSidebarApp `protobuf:"bytes,2,opt,name=sidebar_app,json=sidebarApp,proto3,oneof" json:"sidebar_app,omitempty"` + AppId string `protobuf:"bytes,3,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` } func (x *AITask) Reset() { @@ -2909,6 +2910,13 @@ func (x *AITask) GetSidebarApp() *AITaskSidebarApp { return nil } +func (x *AITask) GetAppId() string { + if x != nil { + return x.AppId + } + return "" +} + // Metadata is information about a workspace used in the execution of a build type Metadata struct { state protoimpl.MessageState @@ -2936,6 +2944,8 @@ type Metadata struct { WorkspaceOwnerRbacRoles []*Role `protobuf:"bytes,19,rep,name=workspace_owner_rbac_roles,json=workspaceOwnerRbacRoles,proto3" json:"workspace_owner_rbac_roles,omitempty"` PrebuiltWorkspaceBuildStage PrebuiltWorkspaceBuildStage `protobuf:"varint,20,opt,name=prebuilt_workspace_build_stage,json=prebuiltWorkspaceBuildStage,proto3,enum=provisioner.PrebuiltWorkspaceBuildStage" json:"prebuilt_workspace_build_stage,omitempty"` // Indicates that a prebuilt workspace is being built. RunningAgentAuthTokens []*RunningAgentAuthToken `protobuf:"bytes,21,rep,name=running_agent_auth_tokens,json=runningAgentAuthTokens,proto3" json:"running_agent_auth_tokens,omitempty"` + TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + TaskPrompt string `protobuf:"bytes,23,opt,name=task_prompt,json=taskPrompt,proto3" json:"task_prompt,omitempty"` } func (x *Metadata) Reset() { @@ -3117,6 +3127,20 @@ func (x *Metadata) GetRunningAgentAuthTokens() []*RunningAgentAuthToken { return nil } +func (x *Metadata) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +func (x *Metadata) GetTaskPrompt() string { + if x != nil { + return x.TaskPrompt + } + return "" +} + // Config represents execution configuration shared by all subsequent requests in the Session type Config struct { state protoimpl.MessageState @@ -4734,334 +4758,340 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x22, 0x0a, 0x10, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x58, 0x0a, 0x06, 0x41, 0x49, 0x54, 0x61, 0x73, - 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, - 0x64, 0x12, 0x3e, 0x0a, 0x0b, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, 0x65, 0x62, - 0x61, 0x72, 0x41, 0x70, 0x70, 0x52, 0x0a, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, - 0x70, 0x22, 0xca, 0x09, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, - 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x25, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x49, - 0x64, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, - 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f, 0x69, 0x64, 0x63, 0x5f, 0x61, 0x63, - 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x4f, 0x69, 0x64, 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x41, 0x0a, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, - 0x65, 0x72, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, - 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, - 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, - 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, 0x42, 0x0a, 0x1e, 0x77, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, - 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x0f, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x84, 0x01, 0x0a, 0x06, 0x41, 0x49, 0x54, 0x61, + 0x73, 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x43, 0x0a, 0x0b, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, + 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, 0x65, + 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, + 0x72, 0x41, 0x70, 0x70, 0x88, 0x01, 0x01, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x70, 0x70, 0x5f, 0x69, + 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x70, 0x70, 0x49, 0x64, 0x42, 0x0e, + 0x0a, 0x0c, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, 0x22, 0x84, + 0x0a, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, + 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x21, 0x0a, + 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, + 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x32, + 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, + 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x45, 0x6d, 0x61, + 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f, 0x69, 0x64, 0x63, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1d, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4f, 0x69, 0x64, + 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x41, 0x0a, 0x1d, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, - 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, - 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, - 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, - 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, - 0x64, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x4e, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, - 0x6e, 0x65, 0x72, 0x5f, 0x72, 0x62, 0x61, 0x63, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x18, 0x13, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x52, 0x62, 0x61, 0x63, 0x52, 0x6f, 0x6c, 0x65, 0x73, - 0x12, 0x6d, 0x0a, 0x1e, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x5f, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x73, 0x74, 0x61, - 0x67, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, - 0x67, 0x65, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, - 0x5d, 0x0a, 0x19, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, - 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, - 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x16, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, - 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x22, 0x8a, - 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, - 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, - 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, - 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, + 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0c, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x64, + 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x0e, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, + 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, 0x42, 0x0a, 0x1e, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, + 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, + 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x1f, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, + 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, + 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, + 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, + 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, + 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x12, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, + 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x4e, 0x0a, + 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x5f, 0x72, 0x62, 0x61, 0x63, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, + 0x77, 0x6e, 0x65, 0x72, 0x52, 0x62, 0x61, 0x63, 0x52, 0x6f, 0x6c, 0x65, 0x73, 0x12, 0x6d, 0x0a, + 0x1e, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, + 0x14, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, + 0x1b, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x5d, 0x0a, 0x19, + 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x61, 0x75, + 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x75, + 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x52, 0x16, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, + 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x74, + 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x16, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, + 0x73, 0x6b, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x61, 0x73, 0x6b, 0x50, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32, + 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, + 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, + 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, + 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, + 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, + 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, + 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, + 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, + 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, + 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, + 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, + 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, + 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, + 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, + 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, + 0x12, 0x5b, 0x0a, 0x19, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, - 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, - 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x5b, 0x0a, 0x19, 0x70, 0x72, 0x65, - 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, - 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x6d, 0x69, 0x74, 0x5f, 0x6d, - 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, - 0x65, 0x73, 0x22, 0xc1, 0x05, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, - 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, - 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, - 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, - 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, - 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, - 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, - 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x55, - 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, - 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, - 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x0c, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, - 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, - 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, - 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, - 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, - 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x68, 0x61, 0x73, 0x5f, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xee, 0x02, 0x0a, 0x0d, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, - 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, - 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, - 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, - 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, - 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x54, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, - 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, - 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, - 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, - 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, - 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, - 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, - 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, - 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, - 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, - 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, - 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, - 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, - 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, - 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, - 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x64, 0x61, 0x74, - 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x66, 0x75, - 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, - 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x2a, 0xa8, 0x01, 0x0a, 0x11, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x46, 0x6f, 0x72, 0x6d, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x0e, - 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x09, - 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x52, 0x4f, - 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x50, 0x55, 0x54, - 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, 0x54, 0x41, 0x52, 0x45, 0x41, 0x10, 0x05, - 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, 0x52, 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, - 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x57, - 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x41, 0x47, 0x53, 0x45, 0x4c, - 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x53, 0x45, - 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, - 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, - 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, - 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, - 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, - 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, - 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, - 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, - 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, - 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, - 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, - 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, - 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, - 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, - 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, - 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4c, 0x41, - 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, - 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, - 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x02, 0x2a, 0x47, 0x0a, 0x0e, 0x44, - 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, - 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, - 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, 0x18, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, - 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x55, 0x4c, 0x45, 0x5f, 0x46, 0x49, 0x4c, - 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, - 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, + 0x11, 0x6f, 0x6d, 0x69, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, + 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, + 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x22, 0xc1, 0x05, 0x0a, 0x0c, 0x50, 0x6c, + 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, + 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, + 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, + 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, + 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, + 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, + 0x2a, 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, + 0x68, 0x61, 0x73, 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, + 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, + 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, + 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, + 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, + 0x13, 0x68, 0x61, 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x41, 0x0a, + 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x22, 0xee, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, + 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, + 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, + 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, + 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, + 0x73, 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, + 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, + 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, + 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, + 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, + 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, + 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, + 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, + 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, + 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, + 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, + 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, + 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, + 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, + 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, + 0x74, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, + 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, + 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, + 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, + 0x69, 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, + 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, + 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, + 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, + 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, + 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x05, 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, + 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, + 0x75, 0x6c, 0x6c, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, + 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x2a, 0xa8, 0x01, 0x0a, 0x11, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x46, 0x6f, 0x72, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, + 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, + 0x52, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, + 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x52, 0x4f, 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, + 0x0a, 0x05, 0x49, 0x4e, 0x50, 0x55, 0x54, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, + 0x54, 0x41, 0x52, 0x45, 0x41, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, + 0x52, 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, + 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x57, 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, + 0x09, 0x54, 0x41, 0x47, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, + 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, + 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, + 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, + 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, + 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, + 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, + 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, + 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, + 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, + 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, + 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, + 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, + 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, + 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, + 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, + 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, + 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, + 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, + 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, + 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, + 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, + 0x10, 0x02, 0x2a, 0x47, 0x0a, 0x0e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, + 0x18, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, + 0x55, 0x4c, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, + 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, + 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -5814,6 +5844,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { (*Agent_Token)(nil), (*Agent_InstanceId)(nil), } + file_provisionersdk_proto_provisioner_proto_msgTypes[32].OneofWrappers = []interface{}{} file_provisionersdk_proto_provisioner_proto_msgTypes[43].OneofWrappers = []interface{}{ (*Request_Config)(nil), (*Request_Parse)(nil), diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index e38edd3aba1c2..803f3e2197ecd 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -335,7 +335,8 @@ message AITaskSidebarApp { message AITask { string id = 1; - AITaskSidebarApp sidebar_app = 2; + optional AITaskSidebarApp sidebar_app = 2; + string app_id = 3; } // Metadata is information about a workspace used in the execution of a build @@ -361,6 +362,8 @@ message Metadata { repeated Role workspace_owner_rbac_roles = 19; PrebuiltWorkspaceBuildStage prebuilt_workspace_build_stage = 20; // Indicates that a prebuilt workspace is being built. repeated RunningAgentAuthToken running_agent_auth_tokens = 21; + string task_id = 22; + string task_prompt = 23; } // Config represents execution configuration shared by all subsequent requests in the Session diff --git a/pty/pty_windows.go b/pty/pty_windows.go index 93fea12019627..987ef02eb281d 100644 --- a/pty/pty_windows.go +++ b/pty/pty_windows.go @@ -54,10 +54,19 @@ func newPty(opt ...Option) (*ptyWindows, error) { return nil, err } - consoleSize := uintptr(80) + (uintptr(80) << 16) + // Default dimensions + width, height := 80, 80 if opts.sshReq != nil { - consoleSize = uintptr(opts.sshReq.Window.Width) + (uintptr(opts.sshReq.Window.Height) << 16) + if w := opts.sshReq.Window.Width; w > 0 && w <= 65535 { + width = w + } + if h := opts.sshReq.Window.Height; h > 0 && h <= 65535 { + height = h + } } + + consoleSize := uintptr(width) + (uintptr(height) << 16) + ret, _, err := procCreatePseudoConsole.Call( consoleSize, uintptr(pty.inputRead.Fd()), diff --git a/scaletest/autostart/config.go b/scaletest/autostart/config.go new file mode 100644 index 0000000000000..ad804a0b89666 --- /dev/null +++ b/scaletest/autostart/config.go @@ -0,0 +1,75 @@ +package autostart + +import ( + "sync" + "time" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/workspacebuild" +) + +type Config struct { + // User is the configuration for the user to create. + User createusers.Config `json:"user"` + + // Workspace is the configuration for the workspace to create. The workspace + // will be built using the new user. + // + // OrganizationID is ignored and set to the new user's organization ID. + Workspace workspacebuild.Config `json:"workspace"` + + // WorkspaceJobTimeout is how long to wait for any one workspace job + // (start or stop) to complete. + WorkspaceJobTimeout time.Duration `json:"workspace_job_timeout"` + + // AutostartDelay is how long after all the workspaces have been stopped + // to schedule them to be started again. + AutostartDelay time.Duration `json:"autostart_delay"` + + // AutostartTimeout is how long to wait for the autostart build to be + // initiated after the scheduled time. + AutostartTimeout time.Duration `json:"autostart_timeout"` + + Metrics *Metrics `json:"-"` + + // SetupBarrier is used to ensure all runners own stopped workspaces + // before setting the autostart schedule on each. + SetupBarrier *sync.WaitGroup `json:"-"` +} + +func (c Config) Validate() error { + if err := c.User.Validate(); err != nil { + return xerrors.Errorf("user config: %w", err) + } + c.Workspace.OrganizationID = c.User.OrganizationID + // This value will be overwritten during the test. + c.Workspace.UserID = codersdk.Me + if err := c.Workspace.Validate(); err != nil { + return xerrors.Errorf("workspace config: %w", err) + } + + if c.SetupBarrier == nil { + return xerrors.New("setup barrier must be set") + } + + if c.WorkspaceJobTimeout <= 0 { + return xerrors.New("workspace_job_timeout must be greater than 0") + } + + if c.AutostartDelay < time.Minute*2 { + return xerrors.New("autostart_delay must be at least 2 minutes") + } + + if c.AutostartTimeout <= 0 { + return xerrors.New("autostart_timeout must be greater than 0") + } + + if c.Metrics == nil { + return xerrors.New("metrics must be set") + } + + return nil +} diff --git a/scaletest/autostart/metrics.go b/scaletest/autostart/metrics.go new file mode 100644 index 0000000000000..d1ff94e7898c4 --- /dev/null +++ b/scaletest/autostart/metrics.go @@ -0,0 +1,65 @@ +package autostart + +import ( + "time" + + "github.com/prometheus/client_golang/prometheus" +) + +type Metrics struct { + AutostartJobCreationLatencySeconds prometheus.HistogramVec + AutostartJobAcquiredLatencySeconds prometheus.HistogramVec + AutostartTotalLatencySeconds prometheus.HistogramVec + AutostartErrorsTotal prometheus.CounterVec +} + +func NewMetrics(reg prometheus.Registerer) *Metrics { + m := &Metrics{ + AutostartJobCreationLatencySeconds: *prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "autostart_job_creation_latency_seconds", + Help: "Time from when the workspace is scheduled to be autostarted to when the autostart job has been created.", + }, []string{"username", "workspace_name"}), + AutostartJobAcquiredLatencySeconds: *prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "autostart_job_acquired_latency_seconds", + Help: "Time from when the workspace is scheduled to be autostarted to when the job has been acquired by a provisioner daemon.", + }, []string{"username", "workspace_name"}), + AutostartTotalLatencySeconds: *prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "autostart_total_latency_seconds", + Help: "Time from when the workspace is scheduled to be autostarted to when the autostart build has finished.", + }, []string{"username", "workspace_name"}), + AutostartErrorsTotal: *prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "autostart_errors_total", + Help: "Total number of autostart errors", + }, []string{"username", "action"}), + } + + reg.MustRegister(m.AutostartTotalLatencySeconds) + reg.MustRegister(m.AutostartJobCreationLatencySeconds) + reg.MustRegister(m.AutostartJobAcquiredLatencySeconds) + reg.MustRegister(m.AutostartErrorsTotal) + return m +} + +func (m *Metrics) RecordCompletion(elapsed time.Duration, username string, workspace string) { + m.AutostartTotalLatencySeconds.WithLabelValues(username, workspace).Observe(elapsed.Seconds()) +} + +func (m *Metrics) RecordJobCreation(elapsed time.Duration, username string, workspace string) { + m.AutostartJobCreationLatencySeconds.WithLabelValues(username, workspace).Observe(elapsed.Seconds()) +} + +func (m *Metrics) RecordJobAcquired(elapsed time.Duration, username string, workspace string) { + m.AutostartJobAcquiredLatencySeconds.WithLabelValues(username, workspace).Observe(elapsed.Seconds()) +} + +func (m *Metrics) AddError(username string, action string) { + m.AutostartErrorsTotal.WithLabelValues(username, action).Inc() +} diff --git a/scaletest/autostart/run.go b/scaletest/autostart/run.go new file mode 100644 index 0000000000000..c37d843ad95c2 --- /dev/null +++ b/scaletest/autostart/run.go @@ -0,0 +1,246 @@ +package autostart + +import ( + "context" + "fmt" + "io" + "time" + + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/loadtestutil" + "github.com/coder/coder/v2/scaletest/workspacebuild" +) + +type Runner struct { + client *codersdk.Client + cfg Config + + createUserRunner *createusers.Runner + workspacebuildRunner *workspacebuild.Runner + + autostartTotalLatency time.Duration + autostartJobCreationLatency time.Duration + autostartJobAcquiredLatency time.Duration +} + +func NewRunner(client *codersdk.Client, cfg Config) *Runner { + return &Runner{ + client: client, + cfg: cfg, + } +} + +var ( + _ harness.Runnable = &Runner{} + _ harness.Cleanable = &Runner{} + _ harness.Collectable = &Runner{} +) + +func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error { + ctx, span := tracing.StartSpan(ctx) + defer span.End() + + reachedBarrier := false + defer func() { + if !reachedBarrier { + r.cfg.SetupBarrier.Done() + } + }() + + logs = loadtestutil.NewSyncWriter(logs) + logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug) + r.client.SetLogger(logger) + r.client.SetLogBodies(true) + + r.createUserRunner = createusers.NewRunner(r.client, r.cfg.User) + newUserAndToken, err := r.createUserRunner.RunReturningUser(ctx, id, logs) + if err != nil { + r.cfg.Metrics.AddError("", "create_user") + return xerrors.Errorf("create user: %w", err) + } + newUser := newUserAndToken.User + + newUserClient := codersdk.New(r.client.URL, + codersdk.WithSessionToken(newUserAndToken.SessionToken), + codersdk.WithLogger(logger), + codersdk.WithLogBodies()) + + //nolint:gocritic // short log is fine + logger.Info(ctx, "user created", slog.F("username", newUser.Username), slog.F("user_id", newUser.ID.String())) + + workspaceBuildConfig := r.cfg.Workspace + workspaceBuildConfig.OrganizationID = r.cfg.User.OrganizationID + workspaceBuildConfig.UserID = newUser.ID.String() + // We'll wait for the build ourselves to avoid multiple API requests + workspaceBuildConfig.NoWaitForBuild = true + workspaceBuildConfig.NoWaitForAgents = true + + r.workspacebuildRunner = workspacebuild.NewRunner(newUserClient, workspaceBuildConfig) + workspace, err := r.workspacebuildRunner.RunReturningWorkspace(ctx, id, logs) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "create_workspace") + return xerrors.Errorf("create workspace: %w", err) + } + + watchCtx, cancel := context.WithCancel(ctx) + defer cancel() + workspaceUpdates, err := newUserClient.WatchWorkspace(watchCtx, workspace.ID) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "watch_workspace") + return xerrors.Errorf("watch workspace: %w", err) + } + + createWorkspaceCtx, cancel2 := context.WithTimeout(ctx, r.cfg.WorkspaceJobTimeout) + defer cancel2() + + err = waitForWorkspaceUpdate(createWorkspaceCtx, logger, workspaceUpdates, func(ws codersdk.Workspace) bool { + return ws.LatestBuild.Transition == codersdk.WorkspaceTransitionStart && + ws.LatestBuild.Job.Status == codersdk.ProvisionerJobSucceeded + }) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "wait_for_initial_build") + return xerrors.Errorf("timeout waiting for initial workspace build to complete: %w", err) + } + + logger.Info(ctx, "stopping workspace", slog.F("workspace_name", workspace.Name)) + + _, err = newUserClient.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionStop, + }) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "create_stop_build") + return xerrors.Errorf("create stop build: %w", err) + } + + stopBuildCtx, cancel3 := context.WithTimeout(ctx, r.cfg.WorkspaceJobTimeout) + defer cancel3() + + err = waitForWorkspaceUpdate(stopBuildCtx, logger, workspaceUpdates, func(ws codersdk.Workspace) bool { + return ws.LatestBuild.Transition == codersdk.WorkspaceTransitionStop && + ws.LatestBuild.Job.Status == codersdk.ProvisionerJobSucceeded + }) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "wait_for_stop_build") + return xerrors.Errorf("timeout waiting for stop build to complete: %w", err) + } + + logger.Info(ctx, "workspace stopped successfully", slog.F("workspace_name", workspace.Name)) + + logger.Info(ctx, "waiting for all runners to reach barrier") + reachedBarrier = true + r.cfg.SetupBarrier.Done() + r.cfg.SetupBarrier.Wait() + logger.Info(ctx, "all runners reached barrier, proceeding with autostart schedule") + + testStartTime := time.Now().UTC() + autostartTime := testStartTime.Add(r.cfg.AutostartDelay).Round(time.Minute) + schedule := fmt.Sprintf("CRON_TZ=UTC %d %d * * *", autostartTime.Minute(), autostartTime.Hour()) + + logger.Info(ctx, "setting autostart schedule for workspace", slog.F("workspace_name", workspace.Name), slog.F("schedule", schedule)) + + err = newUserClient.UpdateWorkspaceAutostart(ctx, workspace.ID, codersdk.UpdateWorkspaceAutostartRequest{ + Schedule: &schedule, + }) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "update_workspace_autostart") + return xerrors.Errorf("update workspace autostart: %w", err) + } + + logger.Info(ctx, "waiting for workspace to autostart", slog.F("workspace_name", workspace.Name)) + + autostartInitiateCtx, cancel4 := context.WithDeadline(ctx, autostartTime.Add(r.cfg.AutostartDelay)) + defer cancel4() + + logger.Info(ctx, "listening for workspace updates to detect autostart build") + + err = waitForWorkspaceUpdate(autostartInitiateCtx, logger, workspaceUpdates, func(ws codersdk.Workspace) bool { + if ws.LatestBuild.Transition != codersdk.WorkspaceTransitionStart { + return false + } + + // The job has been created, but it might be pending + if r.autostartJobCreationLatency == 0 { + r.autostartJobCreationLatency = time.Since(autostartTime) + r.cfg.Metrics.RecordJobCreation(r.autostartJobCreationLatency, newUser.Username, workspace.Name) + } + + if ws.LatestBuild.Job.Status == codersdk.ProvisionerJobRunning || + ws.LatestBuild.Job.Status == codersdk.ProvisionerJobSucceeded { + // Job is no longer pending, but it might not have finished + if r.autostartJobAcquiredLatency == 0 { + r.autostartJobAcquiredLatency = time.Since(autostartTime) + r.cfg.Metrics.RecordJobAcquired(r.autostartJobAcquiredLatency, newUser.Username, workspace.Name) + } + return ws.LatestBuild.Job.Status == codersdk.ProvisionerJobSucceeded + } + + return false + }) + if err != nil { + r.cfg.Metrics.AddError(newUser.Username, "wait_for_autostart_build") + return xerrors.Errorf("timeout waiting for autostart build to be created: %w", err) + } + + r.autostartTotalLatency = time.Since(autostartTime) + + logger.Info(ctx, "autostart workspace build complete", slog.F("duration", r.autostartTotalLatency)) + r.cfg.Metrics.RecordCompletion(r.autostartTotalLatency, newUser.Username, workspace.Name) + + return nil +} + +func waitForWorkspaceUpdate(ctx context.Context, logger slog.Logger, updates <-chan codersdk.Workspace, shouldBreak func(codersdk.Workspace) bool) error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case updatedWorkspace, ok := <-updates: + if !ok { + return xerrors.New("workspace updates channel closed") + } + logger.Debug(ctx, "received workspace update", slog.F("update", updatedWorkspace)) + if shouldBreak(updatedWorkspace) { + return nil + } + } + } +} + +func (r *Runner) Cleanup(ctx context.Context, id string, logs io.Writer) error { + if r.workspacebuildRunner != nil { + _, _ = fmt.Fprintln(logs, "Cleaning up workspace...") + if err := r.workspacebuildRunner.Cleanup(ctx, id, logs); err != nil { + return xerrors.Errorf("cleanup workspace: %w", err) + } + } + + if r.createUserRunner != nil { + _, _ = fmt.Fprintln(logs, "Cleaning up user...") + if err := r.createUserRunner.Cleanup(ctx, id, logs); err != nil { + return xerrors.Errorf("cleanup user: %w", err) + } + } + + return nil +} + +const ( + AutostartTotalLatencyMetric = "autostart_total_latency_seconds" + AutostartJobCreationLatencyMetric = "autostart_job_creation_latency_seconds" + AutostartJobAcquiredLatencyMetric = "autostart_job_acquired_latency_seconds" +) + +func (r *Runner) GetMetrics() map[string]any { + return map[string]any{ + AutostartTotalLatencyMetric: r.autostartTotalLatency.Seconds(), + AutostartJobCreationLatencyMetric: r.autostartJobCreationLatency.Seconds(), + AutostartJobAcquiredLatencyMetric: r.autostartJobAcquiredLatency.Seconds(), + } +} diff --git a/scaletest/autostart/run_test.go b/scaletest/autostart/run_test.go new file mode 100644 index 0000000000000..dc0fb9fea018e --- /dev/null +++ b/scaletest/autostart/run_test.go @@ -0,0 +1,158 @@ +package autostart_test + +import ( + "io" + "strconv" + "sync" + "testing" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/sync/errgroup" + + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/provisioner/echo" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/scaletest/autostart" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/workspacebuild" + "github.com/coder/coder/v2/testutil" +) + +func TestRun(t *testing.T) { + t.Parallel() + numUsers := 2 + autoStartDelay := 2 * time.Minute + + // Faking a workspace autostart schedule start time at the coderd level + // is difficult and error-prone. + t.Skip("This test takes several minutes to run, and is intended as a manual regression test") + + ctx := testutil.Context(t, time.Minute*3) + + client := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + AutobuildTicker: time.NewTicker(time.Second * 1).C, + }) + user := coderdtest.CreateFirstUser(t, client) + + authToken := uuid.NewString() + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: echo.PlanComplete, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Name: "example", + Type: "aws_instance", + Agents: []*proto.Agent{ + { + Id: uuid.NewString(), + Name: "agent", + Auth: &proto.Agent_Token{ + Token: authToken, + }, + Apps: []*proto.App{}, + }, + }, + }, + }, + }, + }, + }, + }, + }) + + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + + barrier := new(sync.WaitGroup) + barrier.Add(numUsers) + metrics := autostart.NewMetrics(prometheus.NewRegistry()) + + eg, runCtx := errgroup.WithContext(ctx) + + runners := make([]*autostart.Runner, 0, numUsers) + for i := range numUsers { + cfg := autostart.Config{ + User: createusers.Config{ + OrganizationID: user.OrganizationID, + }, + Workspace: workspacebuild.Config{ + OrganizationID: user.OrganizationID, + Request: codersdk.CreateWorkspaceRequest{ + TemplateID: template.ID, + }, + NoWaitForAgents: true, + }, + WorkspaceJobTimeout: testutil.WaitMedium, + AutostartDelay: autoStartDelay, + AutostartTimeout: testutil.WaitShort, + Metrics: metrics, + SetupBarrier: barrier, + } + err := cfg.Validate() + require.NoError(t, err) + + runner := autostart.NewRunner(client, cfg) + runners = append(runners, runner) + eg.Go(func() error { + return runner.Run(runCtx, strconv.Itoa(i), io.Discard) + }) + } + + err := eg.Wait() + require.NoError(t, err) + + users, err := client.Users(ctx, codersdk.UsersRequest{}) + require.NoError(t, err) + require.Len(t, users.Users, 1+numUsers) // owner + created users + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{}) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, numUsers) // one workspace per user + + // Verify that workspaces have autostart schedules set and are running + for _, workspace := range workspaces.Workspaces { + require.NotNil(t, workspace.AutostartSchedule) + require.Equal(t, codersdk.WorkspaceTransitionStart, workspace.LatestBuild.Transition) + require.Equal(t, codersdk.ProvisionerJobSucceeded, workspace.LatestBuild.Job.Status) + } + + cleanupEg, cleanupCtx := errgroup.WithContext(ctx) + for i, runner := range runners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, strconv.Itoa(i), io.Discard) + }) + } + err = cleanupEg.Wait() + require.NoError(t, err) + + workspaces, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{}) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 0) + + users, err = client.Users(ctx, codersdk.UsersRequest{}) + require.NoError(t, err) + require.Len(t, users.Users, 1) // owner + + for _, runner := range runners { + metrics := runner.GetMetrics() + require.Contains(t, metrics, autostart.AutostartTotalLatencyMetric) + latency, ok := metrics[autostart.AutostartTotalLatencyMetric].(float64) + require.True(t, ok) + jobCreationLatency, ok := metrics[autostart.AutostartJobCreationLatencyMetric].(float64) + require.True(t, ok) + jobAcquiredLatency, ok := metrics[autostart.AutostartJobAcquiredLatencyMetric].(float64) + require.True(t, ok) + require.Greater(t, latency, float64(0)) + require.Greater(t, jobCreationLatency, float64(0)) + require.Greater(t, jobAcquiredLatency, float64(0)) + } +} diff --git a/scaletest/dynamicparameters/config.go b/scaletest/dynamicparameters/config.go index 176b1245b23b1..5bd10f1b25a70 100644 --- a/scaletest/dynamicparameters/config.go +++ b/scaletest/dynamicparameters/config.go @@ -4,7 +4,6 @@ import "github.com/google/uuid" type Config struct { TemplateVersion uuid.UUID `json:"template_version"` - SessionToken string `json:"session_token"` Metrics *Metrics `json:"-"` MetricLabelValues []string `json:"metric_label_values"` } diff --git a/scaletest/dynamicparameters/run.go b/scaletest/dynamicparameters/run.go index cec482bb4129d..12dd4099817e6 100644 --- a/scaletest/dynamicparameters/run.go +++ b/scaletest/dynamicparameters/run.go @@ -22,12 +22,8 @@ type Runner struct { var _ harness.Runnable = &Runner{} func NewRunner(client *codersdk.Client, cfg Config) *Runner { - clone := codersdk.New(client.URL) - clone.HTTPClient = client.HTTPClient - clone.SetLogger(client.Logger()) - clone.SetSessionToken(cfg.SessionToken) return &Runner{ - client: clone, + client: client, cfg: cfg, } } diff --git a/scaletest/dynamicparameters/run_test.go b/scaletest/dynamicparameters/run_test.go index 57577d27434d7..2c280e5f960e3 100644 --- a/scaletest/dynamicparameters/run_test.go +++ b/scaletest/dynamicparameters/run_test.go @@ -37,7 +37,6 @@ func TestRun(t *testing.T) { reg := prometheus.NewRegistry() cfg := dynamicparameters.Config{ TemplateVersion: version.ID, - SessionToken: userClient.SessionToken(), Metrics: dynamicparameters.NewMetrics(reg, "template", "test_label_name"), MetricLabelValues: []string{template.Name, "test_label_value"}, } diff --git a/scaletest/dynamicparameters/template.go b/scaletest/dynamicparameters/template.go index 464fba2f7f7cd..5faf67e531320 100644 --- a/scaletest/dynamicparameters/template.go +++ b/scaletest/dynamicparameters/template.go @@ -1,14 +1,30 @@ package dynamicparameters import ( + "archive/tar" + "bytes" + "context" _ "embed" "encoding/json" + "fmt" + "io" + "path/filepath" + "slices" "strings" "text/template" + "time" + "github.com/google/uuid" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" + "github.com/coder/quartz" ) +var ErrNoProvisionersMatched = xerrors.New("no provisioners matched") + //go:embed tf/main.tf var templateContent string @@ -72,3 +88,276 @@ func GetModuleFiles() map[string][]byte { ".terraform/modules/modules.json": modulesJSONBytes, } } + +func createTarFromFiles(files map[string][]byte) ([]byte, error) { + buf := new(bytes.Buffer) + writer := tar.NewWriter(buf) + dirs := []string{} + for name, content := range files { + // We need to add directories before any files that use them. But, we only need to do this + // once. + dir := filepath.Dir(name) + if dir != "." && !slices.Contains(dirs, dir) { + dirs = append(dirs, dir) + err := writer.WriteHeader(&tar.Header{ + Name: dir, + Mode: 0o755, + Typeflag: tar.TypeDir, + }) + if err != nil { + return nil, err + } + } + + err := writer.WriteHeader(&tar.Header{ + Name: name, + Size: int64(len(content)), + Mode: 0o644, + }) + if err != nil { + return nil, err + } + + _, err = writer.Write(content) + if err != nil { + return nil, err + } + } + // `writer.Close()` function flushes the writer buffer, and adds extra padding to create a legal tarball. + err := writer.Close() + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func TemplateTarData() ([]byte, error) { + mainTF, err := TemplateContent() + if err != nil { + return nil, xerrors.Errorf("failed to generate main.tf: %w", err) + } + moduleFiles := GetModuleFiles() + + files := map[string][]byte{ + "main.tf": []byte(mainTF), + } + for k, v := range moduleFiles { + files[k] = v + } + tarData, err := createTarFromFiles(files) + if err != nil { + return nil, xerrors.Errorf("failed to create tarball: %w", err) + } + + return tarData, nil +} + +type Partition struct { + TemplateVersion codersdk.TemplateVersion + ConcurrentEvaluations int +} + +type SDKForDynamicParametersSetup interface { + TemplateByName(ctx context.Context, orgID uuid.UUID, templateName string) (codersdk.Template, error) + CreateTemplate(ctx context.Context, orgID uuid.UUID, createReq codersdk.CreateTemplateRequest) (codersdk.Template, error) + CreateTemplateVersion(ctx context.Context, orgID uuid.UUID, createReq codersdk.CreateTemplateVersionRequest) (codersdk.TemplateVersion, error) + Upload(ctx context.Context, contentType string, reader io.Reader) (codersdk.UploadResponse, error) + TemplateVersion(ctx context.Context, versionID uuid.UUID) (codersdk.TemplateVersion, error) +} + +// partitioner is an internal struct to hold context and arguments for partition setup +// and to provide methods for all sub-steps. +type partitioner struct { + ctx context.Context + client SDKForDynamicParametersSetup + orgID uuid.UUID + templateName string + provisionerTags map[string]string + numEvals int64 + logger slog.Logger + + // for testing + clock quartz.Clock +} + +func SetupPartitions( + ctx context.Context, client SDKForDynamicParametersSetup, + orgID uuid.UUID, templateName string, provisionerTags map[string]string, + numEvals int64, + logger slog.Logger, +) ([]Partition, error) { + p := &partitioner{ + ctx: ctx, + client: client, + orgID: orgID, + templateName: templateName, + provisionerTags: provisionerTags, + numEvals: numEvals, + logger: logger, + clock: quartz.NewReal(), + } + return p.run() +} + +func (p *partitioner) run() ([]Partition, error) { + var ( + err error + coderError *codersdk.Error + templ codersdk.Template + tempVersion codersdk.TemplateVersion + ) + templ, err = p.client.TemplateByName(p.ctx, p.orgID, p.templateName) + if xerrors.As(err, &coderError) && coderError.StatusCode() == 404 { + tempVersion, err = p.createTemplateVersion(uuid.Nil) + if err != nil { + return nil, xerrors.Errorf("failed to create template version: %w", err) + } + p.logger.Info(p.ctx, "created template version", slog.F("version_id", tempVersion.ID)) + createReq := codersdk.CreateTemplateRequest{ + Name: p.templateName, + DisplayName: "Scaletest Dynamic Parameters", + Description: "`coder exp scaletest dynamic parameters test` template", + VersionID: tempVersion.ID, + } + templ, err = p.client.CreateTemplate(p.ctx, p.orgID, createReq) + if err != nil { + return nil, xerrors.Errorf("failed to create template: %w", err) + } + p.logger.Info(p.ctx, "created template", slog.F("template_id", templ.ID), slog.F("name", p.templateName)) + } else if err != nil { + return nil, xerrors.Errorf("failed to get template: %w", err) + } + + // Partition the number into a list decreasing by half each time + evalParts := partitionEvaluations(int(p.numEvals)) + p.logger.Info(p.ctx, "partitioned evaluations", slog.F("num_evals", p.numEvals), slog.F("eval_parts", evalParts)) + + // If tempVersion is not empty (i.e. we created it above), use it as the first version. + partitions := make([]Partition, 0, len(evalParts)) + if tempVersion.ID != uuid.Nil { + partitions = append(partitions, Partition{ + TemplateVersion: tempVersion, + ConcurrentEvaluations: evalParts[0], + }) + evalParts = evalParts[1:] + } + + for _, num := range evalParts { + version, err := p.createTemplateVersion(templ.ID) + if err != nil { + return nil, xerrors.Errorf("failed to create template version: %w", err) + } + partitions = append(partitions, Partition{ + TemplateVersion: version, + ConcurrentEvaluations: num, + }) + p.logger.Info(p.ctx, "created template version", slog.F("version_id", version.ID)) + } + + err = p.waitForTemplateVersionJobs(partitions) + if err != nil { + return nil, xerrors.Errorf("one or more template version jobs did not succeed: %w", err) + } + return partitions, nil +} + +func (p *partitioner) createTemplateVersion(templateID uuid.UUID) (codersdk.TemplateVersion, error) { + tarData, err := TemplateTarData() + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("failed to create template tarball: %w", err) + } + + // Upload tarball + uploadResp, err := p.client.Upload(p.ctx, codersdk.ContentTypeTar, bytes.NewReader(tarData)) + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("failed to upload template tar: %w", err) + } + + // Create template version + versionReq := codersdk.CreateTemplateVersionRequest{ + TemplateID: templateID, + FileID: uploadResp.ID, + Message: "Initial version for scaletest dynamic parameters", + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeTerraform, + ProvisionerTags: p.provisionerTags, + } + version, err := p.client.CreateTemplateVersion(p.ctx, p.orgID, versionReq) + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("failed to create template version: %w", err) + } + if version.MatchedProvisioners != nil && version.MatchedProvisioners.Count == 0 { + return codersdk.TemplateVersion{}, ErrNoProvisionersMatched + } + return version, nil +} + +func (p *partitioner) waitForTemplateVersionJobs(partitions []Partition) error { + const pollInterval = 2 * time.Second + done := xerrors.New("done") + + pending := make(map[uuid.UUID]int) + for i, part := range partitions { + pending[part.TemplateVersion.ID] = i + } + + tkr := p.clock.TickerFunc(p.ctx, pollInterval, func() error { + for versionID := range pending { + version, err := p.client.TemplateVersion(p.ctx, versionID) + if err != nil { + return xerrors.Errorf("failed to fetch template version %s: %w", versionID, err) + } + status := version.Job.Status + p.logger.Info(p.ctx, "polled template version job", slog.F("version_id", versionID), slog.F("status", status)) + switch status { + case codersdk.ProvisionerJobSucceeded: + delete(pending, versionID) + case codersdk.ProvisionerJobPending, codersdk.ProvisionerJobRunning: + continue + default: + return ProvisionerJobUnexpectedStatusError{ + TemplateVersionID: versionID, + Status: status, + JobError: version.Job.Error, + } + } + } + if len(pending) == 0 { + return done + } + return nil + }, "waitForTemplateVersionJobs") + err := tkr.Wait() + if xerrors.Is(err, done) { + return nil + } + return err +} + +func partitionEvaluations(total int) []int { + var parts []int + remaining := total + for remaining > 0 { + next := remaining / 2 + // round up + if next*2 != remaining { + next++ + } + if next > remaining { + next = remaining + } + parts = append(parts, next) + remaining -= next + } + return parts +} + +type ProvisionerJobUnexpectedStatusError struct { + TemplateVersionID uuid.UUID + Status codersdk.ProvisionerJobStatus + JobError string +} + +func (e ProvisionerJobUnexpectedStatusError) Error() string { + return fmt.Sprintf("template version %s job in unexpected status %q, error '%s'", e.TemplateVersionID, e.Status, e.JobError) +} diff --git a/scaletest/dynamicparameters/template_internal_test.go b/scaletest/dynamicparameters/template_internal_test.go new file mode 100644 index 0000000000000..6b1230eeae75e --- /dev/null +++ b/scaletest/dynamicparameters/template_internal_test.go @@ -0,0 +1,297 @@ +package dynamicparameters + +import ( + "context" + "io" + "net/http" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" +) + +func TestPartitionEvaluations(t *testing.T) { + t.Parallel() + tests := []struct { + name string + input int + expected []int + }{ + { + name: "10", + input: 10, + expected: []int{5, 3, 1, 1}, + }, + { + name: "11", + input: 11, + expected: []int{6, 3, 1, 1}, + }, + { + name: "12", + input: 12, + expected: []int{6, 3, 2, 1}, + }, + { + name: "600", + input: 600, + expected: []int{300, 150, 75, 38, 19, 9, 5, 2, 1, 1}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := partitionEvaluations(tc.input) + require.Equal(t, tc.expected, got) + total := 0 + for _, v := range got { + total += v + } + require.Equal(t, tc.input, total) + }) + } +} + +func TestSetupPartitions_TemplateExists(t *testing.T) { + t.Parallel() + logger := testutil.Logger(t).Leveled(slog.LevelDebug) + ctx := testutil.Context(t, testutil.WaitShort) + + orgID := uuid.New() + fClient := &fakeClient{ + t: t, + expectedTemplateName: "test-template", + expectedOrgID: orgID, + expectedTags: map[string]string{"foo": "bar"}, + matchedProvisioners: 1, + templateVersionJobStatus: codersdk.ProvisionerJobSucceeded, + } + mClock := quartz.NewMock(t) + trap := mClock.Trap().TickerFunc("waitForTemplateVersionJobs") + defer trap.Close() + uut := partitioner{ + ctx: ctx, + client: fClient, + orgID: orgID, + templateName: "test-template", + provisionerTags: map[string]string{"foo": "bar"}, + numEvals: 600, + logger: logger, + clock: mClock, + } + var partitions []Partition + errCh := make(chan error, 1) + go func() { + var err error + partitions, err = uut.run() + errCh <- err + }() + trap.MustWait(ctx).MustRelease(ctx) + mClock.Advance(time.Second * 2).MustWait(ctx) + err := testutil.RequireReceive(ctx, t, errCh) + require.NoError(t, err) + // 600 evaluations should be partitioned into 10 parts: []int{300, 150, 75, 38, 19, 9, 5, 2, 1, 1} + // c.f. TestPartitionEvaluations. That's 10 template versions and associated uploads. + require.Equal(t, 10, len(partitions)) + require.Equal(t, 10, fClient.templateVersionsCount) + require.Equal(t, 10, fClient.uploadsCount) + require.Equal(t, 1, fClient.templateByNameCount) + require.Equal(t, 0, fClient.createTemplateCount) +} + +func TestSetupPartitions_TemplateDoesntExist(t *testing.T) { + t.Parallel() + logger := testutil.Logger(t).Leveled(slog.LevelDebug) + ctx := testutil.Context(t, testutil.WaitShort) + + orgID := uuid.New() + fClient := &fakeClient{ + t: t, + expectedTemplateName: "test-template", + expectedOrgID: orgID, + templateByNameError: codersdk.NewTestError(http.StatusNotFound, "", ""), + matchedProvisioners: 1, + templateVersionJobStatus: codersdk.ProvisionerJobSucceeded, + } + mClock := quartz.NewMock(t) + trap := mClock.Trap().TickerFunc("waitForTemplateVersionJobs") + defer trap.Close() + uut := partitioner{ + ctx: ctx, + client: fClient, + orgID: orgID, + templateName: "test-template", + numEvals: 600, + logger: logger, + clock: mClock, + } + var partitions []Partition + errCh := make(chan error, 1) + go func() { + var err error + partitions, err = uut.run() + errCh <- err + }() + trap.MustWait(ctx).MustRelease(ctx) + mClock.Advance(time.Second * 2).MustWait(ctx) + err := testutil.RequireReceive(ctx, t, errCh) + require.NoError(t, err) + // 600 evaluations should be partitioned into 10 parts: []int{300, 150, 75, 38, 19, 9, 5, 2, 1, 1} + // c.f. TestPartitionEvaluations. That's 10 template versions and associated uploads. + require.Equal(t, 10, len(partitions)) + require.Equal(t, 10, fClient.templateVersionsCount) + require.Equal(t, 10, fClient.uploadsCount) + require.Equal(t, 1, fClient.templateByNameCount) + require.Equal(t, 1, fClient.createTemplateCount) +} + +func TestSetupPartitions_NoMatchedProvisioners(t *testing.T) { + t.Parallel() + logger := testutil.Logger(t).Leveled(slog.LevelDebug) + ctx := testutil.Context(t, testutil.WaitShort) + + orgID := uuid.New() + fClient := &fakeClient{ + t: t, + expectedTemplateName: "test-template", + expectedOrgID: orgID, + matchedProvisioners: 0, + templateVersionJobStatus: codersdk.ProvisionerJobSucceeded, + } + mClock := quartz.NewMock(t) + uut := partitioner{ + ctx: ctx, + client: fClient, + orgID: orgID, + templateName: "test-template", + numEvals: 600, + logger: logger, + clock: mClock, + } + errCh := make(chan error, 1) + go func() { + _, err := uut.run() + errCh <- err + }() + err := testutil.RequireReceive(ctx, t, errCh) + require.ErrorIs(t, err, ErrNoProvisionersMatched) + require.Equal(t, 1, fClient.templateVersionsCount) + require.Equal(t, 1, fClient.uploadsCount) + require.Equal(t, 1, fClient.templateByNameCount) + require.Equal(t, 0, fClient.createTemplateCount) +} + +func TestSetupPartitions_JobFailed(t *testing.T) { + t.Parallel() + logger := testutil.Logger(t).Leveled(slog.LevelDebug) + ctx := testutil.Context(t, testutil.WaitShort) + + orgID := uuid.New() + fClient := &fakeClient{ + t: t, + expectedTemplateName: "test-template", + expectedOrgID: orgID, + matchedProvisioners: 1, + templateVersionJobStatus: codersdk.ProvisionerJobFailed, + } + mClock := quartz.NewMock(t) + trap := mClock.Trap().TickerFunc("waitForTemplateVersionJobs") + defer trap.Close() + uut := partitioner{ + ctx: ctx, + client: fClient, + orgID: orgID, + templateName: "test-template", + numEvals: 600, + logger: logger, + clock: mClock, + } + errCh := make(chan error, 1) + go func() { + _, err := uut.run() + errCh <- err + }() + trap.MustWait(ctx).MustRelease(ctx) + mClock.Advance(time.Second * 2).MustWait(ctx) + err := testutil.RequireReceive(ctx, t, errCh) + require.ErrorAs(t, err, &ProvisionerJobUnexpectedStatusError{}) + require.Equal(t, 10, fClient.templateVersionsCount) + require.Equal(t, 10, fClient.uploadsCount) + require.Equal(t, 1, fClient.templateByNameCount) + require.Equal(t, 0, fClient.createTemplateCount) +} + +type fakeClient struct { + t testing.TB + + expectedTemplateName string + expectedOrgID uuid.UUID + templateByNameError error + + expectedTags map[string]string + matchedProvisioners int + templateVersionJobStatus codersdk.ProvisionerJobStatus + + createTemplateCount int + templateVersionsCount int + uploadsCount int + templateByNameCount int +} + +func (f *fakeClient) TemplateByName(ctx context.Context, orgID uuid.UUID, templateName string) (codersdk.Template, error) { + f.templateByNameCount++ + require.Equal(f.t, f.expectedOrgID, orgID) + require.Equal(f.t, f.expectedTemplateName, templateName) + + if f.templateByNameError != nil { + return codersdk.Template{}, f.templateByNameError + } + return codersdk.Template{ + ID: uuid.New(), + Name: f.expectedTemplateName, + }, nil +} + +func (f *fakeClient) CreateTemplate(ctx context.Context, orgID uuid.UUID, createReq codersdk.CreateTemplateRequest) (codersdk.Template, error) { + f.createTemplateCount++ + require.Equal(f.t, f.expectedOrgID, orgID) + require.Equal(f.t, f.expectedTemplateName, createReq.Name) + + return codersdk.Template{ + ID: uuid.New(), + Name: f.expectedTemplateName, + }, nil +} + +func (f *fakeClient) CreateTemplateVersion(ctx context.Context, orgID uuid.UUID, createReq codersdk.CreateTemplateVersionRequest) (codersdk.TemplateVersion, error) { + f.templateVersionsCount++ + require.Equal(f.t, f.expectedTags, createReq.ProvisionerTags) + return codersdk.TemplateVersion{ + ID: uuid.New(), + Name: f.expectedTemplateName, + MatchedProvisioners: &codersdk.MatchedProvisioners{Count: f.matchedProvisioners}, + }, nil +} + +func (f *fakeClient) Upload(ctx context.Context, contentType string, reader io.Reader) (codersdk.UploadResponse, error) { + f.uploadsCount++ + return codersdk.UploadResponse{ + ID: uuid.New(), + }, nil +} + +func (f *fakeClient) TemplateVersion(ctx context.Context, versionID uuid.UUID) (codersdk.TemplateVersion, error) { + return codersdk.TemplateVersion{ + ID: versionID, + Job: codersdk.ProvisionerJob{Status: f.templateVersionJobStatus}, + MatchedProvisioners: &codersdk.MatchedProvisioners{Count: f.matchedProvisioners}, + }, nil +} diff --git a/scaletest/notifications/config.go b/scaletest/notifications/config.go new file mode 100644 index 0000000000000..ac8daeb9ef9cb --- /dev/null +++ b/scaletest/notifications/config.go @@ -0,0 +1,73 @@ +package notifications + +import ( + "sync" + "time" + + "golang.org/x/xerrors" + + "github.com/google/uuid" + + "github.com/coder/coder/v2/scaletest/createusers" +) + +type Config struct { + // User is the configuration for the user to create. + User createusers.Config `json:"user"` + + // Roles are the roles to assign to the user. + Roles []string `json:"roles"` + + // NotificationTimeout is how long to wait for notifications after triggering. + NotificationTimeout time.Duration `json:"notification_timeout"` + + // DialTimeout is how long to wait for websocket connection. + DialTimeout time.Duration `json:"dial_timeout"` + + // ExpectedNotificationsIDs is the list of notification template IDs to expect. + ExpectedNotificationsIDs map[uuid.UUID]struct{} `json:"-"` + + Metrics *Metrics `json:"-"` + + // DialBarrier ensures all runners are connected before notifications are triggered. + DialBarrier *sync.WaitGroup `json:"-"` + + // ReceivingWatchBarrier is the barrier for receiving users. Regular users wait on this to disconnect after receiving users complete. + ReceivingWatchBarrier *sync.WaitGroup `json:"-"` + + // SMTPApiUrl is the URL of the SMTP mock HTTP API + SMTPApiURL string `json:"smtp_api_url"` +} + +func (c Config) Validate() error { + // The runner always needs an org; ensure we propagate it into the user config. + if c.User.OrganizationID == uuid.Nil { + return xerrors.New("user organization_id must be set") + } + + if err := c.User.Validate(); err != nil { + return xerrors.Errorf("user config: %w", err) + } + + if c.DialBarrier == nil { + return xerrors.New("dial barrier must be set") + } + + if c.ReceivingWatchBarrier == nil { + return xerrors.New("receiving_watch_barrier must be set") + } + + if c.NotificationTimeout <= 0 { + return xerrors.New("notification_timeout must be greater than 0") + } + + if c.DialTimeout <= 0 { + return xerrors.New("dial_timeout must be greater than 0") + } + + if c.Metrics == nil { + return xerrors.New("metrics must be set") + } + + return nil +} diff --git a/scaletest/notifications/metrics.go b/scaletest/notifications/metrics.go new file mode 100644 index 0000000000000..0bf3ebad74044 --- /dev/null +++ b/scaletest/notifications/metrics.go @@ -0,0 +1,53 @@ +package notifications + +import ( + "time" + + "github.com/prometheus/client_golang/prometheus" +) + +type NotificationType string + +const ( + NotificationTypeWebsocket NotificationType = "websocket" + NotificationTypeSMTP NotificationType = "smtp" +) + +type Metrics struct { + notificationLatency *prometheus.HistogramVec + notificationErrors *prometheus.CounterVec +} + +func NewMetrics(reg prometheus.Registerer) *Metrics { + if reg == nil { + reg = prometheus.DefaultRegisterer + } + + latency := prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "notification_delivery_latency_seconds", + Help: "Time between notification-creating action and receipt of notification by client", + }, []string{"notification_id", "notification_type"}) + errors := prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "notification_delivery_errors_total", + Help: "Total number of notification delivery errors", + }, []string{"action"}) + + reg.MustRegister(latency, errors) + + return &Metrics{ + notificationLatency: latency, + notificationErrors: errors, + } +} + +func (m *Metrics) RecordLatency(latency time.Duration, notificationID string, notificationType NotificationType) { + m.notificationLatency.WithLabelValues(notificationID, string(notificationType)).Observe(latency.Seconds()) +} + +func (m *Metrics) AddError(action string) { + m.notificationErrors.WithLabelValues(action).Inc() +} diff --git a/scaletest/notifications/run.go b/scaletest/notifications/run.go new file mode 100644 index 0000000000000..abe844574659e --- /dev/null +++ b/scaletest/notifications/run.go @@ -0,0 +1,394 @@ +package notifications + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "maps" + "net/http" + "sync" + "time" + + "github.com/google/uuid" + "golang.org/x/sync/errgroup" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + + "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/loadtestutil" + "github.com/coder/coder/v2/scaletest/smtpmock" + "github.com/coder/quartz" + "github.com/coder/websocket" +) + +type Runner struct { + client *codersdk.Client + cfg Config + + createUserRunner *createusers.Runner + + // websocketReceiptTimes stores the receipt time for websocket notifications + websocketReceiptTimes map[uuid.UUID]time.Time + websocketReceiptTimesMu sync.RWMutex + + // smtpReceiptTimes stores the receipt time for SMTP notifications + smtpReceiptTimes map[uuid.UUID]time.Time + smtpReceiptTimesMu sync.RWMutex + + clock quartz.Clock +} + +func NewRunner(client *codersdk.Client, cfg Config) *Runner { + return &Runner{ + client: client, + cfg: cfg, + websocketReceiptTimes: make(map[uuid.UUID]time.Time), + smtpReceiptTimes: make(map[uuid.UUID]time.Time), + clock: quartz.NewReal(), + } +} + +func (r *Runner) WithClock(clock quartz.Clock) *Runner { + r.clock = clock + return r +} + +var ( + _ harness.Runnable = &Runner{} + _ harness.Cleanable = &Runner{} + _ harness.Collectable = &Runner{} +) + +func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error { + ctx, span := tracing.StartSpan(ctx) + defer span.End() + + reachedBarrier := false + defer func() { + if !reachedBarrier { + r.cfg.DialBarrier.Done() + } + }() + + reachedReceivingWatchBarrier := false + defer func() { + if len(r.cfg.ExpectedNotificationsIDs) > 0 && !reachedReceivingWatchBarrier { + r.cfg.ReceivingWatchBarrier.Done() + } + }() + + logs = loadtestutil.NewSyncWriter(logs) + logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug) + r.client.SetLogger(logger) + r.client.SetLogBodies(true) + + r.createUserRunner = createusers.NewRunner(r.client, r.cfg.User) + newUserAndToken, err := r.createUserRunner.RunReturningUser(ctx, id, logs) + if err != nil { + r.cfg.Metrics.AddError("create_user") + return xerrors.Errorf("create user: %w", err) + } + newUser := newUserAndToken.User + newUserClient := codersdk.New(r.client.URL, + codersdk.WithSessionToken(newUserAndToken.SessionToken), + codersdk.WithLogger(logger), + codersdk.WithLogBodies()) + + logger.Info(ctx, "runner user created", slog.F("username", newUser.Username), slog.F("user_id", newUser.ID.String())) + + if len(r.cfg.Roles) > 0 { + logger.Info(ctx, "assigning roles to user", slog.F("roles", r.cfg.Roles)) + + _, err := r.client.UpdateUserRoles(ctx, newUser.ID.String(), codersdk.UpdateRoles{ + Roles: r.cfg.Roles, + }) + if err != nil { + r.cfg.Metrics.AddError("assign_roles") + return xerrors.Errorf("assign roles: %w", err) + } + } + + logger.Info(ctx, "notification runner is ready") + + dialCtx, cancel := context.WithTimeout(ctx, r.cfg.DialTimeout) + defer cancel() + + logger.Info(ctx, "connecting to notification websocket") + conn, err := r.dialNotificationWebsocket(dialCtx, newUserClient, logger) + if err != nil { + return xerrors.Errorf("dial notification websocket: %w", err) + } + defer conn.Close(websocket.StatusNormalClosure, "done") + logger.Info(ctx, "connected to notification websocket") + + reachedBarrier = true + r.cfg.DialBarrier.Done() + r.cfg.DialBarrier.Wait() + + if len(r.cfg.ExpectedNotificationsIDs) == 0 { + logger.Info(ctx, "maintaining websocket connection, waiting for receiving users to complete") + + // Wait for receiving users to complete + done := make(chan struct{}) + go func() { + r.cfg.ReceivingWatchBarrier.Wait() + close(done) + }() + + select { + case <-done: + logger.Info(ctx, "receiving users complete, closing connection") + case <-ctx.Done(): + logger.Info(ctx, "context canceled, closing connection") + } + return nil + } + + logger.Info(ctx, "waiting for notifications", slog.F("timeout", r.cfg.NotificationTimeout)) + + watchCtx, cancel := context.WithTimeout(ctx, r.cfg.NotificationTimeout) + defer cancel() + + eg, egCtx := errgroup.WithContext(watchCtx) + + eg.Go(func() error { + return r.watchNotifications(egCtx, conn, newUser, logger, r.cfg.ExpectedNotificationsIDs) + }) + + if r.cfg.SMTPApiURL != "" { + logger.Info(ctx, "running SMTP notification watcher") + eg.Go(func() error { + return r.watchNotificationsSMTP(egCtx, newUser, logger, r.cfg.ExpectedNotificationsIDs) + }) + } + + if err := eg.Wait(); err != nil { + return xerrors.Errorf("notification watch failed: %w", err) + } + + reachedReceivingWatchBarrier = true + r.cfg.ReceivingWatchBarrier.Done() + + return nil +} + +func (r *Runner) Cleanup(ctx context.Context, id string, logs io.Writer) error { + if r.createUserRunner != nil { + _, _ = fmt.Fprintln(logs, "Cleaning up user...") + if err := r.createUserRunner.Cleanup(ctx, id, logs); err != nil { + return xerrors.Errorf("cleanup user: %w", err) + } + } + + return nil +} + +const ( + WebsocketNotificationReceiptTimeMetric = "notification_websocket_receipt_time" + SMTPNotificationReceiptTimeMetric = "notification_smtp_receipt_time" +) + +func (r *Runner) GetMetrics() map[string]any { + r.websocketReceiptTimesMu.RLock() + websocketReceiptTimes := maps.Clone(r.websocketReceiptTimes) + r.websocketReceiptTimesMu.RUnlock() + + r.smtpReceiptTimesMu.RLock() + smtpReceiptTimes := maps.Clone(r.smtpReceiptTimes) + r.smtpReceiptTimesMu.RUnlock() + + return map[string]any{ + WebsocketNotificationReceiptTimeMetric: websocketReceiptTimes, + SMTPNotificationReceiptTimeMetric: smtpReceiptTimes, + } +} + +func (r *Runner) dialNotificationWebsocket(ctx context.Context, client *codersdk.Client, logger slog.Logger) (*websocket.Conn, error) { + u, err := client.URL.Parse("/api/v2/notifications/inbox/watch") + if err != nil { + logger.Error(ctx, "parse notification URL", slog.Error(err)) + r.cfg.Metrics.AddError("parse_url") + return nil, xerrors.Errorf("parse notification URL: %w", err) + } + + conn, resp, err := websocket.Dial(ctx, u.String(), &websocket.DialOptions{ + HTTPHeader: http.Header{ + "Coder-Session-Token": []string{client.SessionToken()}, + }, + }) + if err != nil { + if resp != nil { + defer resp.Body.Close() + if resp.StatusCode != http.StatusSwitchingProtocols { + err = codersdk.ReadBodyAsError(resp) + } + } + logger.Error(ctx, "dial notification websocket", slog.Error(err)) + r.cfg.Metrics.AddError("dial") + return nil, xerrors.Errorf("dial notification websocket: %w", err) + } + + return conn, nil +} + +// watchNotifications reads notifications from the websocket and returns error or nil +// once all expected notifications are received. +func (r *Runner) watchNotifications(ctx context.Context, conn *websocket.Conn, user codersdk.User, logger slog.Logger, expectedNotifications map[uuid.UUID]struct{}) error { + logger.Info(ctx, "waiting for notifications", + slog.F("username", user.Username), + slog.F("expected_count", len(expectedNotifications))) + + receivedNotifications := make(map[uuid.UUID]struct{}) + + for { + select { + case <-ctx.Done(): + return xerrors.Errorf("context canceled while waiting for notifications: %w", ctx.Err()) + default: + } + + if len(receivedNotifications) == len(expectedNotifications) { + logger.Info(ctx, "received all expected notifications") + return nil + } + + notif, err := readNotification(ctx, conn) + if err != nil { + logger.Error(ctx, "read notification", slog.Error(err)) + r.cfg.Metrics.AddError("read_notification_websocket") + return xerrors.Errorf("read notification: %w", err) + } + + templateID := notif.Notification.TemplateID + if _, exists := expectedNotifications[templateID]; exists { + if _, received := receivedNotifications[templateID]; !received { + receiptTime := time.Now() + r.websocketReceiptTimesMu.Lock() + r.websocketReceiptTimes[templateID] = receiptTime + r.websocketReceiptTimesMu.Unlock() + receivedNotifications[templateID] = struct{}{} + + logger.Info(ctx, "received expected notification", + slog.F("template_id", templateID), + slog.F("title", notif.Notification.Title), + slog.F("receipt_time", receiptTime)) + } + } else { + logger.Debug(ctx, "received notification not being tested", + slog.F("template_id", templateID), + slog.F("title", notif.Notification.Title)) + } + } +} + +// watchNotificationsSMTP polls the SMTP HTTP API for notifications and returns error or nil +// once all expected notifications are received. +func (r *Runner) watchNotificationsSMTP(ctx context.Context, user codersdk.User, logger slog.Logger, expectedNotifications map[uuid.UUID]struct{}) error { + logger.Info(ctx, "polling SMTP API for notifications", + slog.F("email", user.Email), + slog.F("expected_count", len(expectedNotifications)), + ) + receivedNotifications := make(map[uuid.UUID]struct{}) + + apiURL := fmt.Sprintf("%s/messages?email=%s", r.cfg.SMTPApiURL, user.Email) + httpClient := &http.Client{ + Timeout: 10 * time.Second, + } + + const smtpPollInterval = 2 * time.Second + done := xerrors.New("done") + + tkr := r.clock.TickerFunc(ctx, smtpPollInterval, func() error { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, apiURL, nil) + if err != nil { + logger.Error(ctx, "create SMTP API request", slog.Error(err)) + r.cfg.Metrics.AddError("smtp_create_request") + return xerrors.Errorf("create SMTP API request: %w", err) + } + + resp, err := httpClient.Do(req) + if err != nil { + logger.Error(ctx, "poll smtp api for notifications", slog.Error(err)) + r.cfg.Metrics.AddError("smtp_poll") + return xerrors.Errorf("poll smtp api: %w", err) + } + + if resp.StatusCode != http.StatusOK { + _ = resp.Body.Close() + logger.Error(ctx, "smtp api returned non-200 status", slog.F("status", resp.StatusCode)) + r.cfg.Metrics.AddError("smtp_bad_status") + return xerrors.Errorf("smtp api returned status %d", resp.StatusCode) + } + + var summaries []smtpmock.EmailSummary + if err := json.NewDecoder(resp.Body).Decode(&summaries); err != nil { + _ = resp.Body.Close() + logger.Error(ctx, "decode smtp api response", slog.Error(err)) + r.cfg.Metrics.AddError("smtp_decode") + return xerrors.Errorf("decode smtp api response: %w", err) + } + _ = resp.Body.Close() + + // Process each email summary + for _, summary := range summaries { + notificationID := summary.NotificationTemplateID + if notificationID == uuid.Nil { + continue + } + + if _, exists := expectedNotifications[notificationID]; exists { + if _, received := receivedNotifications[notificationID]; !received { + receiptTime := summary.Date + if receiptTime.IsZero() { + receiptTime = time.Now() + } + + r.smtpReceiptTimesMu.Lock() + r.smtpReceiptTimes[notificationID] = receiptTime + r.smtpReceiptTimesMu.Unlock() + receivedNotifications[notificationID] = struct{}{} + + logger.Info(ctx, "received expected notification via SMTP", + slog.F("notification_id", notificationID), + slog.F("subject", summary.Subject), + slog.F("receipt_time", receiptTime)) + } + } + } + + if len(receivedNotifications) == len(expectedNotifications) { + logger.Info(ctx, "received all expected notifications via SMTP") + return done + } + + return nil + }, "smtp") + + err := tkr.Wait() + if errors.Is(err, done) { + return nil + } + + return err +} + +func readNotification(ctx context.Context, conn *websocket.Conn) (codersdk.GetInboxNotificationResponse, error) { + _, message, err := conn.Read(ctx) + if err != nil { + return codersdk.GetInboxNotificationResponse{}, err + } + + var notif codersdk.GetInboxNotificationResponse + if err := json.Unmarshal(message, ¬if); err != nil { + return codersdk.GetInboxNotificationResponse{}, xerrors.Errorf("unmarshal notification: %w", err) + } + + return notif, nil +} diff --git a/scaletest/notifications/run_test.go b/scaletest/notifications/run_test.go new file mode 100644 index 0000000000000..1e198e9edd91d --- /dev/null +++ b/scaletest/notifications/run_test.go @@ -0,0 +1,343 @@ +package notifications_test + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strconv" + "sync" + "testing" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/sync/errgroup" + + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + notificationsLib "github.com/coder/coder/v2/coderd/notifications" + "github.com/coder/coder/v2/coderd/notifications/dispatch" + "github.com/coder/coder/v2/coderd/notifications/types" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/createusers" + "github.com/coder/coder/v2/scaletest/notifications" + "github.com/coder/coder/v2/scaletest/smtpmock" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" +) + +func TestRun(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + logger := testutil.Logger(t) + db, ps := dbtestutil.NewDB(t) + + inboxHandler := dispatch.NewInboxHandler(logger.Named("inbox"), db, ps) + + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, + }) + firstUser := coderdtest.CreateFirstUser(t, client) + + const numReceivingUsers = 2 + const numRegularUsers = 2 + dialBarrier := new(sync.WaitGroup) + receivingWatchBarrier := new(sync.WaitGroup) + dialBarrier.Add(numReceivingUsers + numRegularUsers) + receivingWatchBarrier.Add(numReceivingUsers) + metrics := notifications.NewMetrics(prometheus.NewRegistry()) + + eg, runCtx := errgroup.WithContext(ctx) + + expectedNotificationsIDs := map[uuid.UUID]struct{}{ + notificationsLib.TemplateUserAccountCreated: {}, + notificationsLib.TemplateUserAccountDeleted: {}, + } + + // Start receiving runners who will receive notifications + receivingRunners := make([]*notifications.Runner, 0, numReceivingUsers) + for i := range numReceivingUsers { + runnerCfg := notifications.Config{ + User: createusers.Config{ + OrganizationID: firstUser.OrganizationID, + Username: "receiving-user-" + strconv.Itoa(i), + }, + Roles: []string{codersdk.RoleOwner}, + NotificationTimeout: testutil.WaitLong, + DialTimeout: testutil.WaitLong, + Metrics: metrics, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: receivingWatchBarrier, + ExpectedNotificationsIDs: expectedNotificationsIDs, + } + err := runnerCfg.Validate() + require.NoError(t, err) + + runner := notifications.NewRunner(client, runnerCfg) + receivingRunners = append(receivingRunners, runner) + eg.Go(func() error { + return runner.Run(runCtx, "receiving-"+strconv.Itoa(i), io.Discard) + }) + } + + // Start regular user runners who will maintain websocket connections + regularRunners := make([]*notifications.Runner, 0, numRegularUsers) + for i := range numRegularUsers { + runnerCfg := notifications.Config{ + User: createusers.Config{ + OrganizationID: firstUser.OrganizationID, + }, + Roles: []string{}, + NotificationTimeout: testutil.WaitLong, + DialTimeout: testutil.WaitLong, + Metrics: metrics, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: receivingWatchBarrier, + } + err := runnerCfg.Validate() + require.NoError(t, err) + + runner := notifications.NewRunner(client, runnerCfg) + regularRunners = append(regularRunners, runner) + eg.Go(func() error { + return runner.Run(runCtx, "regular-"+strconv.Itoa(i), io.Discard) + }) + } + + // Trigger notifications by creating and deleting a user + eg.Go(func() error { + // Wait for all runners to connect + dialBarrier.Wait() + + for i := 0; i < numReceivingUsers; i++ { + err := sendInboxNotification(runCtx, t, db, inboxHandler, "receiving-user-"+strconv.Itoa(i), notificationsLib.TemplateUserAccountCreated) + require.NoError(t, err) + err = sendInboxNotification(runCtx, t, db, inboxHandler, "receiving-user-"+strconv.Itoa(i), notificationsLib.TemplateUserAccountDeleted) + require.NoError(t, err) + } + + return nil + }) + + err := eg.Wait() + require.NoError(t, err, "runner execution should complete successfully") + + cleanupEg, cleanupCtx := errgroup.WithContext(ctx) + for i, runner := range receivingRunners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, "receiving-"+strconv.Itoa(i), io.Discard) + }) + } + for i, runner := range regularRunners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, "regular-"+strconv.Itoa(i), io.Discard) + }) + } + err = cleanupEg.Wait() + require.NoError(t, err) + + users, err := client.Users(ctx, codersdk.UsersRequest{}) + require.NoError(t, err) + require.Len(t, users.Users, 1) + require.Equal(t, firstUser.UserID, users.Users[0].ID) + + for _, runner := range receivingRunners { + metrics := runner.GetMetrics() + websocketReceiptTimes := metrics[notifications.WebsocketNotificationReceiptTimeMetric].(map[uuid.UUID]time.Time) + + require.Contains(t, websocketReceiptTimes, notificationsLib.TemplateUserAccountCreated) + require.Contains(t, websocketReceiptTimes, notificationsLib.TemplateUserAccountDeleted) + } +} + +func TestRunWithSMTP(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + logger := testutil.Logger(t) + db, ps := dbtestutil.NewDB(t) + + inboxHandler := dispatch.NewInboxHandler(logger.Named("inbox"), db, ps) + + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, + }) + firstUser := coderdtest.CreateFirstUser(t, client) + + smtpAPIMux := http.NewServeMux() + smtpAPIMux.HandleFunc("/messages", func(w http.ResponseWriter, r *http.Request) { + summaries := []smtpmock.EmailSummary{ + { + Subject: "TemplateUserAccountCreated", + Date: time.Now(), + NotificationTemplateID: notificationsLib.TemplateUserAccountCreated, + }, + { + Subject: "TemplateUserAccountDeleted", + Date: time.Now(), + NotificationTemplateID: notificationsLib.TemplateUserAccountDeleted, + }, + } + + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(summaries) + }) + + smtpAPIServer := httptest.NewServer(smtpAPIMux) + defer smtpAPIServer.Close() + + const numReceivingUsers = 2 + const numRegularUsers = 2 + dialBarrier := new(sync.WaitGroup) + receivingWatchBarrier := new(sync.WaitGroup) + dialBarrier.Add(numReceivingUsers + numRegularUsers) + receivingWatchBarrier.Add(numReceivingUsers) + metrics := notifications.NewMetrics(prometheus.NewRegistry()) + + eg, runCtx := errgroup.WithContext(ctx) + + expectedNotificationsIDs := map[uuid.UUID]struct{}{ + notificationsLib.TemplateUserAccountCreated: {}, + notificationsLib.TemplateUserAccountDeleted: {}, + } + + mClock := quartz.NewMock(t) + smtpTrap := mClock.Trap().TickerFunc("smtp") + defer smtpTrap.Close() + + // Start receiving runners who will receive notifications + receivingRunners := make([]*notifications.Runner, 0, numReceivingUsers) + for i := range numReceivingUsers { + runnerCfg := notifications.Config{ + User: createusers.Config{ + OrganizationID: firstUser.OrganizationID, + Username: "receiving-user-" + strconv.Itoa(i), + }, + Roles: []string{codersdk.RoleOwner}, + NotificationTimeout: testutil.WaitLong, + DialTimeout: testutil.WaitLong, + Metrics: metrics, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: receivingWatchBarrier, + ExpectedNotificationsIDs: expectedNotificationsIDs, + SMTPApiURL: smtpAPIServer.URL, + } + err := runnerCfg.Validate() + require.NoError(t, err) + + runner := notifications.NewRunner(client, runnerCfg).WithClock(mClock) + receivingRunners = append(receivingRunners, runner) + eg.Go(func() error { + return runner.Run(runCtx, "receiving-"+strconv.Itoa(i), io.Discard) + }) + } + + // Start regular user runners who will maintain websocket connections + regularRunners := make([]*notifications.Runner, 0, numRegularUsers) + for i := range numRegularUsers { + runnerCfg := notifications.Config{ + User: createusers.Config{ + OrganizationID: firstUser.OrganizationID, + }, + Roles: []string{}, + NotificationTimeout: testutil.WaitLong, + DialTimeout: testutil.WaitLong, + Metrics: metrics, + DialBarrier: dialBarrier, + ReceivingWatchBarrier: receivingWatchBarrier, + } + err := runnerCfg.Validate() + require.NoError(t, err) + + runner := notifications.NewRunner(client, runnerCfg) + regularRunners = append(regularRunners, runner) + eg.Go(func() error { + return runner.Run(runCtx, "regular-"+strconv.Itoa(i), io.Discard) + }) + } + + // Trigger notifications by creating and deleting a user + eg.Go(func() error { + // Wait for all runners to connect + dialBarrier.Wait() + + for i := 0; i < numReceivingUsers; i++ { + smtpTrap.MustWait(runCtx).MustRelease(runCtx) + } + + for i := 0; i < numReceivingUsers; i++ { + err := sendInboxNotification(runCtx, t, db, inboxHandler, "receiving-user-"+strconv.Itoa(i), notificationsLib.TemplateUserAccountCreated) + require.NoError(t, err) + err = sendInboxNotification(runCtx, t, db, inboxHandler, "receiving-user-"+strconv.Itoa(i), notificationsLib.TemplateUserAccountDeleted) + require.NoError(t, err) + } + + _, w := mClock.AdvanceNext() + w.MustWait(runCtx) + + return nil + }) + + err := eg.Wait() + require.NoError(t, err, "runner execution with SMTP should complete successfully") + + cleanupEg, cleanupCtx := errgroup.WithContext(ctx) + for i, runner := range receivingRunners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, "receiving-"+strconv.Itoa(i), io.Discard) + }) + } + for i, runner := range regularRunners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, "regular-"+strconv.Itoa(i), io.Discard) + }) + } + err = cleanupEg.Wait() + require.NoError(t, err) + + users, err := client.Users(ctx, codersdk.UsersRequest{}) + require.NoError(t, err) + require.Len(t, users.Users, 1) + require.Equal(t, firstUser.UserID, users.Users[0].ID) + + // Verify that notifications were received via both websocket and SMTP + for _, runner := range receivingRunners { + metrics := runner.GetMetrics() + websocketReceiptTimes := metrics[notifications.WebsocketNotificationReceiptTimeMetric].(map[uuid.UUID]time.Time) + smtpReceiptTimes := metrics[notifications.SMTPNotificationReceiptTimeMetric].(map[uuid.UUID]time.Time) + + require.Contains(t, websocketReceiptTimes, notificationsLib.TemplateUserAccountCreated) + require.Contains(t, websocketReceiptTimes, notificationsLib.TemplateUserAccountDeleted) + require.Contains(t, smtpReceiptTimes, notificationsLib.TemplateUserAccountCreated) + require.Contains(t, smtpReceiptTimes, notificationsLib.TemplateUserAccountDeleted) + } +} + +func sendInboxNotification(ctx context.Context, t *testing.T, db database.Store, inboxHandler *dispatch.InboxHandler, username string, templateID uuid.UUID) error { + user, err := db.GetUserByEmailOrUsername(ctx, database.GetUserByEmailOrUsernameParams{ + Username: username, + }) + require.NoError(t, err) + + dispatchFunc, err := inboxHandler.Dispatcher(types.MessagePayload{ + UserID: user.ID.String(), + NotificationTemplateID: templateID.String(), + }, "", "", nil) + if err != nil { + return err + } + + _, err = dispatchFunc(ctx, uuid.New()) + if err != nil { + return err + } + + return nil +} diff --git a/scaletest/smtpmock/server.go b/scaletest/smtpmock/server.go new file mode 100644 index 0000000000000..26f5b65ffbfb5 --- /dev/null +++ b/scaletest/smtpmock/server.go @@ -0,0 +1,247 @@ +package smtpmock + +import ( + "bufio" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "mime/quotedprintable" + "net" + "net/http" + "net/mail" + "regexp" + "slices" + "strings" + "time" + + "github.com/google/uuid" + smtpmocklib "github.com/mocktools/go-smtp-mock/v2" + "golang.org/x/xerrors" + + "cdr.dev/slog" +) + +// Server wraps the SMTP mock server and provides an HTTP API to retrieve emails. +type Server struct { + smtpServer *smtpmocklib.Server + httpServer *http.Server + httpListener net.Listener + logger slog.Logger + + hostAddress string + smtpPort int + apiPort int +} + +type Config struct { + HostAddress string + SMTPPort int + APIPort int + Logger slog.Logger +} + +type EmailSummary struct { + Subject string `json:"subject"` + Date time.Time `json:"date"` + NotificationTemplateID uuid.UUID `json:"notification_template_id,omitempty"` +} + +var notificationTemplateIDRegex = regexp.MustCompile(`notifications\?disabled=([a-f0-9-]+)`) + +func (s *Server) Start(ctx context.Context, cfg Config) error { + s.hostAddress = cfg.HostAddress + s.smtpPort = cfg.SMTPPort + s.apiPort = cfg.APIPort + s.logger = cfg.Logger + + s.smtpServer = smtpmocklib.New(smtpmocklib.ConfigurationAttr{ + LogToStdout: false, + LogServerActivity: true, + HostAddress: s.hostAddress, + PortNumber: s.smtpPort, + }) + if err := s.smtpServer.Start(); err != nil { + return xerrors.Errorf("start SMTP server: %w", err) + } + s.smtpPort = s.smtpServer.PortNumber() + + if err := s.startAPIServer(ctx); err != nil { + _ = s.smtpServer.Stop() + return xerrors.Errorf("start API server: %w", err) + } + + return nil +} + +func (s *Server) Stop() error { + var httpErr, smtpErr error + + if s.httpServer != nil { + shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := s.httpServer.Shutdown(shutdownCtx); err != nil { + httpErr = xerrors.Errorf("shutdown HTTP server: %w", err) + } + } + + if s.smtpServer != nil { + if err := s.smtpServer.Stop(); err != nil { + smtpErr = xerrors.Errorf("stop SMTP server: %w", err) + } + } + + return errors.Join(httpErr, smtpErr) +} + +func (s *Server) SMTPAddress() string { + return fmt.Sprintf("%s:%d", s.hostAddress, s.smtpPort) +} + +func (s *Server) APIAddress() string { + return fmt.Sprintf("http://%s:%d", s.hostAddress, s.apiPort) +} + +func (s *Server) MessageCount() int { + if s.smtpServer == nil { + return 0 + } + return len(s.smtpServer.Messages()) +} + +func (s *Server) Purge() { + if s.smtpServer != nil { + s.smtpServer.MessagesAndPurge() + } +} + +func (s *Server) startAPIServer(ctx context.Context) error { + mux := http.NewServeMux() + mux.HandleFunc("POST /purge", s.handlePurge) + mux.HandleFunc("GET /messages", s.handleMessages) + + s.httpServer = &http.Server{ + Handler: mux, + ReadHeaderTimeout: 10 * time.Second, + } + + listener, err := net.Listen("tcp", fmt.Sprintf("%s:%d", s.hostAddress, s.apiPort)) + if err != nil { + return xerrors.Errorf("listen on %s:%d: %w", s.hostAddress, s.apiPort, err) + } + s.httpListener = listener + + tcpAddr, valid := listener.Addr().(*net.TCPAddr) + if !valid { + err := listener.Close() + if err != nil { + s.logger.Error(ctx, "failed to close listener", slog.Error(err)) + } + return xerrors.Errorf("listener returned invalid address: %T", listener.Addr()) + } + s.apiPort = tcpAddr.Port + + go func() { + if err := s.httpServer.Serve(listener); err != nil && !errors.Is(err, http.ErrServerClosed) { + s.logger.Error(ctx, "http API server error", slog.Error(err)) + } + }() + + return nil +} + +func (s *Server) handlePurge(w http.ResponseWriter, _ *http.Request) { + s.smtpServer.MessagesAndPurge() + w.WriteHeader(http.StatusOK) +} + +func (s *Server) handleMessages(w http.ResponseWriter, r *http.Request) { + email := r.URL.Query().Get("email") + msgs := s.smtpServer.Messages() + + var summaries []EmailSummary + for _, msg := range msgs { + recipients := msg.RcpttoRequestResponse() + if !matchesRecipient(recipients, email) { + continue + } + + summary, err := parseEmailSummary(msg.MsgRequest()) + if err != nil { + s.logger.Warn(r.Context(), "failed to parse email summary", slog.Error(err)) + continue + } + summaries = append(summaries, summary) + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(summaries); err != nil { + s.logger.Warn(r.Context(), "failed to encode JSON response", slog.Error(err)) + } +} + +func matchesRecipient(recipients [][]string, email string) bool { + if email == "" { + return true + } + return slices.ContainsFunc(recipients, func(rcptPair []string) bool { + if len(rcptPair) == 0 { + return false + } + + addrPart, ok := strings.CutPrefix(rcptPair[0], "RCPT TO:") + if !ok { + return false + } + + addr, err := mail.ParseAddress(addrPart) + if err != nil { + return false + } + + return strings.EqualFold(addr.Address, email) + }) +} + +func parseEmailSummary(message string) (EmailSummary, error) { + var summary EmailSummary + + // Decode quoted-printable message + reader := quotedprintable.NewReader(strings.NewReader(message)) + content, err := io.ReadAll(reader) + if err != nil { + return summary, xerrors.Errorf("decode email content: %w", err) + } + + contentStr := string(content) + scanner := bufio.NewScanner(strings.NewReader(contentStr)) + + // Extract Subject and Date from headers. + // Date is used to measure latency. + for scanner.Scan() { + line := scanner.Text() + if line == "" { + break + } + if prefix, found := strings.CutPrefix(line, "Subject: "); found { + summary.Subject = prefix + } else if prefix, found := strings.CutPrefix(line, "Date: "); found { + if parsedDate, err := time.Parse(time.RFC1123Z, prefix); err == nil { + summary.Date = parsedDate + } + } + } + + // Extract notification ID from decoded email content + // Notification ID is present in the email footer like this + //

Stop receiving emails like this

+ if matches := notificationTemplateIDRegex.FindStringSubmatch(contentStr); len(matches) > 1 { + summary.NotificationTemplateID, err = uuid.Parse(matches[1]) + if err != nil { + return summary, xerrors.Errorf("parse notification ID: %w", err) + } + } + + return summary, nil +} diff --git a/scaletest/smtpmock/server_test.go b/scaletest/smtpmock/server_test.go new file mode 100644 index 0000000000000..7136c5ab9ee59 --- /dev/null +++ b/scaletest/smtpmock/server_test.go @@ -0,0 +1,203 @@ +package smtpmock_test + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/smtp" + "strings" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/scaletest/smtpmock" + "github.com/coder/coder/v2/testutil" +) + +func TestServer_StartStop(t *testing.T) { + t.Parallel() + + ctx := context.Background() + srv := new(smtpmock.Server) + err := srv.Start(ctx, smtpmock.Config{ + HostAddress: "127.0.0.1", + SMTPPort: 0, + APIPort: 0, + Logger: slogtest.Make(t, nil), + }) + require.NoError(t, err) + require.NotEmpty(t, srv.SMTPAddress()) + require.NotEmpty(t, srv.APIAddress()) + + err = srv.Stop() + require.NoError(t, err) +} + +func TestServer_SendAndReceiveEmail(t *testing.T) { + t.Parallel() + + ctx := context.Background() + srv := new(smtpmock.Server) + err := srv.Start(ctx, smtpmock.Config{ + HostAddress: "127.0.0.1", + SMTPPort: 0, + APIPort: 0, + Logger: slogtest.Make(t, nil), + }) + require.NoError(t, err) + defer srv.Stop() + + err = sendTestEmail(srv.SMTPAddress(), "test@example.com", "Test Subject", "Test Body") + require.NoError(t, err) + + require.Eventually(t, func() bool { + return srv.MessageCount() == 1 + }, testutil.WaitShort, testutil.IntervalMedium) + + url := fmt.Sprintf("%s/messages", srv.APIAddress()) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + require.NoError(t, err) + + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + + require.Equal(t, http.StatusOK, resp.StatusCode) + + var summaries []smtpmock.EmailSummary + err = json.NewDecoder(resp.Body).Decode(&summaries) + require.NoError(t, err) + require.Len(t, summaries, 1) + require.Equal(t, "Test Subject", summaries[0].Subject) +} + +func TestServer_FilterByEmail(t *testing.T) { + t.Parallel() + + ctx := context.Background() + srv := new(smtpmock.Server) + err := srv.Start(ctx, smtpmock.Config{ + HostAddress: "127.0.0.1", + SMTPPort: 0, + APIPort: 0, + Logger: slogtest.Make(t, nil), + }) + require.NoError(t, err) + defer srv.Stop() + + err = sendTestEmail(srv.SMTPAddress(), "admin@coder.com", "Email for admin", "Body 1") + require.NoError(t, err) + + err = sendTestEmail(srv.SMTPAddress(), "test-user@coder.com", "Email for test-user", "Body 2") + require.NoError(t, err) + + require.Eventually(t, func() bool { + return srv.MessageCount() == 2 + }, testutil.WaitShort, testutil.IntervalMedium) + + url := fmt.Sprintf("%s/messages?email=admin@coder.com", srv.APIAddress()) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + require.NoError(t, err) + + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + + var summaries []smtpmock.EmailSummary + err = json.NewDecoder(resp.Body).Decode(&summaries) + require.NoError(t, err) + require.Len(t, summaries, 1) + require.Equal(t, "Email for admin", summaries[0].Subject) +} + +func TestServer_NotificationTemplateID(t *testing.T) { + t.Parallel() + + ctx := context.Background() + srv := new(smtpmock.Server) + err := srv.Start(ctx, smtpmock.Config{ + HostAddress: "127.0.0.1", + SMTPPort: 0, + APIPort: 0, + Logger: slogtest.Make(t, nil), + }) + require.NoError(t, err) + defer srv.Stop() + + notificationID := uuid.New() + body := fmt.Sprintf(`

Unsubscribe

`, notificationID.String()) + + err = sendTestEmail(srv.SMTPAddress(), "test-user@coder.com", "Notification", body) + require.NoError(t, err) + + require.Eventually(t, func() bool { + return srv.MessageCount() == 1 + }, testutil.WaitShort, testutil.IntervalMedium) + + url := fmt.Sprintf("%s/messages", srv.APIAddress()) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + require.NoError(t, err) + + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + + var summaries []smtpmock.EmailSummary + err = json.NewDecoder(resp.Body).Decode(&summaries) + require.NoError(t, err) + require.Len(t, summaries, 1) + require.Equal(t, notificationID, summaries[0].NotificationTemplateID) +} + +func TestServer_Purge(t *testing.T) { + t.Parallel() + + ctx := context.Background() + srv := new(smtpmock.Server) + err := srv.Start(ctx, smtpmock.Config{ + HostAddress: "127.0.0.1", + SMTPPort: 0, + APIPort: 0, + Logger: slogtest.Make(t, nil), + }) + require.NoError(t, err) + defer srv.Stop() + + err = sendTestEmail(srv.SMTPAddress(), "test-user@coder.com", "Test", "Body") + require.NoError(t, err) + + require.Eventually(t, func() bool { + return srv.MessageCount() == 1 + }, testutil.WaitShort, testutil.IntervalMedium) + + url := fmt.Sprintf("%s/purge", srv.APIAddress()) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, nil) + require.NoError(t, err) + + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + require.Equal(t, http.StatusOK, resp.StatusCode) + + require.Equal(t, 0, srv.MessageCount()) +} + +func sendTestEmail(smtpAddr, to, subject, body string) error { + from := "noreply@coder.com" + now := time.Now().Format(time.RFC1123Z) + + msg := strings.Builder{} + _, _ = msg.WriteString(fmt.Sprintf("From: %s\r\n", from)) + _, _ = msg.WriteString(fmt.Sprintf("To: %s\r\n", to)) + _, _ = msg.WriteString(fmt.Sprintf("Subject: %s\r\n", subject)) + _, _ = msg.WriteString(fmt.Sprintf("Date: %s\r\n", now)) + _, _ = msg.WriteString("Content-Type: text/html; charset=UTF-8\r\n") + _, _ = msg.WriteString("\r\n") + _, _ = msg.WriteString(body) + + return smtp.SendMail(smtpAddr, nil, from, []string{to}, []byte(msg.String())) +} diff --git a/scaletest/workspacebuild/config.go b/scaletest/workspacebuild/config.go index 90184dacf84e8..105883ba3b7a8 100644 --- a/scaletest/workspacebuild/config.go +++ b/scaletest/workspacebuild/config.go @@ -19,6 +19,9 @@ type Config struct { // NoWaitForAgents determines whether the test should wait for the workspace // agents to connect before returning. NoWaitForAgents bool `json:"no_wait_for_agents"` + // NoWaitForBuild determines whether the test should wait for the workspace + // build to complete before returning. + NoWaitForBuild bool `json:"no_wait_for_build"` // Retry determines how many times to retry starting a workspace build if it // fails. Retry int `json:"retry"` diff --git a/scaletest/workspacebuild/run.go b/scaletest/workspacebuild/run.go index c9a59a18d69cb..308c18f0b6a03 100644 --- a/scaletest/workspacebuild/run.go +++ b/scaletest/workspacebuild/run.go @@ -58,27 +58,31 @@ func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.W } r.workspaceID = workspace.ID - err = waitForBuild(ctx, logs, r.client, workspace.LatestBuild.ID) - if err != nil { - for i := 0; i < r.cfg.Retry; i++ { - _, _ = fmt.Fprintf(logs, "Retrying build %d/%d...\n", i+1, r.cfg.Retry) - - workspace.LatestBuild, err = r.client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{ - Transition: codersdk.WorkspaceTransitionStart, - RichParameterValues: req.RichParameterValues, - TemplateVersionID: req.TemplateVersionID, - }) - if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("create workspace build: %w", err) + if r.cfg.NoWaitForBuild { + _, _ = fmt.Fprintln(logs, "Skipping waiting for build") + } else { + err = waitForBuild(ctx, logs, r.client, workspace.LatestBuild.ID) + if err != nil { + for i := 0; i < r.cfg.Retry; i++ { + _, _ = fmt.Fprintf(logs, "Retrying build %d/%d...\n", i+1, r.cfg.Retry) + + workspace.LatestBuild, err = r.client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionStart, + RichParameterValues: req.RichParameterValues, + TemplateVersionID: req.TemplateVersionID, + }) + if err != nil { + return codersdk.Workspace{}, xerrors.Errorf("create workspace build: %w", err) + } + err = waitForBuild(ctx, logs, r.client, workspace.LatestBuild.ID) + if err == nil { + break + } } - err = waitForBuild(ctx, logs, r.client, workspace.LatestBuild.ID) - if err == nil { - break + if err != nil { + return codersdk.Workspace{}, xerrors.Errorf("wait for build: %w", err) } } - if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("wait for build: %w", err) - } } if r.cfg.NoWaitForAgents { diff --git a/scaletest/workspaceupdates/run_test.go b/scaletest/workspaceupdates/run_test.go index 01e4b7f621bd1..b31a6050dbbad 100644 --- a/scaletest/workspaceupdates/run_test.go +++ b/scaletest/workspaceupdates/run_test.go @@ -24,7 +24,7 @@ import ( func TestRun(t *testing.T) { t.Parallel() - ctx := testutil.Context(t, testutil.WaitMedium) + ctx := testutil.Context(t, testutil.WaitSuperLong) client := coderdtest.New(t, &coderdtest.Options{ IncludeProvisionerDaemon: true, diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 53c999301e410..d14d88e1a544d 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -1,7 +1,7 @@ # This is the base image used for Coder images. It's a multi-arch image that is # built in depot.dev for all supported architectures. Since it's built on real # hardware and not cross-compiled, it can have "RUN" commands. -FROM alpine:3.21.3@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c +FROM alpine:3.22.2@sha256:4b7ce07002c69e8f3d704a9c5d6fd3053be500b7f1c69fc0d80990c2ad8dd412 # We use a single RUN command to reduce the number of layers in the image. # NOTE: Keep the Terraform version in sync with minTerraformVersion and diff --git a/scripts/apikeyscopesgen/main.go b/scripts/apikeyscopesgen/main.go index 988c4cb2f0e10..b2c74c72c0adf 100644 --- a/scripts/apikeyscopesgen/main.go +++ b/scripts/apikeyscopesgen/main.go @@ -25,8 +25,8 @@ func main() { } func generate() ([]byte, error) { - names := rbac.ExternalScopeNames() - slices.Sort(names) + allNames := collectAllScopeNames() + publicNames := rbac.ExternalScopeNames() var b bytes.Buffer if _, err := b.WriteString("// Code generated by scripts/apikeyscopesgen. DO NOT EDIT.\n"); err != nil { @@ -61,13 +61,9 @@ func generate() ([]byte, error) { if _, err := b.WriteString("\tAPIKeyScopeApplicationConnect APIKeyScope = \"application_connect\"\n"); err != nil { return nil, err } - for _, n := range names { - res, act := splitRA(n) - if act == policy.WildcardSymbol { - act = "All" - } - constName := fmt.Sprintf("APIKeyScope%s%s", pascal(res), pascal(act)) - if _, err := fmt.Fprintf(&b, "\t%s APIKeyScope = \"%s\"\n", constName, n); err != nil { + for _, name := range allNames { + constName := constNameForScope(name) + if _, err := fmt.Fprintf(&b, "\t%s APIKeyScope = \"%s\"\n", constName, name); err != nil { return nil, err } } @@ -82,12 +78,8 @@ func generate() ([]byte, error) { if _, err := b.WriteString("var PublicAPIKeyScopes = []APIKeyScope{\n"); err != nil { return nil, err } - for _, n := range names { - res, act := splitRA(n) - if act == policy.WildcardSymbol { - act = "All" - } - constName := fmt.Sprintf("APIKeyScope%s%s", pascal(res), pascal(act)) + for _, name := range publicNames { + constName := constNameForScope(name) if _, err := fmt.Fprintf(&b, "\t%s,\n", constName); err != nil { return nil, err } @@ -99,6 +91,54 @@ func generate() ([]byte, error) { return format.Source(b.Bytes()) } +func collectAllScopeNames() []string { + seen := make(map[string]struct{}) + var names []string + add := func(name string) { + if name == "" { + return + } + if _, ok := seen[name]; ok { + return + } + seen[name] = struct{}{} + names = append(names, name) + } + + for resource, def := range policy.RBACPermissions { + if resource == policy.WildcardSymbol { + continue + } + add(resource + ":" + policy.WildcardSymbol) + for action := range def.Actions { + add(resource + ":" + string(action)) + } + } + + for _, name := range rbac.CompositeScopeNames() { + add(name) + } + + for _, name := range rbac.BuiltinScopeNames() { + s := string(name) + if !strings.Contains(s, ":") { + continue + } + add(s) + } + + slices.Sort(names) + return names +} + +func constNameForScope(name string) string { + resource, action := splitRA(name) + if action == policy.WildcardSymbol { + action = "All" + } + return fmt.Sprintf("APIKeyScope%s%s", pascal(resource), pascal(action)) +} + func splitRA(name string) (resource string, action string) { parts := strings.SplitN(name, ":", 2) if len(parts) != 2 { diff --git a/scripts/apitypings/main.go b/scripts/apitypings/main.go index 1a2bab59a662b..65483a34bc9a8 100644 --- a/scripts/apitypings/main.go +++ b/scripts/apitypings/main.go @@ -17,6 +17,9 @@ func main() { log.Fatalf("new convert: %v", err) } + // Include golang comments to typescript output. + gen.PreserveComments() + generateDirectories := map[string]string{ "github.com/coder/coder/v2/codersdk": "", "github.com/coder/coder/v2/coderd/healthcheck/health": "Health", @@ -56,7 +59,7 @@ func main() { log.Fatalf("to typescript: %v", err) } - TsMutations(ts) + TSMutations(ts) output, err := ts.Serialize() if err != nil { @@ -65,7 +68,7 @@ func main() { _, _ = fmt.Println(output) } -func TsMutations(ts *guts.Typescript) { +func TSMutations(ts *guts.Typescript) { ts.ApplyMutations( // TODO: Remove 'NotNullMaps'. This is hiding potential bugs // of referencing maps that are actually null. diff --git a/scripts/apitypings/main_test.go b/scripts/apitypings/main_test.go index 1bb89c7ba5423..77b304e21518b 100644 --- a/scripts/apitypings/main_test.go +++ b/scripts/apitypings/main_test.go @@ -42,13 +42,20 @@ func TestGeneration(t *testing.T) { err = gen.IncludeGenerate("./" + dir) require.NoError(t, err) + // Include minimal references needed for tests that use external types. + for pkg, prefix := range map[string]string{ + "github.com/google/uuid": "", + } { + require.NoError(t, gen.IncludeReference(pkg, prefix)) + } + err = TypeMappings(gen) require.NoError(t, err) ts, err := gen.ToTypescript() require.NoError(t, err) - TsMutations(ts) + TSMutations(ts) output, err := ts.Serialize() require.NoError(t, err) diff --git a/scripts/check-scopes/main.go b/scripts/check-scopes/main.go index e79be506ae804..56ba0d4657e31 100644 --- a/scripts/check-scopes/main.go +++ b/scripts/check-scopes/main.go @@ -58,23 +58,37 @@ func main() { os.Exit(1) } -// expectedFromRBAC returns the set of : pairs derived from RBACPermissions. +// expectedFromRBAC returns the set of scope names the DB enum must support. func expectedFromRBAC() map[string]struct{} { want := make(map[string]struct{}) - // Low-level : + add := func(name string) { + if name == "" { + return + } + want[name] = struct{}{} + } + // Low-level : and synthesized :* wildcards for resource, def := range policy.RBACPermissions { if resource == policy.WildcardSymbol { // Ignore wildcard entry; it has no concrete : pairs. continue } + add(resource + ":" + policy.WildcardSymbol) for action := range def.Actions { - key := resource + ":" + string(action) - want[key] = struct{}{} + add(resource + ":" + string(action)) } } // Composite coder:* names for _, n := range rbac.CompositeScopeNames() { - want[n] = struct{}{} + add(n) + } + // Built-in coder-prefixed scopes such as coder:all + for _, n := range rbac.BuiltinScopeNames() { + s := string(n) + if !strings.Contains(s, ":") { + continue + } + add(s) } return want } diff --git a/scripts/generate_api_key_scope_enum/main.go b/scripts/generate_api_key_scope_enum/main.go deleted file mode 100644 index 130dd865334b8..0000000000000 --- a/scripts/generate_api_key_scope_enum/main.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "fmt" - "sort" - - "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/coderd/rbac/policy" -) - -func main() { - seen := map[string]struct{}{} - var vals []string - for resource, def := range policy.RBACPermissions { - if resource == policy.WildcardSymbol { - continue - } - for action := range def.Actions { - vals = append(vals, fmt.Sprintf("%s:%s", resource, action)) - } - } - // Include composite coder:* scopes as first-class enum values - vals = append(vals, rbac.CompositeScopeNames()...) - sort.Strings(vals) - for _, v := range vals { - if _, ok := seen[v]; ok { - continue - } - seen[v] = struct{}{} - _, _ = fmt.Printf("ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS '%s';\n", v) - } -} diff --git a/scripts/image_tag.sh b/scripts/image_tag.sh index 68dfbcebf99cb..8767a22cb199c 100755 --- a/scripts/image_tag.sh +++ b/scripts/image_tag.sh @@ -51,10 +51,7 @@ fi image="${CODER_IMAGE_BASE:-ghcr.io/coder/coder}" -# use CODER_IMAGE_TAG_PREFIX if set as a prefix for the tag -tag_prefix="${CODER_IMAGE_TAG_PREFIX:-}" - -tag="${tag_prefix:+$tag_prefix-}v$version" +tag="v$version" if [[ "$version" == "latest" ]]; then tag="latest" diff --git a/scripts/metricsdocgen/metrics b/scripts/metricsdocgen/metrics index 20e24d9caa136..ba9a991fc2a06 100644 --- a/scripts/metricsdocgen/metrics +++ b/scripts/metricsdocgen/metrics @@ -707,6 +707,22 @@ coderd_provisionerd_job_timings_seconds_count{provisioner="terraform",status="su # HELP coderd_provisionerd_jobs_current The number of currently running provisioner jobs. # TYPE coderd_provisionerd_jobs_current gauge coderd_provisionerd_jobs_current{provisioner="terraform"} 0 +# HELP coderd_provisionerd_num_daemons The number of provisioner daemons. +# TYPE coderd_provisionerd_num_daemons gauge +coderd_provisionerd_num_daemons 3 +# HELP coderd_provisionerd_workspace_build_timings_seconds The time taken for a workspace to build. +# TYPE coderd_provisionerd_workspace_build_timings_seconds histogram +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="1"} 0 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="10"} 0 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="30"} 0 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="60"} 1 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="300"} 1 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="600"} 1 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="1800"} 1 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="3600"} 1 +coderd_provisionerd_workspace_build_timings_seconds_bucket{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START",le="+Inf"} 1 +coderd_provisionerd_workspace_build_timings_seconds_sum{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START"} 31.042659852 +coderd_provisionerd_workspace_build_timings_seconds_count{status="success",template_name="docker",template_version="gallant_wright0",workspace_transition="START"} 1 # HELP coderd_workspace_latest_build_status The current workspace statuses by template, transition, and owner. # TYPE coderd_workspace_latest_build_status gauge coderd_workspace_latest_build_status{status="failed",template_name="docker",template_version="sweet_gould9",workspace_owner="admin",workspace_transition="stop"} 1 @@ -729,8 +745,8 @@ coderd_workspace_creation_duration_seconds_bucket{organization_name="{organizati coderd_workspace_creation_duration_seconds_bucket{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",type="prebuild",le="1800"} 1 coderd_workspace_creation_duration_seconds_bucket{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",type="prebuild",le="3600"} 1 coderd_workspace_creation_duration_seconds_bucket{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",type="prebuild",le="+Inf"} 1 -coderd_workspace_creation_duration_seconds_sum{organization_name="{organization}",preset_name="Falkenstein",template_name="template-example",type="prebuild"} 4.406214 -coderd_workspace_creation_duration_seconds_count{organization_name="{organization}",preset_name="Falkenstein",template_name="template-example",type="prebuild"} 1 +coderd_workspace_creation_duration_seconds_sum{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",type="prebuild"} 4.406214 +coderd_workspace_creation_duration_seconds_count{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",type="prebuild"} 1 # HELP coderd_prebuilt_workspace_claim_duration_seconds Time to claim a prebuilt workspace by organization, template, and preset. # TYPE coderd_prebuilt_workspace_claim_duration_seconds histogram coderd_prebuilt_workspace_claim_duration_seconds_bucket{organization_name="{organization}",preset_name="Falkenstein",template_name="docker",le="1"} 0 diff --git a/scripts/rules.go b/scripts/rules.go index 7fd3c0ca445c9..0a7c75925d1f9 100644 --- a/scripts/rules.go +++ b/scripts/rules.go @@ -133,7 +133,10 @@ func databaseImport(m dsl.Matcher) { m.Import("github.com/coder/coder/v2/coderd/database") m.Match("database.$_"). Report("Do not import any database types into codersdk"). - Where(m.File().PkgPath.Matches("github.com/coder/coder/v2/codersdk")) + Where( + m.File().PkgPath.Matches("github.com/coder/coder/v2/codersdk") && + !m.File().Name.Matches(`_test\.go$`), + ) } // publishInTransaction detects calls to Publish inside database transactions diff --git a/scripts/should_deploy.sh b/scripts/should_deploy.sh new file mode 100755 index 0000000000000..3122192956b8d --- /dev/null +++ b/scripts/should_deploy.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash + +# This script determines if a commit in either the main branch or a +# `release/x.y` branch should be deployed to dogfood. +# +# To avoid masking unrelated failures, this script will return 0 in either case, +# and will print `DEPLOY` or `NOOP` to stdout. + +set -euo pipefail +# shellcheck source=scripts/lib.sh +source "$(dirname "${BASH_SOURCE[0]}")/lib.sh" +cdroot + +deploy_branch=main + +# Determine the current branch name and check that it is one of the supported +# branch names. +branch_name=$(git branch --show-current) +if [[ "$branch_name" != "main" && ! "$branch_name" =~ ^release/[0-9]+\.[0-9]+$ ]]; then + error "Current branch '$branch_name' is not a supported branch name for dogfood, must be 'main' or 'release/x.y'" +fi +log "Current branch '$branch_name'" + +# Determine the remote name +remote=$(git remote -v | grep coder/coder | awk '{print $1}' | head -n1) +if [[ -z "${remote}" ]]; then + error "Could not find remote for coder/coder" +fi +log "Using remote '$remote'" + +# Step 1: List all release branches and sort them by major/minor so we can find +# the latest release branch. +release_branches=$( + git branch -r --format='%(refname:short)' | + grep -E "${remote}/release/[0-9]+\.[0-9]+$" | + sed "s|${remote}/||" | + sort -V +) + +# As a sanity check, release/2.26 should exist. +if ! echo "$release_branches" | grep "release/2.26" >/dev/null; then + error "Could not find existing release branches. Did you run 'git fetch -ap ${remote}'?" +fi + +latest_release_branch=$(echo "$release_branches" | tail -n 1) +latest_release_branch_version=${latest_release_branch#release/} +log "Latest release branch: $latest_release_branch" +log "Latest release branch version: $latest_release_branch_version" + +# Step 2: check if a matching tag `v.0` exists. If it does not, we will +# use the release branch as the deploy branch. +if ! git rev-parse "refs/tags/v${latest_release_branch_version}.0" >/dev/null 2>&1; then + log "Tag 'v${latest_release_branch_version}.0' does not exist, using release branch as deploy branch" + deploy_branch=$latest_release_branch +else + log "Matching tag 'v${latest_release_branch_version}.0' exists, using main as deploy branch" +fi +log "Deploy branch: $deploy_branch" + +# Finally, check if the current branch is the deploy branch. +log +if [[ "$branch_name" != "$deploy_branch" ]]; then + log "VERDICT: DO NOT DEPLOY" + echo "NOOP" # stdout +else + log "VERDICT: DEPLOY" + echo "DEPLOY" # stdout +fi diff --git a/site/biome.jsonc b/site/biome.jsonc index 4c9cb18aa482b..be24c66617a6e 100644 --- a/site/biome.jsonc +++ b/site/biome.jsonc @@ -3,5 +3,5 @@ "files": { "includes": ["!e2e/**/*Generated.ts"] }, - "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json" + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json" } diff --git a/site/e2e/api.ts b/site/e2e/api.ts index 342b08cb28914..92469aa2f177e 100644 --- a/site/e2e/api.ts +++ b/site/e2e/api.ts @@ -199,6 +199,7 @@ export const createCustomRole = async ( }, ], user_permissions: [], + organization_member_permissions: [], }); return role; }; @@ -213,7 +214,7 @@ export async function verifyConfigFlagBoolean( const value = opt.value ? "Enabled" : "Disabled"; const configOption = page.locator( - `div.options-table .option-${flag} .${type}`, + `table.options-table .option-${flag} .${type}`, ); await expect(configOption).toHaveText(value); } @@ -225,7 +226,7 @@ export async function verifyConfigFlagNumber( ) { const opt = findConfigOption(config, flag); const configOption = page.locator( - `div.options-table .option-${flag} .option-value-number`, + `table.options-table .option-${flag} .option-value-number`, ); await expect(configOption).toHaveText(String(opt.value)); } @@ -238,7 +239,7 @@ export async function verifyConfigFlagString( const opt = findConfigOption(config, flag); const configOption = page.locator( - `div.options-table .option-${flag} .option-value-string`, + `table.options-table .option-${flag} .option-value-string`, ); // biome-ignore lint/suspicious/noExplicitAny: opt.value is any await expect(configOption).toHaveText(opt.value as any); @@ -251,7 +252,7 @@ export async function verifyConfigFlagArray( ) { const opt = findConfigOption(config, flag); const configOption = page.locator( - `div.options-table .option-${flag} .option-array`, + `table.options-table .option-${flag} .option-array`, ); // Verify array of options with simple dots @@ -268,7 +269,7 @@ export async function verifyConfigFlagEntries( ) { const opt = findConfigOption(config, flag); const configOption = page.locator( - `div.options-table .option-${flag} .option-array`, + `table.options-table .option-${flag} .option-array`, ); // Verify array of options with green marks. @@ -296,7 +297,7 @@ export async function verifyConfigFlagDuration( ); } const configOption = page.locator( - `div.options-table .option-${flag} .option-value-string`, + `table.options-table .option-${flag} .option-value-string`, ); await expect(configOption).toHaveText(humanDuration(opt.value / 1e6)); } diff --git a/site/e2e/google/protobuf/timestampGenerated.ts b/site/e2e/google/protobuf/timestampGenerated.ts index 6dd4b08e96087..6cddbb0b0b781 100644 --- a/site/e2e/google/protobuf/timestampGenerated.ts +++ b/site/e2e/google/protobuf/timestampGenerated.ts @@ -1,3 +1,9 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v1.181.2 +// protoc v4.23.4 +// source: google/protobuf/timestamp.proto + /* eslint-disable */ import * as _m0 from "protobufjs/minimal"; diff --git a/site/e2e/provisionerGenerated.ts b/site/e2e/provisionerGenerated.ts index 82fd25db9258d..c5a7d16274a1c 100644 --- a/site/e2e/provisionerGenerated.ts +++ b/site/e2e/provisionerGenerated.ts @@ -1,3 +1,9 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v1.181.2 +// protoc v4.23.4 +// source: provisioner.proto + /* eslint-disable */ import * as _m0 from "protobufjs/minimal"; import { Observable } from "rxjs"; @@ -369,7 +375,8 @@ export interface AITaskSidebarApp { export interface AITask { id: string; - sidebarApp: AITaskSidebarApp | undefined; + sidebarApp?: AITaskSidebarApp | undefined; + appId: string; } /** Metadata is information about a workspace used in the execution of a build */ @@ -396,6 +403,8 @@ export interface Metadata { /** Indicates that a prebuilt workspace is being built. */ prebuiltWorkspaceBuildStage: PrebuiltWorkspaceBuildStage; runningAgentAuthTokens: RunningAgentAuthToken[]; + taskId: string; + taskPrompt: string; } /** Config represents execution configuration shared by all subsequent requests in the Session */ @@ -560,10 +569,10 @@ export const TemplateVariable = { if (message.defaultValue !== "") { writer.uint32(34).string(message.defaultValue); } - if (message.required === true) { + if (message.required !== false) { writer.uint32(40).bool(message.required); } - if (message.sensitive === true) { + if (message.sensitive !== false) { writer.uint32(48).bool(message.sensitive); } return writer; @@ -599,7 +608,7 @@ export const RichParameter = { if (message.type !== "") { writer.uint32(26).string(message.type); } - if (message.mutable === true) { + if (message.mutable !== false) { writer.uint32(32).bool(message.mutable); } if (message.defaultValue !== "") { @@ -626,7 +635,7 @@ export const RichParameter = { if (message.validationMonotonic !== "") { writer.uint32(98).string(message.validationMonotonic); } - if (message.required === true) { + if (message.required !== false) { writer.uint32(104).bool(message.required); } if (message.displayName !== "") { @@ -635,7 +644,7 @@ export const RichParameter = { if (message.order !== 0) { writer.uint32(128).int32(message.order); } - if (message.ephemeral === true) { + if (message.ephemeral !== false) { writer.uint32(136).bool(message.ephemeral); } if (message.formType !== 0) { @@ -716,7 +725,7 @@ export const Preset = { if (message.prebuild !== undefined) { Prebuild.encode(message.prebuild, writer.uint32(26).fork()).ldelim(); } - if (message.default === true) { + if (message.default !== false) { writer.uint32(32).bool(message.default); } if (message.description !== "") { @@ -761,7 +770,7 @@ export const VariableValue = { if (message.value !== "") { writer.uint32(18).string(message.value); } - if (message.sensitive === true) { + if (message.sensitive !== false) { writer.uint32(24).bool(message.sensitive); } return writer; @@ -794,7 +803,7 @@ export const ExternalAuthProviderResource = { if (message.id !== "") { writer.uint32(10).string(message.id); } - if (message.optional === true) { + if (message.optional !== false) { writer.uint32(16).bool(message.optional); } return writer; @@ -929,7 +938,7 @@ export const ResourcesMonitoring = { export const MemoryResourceMonitor = { encode(message: MemoryResourceMonitor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.enabled === true) { + if (message.enabled !== false) { writer.uint32(8).bool(message.enabled); } if (message.threshold !== 0) { @@ -944,7 +953,7 @@ export const VolumeResourceMonitor = { if (message.path !== "") { writer.uint32(10).string(message.path); } - if (message.enabled === true) { + if (message.enabled !== false) { writer.uint32(16).bool(message.enabled); } if (message.threshold !== 0) { @@ -956,19 +965,19 @@ export const VolumeResourceMonitor = { export const DisplayApps = { encode(message: DisplayApps, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.vscode === true) { + if (message.vscode !== false) { writer.uint32(8).bool(message.vscode); } - if (message.vscodeInsiders === true) { + if (message.vscodeInsiders !== false) { writer.uint32(16).bool(message.vscodeInsiders); } - if (message.webTerminal === true) { + if (message.webTerminal !== false) { writer.uint32(24).bool(message.webTerminal); } - if (message.sshHelper === true) { + if (message.sshHelper !== false) { writer.uint32(32).bool(message.sshHelper); } - if (message.portForwardingHelper === true) { + if (message.portForwardingHelper !== false) { writer.uint32(40).bool(message.portForwardingHelper); } return writer; @@ -1001,13 +1010,13 @@ export const Script = { if (message.cron !== "") { writer.uint32(34).string(message.cron); } - if (message.startBlocksLogin === true) { + if (message.startBlocksLogin !== false) { writer.uint32(40).bool(message.startBlocksLogin); } - if (message.runOnStart === true) { + if (message.runOnStart !== false) { writer.uint32(48).bool(message.runOnStart); } - if (message.runOnStop === true) { + if (message.runOnStop !== false) { writer.uint32(56).bool(message.runOnStop); } if (message.timeoutSeconds !== 0) { @@ -1052,7 +1061,7 @@ export const App = { if (message.icon !== "") { writer.uint32(42).string(message.icon); } - if (message.subdomain === true) { + if (message.subdomain !== false) { writer.uint32(48).bool(message.subdomain); } if (message.healthcheck !== undefined) { @@ -1061,13 +1070,13 @@ export const App = { if (message.sharingLevel !== 0) { writer.uint32(64).int32(message.sharingLevel); } - if (message.external === true) { + if (message.external !== false) { writer.uint32(72).bool(message.external); } if (message.order !== 0) { writer.uint32(80).int64(message.order); } - if (message.hidden === true) { + if (message.hidden !== false) { writer.uint32(88).bool(message.hidden); } if (message.openIn !== 0) { @@ -1115,7 +1124,7 @@ export const Resource = { for (const v of message.metadata) { Resource_Metadata.encode(v!, writer.uint32(34).fork()).ldelim(); } - if (message.hide === true) { + if (message.hide !== false) { writer.uint32(40).bool(message.hide); } if (message.icon !== "") { @@ -1142,10 +1151,10 @@ export const Resource_Metadata = { if (message.value !== "") { writer.uint32(18).string(message.value); } - if (message.sensitive === true) { + if (message.sensitive !== false) { writer.uint32(24).bool(message.sensitive); } - if (message.isNull === true) { + if (message.isNull !== false) { writer.uint32(32).bool(message.isNull); } return writer; @@ -1211,6 +1220,9 @@ export const AITask = { if (message.sidebarApp !== undefined) { AITaskSidebarApp.encode(message.sidebarApp, writer.uint32(18).fork()).ldelim(); } + if (message.appId !== "") { + writer.uint32(26).string(message.appId); + } return writer; }, }; @@ -1280,6 +1292,12 @@ export const Metadata = { for (const v of message.runningAgentAuthTokens) { RunningAgentAuthToken.encode(v!, writer.uint32(170).fork()).ldelim(); } + if (message.taskId !== "") { + writer.uint32(178).string(message.taskId); + } + if (message.taskPrompt !== "") { + writer.uint32(186).string(message.taskPrompt); + } return writer; }, }; @@ -1352,7 +1370,7 @@ export const PlanRequest = { for (const v of message.previousParameterValues) { RichParameterValue.encode(v!, writer.uint32(42).fork()).ldelim(); } - if (message.omitModuleFiles === true) { + if (message.omitModuleFiles !== false) { writer.uint32(48).bool(message.omitModuleFiles); } return writer; @@ -1394,13 +1412,13 @@ export const PlanComplete = { if (message.moduleFilesHash.length !== 0) { writer.uint32(98).bytes(message.moduleFilesHash); } - if (message.hasAiTasks === true) { + if (message.hasAiTasks !== false) { writer.uint32(104).bool(message.hasAiTasks); } for (const v of message.aiTasks) { AITask.encode(v!, writer.uint32(114).fork()).ldelim(); } - if (message.hasExternalAgents === true) { + if (message.hasExternalAgents !== false) { writer.uint32(120).bool(message.hasExternalAgents); } return writer; @@ -1571,7 +1589,7 @@ export interface Provisioner { } function toTimestamp(date: Date): Timestamp { - const seconds = date.getTime() / 1_000; + const seconds = Math.trunc(date.getTime() / 1_000); const nanos = (date.getTime() % 1_000) * 1_000_000; return { seconds, nanos }; } diff --git a/site/e2e/tests/deployment/appearance.spec.ts b/site/e2e/tests/deployment/appearance.spec.ts index c2b129c632cb3..83b743814e70e 100644 --- a/site/e2e/tests/deployment/appearance.spec.ts +++ b/site/e2e/tests/deployment/appearance.spec.ts @@ -83,6 +83,7 @@ test("set service banner", async ({ page }) => { await page.goto("/workspaces", { waitUntil: "domcontentloaded" }); await expectUrl(page).toHavePathName("/workspaces"); - const bar = page.locator("div.service-banner", { hasText: message }); - await expect(bar).toBeVisible(); + const banner = page.getByTestId("service-banner"); + await expect(banner).toBeVisible(); + await expect(banner).toHaveText(message); }); diff --git a/site/e2e/tests/deployment/general.spec.ts b/site/e2e/tests/deployment/general.spec.ts index 40c8342e89929..a1dca0a820327 100644 --- a/site/e2e/tests/deployment/general.spec.ts +++ b/site/e2e/tests/deployment/general.spec.ts @@ -19,7 +19,7 @@ test("experiments", async ({ page }) => { await page.goto("/deployment/overview", { waitUntil: "domcontentloaded" }); const experimentsLocator = page.locator( - "div.options-table tr.option-experiments ul.option-array", + "table.options-table tr.option-experiments ul.option-array", ); await expect(experimentsLocator).toBeVisible(); diff --git a/site/e2e/tests/deployment/security.spec.ts b/site/e2e/tests/deployment/security.spec.ts index 2d8bb0032691b..3f5e9a9b5c38f 100644 --- a/site/e2e/tests/deployment/security.spec.ts +++ b/site/e2e/tests/deployment/security.spec.ts @@ -47,7 +47,7 @@ async function verifyStrictTransportSecurity( } const configOption = page.locator( - `div.options-table .option-${flag} .option-value-string`, + `table.options-table .option-${flag} .option-value-string`, ); await expect(configOption).toHaveText("Disabled"); } diff --git a/site/package.json b/site/package.json index 7a52d5545eb8d..7b74cfae58254 100644 --- a/site/package.json +++ b/site/package.json @@ -43,11 +43,10 @@ "@emotion/styled": "11.14.1", "@fontsource-variable/inter": "5.1.1", "@fontsource/fira-code": "5.2.7", - "@fontsource/ibm-plex-mono": "5.1.1", + "@fontsource/ibm-plex-mono": "5.2.7", "@fontsource/jetbrains-mono": "5.2.5", "@fontsource/source-code-pro": "5.2.5", "@monaco-editor/react": "4.7.0", - "@mui/icons-material": "5.18.0", "@mui/material": "5.18.0", "@mui/system": "5.18.0", "@mui/utils": "5.17.1", @@ -55,7 +54,7 @@ "@radix-ui/react-avatar": "1.1.2", "@radix-ui/react-checkbox": "1.1.4", "@radix-ui/react-collapsible": "1.1.2", - "@radix-ui/react-dialog": "1.1.15", + "@radix-ui/react-dialog": "1.1.4", "@radix-ui/react-dropdown-menu": "2.1.4", "@radix-ui/react-label": "2.1.0", "@radix-ui/react-popover": "1.1.5", @@ -76,14 +75,14 @@ "@xterm/xterm": "5.5.0", "ansi-to-html": "0.7.2", "axios": "1.12.0", - "chroma-js": "2.4.2", + "chroma-js": "2.6.0", "class-variance-authority": "0.7.1", "clsx": "2.1.1", "cmdk": "1.0.4", "color-convert": "2.0.1", "cron-parser": "4.9.0", "cronstrue": "2.50.0", - "dayjs": "1.11.13", + "dayjs": "1.11.18", "emoji-mart": "5.6.0", "file-saver": "2.0.5", "formik": "2.4.6", @@ -91,30 +90,30 @@ "humanize-duration": "3.32.2", "jszip": "3.10.1", "lodash": "4.17.21", - "lucide-react": "0.474.0", - "monaco-editor": "0.52.2", + "lucide-react": "0.545.0", + "monaco-editor": "0.53.0", "pretty-bytes": "6.1.1", "react": "19.1.1", "react-color": "2.19.3", - "react-confetti": "6.2.2", + "react-confetti": "6.4.0", "react-date-range": "1.4.0", "react-dom": "19.1.1", "react-markdown": "9.1.0", "react-query": "npm:@tanstack/react-query@5.77.0", - "react-resizable-panels": "3.0.3", + "react-resizable-panels": "3.0.6", "react-router": "7.8.0", "react-syntax-highlighter": "15.6.1", "react-textarea-autosize": "8.5.9", - "react-virtualized-auto-sizer": "1.0.24", + "react-virtualized-auto-sizer": "1.0.26", "react-window": "1.8.11", "recharts": "2.15.0", - "remark-gfm": "4.0.0", + "remark-gfm": "4.0.1", "resize-observer-polyfill": "1.5.1", "semver": "7.7.2", "tailwind-merge": "2.6.0", "tailwindcss-animate": "1.0.7", "tzdata": "1.0.46", - "ua-parser-js": "1.0.40", + "ua-parser-js": "1.0.41", "ufuzzy": "npm:@leeoniya/ufuzzy@1.0.10", "undici": "6.21.3", "unique-names-generator": "4.7.1", @@ -123,10 +122,10 @@ "yup": "1.6.1" }, "devDependencies": { - "@biomejs/biome": "2.2.0", + "@biomejs/biome": "2.2.4", "@chromatic-com/storybook": "4.1.0", "@octokit/types": "12.3.0", - "@playwright/test": "1.55.1", + "@playwright/test": "1.50.1", "@storybook/addon-docs": "9.1.2", "@storybook/addon-links": "9.1.2", "@storybook/addon-themes": "9.1.2", @@ -143,44 +142,44 @@ "@types/file-saver": "2.0.7", "@types/humanize-duration": "3.27.4", "@types/jest": "29.5.14", - "@types/lodash": "4.17.15", + "@types/lodash": "4.17.20", "@types/node": "20.17.16", - "@types/react": "19.1.13", + "@types/react": "19.1.17", "@types/react-color": "3.0.13", "@types/react-date-range": "1.4.4", - "@types/react-dom": "19.1.9", + "@types/react-dom": "19.1.11", "@types/react-syntax-highlighter": "15.5.13", - "@types/react-virtualized-auto-sizer": "1.0.4", + "@types/react-virtualized-auto-sizer": "1.0.8", "@types/react-window": "1.8.8", "@types/semver": "7.7.1", "@types/ssh2": "1.15.5", "@types/ua-parser-js": "0.7.36", "@types/uuid": "9.0.2", "@vitejs/plugin-react": "5.0.4", - "autoprefixer": "10.4.20", + "autoprefixer": "10.4.21", "chromatic": "11.29.0", "dpdm": "3.14.0", "express": "4.21.2", "jest": "29.7.0", "jest-canvas-mock": "2.5.2", "jest-environment-jsdom": "29.5.0", - "jest-fixed-jsdom": "0.0.9", + "jest-fixed-jsdom": "0.0.10", "jest-location-mock": "2.0.0", "jest-websocket-mock": "2.5.0", "jest_workaround": "0.1.14", "knip": "5.64.1", - "msw": "2.11.3", - "postcss": "8.5.1", + "msw": "2.4.8", + "postcss": "8.5.6", "protobufjs": "7.4.0", "rollup-plugin-visualizer": "5.14.0", "rxjs": "7.8.1", "ssh2": "1.17.0", "storybook": "9.1.2", "storybook-addon-remix-react-router": "5.0.0", - "tailwindcss": "3.4.17", - "ts-proto": "1.164.0", + "tailwindcss": "3.4.18", + "ts-proto": "1.181.2", "typescript": "5.6.3", - "vite": "7.1.7", + "vite": "7.1.11", "vite-plugin-checker": "0.11.0" }, "browserslist": [ diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index c77df41bc3ede..c70eff146bcee 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -33,10 +33,10 @@ importers: version: 11.13.5 '@emotion/react': specifier: 11.14.0 - version: 11.14.0(@types/react@19.1.13)(react@19.1.1) + version: 11.14.0(@types/react@19.1.17)(react@19.1.1) '@emotion/styled': specifier: 11.14.1 - version: 11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) + version: 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) '@fontsource-variable/inter': specifier: 5.1.1 version: 5.1.1 @@ -44,8 +44,8 @@ importers: specifier: 5.2.7 version: 5.2.7 '@fontsource/ibm-plex-mono': - specifier: 5.1.1 - version: 5.1.1 + specifier: 5.2.7 + version: 5.2.7 '@fontsource/jetbrains-mono': specifier: 5.2.5 version: 5.2.5 @@ -54,67 +54,64 @@ importers: version: 5.2.5 '@monaco-editor/react': specifier: 4.7.0 - version: 4.7.0(monaco-editor@0.52.2)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@mui/icons-material': - specifier: 5.18.0 - version: 5.18.0(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) + version: 4.7.0(monaco-editor@0.53.0)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@mui/material': specifier: 5.18.0 - version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@mui/system': specifier: 5.18.0 - version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) + version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) '@mui/utils': specifier: 5.17.1 - version: 5.17.1(@types/react@19.1.13)(react@19.1.1) + version: 5.17.1(@types/react@19.1.17)(react@19.1.1) '@mui/x-tree-view': specifier: 7.29.10 - version: 7.29.10(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 7.29.10(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-avatar': specifier: 1.1.2 - version: 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-checkbox': specifier: 1.1.4 - version: 1.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-collapsible': specifier: 1.1.2 - version: 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-dialog': - specifier: 1.1.15 - version: 1.1.15(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.4 + version: 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-dropdown-menu': specifier: 2.1.4 - version: 2.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-label': specifier: 2.1.0 - version: 2.1.0(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 2.1.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-popover': specifier: 1.1.5 - version: 1.1.5(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.5(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-radio-group': specifier: 1.2.3 - version: 1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-scroll-area': specifier: 1.2.3 - version: 1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-select': specifier: 2.2.6 - version: 2.2.6(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 2.2.6(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-separator': specifier: 1.1.7 - version: 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-slider': specifier: 1.2.2 - version: 1.2.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.2.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-slot': specifier: 1.2.3 - version: 1.2.3(@types/react@19.1.13)(react@19.1.1) + version: 1.2.3(@types/react@19.1.17)(react@19.1.1) '@radix-ui/react-switch': specifier: 1.1.1 - version: 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-tooltip': specifier: 1.1.7 - version: 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@tanstack/react-query-devtools': specifier: 5.77.0 version: 5.77.0(@tanstack/react-query@5.77.0(react@19.1.1))(react@19.1.1) @@ -143,8 +140,8 @@ importers: specifier: 1.12.0 version: 1.12.0 chroma-js: - specifier: 2.4.2 - version: 2.4.2 + specifier: 2.6.0 + version: 2.6.0 class-variance-authority: specifier: 0.7.1 version: 0.7.1 @@ -153,7 +150,7 @@ importers: version: 2.1.1 cmdk: specifier: 1.0.4 - version: 1.0.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.0.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) color-convert: specifier: 2.0.1 version: 2.0.1 @@ -164,8 +161,8 @@ importers: specifier: 2.50.0 version: 2.50.0 dayjs: - specifier: 1.11.13 - version: 1.11.13 + specifier: 1.11.18 + version: 1.11.18 emoji-mart: specifier: 5.6.0 version: 5.6.0 @@ -188,11 +185,11 @@ importers: specifier: 4.17.21 version: 4.17.21 lucide-react: - specifier: 0.474.0 - version: 0.474.0(react@19.1.1) + specifier: 0.545.0 + version: 0.545.0(react@19.1.1) monaco-editor: - specifier: 0.52.2 - version: 0.52.2 + specifier: 0.53.0 + version: 0.53.0 pretty-bytes: specifier: 6.1.1 version: 6.1.1 @@ -203,8 +200,8 @@ importers: specifier: 2.19.3 version: 2.19.3(react@19.1.1) react-confetti: - specifier: 6.2.2 - version: 6.2.2(react@19.1.1) + specifier: 6.4.0 + version: 6.4.0(react@19.1.1) react-date-range: specifier: 1.4.0 version: 1.4.0(date-fns@2.30.0)(react@19.1.1) @@ -213,13 +210,13 @@ importers: version: 19.1.1(react@19.1.1) react-markdown: specifier: 9.1.0 - version: 9.1.0(@types/react@19.1.13)(react@19.1.1) + version: 9.1.0(@types/react@19.1.17)(react@19.1.1) react-query: specifier: npm:@tanstack/react-query@5.77.0 version: '@tanstack/react-query@5.77.0(react@19.1.1)' react-resizable-panels: - specifier: 3.0.3 - version: 3.0.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 3.0.6 + version: 3.0.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react-router: specifier: 7.8.0 version: 7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -228,10 +225,10 @@ importers: version: 15.6.1(react@19.1.1) react-textarea-autosize: specifier: 8.5.9 - version: 8.5.9(@types/react@19.1.13)(react@19.1.1) + version: 8.5.9(@types/react@19.1.17)(react@19.1.1) react-virtualized-auto-sizer: - specifier: 1.0.24 - version: 1.0.24(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.0.26 + version: 1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react-window: specifier: 1.8.11 version: 1.8.11(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -239,8 +236,8 @@ importers: specifier: 2.15.0 version: 2.15.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) remark-gfm: - specifier: 4.0.0 - version: 4.0.0 + specifier: 4.0.1 + version: 4.0.1 resize-observer-polyfill: specifier: 1.5.1 version: 1.5.1 @@ -252,13 +249,13 @@ importers: version: 2.6.0 tailwindcss-animate: specifier: 1.0.7 - version: 1.0.7(tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3))) + version: 1.0.7(tailwindcss@3.4.18(yaml@2.7.0)) tzdata: specifier: 1.0.46 version: 1.0.46 ua-parser-js: - specifier: 1.0.40 - version: 1.0.40 + specifier: 1.0.41 + version: 1.0.41 ufuzzy: specifier: npm:@leeoniya/ufuzzy@1.0.10 version: '@leeoniya/ufuzzy@1.0.10' @@ -279,29 +276,29 @@ importers: version: 1.6.1 devDependencies: '@biomejs/biome': - specifier: 2.2.0 - version: 2.2.0 + specifier: 2.2.4 + version: 2.2.4 '@chromatic-com/storybook': specifier: 4.1.0 - version: 4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + version: 4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@octokit/types': specifier: 12.3.0 version: 12.3.0 '@playwright/test': - specifier: 1.55.1 - version: 1.55.1 + specifier: 1.50.1 + version: 1.50.1 '@storybook/addon-docs': specifier: 9.1.2 - version: 9.1.2(@types/react@19.1.13)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + version: 9.1.2(@types/react@19.1.17)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/addon-links': specifier: 9.1.2 - version: 9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + version: 9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/addon-themes': specifier: 9.1.2 - version: 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + version: 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/react-vite': specifier: 9.1.2 - version: 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.3)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + version: 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.5)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) '@swc/core': specifier: 1.3.38 version: 1.3.38 @@ -310,13 +307,13 @@ importers: version: 0.2.37(@swc/core@1.3.38) '@tailwindcss/typography': specifier: 0.5.16 - version: 0.5.16(tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3))) + version: 0.5.16(tailwindcss@3.4.18(yaml@2.7.0)) '@testing-library/jest-dom': specifier: 6.6.3 version: 6.6.3 '@testing-library/react': specifier: 14.3.1 - version: 14.3.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 14.3.1(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@testing-library/user-event': specifier: 14.6.1 version: 14.6.1(@testing-library/dom@10.4.0) @@ -339,29 +336,29 @@ importers: specifier: 29.5.14 version: 29.5.14 '@types/lodash': - specifier: 4.17.15 - version: 4.17.15 + specifier: 4.17.20 + version: 4.17.20 '@types/node': specifier: 20.17.16 version: 20.17.16 '@types/react': - specifier: 19.1.13 - version: 19.1.13 + specifier: 19.1.17 + version: 19.1.17 '@types/react-color': specifier: 3.0.13 - version: 3.0.13(@types/react@19.1.13) + version: 3.0.13(@types/react@19.1.17) '@types/react-date-range': specifier: 1.4.4 version: 1.4.4 '@types/react-dom': - specifier: 19.1.9 - version: 19.1.9(@types/react@19.1.13) + specifier: 19.1.11 + version: 19.1.11(@types/react@19.1.17) '@types/react-syntax-highlighter': specifier: 15.5.13 version: 15.5.13 '@types/react-virtualized-auto-sizer': - specifier: 1.0.4 - version: 1.0.4 + specifier: 1.0.8 + version: 1.0.8(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@types/react-window': specifier: 1.8.8 version: 1.8.8 @@ -379,10 +376,10 @@ importers: version: 9.0.2 '@vitejs/plugin-react': specifier: 5.0.4 - version: 5.0.4(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + version: 5.0.4(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) autoprefixer: - specifier: 10.4.20 - version: 10.4.20(postcss@8.5.1) + specifier: 10.4.21 + version: 10.4.21(postcss@8.5.6) chromatic: specifier: 11.29.0 version: 11.29.0 @@ -402,8 +399,8 @@ importers: specifier: 29.5.0 version: 29.5.0 jest-fixed-jsdom: - specifier: 0.0.9 - version: 0.0.9(jest-environment-jsdom@29.5.0) + specifier: 0.0.10 + version: 0.0.10(jest-environment-jsdom@29.5.0) jest-location-mock: specifier: 2.0.0 version: 2.0.0 @@ -417,17 +414,17 @@ importers: specifier: 5.64.1 version: 5.64.1(@types/node@20.17.16)(typescript@5.6.3) msw: - specifier: 2.11.3 - version: 2.11.3(@types/node@20.17.16)(typescript@5.6.3) + specifier: 2.4.8 + version: 2.4.8(typescript@5.6.3) postcss: - specifier: 8.5.1 - version: 8.5.1 + specifier: 8.5.6 + version: 8.5.6 protobufjs: specifier: 7.4.0 version: 7.4.0 rollup-plugin-visualizer: specifier: 5.14.0 - version: 5.14.0(rollup@4.52.3) + version: 5.14.0(rollup@4.52.5) rxjs: specifier: 7.8.1 version: 7.8.1 @@ -436,25 +433,25 @@ importers: version: 1.17.0 storybook: specifier: 9.1.2 - version: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + version: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) storybook-addon-remix-react-router: specifier: 5.0.0 - version: 5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + version: 5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) tailwindcss: - specifier: 3.4.17 - version: 3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) + specifier: 3.4.18 + version: 3.4.18(yaml@2.7.0) ts-proto: - specifier: 1.164.0 - version: 1.164.0 + specifier: 1.181.2 + version: 1.181.2 typescript: specifier: 5.6.3 version: 5.6.3 vite: - specifier: 7.1.7 - version: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + specifier: 7.1.11 + version: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) vite-plugin-checker: specifier: 0.11.0 - version: 0.11.0(@biomejs/biome@2.2.0)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + version: 0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) packages: @@ -658,55 +655,55 @@ packages: '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==, tarball: https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz} - '@biomejs/biome@2.2.0': - resolution: {integrity: sha512-3On3RSYLsX+n9KnoSgfoYlckYBoU6VRM22cw1gB4Y0OuUVSYd/O/2saOJMrA4HFfA1Ff0eacOvMN1yAAvHtzIw==, tarball: https://registry.npmjs.org/@biomejs/biome/-/biome-2.2.0.tgz} + '@biomejs/biome@2.2.4': + resolution: {integrity: sha512-TBHU5bUy/Ok6m8c0y3pZiuO/BZoY/OcGxoLlrfQof5s8ISVwbVBdFINPQZyFfKwil8XibYWb7JMwnT8wT4WVPg==, tarball: https://registry.npmjs.org/@biomejs/biome/-/biome-2.2.4.tgz} engines: {node: '>=14.21.3'} hasBin: true - '@biomejs/cli-darwin-arm64@2.2.0': - resolution: {integrity: sha512-zKbwUUh+9uFmWfS8IFxmVD6XwqFcENjZvEyfOxHs1epjdH3wyyMQG80FGDsmauPwS2r5kXdEM0v/+dTIA9FXAg==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.2.0.tgz} + '@biomejs/cli-darwin-arm64@2.2.4': + resolution: {integrity: sha512-RJe2uiyaloN4hne4d2+qVj3d3gFJFbmrr5PYtkkjei1O9c+BjGXgpUPVbi8Pl8syumhzJjFsSIYkcLt2VlVLMA==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [darwin] - '@biomejs/cli-darwin-x64@2.2.0': - resolution: {integrity: sha512-+OmT4dsX2eTfhD5crUOPw3RPhaR+SKVspvGVmSdZ9y9O/AgL8pla6T4hOn1q+VAFBHuHhsdxDRJgFCSC7RaMOw==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.2.0.tgz} + '@biomejs/cli-darwin-x64@2.2.4': + resolution: {integrity: sha512-cFsdB4ePanVWfTnPVaUX+yr8qV8ifxjBKMkZwN7gKb20qXPxd/PmwqUH8mY5wnM9+U0QwM76CxFyBRJhC9tQwg==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [darwin] - '@biomejs/cli-linux-arm64-musl@2.2.0': - resolution: {integrity: sha512-egKpOa+4FL9YO+SMUMLUvf543cprjevNc3CAgDNFLcjknuNMcZ0GLJYa3EGTCR2xIkIUJDVneBV3O9OcIlCEZQ==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.0.tgz} + '@biomejs/cli-linux-arm64-musl@2.2.4': + resolution: {integrity: sha512-7TNPkMQEWfjvJDaZRSkDCPT/2r5ESFPKx+TEev+I2BXDGIjfCZk2+b88FOhnJNHtksbOZv8ZWnxrA5gyTYhSsQ==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-arm64@2.2.0': - resolution: {integrity: sha512-6eoRdF2yW5FnW9Lpeivh7Mayhq0KDdaDMYOJnH9aT02KuSIX5V1HmWJCQQPwIQbhDh68Zrcpl8inRlTEan0SXw==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.2.0.tgz} + '@biomejs/cli-linux-arm64@2.2.4': + resolution: {integrity: sha512-M/Iz48p4NAzMXOuH+tsn5BvG/Jb07KOMTdSVwJpicmhN309BeEyRyQX+n1XDF0JVSlu28+hiTQ2L4rZPvu7nMw==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-x64-musl@2.2.0': - resolution: {integrity: sha512-I5J85yWwUWpgJyC1CcytNSGusu2p9HjDnOPAFG4Y515hwRD0jpR9sT9/T1cKHtuCvEQ/sBvx+6zhz9l9wEJGAg==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.0.tgz} + '@biomejs/cli-linux-x64-musl@2.2.4': + resolution: {integrity: sha512-m41nFDS0ksXK2gwXL6W6yZTYPMH0LughqbsxInSKetoH6morVj43szqKx79Iudkp8WRT5SxSh7qVb8KCUiewGg==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-linux-x64@2.2.0': - resolution: {integrity: sha512-5UmQx/OZAfJfi25zAnAGHUMuOd+LOsliIt119x2soA2gLggQYrVPA+2kMUxR6Mw5M1deUF/AWWP2qpxgH7Nyfw==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.2.0.tgz} + '@biomejs/cli-linux-x64@2.2.4': + resolution: {integrity: sha512-orr3nnf2Dpb2ssl6aihQtvcKtLySLta4E2UcXdp7+RTa7mfJjBgIsbS0B9GC8gVu0hjOu021aU8b3/I1tn+pVQ==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-win32-arm64@2.2.0': - resolution: {integrity: sha512-n9a1/f2CwIDmNMNkFs+JI0ZjFnMO0jdOyGNtihgUNFnlmd84yIYY2KMTBmMV58ZlVHjgmY5Y6E1hVTnSRieggA==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.2.0.tgz} + '@biomejs/cli-win32-arm64@2.2.4': + resolution: {integrity: sha512-NXnfTeKHDFUWfxAefa57DiGmu9VyKi0cDqFpdI+1hJWQjGJhJutHPX0b5m+eXvTKOaf+brU+P0JrQAZMb5yYaQ==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [win32] - '@biomejs/cli-win32-x64@2.2.0': - resolution: {integrity: sha512-Nawu5nHjP/zPKTIryh2AavzTc/KEg4um/MxWdXW0A6P/RZOyIpa7+QSjeXwAwX/utJGaCoXRPWtF3m5U/bB3Ww==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.2.0.tgz} + '@biomejs/cli-win32-x64@2.2.4': + resolution: {integrity: sha512-3Y4V4zVRarVh/B/eSHczR4LYoSVyv3Dfuvm3cWs5w/HScccS0+Wt/lHOcDTRYeHjQmMYVC3rIRWqyN2EI52+zg==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.2.4.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [win32] @@ -717,6 +714,9 @@ packages: '@bundled-es-modules/statuses@1.0.1': resolution: {integrity: sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==, tarball: https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz} + '@bundled-es-modules/tough-cookie@0.1.6': + resolution: {integrity: sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw==, tarball: https://registry.npmjs.org/@bundled-es-modules/tough-cookie/-/tough-cookie-0.1.6.tgz} + '@chromatic-com/storybook@4.1.0': resolution: {integrity: sha512-B9XesFX5lQUdP81/QBTtkiYOFqEsJwQpzkZlcYPm2n/L1S/8ZabSPbz6NoY8hOJTXWZ2p7grygUlxyGy+gAvfQ==, tarball: https://registry.npmjs.org/@chromatic-com/storybook/-/storybook-4.1.0.tgz} engines: {node: '>=20.0.0', yarn: '>=1.22.18'} @@ -802,8 +802,8 @@ packages: '@emotion/weak-memoize@0.4.0': resolution: {integrity: sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==, tarball: https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz} - '@esbuild/aix-ppc64@0.25.10': - resolution: {integrity: sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz} + '@esbuild/aix-ppc64@0.25.11': + resolution: {integrity: sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.11.tgz} engines: {node: '>=18'} cpu: [ppc64] os: [aix] @@ -814,8 +814,8 @@ packages: cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.25.10': - resolution: {integrity: sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.10.tgz} + '@esbuild/android-arm64@0.25.11': + resolution: {integrity: sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [android] @@ -826,8 +826,8 @@ packages: cpu: [arm64] os: [android] - '@esbuild/android-arm@0.25.10': - resolution: {integrity: sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.10.tgz} + '@esbuild/android-arm@0.25.11': + resolution: {integrity: sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm] os: [android] @@ -838,8 +838,8 @@ packages: cpu: [arm] os: [android] - '@esbuild/android-x64@0.25.10': - resolution: {integrity: sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.10.tgz} + '@esbuild/android-x64@0.25.11': + resolution: {integrity: sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [android] @@ -850,8 +850,8 @@ packages: cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.25.10': - resolution: {integrity: sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz} + '@esbuild/darwin-arm64@0.25.11': + resolution: {integrity: sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [darwin] @@ -862,8 +862,8 @@ packages: cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.25.10': - resolution: {integrity: sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz} + '@esbuild/darwin-x64@0.25.11': + resolution: {integrity: sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [darwin] @@ -874,8 +874,8 @@ packages: cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.25.10': - resolution: {integrity: sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.10.tgz} + '@esbuild/freebsd-arm64@0.25.11': + resolution: {integrity: sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] @@ -886,8 +886,8 @@ packages: cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.10': - resolution: {integrity: sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.10.tgz} + '@esbuild/freebsd-x64@0.25.11': + resolution: {integrity: sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [freebsd] @@ -898,8 +898,8 @@ packages: cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.25.10': - resolution: {integrity: sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.10.tgz} + '@esbuild/linux-arm64@0.25.11': + resolution: {integrity: sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [linux] @@ -910,8 +910,8 @@ packages: cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.25.10': - resolution: {integrity: sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.10.tgz} + '@esbuild/linux-arm@0.25.11': + resolution: {integrity: sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm] os: [linux] @@ -922,8 +922,8 @@ packages: cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.25.10': - resolution: {integrity: sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.10.tgz} + '@esbuild/linux-ia32@0.25.11': + resolution: {integrity: sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.11.tgz} engines: {node: '>=18'} cpu: [ia32] os: [linux] @@ -934,8 +934,8 @@ packages: cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.25.10': - resolution: {integrity: sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.10.tgz} + '@esbuild/linux-loong64@0.25.11': + resolution: {integrity: sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.11.tgz} engines: {node: '>=18'} cpu: [loong64] os: [linux] @@ -946,8 +946,8 @@ packages: cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.25.10': - resolution: {integrity: sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.10.tgz} + '@esbuild/linux-mips64el@0.25.11': + resolution: {integrity: sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.11.tgz} engines: {node: '>=18'} cpu: [mips64el] os: [linux] @@ -958,8 +958,8 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.25.10': - resolution: {integrity: sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.10.tgz} + '@esbuild/linux-ppc64@0.25.11': + resolution: {integrity: sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.11.tgz} engines: {node: '>=18'} cpu: [ppc64] os: [linux] @@ -970,8 +970,8 @@ packages: cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.25.10': - resolution: {integrity: sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.10.tgz} + '@esbuild/linux-riscv64@0.25.11': + resolution: {integrity: sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.11.tgz} engines: {node: '>=18'} cpu: [riscv64] os: [linux] @@ -982,8 +982,8 @@ packages: cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.25.10': - resolution: {integrity: sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.10.tgz} + '@esbuild/linux-s390x@0.25.11': + resolution: {integrity: sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.11.tgz} engines: {node: '>=18'} cpu: [s390x] os: [linux] @@ -994,8 +994,8 @@ packages: cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.25.10': - resolution: {integrity: sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz} + '@esbuild/linux-x64@0.25.11': + resolution: {integrity: sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [linux] @@ -1006,8 +1006,8 @@ packages: cpu: [x64] os: [linux] - '@esbuild/netbsd-arm64@0.25.10': - resolution: {integrity: sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==, tarball: https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.10.tgz} + '@esbuild/netbsd-arm64@0.25.11': + resolution: {integrity: sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] @@ -1018,8 +1018,8 @@ packages: cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.10': - resolution: {integrity: sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.10.tgz} + '@esbuild/netbsd-x64@0.25.11': + resolution: {integrity: sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [netbsd] @@ -1030,8 +1030,8 @@ packages: cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.25.10': - resolution: {integrity: sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==, tarball: https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.10.tgz} + '@esbuild/openbsd-arm64@0.25.11': + resolution: {integrity: sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==, tarball: https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] @@ -1042,8 +1042,8 @@ packages: cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.10': - resolution: {integrity: sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.10.tgz} + '@esbuild/openbsd-x64@0.25.11': + resolution: {integrity: sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [openbsd] @@ -1054,14 +1054,14 @@ packages: cpu: [x64] os: [openbsd] - '@esbuild/openharmony-arm64@0.25.10': - resolution: {integrity: sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==, tarball: https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.10.tgz} + '@esbuild/openharmony-arm64@0.25.11': + resolution: {integrity: sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==, tarball: https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [openharmony] - '@esbuild/sunos-x64@0.25.10': - resolution: {integrity: sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.10.tgz} + '@esbuild/sunos-x64@0.25.11': + resolution: {integrity: sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [sunos] @@ -1072,8 +1072,8 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.25.10': - resolution: {integrity: sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.10.tgz} + '@esbuild/win32-arm64@0.25.11': + resolution: {integrity: sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [win32] @@ -1084,8 +1084,8 @@ packages: cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.25.10': - resolution: {integrity: sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.10.tgz} + '@esbuild/win32-ia32@0.25.11': + resolution: {integrity: sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.11.tgz} engines: {node: '>=18'} cpu: [ia32] os: [win32] @@ -1096,8 +1096,8 @@ packages: cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.25.10': - resolution: {integrity: sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz} + '@esbuild/win32-x64@0.25.11': + resolution: {integrity: sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [win32] @@ -1162,8 +1162,8 @@ packages: '@fontsource/fira-code@5.2.7': resolution: {integrity: sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==, tarball: https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz} - '@fontsource/ibm-plex-mono@5.1.1': - resolution: {integrity: sha512-1aayqPe/ZkD3MlvqpmOHecfA3f2B8g+fAEkgvcCd3lkPP0pS1T0xG5Zmn2EsJQqr1JURtugPUH+5NqvKyfFZMQ==, tarball: https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.1.1.tgz} + '@fontsource/ibm-plex-mono@5.2.7': + resolution: {integrity: sha512-MKAb8qV+CaiMQn2B0dIi1OV3565NYzp3WN5b4oT6LTkk+F0jR6j0ZN+5BKJiIhffDC3rtBULsYZE65+0018z9w==, tarball: https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz} '@fontsource/jetbrains-mono@5.2.5': resolution: {integrity: sha512-TPZ9b/uq38RMdrlZZkl0RwN8Ju9JxuqMETrw76pUQFbGtE1QbwQaNsLlnUrACNNBNbd0NZRXiJJSkC8ajPgbew==, tarball: https://registry.npmjs.org/@fontsource/jetbrains-mono/-/jetbrains-mono-5.2.5.tgz} @@ -1189,40 +1189,25 @@ packages: peerDependencies: react: '*' - '@inquirer/ansi@1.0.0': - resolution: {integrity: sha512-JWaTfCxI1eTmJ1BIv86vUfjVatOdxwD0DAVKYevY8SazeUUZtW+tNbsdejVO1GYE0GXJW1N1ahmiC3TFd+7wZA==, tarball: https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.0.tgz} + '@inquirer/confirm@3.2.0': + resolution: {integrity: sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw==, tarball: https://registry.npmjs.org/@inquirer/confirm/-/confirm-3.2.0.tgz} engines: {node: '>=18'} - '@inquirer/confirm@5.1.18': - resolution: {integrity: sha512-MilmWOzHa3Ks11tzvuAmFoAd/wRuaP3SwlT1IZhyMke31FKLxPiuDWcGXhU+PKveNOpAc4axzAgrgxuIJJRmLw==, tarball: https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.18.tgz} + '@inquirer/core@9.2.1': + resolution: {integrity: sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg==, tarball: https://registry.npmjs.org/@inquirer/core/-/core-9.2.1.tgz} engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/core@10.2.2': - resolution: {integrity: sha512-yXq/4QUnk4sHMtmbd7irwiepjB8jXU0kkFRL4nr/aDBA2mDz13cMakEWdDwX3eSCTkk03kwcndD1zfRAIlELxA==, tarball: https://registry.npmjs.org/@inquirer/core/-/core-10.2.2.tgz} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true '@inquirer/figures@1.0.13': resolution: {integrity: sha512-lGPVU3yO9ZNqA7vTYz26jny41lE7yoQansmqdMLBEfqaGsmdg7V3W9mK9Pvb5IL4EVZ9GnSDGMO/cJXud5dMaw==, tarball: https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.13.tgz} engines: {node: '>=18'} - '@inquirer/type@3.0.8': - resolution: {integrity: sha512-lg9Whz8onIHRthWaN1Q9EGLa/0LFJjyM8mEUbL1eTi6yMGvBf8gvyDLtxSXztQsxMvhxxNpJYrwa1YHdq+w4Jw==, tarball: https://registry.npmjs.org/@inquirer/type/-/type-3.0.8.tgz} + '@inquirer/type@1.5.5': + resolution: {integrity: sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA==, tarball: https://registry.npmjs.org/@inquirer/type/-/type-1.5.5.tgz} + engines: {node: '>=18'} + + '@inquirer/type@2.0.0': + resolution: {integrity: sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag==, tarball: https://registry.npmjs.org/@inquirer/type/-/type-2.0.0.tgz} engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==, tarball: https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz} @@ -1333,10 +1318,6 @@ packages: '@jridgewell/gen-mapping@0.3.13': resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==, tarball: https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz} - '@jridgewell/gen-mapping@0.3.8': - resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==, tarball: https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz} - engines: {node: '>=6.0.0'} - '@jridgewell/remapping@2.3.5': resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==, tarball: https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz} @@ -1344,10 +1325,6 @@ packages: resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==, tarball: https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz} engines: {node: '>=6.0.0'} - '@jridgewell/set-array@1.2.1': - resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==, tarball: https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz} - engines: {node: '>=6.0.0'} - '@jridgewell/sourcemap-codec@1.5.0': resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==, tarball: https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz} @@ -1391,24 +1368,13 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@mswjs/interceptors@0.39.7': - resolution: {integrity: sha512-sURvQbbKsq5f8INV54YJgJEdk8oxBanqkTiXXd33rKmofFCwZLhLRszPduMZ9TA9b8/1CHc/IJmOlBHJk2Q5AQ==, tarball: https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.39.7.tgz} + '@mswjs/interceptors@0.35.9': + resolution: {integrity: sha512-SSnyl/4ni/2ViHKkiZb8eajA/eN1DNFaHjhGiLUdZvDz6PKF4COSf/17xqSz64nOo2Ia29SA6B2KNCsyCbVmaQ==, tarball: https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.35.9.tgz} engines: {node: '>=18'} '@mui/core-downloads-tracker@5.18.0': resolution: {integrity: sha512-jbhwoQ1AY200PSSOrNXmrFCaSDSJWP7qk6urkTmIirvRXDROkqe+QwcLlUiw/PrREwsIF/vm3/dAXvjlMHF0RA==, tarball: https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.18.0.tgz} - '@mui/icons-material@5.18.0': - resolution: {integrity: sha512-1s0vEZj5XFXDMmz3Arl/R7IncFqJ+WQ95LDp1roHWGDE2oCO3IS4/hmiOv1/8SD9r6B7tv9GLiqVZYHo+6PkTg==, tarball: https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.18.0.tgz} - engines: {node: '>=12.0.0'} - peerDependencies: - '@mui/material': ^5.0.0 - '@types/react': ^17.0.0 || ^18.0.0 || ^19.0.0 - react: ^17.0.0 || ^18.0.0 || ^19.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - '@mui/material@5.18.0': resolution: {integrity: sha512-bbH/HaJZpFtXGvWg3TsBWG4eyt3gah3E7nCNU8GLyRjVoWcA91Vm/T+sjHfUcwgJSw9iLtucfHBoq+qW/T30aA==, tarball: https://registry.npmjs.org/@mui/material/-/material-5.18.0.tgz} engines: {node: '>=12.0.0'} @@ -1637,8 +1603,8 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==, tarball: https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz} engines: {node: '>=14'} - '@playwright/test@1.55.1': - resolution: {integrity: sha512-IVAh/nOJaw6W9g+RJVlIQJ6gSiER+ae6mKQ5CX1bERzQgbC1VSeBlwdvczT7pxb0GWiyrxH4TGKbMfDb4Sq/ig==, tarball: https://registry.npmjs.org/@playwright/test/-/test-1.55.1.tgz} + '@playwright/test@1.50.1': + resolution: {integrity: sha512-Jii3aBg+CEDpgnuDxEp/h7BimHcUTDlpEtce89xEumlJ5ef2hqepZ+PWp1DDpYC/VO9fmWVI1IlEaoI5fK9FXQ==, tarball: https://registry.npmjs.org/@playwright/test/-/test-1.50.1.tgz} engines: {node: '>=18'} hasBin: true @@ -1839,8 +1805,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-dialog@1.1.15': - resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==, tarball: https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz} + '@radix-ui/react-dialog@1.1.4': + resolution: {integrity: sha512-Ur7EV1IwQGCyaAuyDRiOLA5JIUZxELJljF+MbM/2NC0BYwfuRrbpS30BiQBJrVruscgUkieKkqXYDOoByaxIoA==, tarball: https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.4.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2088,19 +2054,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-presence@1.1.5': - resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==, tarball: https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-primitive@2.0.0': resolution: {integrity: sha512-ZSpFm0/uHa8zTvKBDjLFWLo8dkr4MBsiDLz0g3gMUwqgLHz9rTaRRGYDgvZPtBJgYCBKXkS9fzmoySgr8CO6Cw==, tarball: https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.0.tgz} peerDependencies: @@ -2485,113 +2438,113 @@ packages: rollup: optional: true - '@rollup/rollup-android-arm-eabi@4.52.3': - resolution: {integrity: sha512-h6cqHGZ6VdnwliFG1NXvMPTy/9PS3h8oLh7ImwR+kl+oYnQizgjxsONmmPSb2C66RksfkfIxEVtDSEcJiO0tqw==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.3.tgz} + '@rollup/rollup-android-arm-eabi@4.52.5': + resolution: {integrity: sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.5.tgz} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.52.3': - resolution: {integrity: sha512-wd+u7SLT/u6knklV/ifG7gr5Qy4GUbH2hMWcDauPFJzmCZUAJ8L2bTkVXC2niOIxp8lk3iH/QX8kSrUxVZrOVw==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.3.tgz} + '@rollup/rollup-android-arm64@4.52.5': + resolution: {integrity: sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.5.tgz} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.52.3': - resolution: {integrity: sha512-lj9ViATR1SsqycwFkJCtYfQTheBdvlWJqzqxwc9f2qrcVrQaF/gCuBRTiTolkRWS6KvNxSk4KHZWG7tDktLgjg==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.3.tgz} + '@rollup/rollup-darwin-arm64@4.52.5': + resolution: {integrity: sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.5.tgz} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.52.3': - resolution: {integrity: sha512-+Dyo7O1KUmIsbzx1l+4V4tvEVnVQqMOIYtrxK7ncLSknl1xnMHLgn7gddJVrYPNZfEB8CIi3hK8gq8bDhb3h5A==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.3.tgz} + '@rollup/rollup-darwin-x64@4.52.5': + resolution: {integrity: sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.5.tgz} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.52.3': - resolution: {integrity: sha512-u9Xg2FavYbD30g3DSfNhxgNrxhi6xVG4Y6i9Ur1C7xUuGDW3banRbXj+qgnIrwRN4KeJ396jchwy9bCIzbyBEQ==, tarball: https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.3.tgz} + '@rollup/rollup-freebsd-arm64@4.52.5': + resolution: {integrity: sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==, tarball: https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.5.tgz} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.52.3': - resolution: {integrity: sha512-5M8kyi/OX96wtD5qJR89a/3x5x8x5inXBZO04JWhkQb2JWavOWfjgkdvUqibGJeNNaz1/Z1PPza5/tAPXICI6A==, tarball: https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.3.tgz} + '@rollup/rollup-freebsd-x64@4.52.5': + resolution: {integrity: sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==, tarball: https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.5.tgz} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.52.3': - resolution: {integrity: sha512-IoerZJ4l1wRMopEHRKOO16e04iXRDyZFZnNZKrWeNquh5d6bucjezgd+OxG03mOMTnS1x7hilzb3uURPkJ0OfA==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.3.tgz} + '@rollup/rollup-linux-arm-gnueabihf@4.52.5': + resolution: {integrity: sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.5.tgz} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.52.3': - resolution: {integrity: sha512-ZYdtqgHTDfvrJHSh3W22TvjWxwOgc3ThK/XjgcNGP2DIwFIPeAPNsQxrJO5XqleSlgDux2VAoWQ5iJrtaC1TbA==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.3.tgz} + '@rollup/rollup-linux-arm-musleabihf@4.52.5': + resolution: {integrity: sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.5.tgz} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.52.3': - resolution: {integrity: sha512-NcViG7A0YtuFDA6xWSgmFb6iPFzHlf5vcqb2p0lGEbT+gjrEEz8nC/EeDHvx6mnGXnGCC1SeVV+8u+smj0CeGQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.3.tgz} + '@rollup/rollup-linux-arm64-gnu@4.52.5': + resolution: {integrity: sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.5.tgz} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.52.3': - resolution: {integrity: sha512-d3pY7LWno6SYNXRm6Ebsq0DJGoiLXTb83AIPCXl9fmtIQs/rXoS8SJxxUNtFbJ5MiOvs+7y34np77+9l4nfFMw==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.3.tgz} + '@rollup/rollup-linux-arm64-musl@4.52.5': + resolution: {integrity: sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.5.tgz} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loong64-gnu@4.52.3': - resolution: {integrity: sha512-3y5GA0JkBuirLqmjwAKwB0keDlI6JfGYduMlJD/Rl7fvb4Ni8iKdQs1eiunMZJhwDWdCvrcqXRY++VEBbvk6Eg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.3.tgz} + '@rollup/rollup-linux-loong64-gnu@4.52.5': + resolution: {integrity: sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.5.tgz} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-ppc64-gnu@4.52.3': - resolution: {integrity: sha512-AUUH65a0p3Q0Yfm5oD2KVgzTKgwPyp9DSXc3UA7DtxhEb/WSPfbG4wqXeSN62OG5gSo18em4xv6dbfcUGXcagw==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.3.tgz} + '@rollup/rollup-linux-ppc64-gnu@4.52.5': + resolution: {integrity: sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.5.tgz} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.52.3': - resolution: {integrity: sha512-1makPhFFVBqZE+XFg3Dkq+IkQ7JvmUrwwqaYBL2CE+ZpxPaqkGaiWFEWVGyvTwZace6WLJHwjVh/+CXbKDGPmg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.3.tgz} + '@rollup/rollup-linux-riscv64-gnu@4.52.5': + resolution: {integrity: sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.5.tgz} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.52.3': - resolution: {integrity: sha512-OOFJa28dxfl8kLOPMUOQBCO6z3X2SAfzIE276fwT52uXDWUS178KWq0pL7d6p1kz7pkzA0yQwtqL0dEPoVcRWg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.3.tgz} + '@rollup/rollup-linux-riscv64-musl@4.52.5': + resolution: {integrity: sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.5.tgz} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.52.3': - resolution: {integrity: sha512-jMdsML2VI5l+V7cKfZx3ak+SLlJ8fKvLJ0Eoa4b9/vCUrzXKgoKxvHqvJ/mkWhFiyp88nCkM5S2v6nIwRtPcgg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.3.tgz} + '@rollup/rollup-linux-s390x-gnu@4.52.5': + resolution: {integrity: sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.5.tgz} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.52.3': - resolution: {integrity: sha512-tPgGd6bY2M2LJTA1uGq8fkSPK8ZLYjDjY+ZLK9WHncCnfIz29LIXIqUgzCR0hIefzy6Hpbe8Th5WOSwTM8E7LA==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.3.tgz} + '@rollup/rollup-linux-x64-gnu@4.52.5': + resolution: {integrity: sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.5.tgz} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.52.3': - resolution: {integrity: sha512-BCFkJjgk+WFzP+tcSMXq77ymAPIxsX9lFJWs+2JzuZTLtksJ2o5hvgTdIcZ5+oKzUDMwI0PfWzRBYAydAHF2Mw==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.3.tgz} + '@rollup/rollup-linux-x64-musl@4.52.5': + resolution: {integrity: sha512-arCGIcuNKjBoKAXD+y7XomR9gY6Mw7HnFBv5Rw7wQRvwYLR7gBAgV7Mb2QTyjXfTveBNFAtPt46/36vV9STLNg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.5.tgz} cpu: [x64] os: [linux] - '@rollup/rollup-openharmony-arm64@4.52.3': - resolution: {integrity: sha512-KTD/EqjZF3yvRaWUJdD1cW+IQBk4fbQaHYJUmP8N4XoKFZilVL8cobFSTDnjTtxWJQ3JYaMgF4nObY/+nYkumA==, tarball: https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.3.tgz} + '@rollup/rollup-openharmony-arm64@4.52.5': + resolution: {integrity: sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==, tarball: https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.5.tgz} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.52.3': - resolution: {integrity: sha512-+zteHZdoUYLkyYKObGHieibUFLbttX2r+58l27XZauq0tcWYYuKUwY2wjeCN9oK1Um2YgH2ibd6cnX/wFD7DuA==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.3.tgz} + '@rollup/rollup-win32-arm64-msvc@4.52.5': + resolution: {integrity: sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.5.tgz} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.52.3': - resolution: {integrity: sha512-of1iHkTQSo3kr6dTIRX6t81uj/c/b15HXVsPcEElN5sS859qHrOepM5p9G41Hah+CTqSh2r8Bm56dL2z9UQQ7g==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.3.tgz} + '@rollup/rollup-win32-ia32-msvc@4.52.5': + resolution: {integrity: sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.5.tgz} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.52.3': - resolution: {integrity: sha512-s0hybmlHb56mWVZQj8ra9048/WZTPLILKxcvcq+8awSZmyiSUZjjem1AhU3Tf4ZKpYhK4mg36HtHDOe8QJS5PQ==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.3.tgz} + '@rollup/rollup-win32-x64-gnu@4.52.5': + resolution: {integrity: sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.5.tgz} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.52.3': - resolution: {integrity: sha512-zGIbEVVXVtauFgl3MRwGWEN36P5ZGenHRMgNw88X5wEhEBpq0XrMEZwOn07+ICrwM17XO5xfMZqh0OldCH5VTA==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.3.tgz} + '@rollup/rollup-win32-x64-msvc@4.52.5': + resolution: {integrity: sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.5.tgz} cpu: [x64] os: [win32] @@ -2949,11 +2902,8 @@ packages: '@types/jsdom@20.0.1': resolution: {integrity: sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==, tarball: https://registry.npmjs.org/@types/jsdom/-/jsdom-20.0.1.tgz} - '@types/lodash@4.17.15': - resolution: {integrity: sha512-w/P33JFeySuhN6JLkysYUK2gEmy9kHHFN7E8ro0tkfmlDOgxBDzWEZ/J8cWA+fHqFevpswDTFZnDx+R9lbL6xw==, tarball: https://registry.npmjs.org/@types/lodash/-/lodash-4.17.15.tgz} - - '@types/mdast@4.0.3': - resolution: {integrity: sha512-LsjtqsyF+d2/yFOYaN22dHZI1Cpwkrj+g06G8+qtUKlhovPW89YhqSnfKtMbkgmEtYpH2gydRNULd6y8mciAFg==, tarball: https://registry.npmjs.org/@types/mdast/-/mdast-4.0.3.tgz} + '@types/lodash@4.17.20': + resolution: {integrity: sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==, tarball: https://registry.npmjs.org/@types/lodash/-/lodash-4.17.20.tgz} '@types/mdast@4.0.4': resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==, tarball: https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz} @@ -2970,12 +2920,18 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==, tarball: https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz} + '@types/mute-stream@0.0.4': + resolution: {integrity: sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==, tarball: https://registry.npmjs.org/@types/mute-stream/-/mute-stream-0.0.4.tgz} + '@types/node@18.19.129': resolution: {integrity: sha512-hrmi5jWt2w60ayox3iIXwpMEnfUvOLJCRtrOPbHtH15nTjvO7uhnelvrdAs0dO0/zl5DZ3ZbahiaXEVb54ca/A==, tarball: https://registry.npmjs.org/@types/node/-/node-18.19.129.tgz} '@types/node@20.17.16': resolution: {integrity: sha512-vOTpLduLkZXePLxHiHsBLp98mHGnl8RptV4YAO3HfKO5UHjDvySGbxKtpYfy8Sx5+WKcgc45qNreJJRVM3L6mw==, tarball: https://registry.npmjs.org/@types/node/-/node-20.17.16.tgz} + '@types/node@22.18.8': + resolution: {integrity: sha512-pAZSHMiagDR7cARo/cch1f3rXy0AEXwsVsVH09FcyeJVAzCnGgmYis7P3JidtTUjyadhTeSo8TgRPswstghDaw==, tarball: https://registry.npmjs.org/@types/node/-/node-22.18.8.tgz} + '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==, tarball: https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz} @@ -2996,11 +2952,13 @@ packages: '@types/react-date-range@1.4.4': resolution: {integrity: sha512-9Y9NyNgaCsEVN/+O4HKuxzPbVjRVBGdOKRxMDcsTRWVG62lpYgnxefNckTXDWup8FvczoqPW0+ESZR6R1yymDg==, tarball: https://registry.npmjs.org/@types/react-date-range/-/react-date-range-1.4.4.tgz} - '@types/react-dom@18.3.1': - resolution: {integrity: sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz} + '@types/react-dom@18.3.7': + resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz} + peerDependencies: + '@types/react': ^18.0.0 - '@types/react-dom@19.1.9': - resolution: {integrity: sha512-qXRuZaOsAdXKFyOhRBg6Lqqc0yay13vN7KrIg4L7N4aaHN68ma9OK3NE1BoDFgFOTfM7zg+3/8+2n8rLUH3OKQ==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.9.tgz} + '@types/react-dom@19.1.11': + resolution: {integrity: sha512-3BKc/yGdNTYQVVw4idqHtSOcFsgGuBbMveKCOgF8wQ5QtrYOc3jDIlzg3jef04zcXFIHLelyGlj0T+BJ8+KN+w==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.11.tgz} peerDependencies: '@types/react': ^19.0.0 @@ -3012,14 +2970,15 @@ packages: peerDependencies: '@types/react': '*' - '@types/react-virtualized-auto-sizer@1.0.4': - resolution: {integrity: sha512-nhYwlFiYa8M3S+O2T9QO/e1FQUYMr/wJENUdf/O0dhRi1RS/93rjrYQFYdbUqtdFySuhrtnEDX29P6eKOttY+A==, tarball: https://registry.npmjs.org/@types/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.4.tgz} + '@types/react-virtualized-auto-sizer@1.0.8': + resolution: {integrity: sha512-keJpNyhiwfl2+N12G1ocCVA5ZDBArbPLe/S90X3kt7fam9naeHdaYYWbpe2sHczp70JWJ+2QLhBE8kLvLuVNjA==, tarball: https://registry.npmjs.org/@types/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.8.tgz} + deprecated: This is a stub types definition. react-virtualized-auto-sizer provides its own type definitions, so you do not need this installed. '@types/react-window@1.8.8': resolution: {integrity: sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==, tarball: https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz} - '@types/react@19.1.13': - resolution: {integrity: sha512-hHkbU/eoO3EG5/MZkuFSKmYqPbSVk5byPFa3e7y/8TybHiLMACgI8seVYlicwk7H5K/rI2px9xrQp/C+AUDTiQ==, tarball: https://registry.npmjs.org/@types/react/-/react-19.1.13.tgz} + '@types/react@19.1.17': + resolution: {integrity: sha512-Qec1E3mhALmaspIrhWt9jkQMNdw6bReVu64mjvhbhq2NFPftLPVr+l1SZgmw/66WwBNpDh7ao5AT6gF5v41PFA==, tarball: https://registry.npmjs.org/@types/react/-/react-19.1.17.tgz} '@types/reactcss@1.2.13': resolution: {integrity: sha512-gi3S+aUi6kpkF5vdhUsnkwbiSEIU/BEJyD7kBy2SudWBUuKmJk8AQKE0OVcQQeEy40Azh0lV6uynxlikYIJuwg==, tarball: https://registry.npmjs.org/@types/reactcss/-/reactcss-1.2.13.tgz} @@ -3053,21 +3012,27 @@ packages: '@types/tough-cookie@4.0.2': resolution: {integrity: sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==, tarball: https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz} + '@types/tough-cookie@4.0.5': + resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==, tarball: https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz} + + '@types/trusted-types@1.0.6': + resolution: {integrity: sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw==, tarball: https://registry.npmjs.org/@types/trusted-types/-/trusted-types-1.0.6.tgz} + '@types/ua-parser-js@0.7.36': resolution: {integrity: sha512-N1rW+njavs70y2cApeIw1vLMYXRwfBy+7trgavGuuTfOd7j1Yh7QTRc/yqsPl6ncokt72ZXuxEU0PiCp9bSwNQ==, tarball: https://registry.npmjs.org/@types/ua-parser-js/-/ua-parser-js-0.7.36.tgz} '@types/unist@2.0.11': resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==, tarball: https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz} - '@types/unist@3.0.2': - resolution: {integrity: sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==, tarball: https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz} - '@types/unist@3.0.3': resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==, tarball: https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz} '@types/uuid@9.0.2': resolution: {integrity: sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==, tarball: https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz} + '@types/wrap-ansi@3.0.0': + resolution: {integrity: sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==, tarball: https://registry.npmjs.org/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz} + '@types/yargs-parser@21.0.2': resolution: {integrity: sha512-5qcvofLPbfjmBfKaLfj/+f+Sbd6pN4zl7w7VSVI5uz7m9QZTuB2aZAa2uo1wHFBNN2x6g/SoTkXmd8mQnQF2Cw==, tarball: https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.2.tgz} @@ -3276,8 +3241,8 @@ packages: asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==, tarball: https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz} - autoprefixer@10.4.20: - resolution: {integrity: sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==, tarball: https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.20.tgz} + autoprefixer@10.4.21: + resolution: {integrity: sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==, tarball: https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz} engines: {node: ^10 || ^12 || >=14} hasBin: true peerDependencies: @@ -3328,8 +3293,8 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==, tarball: https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz} - baseline-browser-mapping@2.8.9: - resolution: {integrity: sha512-hY/u2lxLrbecMEWSB0IpGzGyDyeoMFQhCvZd2jGFSE5I17Fh01sYUBPCJtkWERw7zrac9+cIghxm/ytJa2X8iA==, tarball: https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.9.tgz} + baseline-browser-mapping@2.8.10: + resolution: {integrity: sha512-uLfgBi+7IBNay8ECBO2mVMGZAc1VgZWEChxm4lv+TobGdG82LnXMjuNGo/BSSZZL4UmkWhxEHP2f5ziLNwGWMA==, tarball: https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.10.tgz} hasBin: true bcrypt-pbkdf@1.0.2: @@ -3357,13 +3322,8 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==, tarball: https://registry.npmjs.org/braces/-/braces-3.0.3.tgz} engines: {node: '>=8'} - browserslist@4.24.2: - resolution: {integrity: sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==, tarball: https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - - browserslist@4.26.2: - resolution: {integrity: sha512-ECFzp6uFOSB+dcZ5BK/IBaGWssbSYBHvuMeMt3MMFyhI0Z8SqGgEkBLARgpRH3hutIgPVsALcMwbDrJqPxQ65A==, tarball: https://registry.npmjs.org/browserslist/-/browserslist-4.26.2.tgz} + browserslist@4.26.3: + resolution: {integrity: sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==, tarball: https://registry.npmjs.org/browserslist/-/browserslist-4.26.3.tgz} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -3416,9 +3376,6 @@ packages: resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==, tarball: https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz} engines: {node: '>=10'} - caniuse-lite@1.0.30001717: - resolution: {integrity: sha512-auPpttCq6BDEG8ZAuHJIplGw6GODhjw+/11e7IjpnYCxZcW/ONgPs0KVBJ0d1bY3e2+7PRe5RCLyP+PfwVgkYw==, tarball: https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001717.tgz} - caniuse-lite@1.0.30001746: resolution: {integrity: sha512-eA7Ys/DGw+pnkWWSE/id29f2IcPHVoE8wxtvE5JdvD2V28VTDPy1yEeo11Guz0sJ4ZeGRcm3uaTcAqK1LXaphA==, tarball: https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001746.tgz} @@ -3478,8 +3435,8 @@ packages: resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==, tarball: https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz} engines: {node: '>= 14.16.0'} - chroma-js@2.4.2: - resolution: {integrity: sha512-U9eDw6+wt7V8z5NncY2jJfZa+hUH8XEj8FQHgFJTrUFnJfXYf4Ml4adI2vXZOjqRDpFWtYVWypDfZwnJ+HIR4A==, tarball: https://registry.npmjs.org/chroma-js/-/chroma-js-2.4.2.tgz} + chroma-js@2.6.0: + resolution: {integrity: sha512-BLHvCB9s8Z1EV4ethr6xnkl/P2YRFOGqfgvuMG/MyCbZPrTA+NeiByY6XvgF0zP4/2deU2CXnWyMa3zu1LqQ3A==, tarball: https://registry.npmjs.org/chroma-js/-/chroma-js-2.6.0.tgz} chromatic@11.29.0: resolution: {integrity: sha512-yisBlntp9hHVj19lIQdpTlcYIXuU9H/DbFuu6tyWHmj6hWT2EtukCCcxYXL78XdQt1vm2GfIrtgtKpj/Rzmo4A==, tarball: https://registry.npmjs.org/chromatic/-/chromatic-11.29.0.tgz} @@ -3718,8 +3675,8 @@ packages: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==, tarball: https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz} engines: {node: '>=0.11'} - dayjs@1.11.13: - resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==, tarball: https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz} + dayjs@1.11.18: + resolution: {integrity: sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA==, tarball: https://registry.npmjs.org/dayjs/-/dayjs-1.11.18.tgz} debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, tarball: https://registry.npmjs.org/debug/-/debug-2.6.9.tgz} @@ -3753,9 +3710,6 @@ packages: decimal.js@10.4.3: resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==, tarball: https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz} - decode-named-character-reference@1.0.2: - resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==, tarball: https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz} - decode-named-character-reference@1.2.0: resolution: {integrity: sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==, tarball: https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz} @@ -3887,9 +3841,6 @@ packages: electron-to-chromium@1.5.228: resolution: {integrity: sha512-nxkiyuqAn4MJ1QbobwqJILiDtu/jk14hEAWaMiJmNPh1Z+jqoFlBFZjdXwLWGeVSeu9hGLg6+2G9yJaW8rBIFA==, tarball: https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.228.tgz} - electron-to-chromium@1.5.50: - resolution: {integrity: sha512-eMVObiUQ2LdgeO1F/ySTXsvqvxb6ZH2zPGaMYsWzRDdOddUa77tdmI0ltg+L16UpbWdhPmuF3wIQYyQq65WfZw==, tarball: https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.50.tgz} - emittery@0.13.1: resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==, tarball: https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz} engines: {node: '>=12'} @@ -3945,8 +3896,8 @@ packages: peerDependencies: esbuild: ^0.25.0 - esbuild@0.25.10: - resolution: {integrity: sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz} + esbuild@0.25.11: + resolution: {integrity: sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.25.11.tgz} engines: {node: '>=18'} hasBin: true @@ -4654,8 +4605,8 @@ packages: resolution: {integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==, tarball: https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - jest-fixed-jsdom@0.0.9: - resolution: {integrity: sha512-KPfqh2+sn5q2B+7LZktwDcwhCpOpUSue8a1I+BcixWLOQoEVyAjAGfH+IYZGoxZsziNojoHGRTC8xRbB1wDD4g==, tarball: https://registry.npmjs.org/jest-fixed-jsdom/-/jest-fixed-jsdom-0.0.9.tgz} + jest-fixed-jsdom@0.0.10: + resolution: {integrity: sha512-WaEVX+FripJh+Hn/7dysIgqP66h0KT1NNC22NGmNYANExtCoYNk1q2yjwwcdSboBMkkhn0NtmvKad/cmisnCLg==, tarball: https://registry.npmjs.org/jest-fixed-jsdom/-/jest-fixed-jsdom-0.0.10.tgz} engines: {node: '>=18.0.0'} peerDependencies: jest-environment-jsdom: '>=28.0.0' @@ -4898,6 +4849,9 @@ packages: long@5.2.3: resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==, tarball: https://registry.npmjs.org/long/-/long-5.2.3.tgz} + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==, tarball: https://registry.npmjs.org/long/-/long-5.3.2.tgz} + longest-streak@3.1.0: resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==, tarball: https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz} @@ -4917,8 +4871,8 @@ packages: lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==, tarball: https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz} - lucide-react@0.474.0: - resolution: {integrity: sha512-CmghgHkh0OJNmxGKWc0qfPJCYHASPMVSyGY8fj3xgk4v84ItqDg64JNKFZn5hC6E0vHi6gxnbCgwhyVB09wQtA==, tarball: https://registry.npmjs.org/lucide-react/-/lucide-react-0.474.0.tgz} + lucide-react@0.545.0: + resolution: {integrity: sha512-7r1/yUuflQDSt4f1bpn5ZAocyIxcTyVyBBChSVtBKn5M+392cPmI5YJMWOJKk/HUWGm5wg83chlAZtCcGbEZtw==, tarball: https://registry.npmjs.org/lucide-react/-/lucide-react-0.545.0.tgz} peerDependencies: react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -4943,8 +4897,8 @@ packages: makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==, tarball: https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz} - markdown-table@3.0.3: - resolution: {integrity: sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==, tarball: https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz} + markdown-table@3.0.4: + resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==, tarball: https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz} material-colors@1.2.6: resolution: {integrity: sha512-6qE4B9deFBIa9YSpOc9O0Sgc43zTeVYbgDT5veRKSlB2+ZuHNoVVxA1L/ckMUayV9Ay9y7Z/SZCLcGteW9i7bg==, tarball: https://registry.npmjs.org/material-colors/-/material-colors-1.2.6.tgz} @@ -4953,20 +4907,17 @@ packages: resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==, tarball: https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz} engines: {node: '>= 0.4'} - mdast-util-find-and-replace@3.0.1: - resolution: {integrity: sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==, tarball: https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz} - - mdast-util-from-markdown@2.0.0: - resolution: {integrity: sha512-n7MTOr/z+8NAX/wmhhDji8O3bRvPTV/U0oTCaZJkjhPSKTPhS3xufVhKGF8s1pJ7Ox4QgoIU7KHseh09S+9rTA==, tarball: https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.0.tgz} + mdast-util-find-and-replace@3.0.2: + resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==, tarball: https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz} mdast-util-from-markdown@2.0.2: resolution: {integrity: sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==, tarball: https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz} - mdast-util-gfm-autolink-literal@2.0.0: - resolution: {integrity: sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==, tarball: https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.0.tgz} + mdast-util-gfm-autolink-literal@2.0.1: + resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==, tarball: https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz} - mdast-util-gfm-footnote@2.0.0: - resolution: {integrity: sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==, tarball: https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz} + mdast-util-gfm-footnote@2.1.0: + resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==, tarball: https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz} mdast-util-gfm-strikethrough@2.0.0: resolution: {integrity: sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==, tarball: https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz} @@ -4977,8 +4928,8 @@ packages: mdast-util-gfm-task-list-item@2.0.0: resolution: {integrity: sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==, tarball: https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz} - mdast-util-gfm@3.0.0: - resolution: {integrity: sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==, tarball: https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz} + mdast-util-gfm@3.1.0: + resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==, tarball: https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz} mdast-util-mdx-expression@2.0.1: resolution: {integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==, tarball: https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz} @@ -4989,18 +4940,12 @@ packages: mdast-util-mdxjs-esm@2.0.1: resolution: {integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==, tarball: https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz} - mdast-util-phrasing@4.0.0: - resolution: {integrity: sha512-xadSsJayQIucJ9n053dfQwVu1kuXg7jCTdYsMK8rqzKZh52nLfSH/k0sAxE0u+pj/zKZX+o5wB+ML5mRayOxFA==, tarball: https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.0.0.tgz} - mdast-util-phrasing@4.1.0: resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==, tarball: https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz} mdast-util-to-hast@13.2.0: resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==, tarball: https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz} - mdast-util-to-markdown@2.1.0: - resolution: {integrity: sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==, tarball: https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz} - mdast-util-to-markdown@2.1.2: resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==, tarball: https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz} @@ -5028,144 +4973,87 @@ packages: resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==, tarball: https://registry.npmjs.org/methods/-/methods-1.1.2.tgz} engines: {node: '>= 0.6'} - micromark-core-commonmark@2.0.0: - resolution: {integrity: sha512-jThOz/pVmAYUtkroV3D5c1osFXAMv9e0ypGDOIZuCeAe91/sD6BoE2Sjzt30yuXtwOYUmySOhMas/PVyh02itA==, tarball: https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.0.tgz} - micromark-core-commonmark@2.0.3: resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==, tarball: https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz} - micromark-extension-gfm-autolink-literal@2.0.0: - resolution: {integrity: sha512-rTHfnpt/Q7dEAK1Y5ii0W8bhfJlVJFnJMHIPisfPK3gpVNuOP0VnRl96+YJ3RYWV/P4gFeQoGKNlT3RhuvpqAg==, tarball: https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.0.0.tgz} + micromark-extension-gfm-autolink-literal@2.1.0: + resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz} - micromark-extension-gfm-footnote@2.0.0: - resolution: {integrity: sha512-6Rzu0CYRKDv3BfLAUnZsSlzx3ak6HAoI85KTiijuKIz5UxZxbUI+pD6oHgw+6UtQuiRwnGRhzMmPRv4smcz0fg==, tarball: https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.0.0.tgz} + micromark-extension-gfm-footnote@2.1.0: + resolution: {integrity: sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz} - micromark-extension-gfm-strikethrough@2.0.0: - resolution: {integrity: sha512-c3BR1ClMp5fxxmwP6AoOY2fXO9U8uFMKs4ADD66ahLTNcwzSCyRVU4k7LPV5Nxo/VJiR4TdzxRQY2v3qIUceCw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.0.0.tgz} + micromark-extension-gfm-strikethrough@2.1.0: + resolution: {integrity: sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz} - micromark-extension-gfm-table@2.0.0: - resolution: {integrity: sha512-PoHlhypg1ItIucOaHmKE8fbin3vTLpDOUg8KAr8gRCF1MOZI9Nquq2i/44wFvviM4WuxJzc3demT8Y3dkfvYrw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.0.0.tgz} + micromark-extension-gfm-table@2.1.1: + resolution: {integrity: sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==, tarball: https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz} micromark-extension-gfm-tagfilter@2.0.0: resolution: {integrity: sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==, tarball: https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz} - micromark-extension-gfm-task-list-item@2.0.1: - resolution: {integrity: sha512-cY5PzGcnULaN5O7T+cOzfMoHjBW7j+T9D2sucA5d/KbsBTPcYdebm9zUd9zzdgJGCwahV+/W78Z3nbulBYVbTw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.0.1.tgz} + micromark-extension-gfm-task-list-item@2.1.0: + resolution: {integrity: sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==, tarball: https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz} micromark-extension-gfm@3.0.0: resolution: {integrity: sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==, tarball: https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz} - micromark-factory-destination@2.0.0: - resolution: {integrity: sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==, tarball: https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz} - micromark-factory-destination@2.0.1: resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==, tarball: https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz} - micromark-factory-label@2.0.0: - resolution: {integrity: sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==, tarball: https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz} - micromark-factory-label@2.0.1: resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==, tarball: https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz} - micromark-factory-space@2.0.0: - resolution: {integrity: sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==, tarball: https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz} - micromark-factory-space@2.0.1: resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==, tarball: https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz} - micromark-factory-title@2.0.0: - resolution: {integrity: sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==, tarball: https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz} - micromark-factory-title@2.0.1: resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==, tarball: https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz} - micromark-factory-whitespace@2.0.0: - resolution: {integrity: sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==, tarball: https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz} - micromark-factory-whitespace@2.0.1: resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==, tarball: https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz} - micromark-util-character@2.0.1: - resolution: {integrity: sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==, tarball: https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz} - micromark-util-character@2.1.1: resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==, tarball: https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz} - micromark-util-chunked@2.0.0: - resolution: {integrity: sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==, tarball: https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz} - micromark-util-chunked@2.0.1: resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==, tarball: https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz} - micromark-util-classify-character@2.0.0: - resolution: {integrity: sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==, tarball: https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz} - micromark-util-classify-character@2.0.1: resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==, tarball: https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz} - micromark-util-combine-extensions@2.0.0: - resolution: {integrity: sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==, tarball: https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz} - micromark-util-combine-extensions@2.0.1: resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==, tarball: https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz} - micromark-util-decode-numeric-character-reference@2.0.1: - resolution: {integrity: sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==, tarball: https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz} - micromark-util-decode-numeric-character-reference@2.0.2: resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==, tarball: https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz} - micromark-util-decode-string@2.0.0: - resolution: {integrity: sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA==, tarball: https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz} - micromark-util-decode-string@2.0.1: resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==, tarball: https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz} micromark-util-encode@2.0.1: resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==, tarball: https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz} - micromark-util-html-tag-name@2.0.0: - resolution: {integrity: sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==, tarball: https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz} - micromark-util-html-tag-name@2.0.1: resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==, tarball: https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz} - micromark-util-normalize-identifier@2.0.0: - resolution: {integrity: sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==, tarball: https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz} - micromark-util-normalize-identifier@2.0.1: resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==, tarball: https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz} - micromark-util-resolve-all@2.0.0: - resolution: {integrity: sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==, tarball: https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz} - micromark-util-resolve-all@2.0.1: resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==, tarball: https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz} micromark-util-sanitize-uri@2.0.1: resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==, tarball: https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz} - micromark-util-subtokenize@2.0.0: - resolution: {integrity: sha512-vc93L1t+gpR3p8jxeVdaYlbV2jTYteDje19rNSS/H5dlhxUYll5Fy6vJ2cDwP8RnsXi818yGty1ayP55y3W6fg==, tarball: https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.0.tgz} - micromark-util-subtokenize@2.1.0: resolution: {integrity: sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==, tarball: https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz} - micromark-util-symbol@2.0.0: - resolution: {integrity: sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==, tarball: https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz} - micromark-util-symbol@2.0.1: resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==, tarball: https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz} - micromark-util-types@2.0.0: - resolution: {integrity: sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==, tarball: https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz} - micromark-util-types@2.0.2: resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==, tarball: https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz} - micromark@4.0.0: - resolution: {integrity: sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==, tarball: https://registry.npmjs.org/micromark/-/micromark-4.0.0.tgz} - micromark@4.0.2: resolution: {integrity: sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==, tarball: https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz} @@ -5212,8 +5100,8 @@ packages: resolution: {integrity: sha512-qxBgB7Qa2sEQgHFjj0dSigq7fX4k6Saisd5Nelwp2q8mlbAFh5dHV9JTTlF8viYJLSSWgMCZFUom8PJcMNBoJw==, tarball: https://registry.npmjs.org/mock-socket/-/mock-socket-9.3.1.tgz} engines: {node: '>= 8'} - monaco-editor@0.52.2: - resolution: {integrity: sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz} + monaco-editor@0.53.0: + resolution: {integrity: sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.53.0.tgz} moo-color@1.0.3: resolution: {integrity: sha512-i/+ZKXMDf6aqYtBhuOcej71YSlbjT3wCO/4H1j8rPvxDJEifdwgg5MaFyu6iYAT8GBZJg2z0dkgK4YMzvURALQ==, tarball: https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz} @@ -5224,8 +5112,8 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, tarball: https://registry.npmjs.org/ms/-/ms-2.1.3.tgz} - msw@2.11.3: - resolution: {integrity: sha512-878imp8jxIpfzuzxYfX0qqTq1IFQz/1/RBHs/PyirSjzi+xKM/RRfIpIqHSCWjH0GxidrjhgiiXC+DWXNDvT9w==, tarball: https://registry.npmjs.org/msw/-/msw-2.11.3.tgz} + msw@2.4.8: + resolution: {integrity: sha512-a+FUW1m5yT8cV9GBy0L/cbNg0EA4//SKEzgu3qFrpITrWYeZmqfo7dqtM74T2lAl69jjUjjCaEhZKaxG2Ns8DA==, tarball: https://registry.npmjs.org/msw/-/msw-2.4.8.tgz} engines: {node: '>=18'} hasBin: true peerDependencies: @@ -5234,9 +5122,9 @@ packages: typescript: optional: true - mute-stream@2.0.0: - resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==, tarball: https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz} - engines: {node: ^18.17.0 || >=20.5.0} + mute-stream@1.0.0: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==, tarball: https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==, tarball: https://registry.npmjs.org/mz/-/mz-2.7.0.tgz} @@ -5249,11 +5137,6 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - nanoid@3.3.8: - resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==, tarball: https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - napi-postinstall@0.3.3: resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==, tarball: https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.3.tgz} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} @@ -5269,9 +5152,6 @@ packages: node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==, tarball: https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz} - node-releases@2.0.18: - resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==, tarball: https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz} - node-releases@2.0.21: resolution: {integrity: sha512-5b0pgg78U3hwXkCM8Z9b2FJdPZlr9Psr9V2gQPESdGHqbntyFJKFW4r5TeWGFzafGY3hzs1JC62VEQMbl1JFkw==, tarball: https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz} @@ -5462,21 +5342,21 @@ packages: resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==, tarball: https://registry.npmjs.org/pify/-/pify-2.3.0.tgz} engines: {node: '>=0.10.0'} - pirates@4.0.6: - resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==, tarball: https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz} + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==, tarball: https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz} engines: {node: '>= 6'} pkg-dir@4.2.0: resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==, tarball: https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz} engines: {node: '>=8'} - playwright-core@1.55.1: - resolution: {integrity: sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==, tarball: https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz} + playwright-core@1.50.1: + resolution: {integrity: sha512-ra9fsNWayuYumt+NiM069M6OkcRb1FZSK8bgi66AtpFoWkg2+y0bJSNmkFrWhMbEBbVKC/EruAHH3g0zmtwGmQ==, tarball: https://registry.npmjs.org/playwright-core/-/playwright-core-1.50.1.tgz} engines: {node: '>=18'} hasBin: true - playwright@1.55.1: - resolution: {integrity: sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==, tarball: https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz} + playwright@1.50.1: + resolution: {integrity: sha512-G8rwsOQJ63XG6BbKj2w5rHeavFjy5zynBA9zsJMMtBoe/Uf757oG12NXz6e6OirF7RCrTVAKFXbLmn1RbL7Qaw==, tarball: https://registry.npmjs.org/playwright/-/playwright-1.50.1.tgz} engines: {node: '>=18'} hasBin: true @@ -5490,22 +5370,28 @@ packages: peerDependencies: postcss: ^8.0.0 - postcss-js@4.0.1: - resolution: {integrity: sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==, tarball: https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz} + postcss-js@4.1.0: + resolution: {integrity: sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==, tarball: https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz} engines: {node: ^12 || ^14 || >= 16} peerDependencies: postcss: ^8.4.21 - postcss-load-config@4.0.2: - resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==, tarball: https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz} - engines: {node: '>= 14'} + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==, tarball: https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz} + engines: {node: '>= 18'} peerDependencies: + jiti: '>=1.21.0' postcss: '>=8.0.9' - ts-node: '>=9.0.0' + tsx: ^4.8.1 + yaml: ^2.4.2 peerDependenciesMeta: + jiti: + optional: true postcss: optional: true - ts-node: + tsx: + optional: true + yaml: optional: true postcss-nested@6.2.0: @@ -5525,10 +5411,6 @@ packages: postcss-value-parser@4.2.0: resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==, tarball: https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz} - postcss@8.5.1: - resolution: {integrity: sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.5.1.tgz} - engines: {node: ^10 || ^12 || >=14} - postcss@8.5.6: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz} engines: {node: ^10 || ^12 || >=14} @@ -5621,8 +5503,8 @@ packages: peerDependencies: react: '*' - react-confetti@6.2.2: - resolution: {integrity: sha512-K+kTyOPgX+ZujMZ+Rmb7pZdHBvg+DzinG/w4Eh52WOB8/pfO38efnnrtEZNJmjTvLxc16RBYO+tPM68Fg8viBA==, tarball: https://registry.npmjs.org/react-confetti/-/react-confetti-6.2.2.tgz} + react-confetti@6.4.0: + resolution: {integrity: sha512-5MdGUcqxrTU26I2EU7ltkWPwxvucQTuqMm8dUz72z2YMqTD6s9vMcDUysk7n9jnC+lXuCPeJJ7Knf98VEYE9Rg==, tarball: https://registry.npmjs.org/react-confetti/-/react-confetti-6.4.0.tgz} engines: {node: '>=16'} peerDependencies: react: ^16.3.0 || ^17.0.1 || ^18.0.0 || ^19.0.0 @@ -5712,8 +5594,8 @@ packages: '@types/react': optional: true - react-resizable-panels@3.0.3: - resolution: {integrity: sha512-7HA8THVBHTzhDK4ON0tvlGXyMAJN1zBeRpuyyremSikgYh2ku6ltD7tsGQOcXx4NKPrZtYCm/5CBr+dkruTGQw==, tarball: https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-3.0.3.tgz} + react-resizable-panels@3.0.6: + resolution: {integrity: sha512-b3qKHQ3MLqOgSS+FRYKapNkJZf5EQzuf6+RLiq1/IlTHw99YrZ2NJZLk4hQIzTnnIkRg2LUqyVinu6YWWpUYew==, tarball: https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-3.0.6.tgz} peerDependencies: react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc @@ -5761,11 +5643,11 @@ packages: react: '>=16.6.0' react-dom: '>=16.6.0' - react-virtualized-auto-sizer@1.0.24: - resolution: {integrity: sha512-3kCn7N9NEb3FlvJrSHWGQ4iVl+ydQObq2fHMn12i5wbtm74zHOPhz/i64OL3c1S1vi9i2GXtZqNqUJTQ+BnNfg==, tarball: https://registry.npmjs.org/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.24.tgz} + react-virtualized-auto-sizer@1.0.26: + resolution: {integrity: sha512-CblNyiNVw2o+hsa5/49NH2ogGxZ+t+3aweRvNSq7TVjDIlwk7ir4lencEg5HxHeSzwNarSkNkiu0qJSOXtxm5A==, tarball: https://registry.npmjs.org/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.26.tgz} peerDependencies: - react: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 - react-dom: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 + react: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0 react-window@1.8.11: resolution: {integrity: sha512-+SRbUVT2scadgFSWx+R1P754xHPEqvcfSfVX10QYg6POOz+WNgkN48pS+BtZNIMGiL1HYrSEiCkwsMS15QogEQ==, tarball: https://registry.npmjs.org/react-window/-/react-window-1.8.11.tgz} @@ -5829,8 +5711,8 @@ packages: resolution: {integrity: sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==, tarball: https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz} engines: {node: '>= 0.4'} - remark-gfm@4.0.0: - resolution: {integrity: sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==, tarball: https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz} + remark-gfm@4.0.1: + resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==, tarball: https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz} remark-parse@11.0.0: resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==, tarball: https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz} @@ -5876,9 +5758,6 @@ packages: resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==, tarball: https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz} engines: {node: '>=8'} - rettime@0.7.0: - resolution: {integrity: sha512-LPRKoHnLKd/r3dVxcwO7vhCW+orkOGj9ViueosEBK6ie89CijnfRlhaDhHq/3Hxu4CkWQtxwlBG0mzTQY6uQjw==, tarball: https://registry.npmjs.org/rettime/-/rettime-0.7.0.tgz} - reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==, tarball: https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -5901,8 +5780,8 @@ packages: rollup: optional: true - rollup@4.52.3: - resolution: {integrity: sha512-RIDh866U8agLgiIcdpB+COKnlCreHJLfIhWC3LVflku5YHfpnsIKigRZeFfMfCc4dVcqNVfQQ5gO/afOck064A==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.52.3.tgz} + rollup@4.52.5: + resolution: {integrity: sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.52.5.tgz} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -6177,8 +6056,8 @@ packages: peerDependencies: tailwindcss: '>=3.0.0 || insiders' - tailwindcss@3.4.17: - resolution: {integrity: sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==, tarball: https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.17.tgz} + tailwindcss@3.4.18: + resolution: {integrity: sha512-6A2rnmW5xZMdw11LYjhcI5846rt9pbLSabY5XPxo+XWdxwZaFEn47Go4NzFiHu9sNNmr/kXivP1vStfvMaK1GQ==, tarball: https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.18.tgz} engines: {node: '>=14.0.0'} hasBin: true @@ -6220,13 +6099,6 @@ packages: resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==, tarball: https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz} engines: {node: '>=14.0.0'} - tldts-core@7.0.16: - resolution: {integrity: sha512-XHhPmHxphLi+LGbH0G/O7dmUH9V65OY20R7vH8gETHsp5AZCjBk9l8sqmRKLaGOxnETU7XNSDUPtewAy/K6jbA==, tarball: https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.16.tgz} - - tldts@7.0.16: - resolution: {integrity: sha512-5bdPHSwbKTeHmXrgecID4Ljff8rQjv7g8zKQPkCozRo2HWWni+p310FSn5ImI+9kWw9kK4lzOB5q/a6iv0IJsw==, tarball: https://registry.npmjs.org/tldts/-/tldts-7.0.16.tgz} - hasBin: true - tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==, tarball: https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz} @@ -6245,10 +6117,6 @@ packages: resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==, tarball: https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz} engines: {node: '>=6'} - tough-cookie@6.0.0: - resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==, tarball: https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz} - engines: {node: '>=16'} - tr46@3.0.0: resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==, tarball: https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz} engines: {node: '>=12'} @@ -6256,9 +6124,6 @@ packages: trim-lines@3.0.1: resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==, tarball: https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz} - trough@2.1.0: - resolution: {integrity: sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==, tarball: https://registry.npmjs.org/trough/-/trough-2.1.0.tgz} - trough@2.2.0: resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==, tarball: https://registry.npmjs.org/trough/-/trough-2.2.0.tgz} @@ -6283,14 +6148,14 @@ packages: '@swc/wasm': optional: true - ts-poet@6.11.0: - resolution: {integrity: sha512-r5AGF8vvb+GjBsnqiTqbLhN1/U2FJt6BI+k0dfCrkKzWvUhNlwMmq9nDHuucHs45LomgHjZPvYj96dD3JawjJA==, tarball: https://registry.npmjs.org/ts-poet/-/ts-poet-6.11.0.tgz} + ts-poet@6.12.0: + resolution: {integrity: sha512-xo+iRNMWqyvXpFTaOAvLPA5QAWO6TZrSUs5s4Odaya3epqofBu/fMLHEWl8jPmjhA0s9sgj9sNvF1BmaQlmQkA==, tarball: https://registry.npmjs.org/ts-poet/-/ts-poet-6.12.0.tgz} - ts-proto-descriptors@1.15.0: - resolution: {integrity: sha512-TYyJ7+H+7Jsqawdv+mfsEpZPTIj9siDHS6EMCzG/z3b/PZiphsX+mWtqFfFVe5/N0Th6V3elK9lQqjnrgTOfrg==, tarball: https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-1.15.0.tgz} + ts-proto-descriptors@1.16.0: + resolution: {integrity: sha512-3yKuzMLpltdpcyQji1PJZRfoo4OJjNieKTYkQY8pF7xGKsYz/RHe3aEe4KiRxcinoBmnEhmuI+yJTxLb922ULA==, tarball: https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-1.16.0.tgz} - ts-proto@1.164.0: - resolution: {integrity: sha512-yIyMucjcozS7Vxtyy5mH6C8ltbY4gEBVNW4ymZ0kWiKlyMxsvhyUZ63CbxcF7dCKQVjHR+fLJ3SiorfgyhQ+AQ==, tarball: https://registry.npmjs.org/ts-proto/-/ts-proto-1.164.0.tgz} + ts-proto@1.181.2: + resolution: {integrity: sha512-knJ8dtjn2Pd0c5ZGZG8z9DMiD4PUY8iGI9T9tb8DvGdWRMkLpf0WcPO7G+7cmbZyxvNTAG6ci3fybEaFgMZIvg==, tarball: https://registry.npmjs.org/ts-proto/-/ts-proto-1.181.2.tgz} hasBin: true tsconfig-paths@4.2.0: @@ -6345,8 +6210,8 @@ packages: tzdata@1.0.46: resolution: {integrity: sha512-zJ4Jv3KCgN3dFeSADpIfHKt9bdIY7TjK3ELaij6oFvyyQBuIZ9LwMlR51vJvMQvRWQ9cS2v92xeZ0sQW4hXCWA==, tarball: https://registry.npmjs.org/tzdata/-/tzdata-1.0.46.tgz} - ua-parser-js@1.0.40: - resolution: {integrity: sha512-z6PJ8Lml+v3ichVojCiB8toQJBuwR42ySM4ezjXIqXK3M0HczmKQ3LF4rhU55PfD99KEEXQG6yb7iOMyvYuHew==, tarball: https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.40.tgz} + ua-parser-js@1.0.41: + resolution: {integrity: sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==, tarball: https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.41.tgz} hasBin: true undici-types@5.26.5: @@ -6355,6 +6220,9 @@ packages: undici-types@6.19.8: resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==, tarball: https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==, tarball: https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz} + undici@6.21.3: resolution: {integrity: sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==, tarball: https://registry.npmjs.org/undici/-/undici-6.21.3.tgz} engines: {node: '>=18.17'} @@ -6367,9 +6235,6 @@ packages: resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==, tarball: https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz} engines: {node: '>=18'} - unified@11.0.4: - resolution: {integrity: sha512-apMPnyLjAX+ty4OrNap7yumyVAMlKx5IWU2wlzzUdYJO9A8f1p9m/gywF/GM2ZDFcjQPrx59Mc90KwmxsoklxQ==, tarball: https://registry.npmjs.org/unified/-/unified-11.0.4.tgz} - unified@11.0.5: resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==, tarball: https://registry.npmjs.org/unified/-/unified-11.0.5.tgz} @@ -6407,15 +6272,6 @@ packages: unplugin@1.5.0: resolution: {integrity: sha512-9ZdRwbh/4gcm1JTOkp9lAkIDrtOyOxgHmY7cjuwI8L/2RTikMcVG25GsZwNAgRuap3iDw2jeq7eoqtAsz5rW3A==, tarball: https://registry.npmjs.org/unplugin/-/unplugin-1.5.0.tgz} - until-async@3.0.2: - resolution: {integrity: sha512-IiSk4HlzAMqTUseHHe3VhIGyuFmN90zMTpD3Z3y8jeQbzLIq500MVM7Jq2vUAnTKAFPJrqwkzr6PoTcPhGcOiw==, tarball: https://registry.npmjs.org/until-async/-/until-async-3.0.2.tgz} - - update-browserslist-db@1.1.1: - resolution: {integrity: sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==, tarball: https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - update-browserslist-db@1.1.3: resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==, tarball: https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz} hasBin: true @@ -6548,8 +6404,8 @@ packages: vue-tsc: optional: true - vite@7.1.7: - resolution: {integrity: sha512-VbA8ScMvAISJNJVbRDTJdCwqQoAareR/wutevKanhR2/1EkoXVZVkkORaYm/tNVCjP/UDTKtcw3bAkwOUdedmA==, tarball: https://registry.npmjs.org/vite/-/vite-7.1.7.tgz} + vite@7.1.11: + resolution: {integrity: sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==, tarball: https://registry.npmjs.org/vite/-/vite-7.1.11.tgz} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -6798,7 +6654,7 @@ snapshots: dependencies: '@babel/compat-data': 7.28.4 '@babel/helper-validator-option': 7.27.1 - browserslist: 4.26.2 + browserslist: 4.26.3 lru-cache: 5.1.1 semver: 7.7.2 @@ -6978,39 +6834,39 @@ snapshots: '@bcoe/v8-coverage@0.2.3': {} - '@biomejs/biome@2.2.0': + '@biomejs/biome@2.2.4': optionalDependencies: - '@biomejs/cli-darwin-arm64': 2.2.0 - '@biomejs/cli-darwin-x64': 2.2.0 - '@biomejs/cli-linux-arm64': 2.2.0 - '@biomejs/cli-linux-arm64-musl': 2.2.0 - '@biomejs/cli-linux-x64': 2.2.0 - '@biomejs/cli-linux-x64-musl': 2.2.0 - '@biomejs/cli-win32-arm64': 2.2.0 - '@biomejs/cli-win32-x64': 2.2.0 + '@biomejs/cli-darwin-arm64': 2.2.4 + '@biomejs/cli-darwin-x64': 2.2.4 + '@biomejs/cli-linux-arm64': 2.2.4 + '@biomejs/cli-linux-arm64-musl': 2.2.4 + '@biomejs/cli-linux-x64': 2.2.4 + '@biomejs/cli-linux-x64-musl': 2.2.4 + '@biomejs/cli-win32-arm64': 2.2.4 + '@biomejs/cli-win32-x64': 2.2.4 - '@biomejs/cli-darwin-arm64@2.2.0': + '@biomejs/cli-darwin-arm64@2.2.4': optional: true - '@biomejs/cli-darwin-x64@2.2.0': + '@biomejs/cli-darwin-x64@2.2.4': optional: true - '@biomejs/cli-linux-arm64-musl@2.2.0': + '@biomejs/cli-linux-arm64-musl@2.2.4': optional: true - '@biomejs/cli-linux-arm64@2.2.0': + '@biomejs/cli-linux-arm64@2.2.4': optional: true - '@biomejs/cli-linux-x64-musl@2.2.0': + '@biomejs/cli-linux-x64-musl@2.2.4': optional: true - '@biomejs/cli-linux-x64@2.2.0': + '@biomejs/cli-linux-x64@2.2.4': optional: true - '@biomejs/cli-win32-arm64@2.2.0': + '@biomejs/cli-win32-arm64@2.2.4': optional: true - '@biomejs/cli-win32-x64@2.2.0': + '@biomejs/cli-win32-x64@2.2.4': optional: true '@bundled-es-modules/cookie@2.0.1': @@ -7021,13 +6877,18 @@ snapshots: dependencies: statuses: 2.0.2 - '@chromatic-com/storybook@4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@bundled-es-modules/tough-cookie@0.1.6': + dependencies: + '@types/tough-cookie': 4.0.5 + tough-cookie: 4.1.4 + + '@chromatic-com/storybook@4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: '@neoconfetti/react': 1.0.0 chromatic: 12.2.0 filesize: 10.1.2 jsonfile: 6.1.0 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) strip-ansi: 7.1.0 transitivePeerDependencies: - '@chromatic-com/cypress' @@ -7103,7 +6964,7 @@ snapshots: '@emotion/memoize@0.9.0': {} - '@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1)': + '@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 '@emotion/babel-plugin': 11.13.5 @@ -7115,7 +6976,7 @@ snapshots: hoist-non-react-statics: 3.3.2 react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 transitivePeerDependencies: - supports-color @@ -7129,18 +6990,18 @@ snapshots: '@emotion/sheet@1.4.0': {} - '@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1)': + '@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 '@emotion/babel-plugin': 11.13.5 '@emotion/is-prop-valid': 1.4.0 - '@emotion/react': 11.14.0(@types/react@19.1.13)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) '@emotion/serialize': 1.3.3 '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.1) '@emotion/utils': 1.4.2 react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 transitivePeerDependencies: - supports-color @@ -7154,154 +7015,154 @@ snapshots: '@emotion/weak-memoize@0.4.0': {} - '@esbuild/aix-ppc64@0.25.10': + '@esbuild/aix-ppc64@0.25.11': optional: true '@esbuild/aix-ppc64@0.25.3': optional: true - '@esbuild/android-arm64@0.25.10': + '@esbuild/android-arm64@0.25.11': optional: true '@esbuild/android-arm64@0.25.3': optional: true - '@esbuild/android-arm@0.25.10': + '@esbuild/android-arm@0.25.11': optional: true '@esbuild/android-arm@0.25.3': optional: true - '@esbuild/android-x64@0.25.10': + '@esbuild/android-x64@0.25.11': optional: true '@esbuild/android-x64@0.25.3': optional: true - '@esbuild/darwin-arm64@0.25.10': + '@esbuild/darwin-arm64@0.25.11': optional: true '@esbuild/darwin-arm64@0.25.3': optional: true - '@esbuild/darwin-x64@0.25.10': + '@esbuild/darwin-x64@0.25.11': optional: true '@esbuild/darwin-x64@0.25.3': optional: true - '@esbuild/freebsd-arm64@0.25.10': + '@esbuild/freebsd-arm64@0.25.11': optional: true '@esbuild/freebsd-arm64@0.25.3': optional: true - '@esbuild/freebsd-x64@0.25.10': + '@esbuild/freebsd-x64@0.25.11': optional: true '@esbuild/freebsd-x64@0.25.3': optional: true - '@esbuild/linux-arm64@0.25.10': + '@esbuild/linux-arm64@0.25.11': optional: true '@esbuild/linux-arm64@0.25.3': optional: true - '@esbuild/linux-arm@0.25.10': + '@esbuild/linux-arm@0.25.11': optional: true '@esbuild/linux-arm@0.25.3': optional: true - '@esbuild/linux-ia32@0.25.10': + '@esbuild/linux-ia32@0.25.11': optional: true '@esbuild/linux-ia32@0.25.3': optional: true - '@esbuild/linux-loong64@0.25.10': + '@esbuild/linux-loong64@0.25.11': optional: true '@esbuild/linux-loong64@0.25.3': optional: true - '@esbuild/linux-mips64el@0.25.10': + '@esbuild/linux-mips64el@0.25.11': optional: true '@esbuild/linux-mips64el@0.25.3': optional: true - '@esbuild/linux-ppc64@0.25.10': + '@esbuild/linux-ppc64@0.25.11': optional: true '@esbuild/linux-ppc64@0.25.3': optional: true - '@esbuild/linux-riscv64@0.25.10': + '@esbuild/linux-riscv64@0.25.11': optional: true '@esbuild/linux-riscv64@0.25.3': optional: true - '@esbuild/linux-s390x@0.25.10': + '@esbuild/linux-s390x@0.25.11': optional: true '@esbuild/linux-s390x@0.25.3': optional: true - '@esbuild/linux-x64@0.25.10': + '@esbuild/linux-x64@0.25.11': optional: true '@esbuild/linux-x64@0.25.3': optional: true - '@esbuild/netbsd-arm64@0.25.10': + '@esbuild/netbsd-arm64@0.25.11': optional: true '@esbuild/netbsd-arm64@0.25.3': optional: true - '@esbuild/netbsd-x64@0.25.10': + '@esbuild/netbsd-x64@0.25.11': optional: true '@esbuild/netbsd-x64@0.25.3': optional: true - '@esbuild/openbsd-arm64@0.25.10': + '@esbuild/openbsd-arm64@0.25.11': optional: true '@esbuild/openbsd-arm64@0.25.3': optional: true - '@esbuild/openbsd-x64@0.25.10': + '@esbuild/openbsd-x64@0.25.11': optional: true '@esbuild/openbsd-x64@0.25.3': optional: true - '@esbuild/openharmony-arm64@0.25.10': + '@esbuild/openharmony-arm64@0.25.11': optional: true - '@esbuild/sunos-x64@0.25.10': + '@esbuild/sunos-x64@0.25.11': optional: true '@esbuild/sunos-x64@0.25.3': optional: true - '@esbuild/win32-arm64@0.25.10': + '@esbuild/win32-arm64@0.25.11': optional: true '@esbuild/win32-arm64@0.25.3': optional: true - '@esbuild/win32-ia32@0.25.10': + '@esbuild/win32-ia32@0.25.11': optional: true '@esbuild/win32-ia32@0.25.3': optional: true - '@esbuild/win32-x64@0.25.10': + '@esbuild/win32-x64@0.25.11': optional: true '@esbuild/win32-x64@0.25.3': @@ -7372,7 +7233,7 @@ snapshots: '@fontsource/fira-code@5.2.7': {} - '@fontsource/ibm-plex-mono@5.1.1': {} + '@fontsource/ibm-plex-mono@5.2.7': {} '@fontsource/jetbrains-mono@5.2.5': {} @@ -7397,33 +7258,35 @@ snapshots: dependencies: react: 19.1.1 - '@inquirer/ansi@1.0.0': {} - - '@inquirer/confirm@5.1.18(@types/node@20.17.16)': + '@inquirer/confirm@3.2.0': dependencies: - '@inquirer/core': 10.2.2(@types/node@20.17.16) - '@inquirer/type': 3.0.8(@types/node@20.17.16) - optionalDependencies: - '@types/node': 20.17.16 + '@inquirer/core': 9.2.1 + '@inquirer/type': 1.5.5 - '@inquirer/core@10.2.2(@types/node@20.17.16)': + '@inquirer/core@9.2.1': dependencies: - '@inquirer/ansi': 1.0.0 '@inquirer/figures': 1.0.13 - '@inquirer/type': 3.0.8(@types/node@20.17.16) + '@inquirer/type': 2.0.0 + '@types/mute-stream': 0.0.4 + '@types/node': 22.18.8 + '@types/wrap-ansi': 3.0.0 + ansi-escapes: 4.3.2 cli-width: 4.1.0 - mute-stream: 2.0.0 + mute-stream: 1.0.0 signal-exit: 4.1.0 + strip-ansi: 6.0.1 wrap-ansi: 6.2.0 yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 20.17.16 '@inquirer/figures@1.0.13': {} - '@inquirer/type@3.0.8(@types/node@20.17.16)': - optionalDependencies: - '@types/node': 20.17.16 + '@inquirer/type@1.5.5': + dependencies: + mute-stream: 1.0.0 + + '@inquirer/type@2.0.0': + dependencies: + mute-stream: 1.0.0 '@isaacs/cliui@8.0.2': dependencies: @@ -7613,7 +7476,7 @@ snapshots: jest-regex-util: 29.6.3 jest-util: 29.7.0 micromatch: 4.0.8 - pirates: 4.0.6 + pirates: 4.0.7 slash: 3.0.0 write-file-atomic: 4.0.2 transitivePeerDependencies: @@ -7637,12 +7500,12 @@ snapshots: '@types/yargs': 17.0.33 chalk: 4.1.2 - '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))': + '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: glob: 10.4.5 magic-string: 0.30.17 react-docgen-typescript: 2.2.2(typescript@5.6.3) - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) optionalDependencies: typescript: 5.6.3 @@ -7651,12 +7514,6 @@ snapshots: '@jridgewell/sourcemap-codec': 1.5.5 '@jridgewell/trace-mapping': 0.3.31 - '@jridgewell/gen-mapping@0.3.8': - dependencies: - '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 - '@jridgewell/remapping@2.3.5': dependencies: '@jridgewell/gen-mapping': 0.3.13 @@ -7664,8 +7521,6 @@ snapshots: '@jridgewell/resolve-uri@3.1.2': {} - '@jridgewell/set-array@1.2.1': {} - '@jridgewell/sourcemap-codec@1.5.0': {} '@jridgewell/sourcemap-codec@1.5.5': {} @@ -7673,7 +7528,7 @@ snapshots: '@jridgewell/trace-mapping@0.3.25': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 '@jridgewell/trace-mapping@0.3.31': dependencies: @@ -7688,10 +7543,10 @@ snapshots: '@leeoniya/ufuzzy@1.0.10': {} - '@mdx-js/react@3.0.1(@types/react@19.1.13)(react@19.1.1)': + '@mdx-js/react@3.0.1(@types/react@19.1.17)(react@19.1.1)': dependencies: '@types/mdx': 2.0.9 - '@types/react': 19.1.13 + '@types/react': 19.1.17 react: 19.1.1 '@mjackson/form-data-parser@0.4.0': @@ -7708,14 +7563,14 @@ snapshots: dependencies: state-local: 1.0.7 - '@monaco-editor/react@4.7.0(monaco-editor@0.52.2)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@monaco-editor/react@4.7.0(monaco-editor@0.53.0)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@monaco-editor/loader': 1.5.0 - monaco-editor: 0.52.2 + monaco-editor: 0.53.0 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - '@mswjs/interceptors@0.39.7': + '@mswjs/interceptors@0.35.9': dependencies: '@open-draft/deferred-promise': 2.2.0 '@open-draft/logger': 0.3.0 @@ -7726,23 +7581,15 @@ snapshots: '@mui/core-downloads-tracker@5.18.0': {} - '@mui/icons-material@5.18.0(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@types/react@19.1.13)(react@19.1.1)': - dependencies: - '@babel/runtime': 7.26.10 - '@mui/material': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.13 - - '@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 '@mui/core-downloads-tracker': 5.18.0 - '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) - '@mui/types': 7.2.24(@types/react@19.1.13) - '@mui/utils': 5.17.1(@types/react@19.1.13)(react@19.1.1) + '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@mui/types': 7.2.24(@types/react@19.1.17) + '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) '@popperjs/core': 2.11.8 - '@types/react-transition-group': 4.4.12(@types/react@19.1.13) + '@types/react-transition-group': 4.4.12(@types/react@19.1.17) clsx: 2.1.1 csstype: 3.1.3 prop-types: 15.8.1 @@ -7751,20 +7598,20 @@ snapshots: react-is: 19.1.1 react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.13)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) - '@types/react': 19.1.13 + '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@types/react': 19.1.17 - '@mui/private-theming@5.17.1(@types/react@19.1.13)(react@19.1.1)': + '@mui/private-theming@5.17.1(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 - '@mui/utils': 5.17.1(@types/react@19.1.13)(react@19.1.1) + '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) prop-types: 15.8.1 react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@mui/styled-engine@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(react@19.1.1)': + '@mui/styled-engine@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 '@emotion/cache': 11.14.0 @@ -7773,65 +7620,65 @@ snapshots: prop-types: 15.8.1 react: 19.1.1 optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.13)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) - '@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1)': + '@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 - '@mui/private-theming': 5.17.1(@types/react@19.1.13)(react@19.1.1) - '@mui/styled-engine': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(react@19.1.1) - '@mui/types': 7.2.24(@types/react@19.1.13) - '@mui/utils': 5.17.1(@types/react@19.1.13)(react@19.1.1) + '@mui/private-theming': 5.17.1(@types/react@19.1.17)(react@19.1.1) + '@mui/styled-engine': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(react@19.1.1) + '@mui/types': 7.2.24(@types/react@19.1.17) + '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) clsx: 2.1.1 csstype: 3.1.3 prop-types: 15.8.1 react: 19.1.1 optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.13)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) - '@types/react': 19.1.13 + '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@types/react': 19.1.17 - '@mui/types@7.2.24(@types/react@19.1.13)': + '@mui/types@7.2.24(@types/react@19.1.17)': optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@mui/utils@5.17.1(@types/react@19.1.13)(react@19.1.1)': + '@mui/utils@5.17.1(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 - '@mui/types': 7.2.24(@types/react@19.1.13) + '@mui/types': 7.2.24(@types/react@19.1.17) '@types/prop-types': 15.7.15 clsx: 2.1.1 prop-types: 15.8.1 react: 19.1.1 react-is: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@mui/x-internals@7.29.0(@types/react@19.1.13)(react@19.1.1)': + '@mui/x-internals@7.29.0(@types/react@19.1.17)(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 - '@mui/utils': 5.17.1(@types/react@19.1.13)(react@19.1.1) + '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 transitivePeerDependencies: - '@types/react' - '@mui/x-tree-view@7.29.10(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@mui/x-tree-view@7.29.10(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 - '@mui/material': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) - '@mui/utils': 5.17.1(@types/react@19.1.13)(react@19.1.1) - '@mui/x-internals': 7.29.0(@types/react@19.1.13)(react@19.1.1) - '@types/react-transition-group': 4.4.12(@types/react@19.1.13) + '@mui/material': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) + '@mui/x-internals': 7.29.0(@types/react@19.1.17)(react@19.1.1) + '@types/react-transition-group': 4.4.12(@types/react@19.1.17) clsx: 2.1.1 prop-types: 15.8.1 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.13)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.13)(react@19.1.1))(@types/react@19.1.13)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) transitivePeerDependencies: - '@types/react' @@ -7933,9 +7780,9 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@playwright/test@1.55.1': + '@playwright/test@1.50.1': dependencies: - playwright: 1.55.1 + playwright: 1.50.1 '@popperjs/core@2.11.8': {} @@ -7972,746 +7819,736 @@ snapshots: '@radix-ui/primitive@1.1.3': {} - '@radix-ui/react-arrow@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-arrow@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-avatar@1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-avatar@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-checkbox@1.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-checkbox@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-collapsible@1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-collapsible@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-collection@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-collection@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-collection@1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-collection@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.2(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.2(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-collection@1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-collection@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-compose-refs@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-compose-refs@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-compose-refs@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-compose-refs@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-context@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-context@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-context@1.1.2(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-context@1.1.2(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-dialog@1.1.15(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dialog@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/primitive': 1.1.1 + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) aria-hidden: 1.2.6 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-direction@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-direction@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-direction@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-direction@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-dismissable-layer@1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dismissable-layer@1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-dismissable-layer@1.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dismissable-layer@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-dropdown-menu@2.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dropdown-menu@2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-menu': 2.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-menu': 2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-focus-guards@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-focus-guards@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-focus-guards@1.1.3(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-focus-guards@1.1.3(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-focus-scope@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-focus-scope@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-id@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-id@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-id@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-id@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-label@2.1.0(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-label@2.1.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-menu@2.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-menu@2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-roving-focus': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-roving-focus': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) aria-hidden: 1.2.6 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-popover@1.1.5(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-popover@1.1.5(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) aria-hidden: 1.2.4 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.6.3(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll: 2.6.3(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-popper@1.2.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-popper@1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@floating-ui/react-dom': 2.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-arrow': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-rect': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-arrow': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-rect': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) '@radix-ui/rect': 1.1.0 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-popper@1.2.8(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-popper@1.2.8(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@floating-ui/react-dom': 2.1.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-rect': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-rect': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.1.17)(react@19.1.1) '@radix-ui/rect': 1.1.1 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-portal@1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-portal@1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-portal@1.1.9(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-portal@1.1.9(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-presence@1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-presence@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-presence@1.1.5(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-primitive@2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-slot': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-primitive@2.0.0(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-primitive@2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-slot': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-primitive@2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-primitive@2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-slot': 1.1.2(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-primitive@2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-slot': 1.1.2(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.13)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) - - '@radix-ui/react-radio-group@1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-radio-group@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-roving-focus': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-roving-focus': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-roving-focus@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-roving-focus@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-roving-focus@1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-roving-focus@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-collection': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-scroll-area@1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-scroll-area@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/number': 1.1.0 '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-select@2.2.6(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-select@2.2.6(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/number': 1.1.1 '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) aria-hidden: 1.2.6 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-separator@1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-separator@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-slider@1.2.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-slider@1.2.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/number': 1.1.0 '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-slot@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-slot@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-slot@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-slot@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-slot@1.1.2(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-slot@1.1.2(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-slot@1.2.3(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-slot@1.2.3(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-switch@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-switch@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.0 - '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-tooltip@1.1.7(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-tooltip@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-visually-hidden': 1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-visually-hidden': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-use-callback-ref@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-callback-ref@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-controllable-state@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-controllable-state@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-escape-keydown@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-escape-keydown@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-layout-effect@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-layout-effect@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-previous@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-previous@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-previous@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-previous@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-rect@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-rect@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: '@radix-ui/rect': 1.1.0 react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-rect@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-rect@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: '@radix-ui/rect': 1.1.1 react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-size@1.1.0(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-size@1.1.0(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-use-size@1.1.1(@types/react@19.1.13)(react@19.1.1)': + '@radix-ui/react-use-size@1.1.1(@types/react@19.1.17)(react@19.1.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.13)(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@radix-ui/react-visually-hidden@1.1.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-visually-hidden@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) - '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 - '@types/react-dom': 19.1.9(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/react-dom': 19.1.11(@types/react@19.1.17) '@radix-ui/rect@1.1.0': {} @@ -8719,78 +8556,78 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.38': {} - '@rollup/pluginutils@5.0.5(rollup@4.52.3)': + '@rollup/pluginutils@5.0.5(rollup@4.52.5)': dependencies: '@types/estree': 1.0.7 estree-walker: 2.0.2 picomatch: 2.3.1 optionalDependencies: - rollup: 4.52.3 + rollup: 4.52.5 - '@rollup/rollup-android-arm-eabi@4.52.3': + '@rollup/rollup-android-arm-eabi@4.52.5': optional: true - '@rollup/rollup-android-arm64@4.52.3': + '@rollup/rollup-android-arm64@4.52.5': optional: true - '@rollup/rollup-darwin-arm64@4.52.3': + '@rollup/rollup-darwin-arm64@4.52.5': optional: true - '@rollup/rollup-darwin-x64@4.52.3': + '@rollup/rollup-darwin-x64@4.52.5': optional: true - '@rollup/rollup-freebsd-arm64@4.52.3': + '@rollup/rollup-freebsd-arm64@4.52.5': optional: true - '@rollup/rollup-freebsd-x64@4.52.3': + '@rollup/rollup-freebsd-x64@4.52.5': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.52.3': + '@rollup/rollup-linux-arm-gnueabihf@4.52.5': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.52.3': + '@rollup/rollup-linux-arm-musleabihf@4.52.5': optional: true - '@rollup/rollup-linux-arm64-gnu@4.52.3': + '@rollup/rollup-linux-arm64-gnu@4.52.5': optional: true - '@rollup/rollup-linux-arm64-musl@4.52.3': + '@rollup/rollup-linux-arm64-musl@4.52.5': optional: true - '@rollup/rollup-linux-loong64-gnu@4.52.3': + '@rollup/rollup-linux-loong64-gnu@4.52.5': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.52.3': + '@rollup/rollup-linux-ppc64-gnu@4.52.5': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.52.3': + '@rollup/rollup-linux-riscv64-gnu@4.52.5': optional: true - '@rollup/rollup-linux-riscv64-musl@4.52.3': + '@rollup/rollup-linux-riscv64-musl@4.52.5': optional: true - '@rollup/rollup-linux-s390x-gnu@4.52.3': + '@rollup/rollup-linux-s390x-gnu@4.52.5': optional: true - '@rollup/rollup-linux-x64-gnu@4.52.3': + '@rollup/rollup-linux-x64-gnu@4.52.5': optional: true - '@rollup/rollup-linux-x64-musl@4.52.3': + '@rollup/rollup-linux-x64-musl@4.52.5': optional: true - '@rollup/rollup-openharmony-arm64@4.52.3': + '@rollup/rollup-openharmony-arm64@4.52.5': optional: true - '@rollup/rollup-win32-arm64-msvc@4.52.3': + '@rollup/rollup-win32-arm64-msvc@4.52.5': optional: true - '@rollup/rollup-win32-ia32-msvc@4.52.3': + '@rollup/rollup-win32-ia32-msvc@4.52.5': optional: true - '@rollup/rollup-win32-x64-gnu@4.52.3': + '@rollup/rollup-win32-x64-gnu@4.52.5': optional: true - '@rollup/rollup-win32-x64-msvc@4.52.3': + '@rollup/rollup-win32-x64-msvc@4.52.5': optional: true '@sinclair/typebox@0.27.8': {} @@ -8803,41 +8640,41 @@ snapshots: dependencies: '@sinonjs/commons': 3.0.0 - '@storybook/addon-docs@9.1.2(@types/react@19.1.13)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@storybook/addon-docs@9.1.2(@types/react@19.1.17)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - '@mdx-js/react': 3.0.1(@types/react@19.1.13)(react@19.1.1) - '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + '@mdx-js/react': 3.0.1(@types/react@19.1.17)(react@19.1.1) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/icons': 1.4.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 transitivePeerDependencies: - '@types/react' - '@storybook/addon-links@9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@storybook/addon-links@9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: '@storybook/global': 5.0.0 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: react: 19.1.1 - '@storybook/addon-themes@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@storybook/addon-themes@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 - '@storybook/builder-vite@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))': + '@storybook/builder-vite@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: - '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) - '@storybook/csf-plugin@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@storybook/csf-plugin@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) unplugin: 1.5.0 '@storybook/global@5.0.0': {} @@ -8847,39 +8684,39 @@ snapshots: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - '@storybook/react-dom-shim@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))': + '@storybook/react-dom-shim@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) - '@storybook/react-vite@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.3)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))': + '@storybook/react-vite@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.5)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: - '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.1(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) - '@rollup/pluginutils': 5.0.5(rollup@4.52.3) - '@storybook/builder-vite': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) - '@storybook/react': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(typescript@5.6.3) + '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.1(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@rollup/pluginutils': 5.0.5(rollup@4.52.5) + '@storybook/builder-vite': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@storybook/react': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3) find-up: 7.0.0 magic-string: 0.30.17 react: 19.1.1 react-docgen: 8.0.0 react-dom: 19.1.1(react@19.1.1) resolve: 1.22.10 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) tsconfig-paths: 4.2.0 - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) transitivePeerDependencies: - rollup - supports-color - typescript - '@storybook/react@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)))(typescript@5.6.3)': + '@storybook/react@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)': dependencies: '@storybook/global': 5.0.0 - '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))) + '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: typescript: 5.6.3 @@ -8935,13 +8772,13 @@ snapshots: '@swc/counter': 0.1.3 jsonc-parser: 3.2.0 - '@tailwindcss/typography@0.5.16(tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)))': + '@tailwindcss/typography@0.5.16(tailwindcss@3.4.18(yaml@2.7.0))': dependencies: lodash.castarray: 4.4.0 lodash.isplainobject: 4.0.6 lodash.merge: 4.6.2 postcss-selector-parser: 6.0.10 - tailwindcss: 3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) + tailwindcss: 3.4.18(yaml@2.7.0) '@tanstack/query-core@5.77.0': {} @@ -8990,13 +8827,15 @@ snapshots: lodash: 4.17.21 redent: 3.0.0 - '@testing-library/react@14.3.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@testing-library/react@14.3.1(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@babel/runtime': 7.26.10 '@testing-library/dom': 9.3.3 - '@types/react-dom': 18.3.1 + '@types/react-dom': 18.3.7(@types/react@19.1.17) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) + transitivePeerDependencies: + - '@types/react' '@testing-library/user-event@14.6.1(@testing-library/dom@10.4.0)': dependencies: @@ -9143,7 +8982,7 @@ snapshots: '@types/hoist-non-react-statics@3.3.5': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 hoist-non-react-statics: 3.3.2 '@types/http-errors@2.0.1': {} @@ -9181,11 +9020,7 @@ snapshots: '@types/tough-cookie': 4.0.2 parse5: 7.1.2 - '@types/lodash@4.17.15': {} - - '@types/mdast@4.0.3': - dependencies: - '@types/unist': 3.0.2 + '@types/lodash@4.17.20': {} '@types/mdast@4.0.4': dependencies: @@ -9199,6 +9034,10 @@ snapshots: '@types/ms@2.1.0': {} + '@types/mute-stream@0.0.4': + dependencies: + '@types/node': 20.17.16 + '@types/node@18.19.129': dependencies: undici-types: 5.26.5 @@ -9207,6 +9046,10 @@ snapshots: dependencies: undici-types: 6.19.8 + '@types/node@22.18.8': + dependencies: + undici-types: 6.21.0 + '@types/parse-json@4.0.2': {} '@types/prop-types@15.7.15': {} @@ -9215,47 +9058,50 @@ snapshots: '@types/range-parser@1.2.4': {} - '@types/react-color@3.0.13(@types/react@19.1.13)': + '@types/react-color@3.0.13(@types/react@19.1.17)': dependencies: - '@types/react': 19.1.13 - '@types/reactcss': 1.2.13(@types/react@19.1.13) + '@types/react': 19.1.17 + '@types/reactcss': 1.2.13(@types/react@19.1.17) '@types/react-date-range@1.4.4': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 date-fns: 2.30.0 - '@types/react-dom@18.3.1': + '@types/react-dom@18.3.7(@types/react@19.1.17)': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@types/react-dom@19.1.9(@types/react@19.1.13)': + '@types/react-dom@19.1.11(@types/react@19.1.17)': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 '@types/react-syntax-highlighter@15.5.13': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@types/react-transition-group@4.4.12(@types/react@19.1.13)': + '@types/react-transition-group@4.4.12(@types/react@19.1.17)': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@types/react-virtualized-auto-sizer@1.0.4': + '@types/react-virtualized-auto-sizer@1.0.8(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@types/react': 19.1.13 + react-virtualized-auto-sizer: 1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + transitivePeerDependencies: + - react + - react-dom '@types/react-window@1.8.8': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - '@types/react@19.1.13': + '@types/react@19.1.17': dependencies: csstype: 3.1.3 - '@types/reactcss@1.2.13(@types/react@19.1.13)': + '@types/reactcss@1.2.13(@types/react@19.1.17)': dependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 '@types/resolve@1.20.4': {} @@ -9284,16 +9130,20 @@ snapshots: '@types/tough-cookie@4.0.2': {} + '@types/tough-cookie@4.0.5': {} + + '@types/trusted-types@1.0.6': {} + '@types/ua-parser-js@0.7.36': {} '@types/unist@2.0.11': {} - '@types/unist@3.0.2': {} - '@types/unist@3.0.3': {} '@types/uuid@9.0.2': {} + '@types/wrap-ansi@3.0.0': {} + '@types/yargs-parser@21.0.2': {} '@types/yargs-parser@21.0.3': {} @@ -9308,7 +9158,7 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@vitejs/plugin-react@5.0.4(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))': + '@vitejs/plugin-react@5.0.4(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: '@babel/core': 7.28.4 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.4) @@ -9316,7 +9166,7 @@ snapshots: '@rolldown/pluginutils': 1.0.0-beta.38 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) transitivePeerDependencies: - supports-color @@ -9328,14 +9178,14 @@ snapshots: chai: 5.2.1 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))': + '@vitest/mocker@3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.17 optionalDependencies: - msw: 2.11.3(@types/node@20.17.16)(typescript@5.6.3) - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + msw: 2.4.8(typescript@5.6.3) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) '@vitest/pretty-format@3.2.4': dependencies: @@ -9494,14 +9344,14 @@ snapshots: asynckit@0.4.0: {} - autoprefixer@10.4.20(postcss@8.5.1): + autoprefixer@10.4.21(postcss@8.5.6): dependencies: - browserslist: 4.24.2 - caniuse-lite: 1.0.30001717 + browserslist: 4.26.3 + caniuse-lite: 1.0.30001746 fraction.js: 4.3.7 normalize-range: 0.1.2 picocolors: 1.1.1 - postcss: 8.5.1 + postcss: 8.5.6 postcss-value-parser: 4.2.0 available-typed-arrays@1.0.7: @@ -9583,7 +9433,7 @@ snapshots: base64-js@1.5.1: {} - baseline-browser-mapping@2.8.9: {} + baseline-browser-mapping@2.8.10: {} bcrypt-pbkdf@1.0.2: dependencies: @@ -9627,20 +9477,13 @@ snapshots: dependencies: fill-range: 7.1.1 - browserslist@4.24.2: - dependencies: - caniuse-lite: 1.0.30001717 - electron-to-chromium: 1.5.50 - node-releases: 2.0.18 - update-browserslist-db: 1.1.1(browserslist@4.24.2) - - browserslist@4.26.2: + browserslist@4.26.3: dependencies: - baseline-browser-mapping: 2.8.9 + baseline-browser-mapping: 2.8.10 caniuse-lite: 1.0.30001746 electron-to-chromium: 1.5.228 node-releases: 2.0.21 - update-browserslist-db: 1.1.3(browserslist@4.26.2) + update-browserslist-db: 1.1.3(browserslist@4.26.3) bser@2.1.1: dependencies: @@ -9691,8 +9534,6 @@ snapshots: camelcase@6.3.0: {} - caniuse-lite@1.0.30001717: {} - caniuse-lite@1.0.30001746: {} case-anything@2.1.13: {} @@ -9751,7 +9592,7 @@ snapshots: dependencies: readdirp: 4.1.2 - chroma-js@2.4.2: {} + chroma-js@2.6.0: {} chromatic@11.29.0: {} @@ -9785,11 +9626,11 @@ snapshots: clsx@2.1.1: {} - cmdk@1.0.4(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + cmdk@1.0.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: - '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.13)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.9(@types/react@19.1.13))(@types/react@19.1.13)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-dialog': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) use-sync-external-store: 1.4.0(react@19.1.1) @@ -9949,7 +9790,7 @@ snapshots: dependencies: '@babel/runtime': 7.26.10 - dayjs@1.11.13: {} + dayjs@1.11.18: {} debug@2.6.9: dependencies: @@ -9967,10 +9808,6 @@ snapshots: decimal.js@10.4.3: {} - decode-named-character-reference@1.0.2: - dependencies: - character-entities: 2.0.2 - decode-named-character-reference@1.2.0: dependencies: character-entities: 2.0.2 @@ -10103,8 +9940,6 @@ snapshots: electron-to-chromium@1.5.228: {} - electron-to-chromium@1.5.50: {} - emittery@0.13.1: {} emoji-mart@5.6.0: {} @@ -10159,34 +9994,34 @@ snapshots: transitivePeerDependencies: - supports-color - esbuild@0.25.10: + esbuild@0.25.11: optionalDependencies: - '@esbuild/aix-ppc64': 0.25.10 - '@esbuild/android-arm': 0.25.10 - '@esbuild/android-arm64': 0.25.10 - '@esbuild/android-x64': 0.25.10 - '@esbuild/darwin-arm64': 0.25.10 - '@esbuild/darwin-x64': 0.25.10 - '@esbuild/freebsd-arm64': 0.25.10 - '@esbuild/freebsd-x64': 0.25.10 - '@esbuild/linux-arm': 0.25.10 - '@esbuild/linux-arm64': 0.25.10 - '@esbuild/linux-ia32': 0.25.10 - '@esbuild/linux-loong64': 0.25.10 - '@esbuild/linux-mips64el': 0.25.10 - '@esbuild/linux-ppc64': 0.25.10 - '@esbuild/linux-riscv64': 0.25.10 - '@esbuild/linux-s390x': 0.25.10 - '@esbuild/linux-x64': 0.25.10 - '@esbuild/netbsd-arm64': 0.25.10 - '@esbuild/netbsd-x64': 0.25.10 - '@esbuild/openbsd-arm64': 0.25.10 - '@esbuild/openbsd-x64': 0.25.10 - '@esbuild/openharmony-arm64': 0.25.10 - '@esbuild/sunos-x64': 0.25.10 - '@esbuild/win32-arm64': 0.25.10 - '@esbuild/win32-ia32': 0.25.10 - '@esbuild/win32-x64': 0.25.10 + '@esbuild/aix-ppc64': 0.25.11 + '@esbuild/android-arm': 0.25.11 + '@esbuild/android-arm64': 0.25.11 + '@esbuild/android-x64': 0.25.11 + '@esbuild/darwin-arm64': 0.25.11 + '@esbuild/darwin-x64': 0.25.11 + '@esbuild/freebsd-arm64': 0.25.11 + '@esbuild/freebsd-x64': 0.25.11 + '@esbuild/linux-arm': 0.25.11 + '@esbuild/linux-arm64': 0.25.11 + '@esbuild/linux-ia32': 0.25.11 + '@esbuild/linux-loong64': 0.25.11 + '@esbuild/linux-mips64el': 0.25.11 + '@esbuild/linux-ppc64': 0.25.11 + '@esbuild/linux-riscv64': 0.25.11 + '@esbuild/linux-s390x': 0.25.11 + '@esbuild/linux-x64': 0.25.11 + '@esbuild/netbsd-arm64': 0.25.11 + '@esbuild/netbsd-x64': 0.25.11 + '@esbuild/openbsd-arm64': 0.25.11 + '@esbuild/openbsd-x64': 0.25.11 + '@esbuild/openharmony-arm64': 0.25.11 + '@esbuild/sunos-x64': 0.25.11 + '@esbuild/win32-arm64': 0.25.11 + '@esbuild/win32-ia32': 0.25.11 + '@esbuild/win32-x64': 0.25.11 esbuild@0.25.3: optionalDependencies: @@ -11080,7 +10915,7 @@ snapshots: jest-mock: 29.7.0 jest-util: 29.7.0 - jest-fixed-jsdom@0.0.9(jest-environment-jsdom@29.5.0): + jest-fixed-jsdom@0.0.10(jest-environment-jsdom@29.5.0): dependencies: jest-environment-jsdom: 29.5.0 @@ -11475,6 +11310,8 @@ snapshots: long@5.2.3: {} + long@5.3.2: {} + longest-streak@3.1.0: {} loose-envify@1.4.0: @@ -11494,7 +11331,7 @@ snapshots: dependencies: yallist: 3.1.1 - lucide-react@0.474.0(react@19.1.1): + lucide-react@0.545.0(react@19.1.1): dependencies: react: 19.1.1 @@ -11517,36 +11354,19 @@ snapshots: dependencies: tmpl: 1.0.5 - markdown-table@3.0.3: {} + markdown-table@3.0.4: {} material-colors@1.2.6: {} math-intrinsics@1.1.0: {} - mdast-util-find-and-replace@3.0.1: + mdast-util-find-and-replace@3.0.2: dependencies: '@types/mdast': 4.0.4 escape-string-regexp: 5.0.0 unist-util-is: 6.0.0 unist-util-visit-parents: 6.0.1 - mdast-util-from-markdown@2.0.0: - dependencies: - '@types/mdast': 4.0.4 - '@types/unist': 3.0.3 - decode-named-character-reference: 1.0.2 - devlop: 1.1.0 - mdast-util-to-string: 4.0.0 - micromark: 4.0.0 - micromark-util-decode-numeric-character-reference: 2.0.1 - micromark-util-decode-string: 2.0.0 - micromark-util-normalize-identifier: 2.0.0 - micromark-util-symbol: 2.0.0 - micromark-util-types: 2.0.2 - unist-util-stringify-position: 4.0.0 - transitivePeerDependencies: - - supports-color - mdast-util-from-markdown@2.0.2: dependencies: '@types/mdast': 4.0.4 @@ -11564,21 +11384,21 @@ snapshots: transitivePeerDependencies: - supports-color - mdast-util-gfm-autolink-literal@2.0.0: + mdast-util-gfm-autolink-literal@2.0.1: dependencies: '@types/mdast': 4.0.4 ccount: 2.0.1 devlop: 1.1.0 - mdast-util-find-and-replace: 3.0.1 - micromark-util-character: 2.0.1 + mdast-util-find-and-replace: 3.0.2 + micromark-util-character: 2.1.1 - mdast-util-gfm-footnote@2.0.0: + mdast-util-gfm-footnote@2.1.0: dependencies: '@types/mdast': 4.0.4 devlop: 1.1.0 mdast-util-from-markdown: 2.0.2 - mdast-util-to-markdown: 2.1.0 - micromark-util-normalize-identifier: 2.0.0 + mdast-util-to-markdown: 2.1.2 + micromark-util-normalize-identifier: 2.0.1 transitivePeerDependencies: - supports-color @@ -11586,7 +11406,7 @@ snapshots: dependencies: '@types/mdast': 4.0.4 mdast-util-from-markdown: 2.0.2 - mdast-util-to-markdown: 2.1.0 + mdast-util-to-markdown: 2.1.2 transitivePeerDependencies: - supports-color @@ -11594,9 +11414,9 @@ snapshots: dependencies: '@types/mdast': 4.0.4 devlop: 1.1.0 - markdown-table: 3.0.3 + markdown-table: 3.0.4 mdast-util-from-markdown: 2.0.2 - mdast-util-to-markdown: 2.1.0 + mdast-util-to-markdown: 2.1.2 transitivePeerDependencies: - supports-color @@ -11605,19 +11425,19 @@ snapshots: '@types/mdast': 4.0.4 devlop: 1.1.0 mdast-util-from-markdown: 2.0.2 - mdast-util-to-markdown: 2.1.0 + mdast-util-to-markdown: 2.1.2 transitivePeerDependencies: - supports-color - mdast-util-gfm@3.0.0: + mdast-util-gfm@3.1.0: dependencies: - mdast-util-from-markdown: 2.0.0 - mdast-util-gfm-autolink-literal: 2.0.0 - mdast-util-gfm-footnote: 2.0.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-gfm-autolink-literal: 2.0.1 + mdast-util-gfm-footnote: 2.1.0 mdast-util-gfm-strikethrough: 2.0.0 mdast-util-gfm-table: 2.0.0 mdast-util-gfm-task-list-item: 2.0.0 - mdast-util-to-markdown: 2.1.0 + mdast-util-to-markdown: 2.1.2 transitivePeerDependencies: - supports-color @@ -11660,11 +11480,6 @@ snapshots: transitivePeerDependencies: - supports-color - mdast-util-phrasing@4.0.0: - dependencies: - '@types/mdast': 4.0.4 - unist-util-is: 6.0.0 - mdast-util-phrasing@4.1.0: dependencies: '@types/mdast': 4.0.4 @@ -11682,17 +11497,6 @@ snapshots: unist-util-visit: 5.0.0 vfile: 6.0.3 - mdast-util-to-markdown@2.1.0: - dependencies: - '@types/mdast': 4.0.4 - '@types/unist': 3.0.3 - longest-streak: 3.1.0 - mdast-util-phrasing: 4.0.0 - mdast-util-to-string: 4.0.0 - micromark-util-decode-string: 2.0.0 - unist-util-visit: 5.0.0 - zwitch: 2.0.4 - mdast-util-to-markdown@2.1.2: dependencies: '@types/mdast': 4.0.4 @@ -11721,25 +11525,6 @@ snapshots: methods@1.1.2: {} - micromark-core-commonmark@2.0.0: - dependencies: - decode-named-character-reference: 1.2.0 - devlop: 1.1.0 - micromark-factory-destination: 2.0.0 - micromark-factory-label: 2.0.0 - micromark-factory-space: 2.0.0 - micromark-factory-title: 2.0.0 - micromark-factory-whitespace: 2.0.0 - micromark-util-character: 2.1.1 - micromark-util-chunked: 2.0.0 - micromark-util-classify-character: 2.0.0 - micromark-util-html-tag-name: 2.0.0 - micromark-util-normalize-identifier: 2.0.1 - micromark-util-resolve-all: 2.0.0 - micromark-util-subtokenize: 2.0.0 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-core-commonmark@2.0.3: dependencies: decode-named-character-reference: 1.2.0 @@ -11759,68 +11544,62 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-extension-gfm-autolink-literal@2.0.0: + micromark-extension-gfm-autolink-literal@2.1.0: dependencies: - micromark-util-character: 2.0.1 + micromark-util-character: 2.1.1 micromark-util-sanitize-uri: 2.0.1 - micromark-util-symbol: 2.0.0 + micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-extension-gfm-footnote@2.0.0: + micromark-extension-gfm-footnote@2.1.0: dependencies: devlop: 1.1.0 - micromark-core-commonmark: 2.0.0 - micromark-factory-space: 2.0.0 - micromark-util-character: 2.0.1 - micromark-util-normalize-identifier: 2.0.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-normalize-identifier: 2.0.1 micromark-util-sanitize-uri: 2.0.1 - micromark-util-symbol: 2.0.0 + micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-extension-gfm-strikethrough@2.0.0: + micromark-extension-gfm-strikethrough@2.1.0: dependencies: devlop: 1.1.0 - micromark-util-chunked: 2.0.0 - micromark-util-classify-character: 2.0.0 - micromark-util-resolve-all: 2.0.0 - micromark-util-symbol: 2.0.0 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-extension-gfm-table@2.0.0: + micromark-extension-gfm-table@2.1.1: dependencies: devlop: 1.1.0 - micromark-factory-space: 2.0.0 - micromark-util-character: 2.0.1 - micromark-util-symbol: 2.0.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 micromark-extension-gfm-tagfilter@2.0.0: dependencies: micromark-util-types: 2.0.2 - micromark-extension-gfm-task-list-item@2.0.1: + micromark-extension-gfm-task-list-item@2.1.0: dependencies: devlop: 1.1.0 - micromark-factory-space: 2.0.0 - micromark-util-character: 2.0.1 - micromark-util-symbol: 2.0.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 micromark-extension-gfm@3.0.0: dependencies: - micromark-extension-gfm-autolink-literal: 2.0.0 - micromark-extension-gfm-footnote: 2.0.0 - micromark-extension-gfm-strikethrough: 2.0.0 - micromark-extension-gfm-table: 2.0.0 + micromark-extension-gfm-autolink-literal: 2.1.0 + micromark-extension-gfm-footnote: 2.1.0 + micromark-extension-gfm-strikethrough: 2.1.0 + micromark-extension-gfm-table: 2.1.1 micromark-extension-gfm-tagfilter: 2.0.0 - micromark-extension-gfm-task-list-item: 2.0.1 - micromark-util-combine-extensions: 2.0.0 - micromark-util-types: 2.0.0 - - micromark-factory-destination@2.0.0: - dependencies: - micromark-util-character: 2.1.1 - micromark-util-symbol: 2.0.1 + micromark-extension-gfm-task-list-item: 2.1.0 + micromark-util-combine-extensions: 2.0.1 micromark-util-types: 2.0.2 micromark-factory-destination@2.0.1: @@ -11829,13 +11608,6 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-factory-label@2.0.0: - dependencies: - devlop: 1.1.0 - micromark-util-character: 2.1.1 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-factory-label@2.0.1: dependencies: devlop: 1.1.0 @@ -11843,23 +11615,11 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-factory-space@2.0.0: - dependencies: - micromark-util-character: 2.1.1 - micromark-util-types: 2.0.2 - micromark-factory-space@2.0.1: dependencies: micromark-util-character: 2.1.1 micromark-util-types: 2.0.2 - micromark-factory-title@2.0.0: - dependencies: - micromark-factory-space: 2.0.1 - micromark-util-character: 2.1.1 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-factory-title@2.0.1: dependencies: micromark-factory-space: 2.0.1 @@ -11867,13 +11627,6 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-factory-whitespace@2.0.0: - dependencies: - micromark-factory-space: 2.0.1 - micromark-util-character: 2.1.1 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-factory-whitespace@2.0.1: dependencies: micromark-factory-space: 2.0.1 @@ -11881,61 +11634,30 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-util-character@2.0.1: - dependencies: - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-util-character@2.1.1: dependencies: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-util-chunked@2.0.0: - dependencies: - micromark-util-symbol: 2.0.1 - micromark-util-chunked@2.0.1: dependencies: micromark-util-symbol: 2.0.1 - micromark-util-classify-character@2.0.0: - dependencies: - micromark-util-character: 2.1.1 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-util-classify-character@2.0.1: dependencies: micromark-util-character: 2.1.1 micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-util-combine-extensions@2.0.0: - dependencies: - micromark-util-chunked: 2.0.0 - micromark-util-types: 2.0.2 - micromark-util-combine-extensions@2.0.1: dependencies: micromark-util-chunked: 2.0.1 micromark-util-types: 2.0.2 - micromark-util-decode-numeric-character-reference@2.0.1: - dependencies: - micromark-util-symbol: 2.0.1 - micromark-util-decode-numeric-character-reference@2.0.2: dependencies: micromark-util-symbol: 2.0.1 - micromark-util-decode-string@2.0.0: - dependencies: - decode-named-character-reference: 1.2.0 - micromark-util-character: 2.1.1 - micromark-util-decode-numeric-character-reference: 2.0.2 - micromark-util-symbol: 2.0.1 - micromark-util-decode-string@2.0.1: dependencies: decode-named-character-reference: 1.2.0 @@ -11945,22 +11667,12 @@ snapshots: micromark-util-encode@2.0.1: {} - micromark-util-html-tag-name@2.0.0: {} - micromark-util-html-tag-name@2.0.1: {} - micromark-util-normalize-identifier@2.0.0: - dependencies: - micromark-util-symbol: 2.0.1 - micromark-util-normalize-identifier@2.0.1: dependencies: micromark-util-symbol: 2.0.1 - micromark-util-resolve-all@2.0.0: - dependencies: - micromark-util-types: 2.0.2 - micromark-util-resolve-all@2.0.1: dependencies: micromark-util-types: 2.0.2 @@ -11971,13 +11683,6 @@ snapshots: micromark-util-encode: 2.0.1 micromark-util-symbol: 2.0.1 - micromark-util-subtokenize@2.0.0: - dependencies: - devlop: 1.1.0 - micromark-util-chunked: 2.0.1 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - micromark-util-subtokenize@2.1.0: dependencies: devlop: 1.1.0 @@ -11985,36 +11690,10 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - micromark-util-symbol@2.0.0: {} - micromark-util-symbol@2.0.1: {} - micromark-util-types@2.0.0: {} - micromark-util-types@2.0.2: {} - micromark@4.0.0: - dependencies: - '@types/debug': 4.1.12 - debug: 4.4.3 - decode-named-character-reference: 1.2.0 - devlop: 1.1.0 - micromark-core-commonmark: 2.0.0 - micromark-factory-space: 2.0.0 - micromark-util-character: 2.1.1 - micromark-util-chunked: 2.0.0 - micromark-util-combine-extensions: 2.0.0 - micromark-util-decode-numeric-character-reference: 2.0.2 - micromark-util-encode: 2.0.1 - micromark-util-normalize-identifier: 2.0.1 - micromark-util-resolve-all: 2.0.0 - micromark-util-sanitize-uri: 2.0.1 - micromark-util-subtokenize: 2.0.0 - micromark-util-symbol: 2.0.1 - micromark-util-types: 2.0.2 - transitivePeerDependencies: - - supports-color - micromark@4.0.2: dependencies: '@types/debug': 4.1.12 @@ -12068,7 +11747,9 @@ snapshots: mock-socket@9.3.1: {} - monaco-editor@0.52.2: {} + monaco-editor@0.53.0: + dependencies: + '@types/trusted-types': 1.0.6 moo-color@1.0.3: dependencies: @@ -12078,33 +11759,29 @@ snapshots: ms@2.1.3: {} - msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3): + msw@2.4.8(typescript@5.6.3): dependencies: '@bundled-es-modules/cookie': 2.0.1 '@bundled-es-modules/statuses': 1.0.1 - '@inquirer/confirm': 5.1.18(@types/node@20.17.16) - '@mswjs/interceptors': 0.39.7 - '@open-draft/deferred-promise': 2.2.0 + '@bundled-es-modules/tough-cookie': 0.1.6 + '@inquirer/confirm': 3.2.0 + '@mswjs/interceptors': 0.35.9 + '@open-draft/until': 2.1.0 '@types/cookie': 0.6.0 '@types/statuses': 2.0.6 + chalk: 4.1.2 graphql: 16.11.0 headers-polyfill: 4.0.3 is-node-process: 1.2.0 outvariant: 1.4.3 path-to-regexp: 6.3.0 - picocolors: 1.1.1 - rettime: 0.7.0 strict-event-emitter: 0.5.1 - tough-cookie: 6.0.0 type-fest: 4.41.0 - until-async: 3.0.2 yargs: 17.7.2 optionalDependencies: typescript: 5.6.3 - transitivePeerDependencies: - - '@types/node' - mute-stream@2.0.0: {} + mute-stream@1.0.0: {} mz@2.7.0: dependencies: @@ -12117,8 +11794,6 @@ snapshots: nanoid@3.3.11: {} - nanoid@3.3.8: {} - napi-postinstall@0.3.3: {} natural-compare@1.4.0: {} @@ -12127,8 +11802,6 @@ snapshots: node-int64@0.4.0: {} - node-releases@2.0.18: {} - node-releases@2.0.21: {} normalize-path@3.0.0: {} @@ -12334,45 +12007,45 @@ snapshots: pify@2.3.0: {} - pirates@4.0.6: {} + pirates@4.0.7: {} pkg-dir@4.2.0: dependencies: find-up: 4.1.0 - playwright-core@1.55.1: {} + playwright-core@1.50.1: {} - playwright@1.55.1: + playwright@1.50.1: dependencies: - playwright-core: 1.55.1 + playwright-core: 1.50.1 optionalDependencies: fsevents: 2.3.2 possible-typed-array-names@1.0.0: {} - postcss-import@15.1.0(postcss@8.5.1): + postcss-import@15.1.0(postcss@8.5.6): dependencies: - postcss: 8.5.1 + postcss: 8.5.6 postcss-value-parser: 4.2.0 read-cache: 1.0.0 resolve: 1.22.10 - postcss-js@4.0.1(postcss@8.5.1): + postcss-js@4.1.0(postcss@8.5.6): dependencies: camelcase-css: 2.0.1 - postcss: 8.5.1 + postcss: 8.5.6 - postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)): + postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.7.0): dependencies: lilconfig: 3.1.3 - yaml: 2.7.0 optionalDependencies: - postcss: 8.5.1 - ts-node: 10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3) + jiti: 1.21.7 + postcss: 8.5.6 + yaml: 2.7.0 - postcss-nested@6.2.0(postcss@8.5.1): + postcss-nested@6.2.0(postcss@8.5.6): dependencies: - postcss: 8.5.1 + postcss: 8.5.6 postcss-selector-parser: 6.1.2 postcss-selector-parser@6.0.10: @@ -12387,12 +12060,6 @@ snapshots: postcss-value-parser@4.2.0: {} - postcss@8.5.1: - dependencies: - nanoid: 3.3.8 - picocolors: 1.1.1 - source-map-js: 1.2.1 - postcss@8.5.6: dependencies: nanoid: 3.3.11 @@ -12498,7 +12165,7 @@ snapshots: reactcss: 1.2.3(react@19.1.1) tinycolor2: 1.6.0 - react-confetti@6.2.2(react@19.1.1): + react-confetti@6.4.0(react@19.1.1): dependencies: react: 19.1.1 tween-functions: 1.2.0 @@ -12555,11 +12222,11 @@ snapshots: prop-types: 15.8.1 react: 19.1.1 - react-markdown@9.1.0(@types/react@19.1.13)(react@19.1.1): + react-markdown@9.1.0(@types/react@19.1.17)(react@19.1.1): dependencies: '@types/hast': 3.0.4 '@types/mdast': 4.0.4 - '@types/react': 19.1.13 + '@types/react': 19.1.17 devlop: 1.1.0 hast-util-to-jsx-runtime: 2.3.6 html-url-attributes: 3.0.1 @@ -12575,37 +12242,37 @@ snapshots: react-refresh@0.17.0: {} - react-remove-scroll-bar@2.3.8(@types/react@19.1.13)(react@19.1.1): + react-remove-scroll-bar@2.3.8(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 - react-style-singleton: 2.2.3(@types/react@19.1.13)(react@19.1.1) + react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - react-remove-scroll@2.6.3(@types/react@19.1.13)(react@19.1.1): + react-remove-scroll@2.6.3(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 - react-remove-scroll-bar: 2.3.8(@types/react@19.1.13)(react@19.1.1) - react-style-singleton: 2.2.3(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll-bar: 2.3.8(@types/react@19.1.17)(react@19.1.1) + react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@19.1.13)(react@19.1.1) - use-sidecar: 1.1.3(@types/react@19.1.13)(react@19.1.1) + use-callback-ref: 1.3.3(@types/react@19.1.17)(react@19.1.1) + use-sidecar: 1.1.3(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - react-remove-scroll@2.7.1(@types/react@19.1.13)(react@19.1.1): + react-remove-scroll@2.7.1(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 - react-remove-scroll-bar: 2.3.8(@types/react@19.1.13)(react@19.1.1) - react-style-singleton: 2.2.3(@types/react@19.1.13)(react@19.1.1) + react-remove-scroll-bar: 2.3.8(@types/react@19.1.17)(react@19.1.1) + react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@19.1.13)(react@19.1.1) - use-sidecar: 1.1.3(@types/react@19.1.13)(react@19.1.1) + use-callback-ref: 1.3.3(@types/react@19.1.17)(react@19.1.1) + use-sidecar: 1.1.3(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - react-resizable-panels@3.0.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-resizable-panels@3.0.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) @@ -12626,13 +12293,13 @@ snapshots: react-dom: 19.1.1(react@19.1.1) react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react-style-singleton@2.2.3(@types/react@19.1.13)(react@19.1.1): + react-style-singleton@2.2.3(@types/react@19.1.17)(react@19.1.1): dependencies: get-nonce: 1.0.1 react: 19.1.1 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 react-syntax-highlighter@15.6.1(react@19.1.1): dependencies: @@ -12644,12 +12311,12 @@ snapshots: react: 19.1.1 refractor: 3.6.0 - react-textarea-autosize@8.5.9(@types/react@19.1.13)(react@19.1.1): + react-textarea-autosize@8.5.9(@types/react@19.1.17)(react@19.1.1): dependencies: '@babel/runtime': 7.26.10 react: 19.1.1 - use-composed-ref: 1.4.0(@types/react@19.1.13)(react@19.1.1) - use-latest: 1.3.0(@types/react@19.1.13)(react@19.1.1) + use-composed-ref: 1.4.0(@types/react@19.1.17)(react@19.1.1) + use-latest: 1.3.0(@types/react@19.1.17)(react@19.1.1) transitivePeerDependencies: - '@types/react' @@ -12662,7 +12329,7 @@ snapshots: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-virtualized-auto-sizer@1.0.24(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-virtualized-auto-sizer@1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) @@ -12751,14 +12418,14 @@ snapshots: define-properties: 1.2.1 set-function-name: 2.0.1 - remark-gfm@4.0.0: + remark-gfm@4.0.1: dependencies: - '@types/mdast': 4.0.3 - mdast-util-gfm: 3.0.0 + '@types/mdast': 4.0.4 + mdast-util-gfm: 3.1.0 micromark-extension-gfm: 3.0.0 remark-parse: 11.0.0 remark-stringify: 11.0.0 - unified: 11.0.4 + unified: 11.0.5 transitivePeerDependencies: - supports-color @@ -12782,7 +12449,7 @@ snapshots: remark-stringify@11.0.0: dependencies: '@types/mdast': 4.0.4 - mdast-util-to-markdown: 2.1.0 + mdast-util-to-markdown: 2.1.2 unified: 11.0.5 require-directory@2.1.1: {} @@ -12812,8 +12479,6 @@ snapshots: onetime: 5.1.2 signal-exit: 3.0.7 - rettime@0.7.0: {} - reusify@1.1.0: {} rimraf@3.0.2: @@ -12821,41 +12486,41 @@ snapshots: glob: 7.2.3 optional: true - rollup-plugin-visualizer@5.14.0(rollup@4.52.3): + rollup-plugin-visualizer@5.14.0(rollup@4.52.5): dependencies: open: 8.4.2 picomatch: 4.0.2 source-map: 0.7.4 yargs: 17.7.2 optionalDependencies: - rollup: 4.52.3 + rollup: 4.52.5 - rollup@4.52.3: + rollup@4.52.5: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.52.3 - '@rollup/rollup-android-arm64': 4.52.3 - '@rollup/rollup-darwin-arm64': 4.52.3 - '@rollup/rollup-darwin-x64': 4.52.3 - '@rollup/rollup-freebsd-arm64': 4.52.3 - '@rollup/rollup-freebsd-x64': 4.52.3 - '@rollup/rollup-linux-arm-gnueabihf': 4.52.3 - '@rollup/rollup-linux-arm-musleabihf': 4.52.3 - '@rollup/rollup-linux-arm64-gnu': 4.52.3 - '@rollup/rollup-linux-arm64-musl': 4.52.3 - '@rollup/rollup-linux-loong64-gnu': 4.52.3 - '@rollup/rollup-linux-ppc64-gnu': 4.52.3 - '@rollup/rollup-linux-riscv64-gnu': 4.52.3 - '@rollup/rollup-linux-riscv64-musl': 4.52.3 - '@rollup/rollup-linux-s390x-gnu': 4.52.3 - '@rollup/rollup-linux-x64-gnu': 4.52.3 - '@rollup/rollup-linux-x64-musl': 4.52.3 - '@rollup/rollup-openharmony-arm64': 4.52.3 - '@rollup/rollup-win32-arm64-msvc': 4.52.3 - '@rollup/rollup-win32-ia32-msvc': 4.52.3 - '@rollup/rollup-win32-x64-gnu': 4.52.3 - '@rollup/rollup-win32-x64-msvc': 4.52.3 + '@rollup/rollup-android-arm-eabi': 4.52.5 + '@rollup/rollup-android-arm64': 4.52.5 + '@rollup/rollup-darwin-arm64': 4.52.5 + '@rollup/rollup-darwin-x64': 4.52.5 + '@rollup/rollup-freebsd-arm64': 4.52.5 + '@rollup/rollup-freebsd-x64': 4.52.5 + '@rollup/rollup-linux-arm-gnueabihf': 4.52.5 + '@rollup/rollup-linux-arm-musleabihf': 4.52.5 + '@rollup/rollup-linux-arm64-gnu': 4.52.5 + '@rollup/rollup-linux-arm64-musl': 4.52.5 + '@rollup/rollup-linux-loong64-gnu': 4.52.5 + '@rollup/rollup-linux-ppc64-gnu': 4.52.5 + '@rollup/rollup-linux-riscv64-gnu': 4.52.5 + '@rollup/rollup-linux-riscv64-musl': 4.52.5 + '@rollup/rollup-linux-s390x-gnu': 4.52.5 + '@rollup/rollup-linux-x64-gnu': 4.52.5 + '@rollup/rollup-linux-x64-musl': 4.52.5 + '@rollup/rollup-openharmony-arm64': 4.52.5 + '@rollup/rollup-win32-arm64-msvc': 4.52.5 + '@rollup/rollup-win32-ia32-msvc': 4.52.5 + '@rollup/rollup-win32-x64-gnu': 4.52.5 + '@rollup/rollup-win32-x64-msvc': 4.52.5 fsevents: 2.3.3 run-parallel@1.2.0: @@ -13015,24 +12680,24 @@ snapshots: dependencies: internal-slot: 1.0.6 - storybook-addon-remix-react-router@5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0))): + storybook-addon-remix-react-router@5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))): dependencies: '@mjackson/form-data-parser': 0.4.0 compare-versions: 6.1.0 react-inspector: 6.0.2(react@19.1.1) react-router: 7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)): + storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): dependencies: '@storybook/global': 5.0.0 '@testing-library/jest-dom': 6.6.3 '@testing-library/user-event': 14.6.1(@testing-library/dom@10.4.0) '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(msw@2.11.3(@types/node@20.17.16)(typescript@5.6.3))(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)) + '@vitest/mocker': 3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) '@vitest/spy': 3.2.4 better-opn: 3.0.2 esbuild: 0.25.3 @@ -13124,12 +12789,12 @@ snapshots: sucrase@3.35.0: dependencies: - '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/gen-mapping': 0.3.13 commander: 4.1.1 glob: 10.4.5 lines-and-columns: 1.2.4 mz: 2.7.0 - pirates: 4.0.6 + pirates: 4.0.7 ts-interface-checker: 0.1.13 supports-color@7.2.0: @@ -13146,11 +12811,11 @@ snapshots: tailwind-merge@2.6.0: {} - tailwindcss-animate@1.0.7(tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3))): + tailwindcss-animate@1.0.7(tailwindcss@3.4.18(yaml@2.7.0)): dependencies: - tailwindcss: 3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) + tailwindcss: 3.4.18(yaml@2.7.0) - tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)): + tailwindcss@3.4.18(yaml@2.7.0): dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 @@ -13166,16 +12831,17 @@ snapshots: normalize-path: 3.0.0 object-hash: 3.0.0 picocolors: 1.1.1 - postcss: 8.5.1 - postcss-import: 15.1.0(postcss@8.5.1) - postcss-js: 4.0.1(postcss@8.5.1) - postcss-load-config: 4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) - postcss-nested: 6.2.0(postcss@8.5.1) + postcss: 8.5.6 + postcss-import: 15.1.0(postcss@8.5.6) + postcss-js: 4.1.0(postcss@8.5.6) + postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.7.0) + postcss-nested: 6.2.0(postcss@8.5.6) postcss-selector-parser: 6.1.2 resolve: 1.22.10 sucrase: 3.35.0 transitivePeerDependencies: - - ts-node + - tsx + - yaml test-exclude@6.0.0: dependencies: @@ -13211,12 +12877,6 @@ snapshots: tinyspy@4.0.3: {} - tldts-core@7.0.16: {} - - tldts@7.0.16: - dependencies: - tldts-core: 7.0.16 - tmpl@1.0.5: {} to-regex-range@5.0.1: @@ -13234,18 +12894,12 @@ snapshots: universalify: 0.2.0 url-parse: 1.5.10 - tough-cookie@6.0.0: - dependencies: - tldts: 7.0.16 - tr46@3.0.0: dependencies: punycode: 2.3.1 trim-lines@3.0.1: {} - trough@2.1.0: {} - trough@2.2.0: {} ts-dedent@2.2.0: {} @@ -13273,21 +12927,21 @@ snapshots: '@swc/core': 1.3.38 optional: true - ts-poet@6.11.0: + ts-poet@6.12.0: dependencies: dprint-node: 1.0.8 - ts-proto-descriptors@1.15.0: + ts-proto-descriptors@1.16.0: dependencies: - long: 5.2.3 + long: 5.3.2 protobufjs: 7.4.0 - ts-proto@1.164.0: + ts-proto@1.181.2: dependencies: case-anything: 2.1.13 protobufjs: 7.4.0 - ts-poet: 6.11.0 - ts-proto-descriptors: 1.15.0 + ts-poet: 6.12.0 + ts-proto-descriptors: 1.16.0 tsconfig-paths@4.2.0: dependencies: @@ -13328,28 +12982,20 @@ snapshots: tzdata@1.0.46: {} - ua-parser-js@1.0.40: {} + ua-parser-js@1.0.41: {} undici-types@5.26.5: {} undici-types@6.19.8: {} + undici-types@6.21.0: {} + undici@6.21.3: {} unicorn-magic@0.1.0: {} unicorn-magic@0.3.0: {} - unified@11.0.4: - dependencies: - '@types/unist': 3.0.2 - bail: 2.0.2 - devlop: 1.1.0 - extend: 3.0.2 - is-plain-obj: 4.1.0 - trough: 2.1.0 - vfile: 6.0.3 - unified@11.0.5: dependencies: '@types/unist': 3.0.3 @@ -13398,17 +13044,9 @@ snapshots: webpack-sources: 3.2.3 webpack-virtual-modules: 0.5.0 - until-async@3.0.2: {} - - update-browserslist-db@1.1.1(browserslist@4.24.2): + update-browserslist-db@1.1.3(browserslist@4.26.3): dependencies: - browserslist: 4.24.2 - escalade: 3.2.0 - picocolors: 1.1.1 - - update-browserslist-db@1.1.3(browserslist@4.26.2): - dependencies: - browserslist: 4.26.2 + browserslist: 4.26.3 escalade: 3.2.0 picocolors: 1.1.1 @@ -13422,39 +13060,39 @@ snapshots: querystringify: 2.2.0 requires-port: 1.0.0 - use-callback-ref@1.3.3(@types/react@19.1.13)(react@19.1.1): + use-callback-ref@1.3.3(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - use-composed-ref@1.4.0(@types/react@19.1.13)(react@19.1.1): + use-composed-ref@1.4.0(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - use-isomorphic-layout-effect@1.2.1(@types/react@19.1.13)(react@19.1.1): + use-isomorphic-layout-effect@1.2.1(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - use-latest@1.3.0(@types/react@19.1.13)(react@19.1.1): + use-latest@1.3.0(@types/react@19.1.17)(react@19.1.1): dependencies: react: 19.1.1 - use-isomorphic-layout-effect: 1.2.1(@types/react@19.1.13)(react@19.1.1) + use-isomorphic-layout-effect: 1.2.1(@types/react@19.1.17)(react@19.1.1) optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 - use-sidecar@1.1.3(@types/react@19.1.13)(react@19.1.1): + use-sidecar@1.1.3(@types/react@19.1.17)(react@19.1.1): dependencies: detect-node-es: 1.1.0 react: 19.1.1 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.13 + '@types/react': 19.1.17 use-sync-external-store@1.4.0(react@19.1.1): dependencies: @@ -13504,7 +13142,7 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 - vite-plugin-checker@0.11.0(@biomejs/biome@2.2.0)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0)): + vite-plugin-checker@0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): dependencies: '@babel/code-frame': 7.27.1 chokidar: 4.0.3 @@ -13513,26 +13151,26 @@ snapshots: picomatch: 4.0.3 tiny-invariant: 1.3.3 tinyglobby: 0.2.15 - vite: 7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0) + vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) vscode-uri: 3.1.0 optionalDependencies: - '@biomejs/biome': 2.2.0 + '@biomejs/biome': 2.2.4 eslint: 8.52.0 optionator: 0.9.3 typescript: 5.6.3 - vite@7.1.7(@types/node@20.17.16)(jiti@2.6.1)(yaml@2.7.0): + vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0): dependencies: - esbuild: 0.25.10 + esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 - rollup: 4.52.3 + rollup: 4.52.5 tinyglobby: 0.2.15 optionalDependencies: '@types/node': 20.17.16 fsevents: 2.3.3 - jiti: 2.6.1 + jiti: 1.21.7 yaml: 2.7.0 vscode-uri@3.1.0: {} @@ -13639,7 +13277,8 @@ snapshots: yaml@1.10.2: {} - yaml@2.7.0: {} + yaml@2.7.0: + optional: true yargs-parser@21.1.1: {} diff --git a/site/site_test.go b/site/site_test.go index fa3c0809f22a7..36ec124ef8bc8 100644 --- a/site/site_test.go +++ b/site/site_test.go @@ -232,6 +232,7 @@ func TestServingFiles(t *testing.T) { Database: db, })) defer srv.Close() + client := &http.Client{} // Create a context ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitShort) @@ -275,7 +276,7 @@ func TestServingFiles(t *testing.T) { req, err := http.NewRequestWithContext(ctx, "GET", path, nil) require.NoError(t, err) - resp, err := http.DefaultClient.Do(req) + resp, err := client.Do(req) require.NoError(t, err, "get file") data, _ := io.ReadAll(resp.Body) require.Equal(t, string(data), testCase.expected, "Verify file: "+testCase.path) @@ -521,6 +522,7 @@ func TestServingBin(t *testing.T) { compressor := middleware.NewCompressor(1, "text/*", "application/*") srv := httptest.NewServer(compressor.Handler(site)) defer srv.Close() + client := &http.Client{} // Create a context ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitShort) @@ -538,7 +540,7 @@ func TestServingBin(t *testing.T) { req.Header.Set("Accept-Encoding", "gzip") } - resp, err := http.DefaultClient.Do(req) + resp, err := client.Do(req) require.NoError(t, err, "http do failed") defer resp.Body.Close() diff --git a/site/src/@types/mui.d.ts b/site/src/@types/mui.d.ts index 49804d33f8971..daad165f7d335 100644 --- a/site/src/@types/mui.d.ts +++ b/site/src/@types/mui.d.ts @@ -13,16 +13,6 @@ declare module "@mui/material/styles" { } } -declare module "@mui/material/Button" { - interface ButtonPropsColorOverrides { - neutral: true; - } - - interface ButtonPropsSizeOverrides { - xlarge: true; - } -} - declare module "@mui/material/Checkbox" { interface CheckboxPropsSizeOverrides { xsmall: true; diff --git a/site/src/api/api.ts b/site/src/api/api.ts index f5b47ed824cde..4c02a96fe2129 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -21,7 +21,6 @@ */ import globalAxios, { type AxiosInstance, isAxiosError } from "axios"; import type dayjs from "dayjs"; -import type { Task } from "modules/tasks/tasks"; import userAgentParser from "ua-parser-js"; import { delay } from "../utils/delay"; import { OneWayWebSocket } from "../utils/OneWayWebSocket"; @@ -426,10 +425,6 @@ export type GetProvisionerDaemonsParams = { offline?: boolean; }; -export type TasksFilter = { - username?: string; -}; - /** * This is the container for all API methods. It's split off to make it more * clear where API methods should go, but it is eventually merged into the Api @@ -2390,19 +2385,6 @@ class ApiMethods { return response.data; }; - getWorkspaceParameters = async (workspace: TypesGen.Workspace) => { - const latestBuild = workspace.latest_build; - const [templateVersionRichParameters, buildParameters] = await Promise.all([ - this.getTemplateVersionRichParameters(latestBuild.template_version_id), - this.getWorkspaceBuildParameters(latestBuild.id), - ]); - - return { - templateVersionRichParameters, - buildParameters, - }; - }; - getInsightsUserLatency = async ( filters: InsightsParams, ): Promise => { @@ -2669,6 +2651,13 @@ class ApiMethods { // // All methods must be defined with arrow function syntax. See the docstring // above the ApiMethods class for a full explanation. + +export type TaskFeedbackRating = "good" | "okay" | "bad"; + +export type CreateTaskFeedbackRequest = { + rate: TaskFeedbackRating; + comment?: string; +}; class ExperimentalApiMethods { constructor(protected readonly axios: AxiosInstance) {} @@ -2705,33 +2694,49 @@ class ExperimentalApiMethods { return response.data; }; - getTasks = async (filter: TasksFilter): Promise => { - const queryExpressions = ["has-ai-task:true"]; - - if (filter.username) { - queryExpressions.push(`owner:${filter.username}`); + getTasks = async ( + filter: TypesGen.TasksFilter, + ): Promise => { + const query: string[] = []; + if (filter.owner) { + query.push(`owner:${filter.owner}`); + } + if (filter.status) { + query.push(`status:${filter.status}`); } - const res = await API.getWorkspaces({ - q: queryExpressions.join(" "), - }); - // Exclude prebuild workspaces as they are not user-facing. - const workspaces = res.workspaces.filter( - (workspace) => !workspace.is_prebuild, + const res = await this.axios.get( + "/api/experimental/tasks", + { + params: { + q: query.join(", "), + }, + }, ); - const prompts = await API.experimental.getAITasksPrompts( - workspaces.map((workspace) => workspace.latest_build.id), + + return res.data.tasks; + }; + + getTask = async (user: string, id: string): Promise => { + const response = await this.axios.get( + `/api/experimental/tasks/${user}/${id}`, ); - return workspaces.map((workspace) => ({ - workspace, - prompt: prompts.prompts[workspace.latest_build.id], - })); + return response.data; }; deleteTask = async (user: string, id: string): Promise => { await this.axios.delete(`/api/experimental/tasks/${user}/${id}`); }; + + createTaskFeedback = async ( + _taskId: string, + _req: CreateTaskFeedbackRequest, + ) => { + return new Promise((res) => { + setTimeout(() => res(), 500); + }); + }; } // This is a hard coded CSRF token/cookie pair for local development. In prod, diff --git a/site/src/api/queries/templates.ts b/site/src/api/queries/templates.ts index 8c3b294f7fad8..686611cb6cd41 100644 --- a/site/src/api/queries/templates.ts +++ b/site/src/api/queries/templates.ts @@ -249,9 +249,15 @@ export const templateVersionLogs = (versionId: string) => { }; }; +export const richParametersKey = (versionId: string) => [ + templateVersionRoot, + versionId, + "richParameters", +]; + export const richParameters = (versionId: string) => { return { - queryKey: [templateVersionRoot, versionId, "richParameters"], + queryKey: richParametersKey(versionId), queryFn: () => API.getTemplateVersionRichParameters(versionId), }; }; diff --git a/site/src/api/queries/workspaceBuilds.ts b/site/src/api/queries/workspaceBuilds.ts index 8f5e088b3a400..4617d988e3c8c 100644 --- a/site/src/api/queries/workspaceBuilds.ts +++ b/site/src/api/queries/workspaceBuilds.ts @@ -6,7 +6,7 @@ import type { } from "api/typesGenerated"; import type { QueryOptions, UseInfiniteQueryOptions } from "react-query"; -function workspaceBuildParametersKey(workspaceBuildId: string) { +export function workspaceBuildParametersKey(workspaceBuildId: string) { return ["workspaceBuilds", workspaceBuildId, "parameters"] as const; } diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts index e2a394894965f..ff7501665bb14 100644 --- a/site/src/api/rbacresourcesGenerated.ts +++ b/site/src/api/rbacresourcesGenerated.ts @@ -156,6 +156,12 @@ export const RBACResourceActions: Partial< read: "view info about a Tailnet coordinator", update: "update a Tailnet coordinator", }, + task: { + create: "create a new task", + delete: "delete task", + read: "read task data or output to view on the UI or CLI", + update: "edit task settings or send input to an existing task", + }, template: { create: "create a template", delete: "delete a template", @@ -195,6 +201,7 @@ export const RBACResourceActions: Partial< delete: "delete workspace", delete_agent: "delete an existing workspace agent", read: "read workspace data to view on the UI", + share: "share a workspace with other users or groups", ssh: "ssh into a given workspace", start: "allows starting a workspace", stop: "allows stopping a workspace", @@ -215,6 +222,7 @@ export const RBACResourceActions: Partial< delete: "delete workspace", delete_agent: "delete an existing workspace agent", read: "read workspace data to view on the UI", + share: "share a workspace with other users or groups", ssh: "ssh into a given workspace", start: "allows starting a workspace", stop: "allows stopping a workspace", diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 0519c9c136cec..8d86362b06227 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -1,6 +1,10 @@ // Code generated by 'guts'. DO NOT EDIT. // From codersdk/templates.go +/** + * ACLAvailable is a list of users and groups that can be added to a template + * ACL. + */ export interface ACLAvailable { readonly users: readonly ReducedUser[]; readonly groups: readonly Group[]; @@ -12,22 +16,33 @@ export interface AIBridgeAnthropicConfig { readonly key: string; } +// From codersdk/deployment.go +export interface AIBridgeBedrockConfig { + readonly region: string; + readonly access_key: string; + readonly access_key_secret: string; + readonly model: string; + readonly small_fast_model: string; +} + // From codersdk/deployment.go export interface AIBridgeConfig { readonly enabled: boolean; readonly openai: AIBridgeOpenAIConfig; readonly anthropic: AIBridgeAnthropicConfig; + readonly bedrock: AIBridgeBedrockConfig; } // From codersdk/aibridge.go export interface AIBridgeInterception { readonly id: string; - readonly initiator_id: string; + readonly initiator: MinimalUser; readonly provider: string; readonly model: string; // empty interface{} type, falling back to unknown readonly metadata: Record; readonly started_at: string; + readonly ended_at: string | null; readonly token_usages: readonly AIBridgeTokenUsage[]; readonly user_prompts: readonly AIBridgeUserPrompt[]; readonly tool_usages: readonly AIBridgeToolUsage[]; @@ -35,6 +50,7 @@ export interface AIBridgeInterception { // From codersdk/aibridge.go export interface AIBridgeListInterceptionsResponse { + readonly count: number; readonly results: readonly AIBridgeInterception[]; } @@ -88,14 +104,42 @@ export interface AIConfig { } // From codersdk/aitasks.go +/** + * AITaskPromptParameterName is the name of the parameter used to pass prompts + * to AI tasks. + * + * Experimental: This value is experimental and may change in the future. + */ export const AITaskPromptParameterName = "AI Prompt"; // From codersdk/aitasks.go +/** + * AITasksPromptsResponse represents the response from the AITaskPrompts method. + * + * Experimental: This method is experimental and may change in the future. + */ export interface AITasksPromptsResponse { + /** + * Prompts is a map of workspace build IDs to prompts. + */ readonly prompts: Record; } +// From codersdk/allowlist.go +/** + * APIAllowListTarget represents a single allow-list entry using the canonical + * string form ":". The wildcard symbol "*" is treated as a + * permissive match for either side. + */ +export interface APIAllowListTarget { + readonly type: RBACResource; + readonly id: string; +} + // From codersdk/apikey.go +/** + * APIKey: do not ever return the HashedSecret + */ export interface APIKey { readonly id: string; readonly user_id: string; @@ -104,14 +148,19 @@ export interface APIKey { readonly created_at: string; readonly updated_at: string; readonly login_type: LoginType; - readonly scope: APIKeyScope; + readonly scope: APIKeyScope; // Deprecated: use Scopes instead. readonly scopes: readonly APIKeyScope[]; readonly token_name: string; readonly lifetime_seconds: number; + readonly allow_list: readonly APIAllowListTarget[]; } // From codersdk/apikey.go export type APIKeyScope = + | "aibridge_interception:*" + | "aibridge_interception:create" + | "aibridge_interception:read" + | "aibridge_interception:update" | "all" | "api_key:*" | "api_key:create" @@ -119,6 +168,20 @@ export type APIKeyScope = | "api_key:read" | "api_key:update" | "application_connect" + | "assign_org_role:*" + | "assign_org_role:assign" + | "assign_org_role:create" + | "assign_org_role:delete" + | "assign_org_role:read" + | "assign_org_role:unassign" + | "assign_org_role:update" + | "assign_role:*" + | "assign_role:assign" + | "assign_role:read" + | "assign_role:unassign" + | "audit_log:*" + | "audit_log:create" + | "audit_log:read" | "coder:all" | "coder:apikeys.manage_self" | "coder:application_connect" @@ -128,33 +191,174 @@ export type APIKeyScope = | "coder:workspaces.create" | "coder:workspaces.delete" | "coder:workspaces.operate" + | "connection_log:*" + | "connection_log:read" + | "connection_log:update" + | "crypto_key:*" + | "crypto_key:create" + | "crypto_key:delete" + | "crypto_key:read" + | "crypto_key:update" + | "debug_info:*" + | "debug_info:read" + | "deployment_config:*" + | "deployment_config:read" + | "deployment_config:update" + | "deployment_stats:*" + | "deployment_stats:read" | "file:*" | "file:create" | "file:read" + | "group:*" + | "group:create" + | "group:delete" + | "group_member:*" + | "group_member:read" + | "group:read" + | "group:update" + | "idpsync_settings:*" + | "idpsync_settings:read" + | "idpsync_settings:update" + | "inbox_notification:*" + | "inbox_notification:create" + | "inbox_notification:read" + | "inbox_notification:update" + | "license:*" + | "license:create" + | "license:delete" + | "license:read" + | "notification_message:*" + | "notification_message:create" + | "notification_message:delete" + | "notification_message:read" + | "notification_message:update" + | "notification_preference:*" + | "notification_preference:read" + | "notification_preference:update" + | "notification_template:*" + | "notification_template:read" + | "notification_template:update" + | "oauth2_app:*" + | "oauth2_app_code_token:*" + | "oauth2_app_code_token:create" + | "oauth2_app_code_token:delete" + | "oauth2_app_code_token:read" + | "oauth2_app:create" + | "oauth2_app:delete" + | "oauth2_app:read" + | "oauth2_app_secret:*" + | "oauth2_app_secret:create" + | "oauth2_app_secret:delete" + | "oauth2_app_secret:read" + | "oauth2_app_secret:update" + | "oauth2_app:update" + | "organization:*" + | "organization:create" + | "organization:delete" + | "organization_member:*" + | "organization_member:create" + | "organization_member:delete" + | "organization_member:read" + | "organization_member:update" + | "organization:read" + | "organization:update" + | "prebuilt_workspace:*" + | "prebuilt_workspace:delete" + | "prebuilt_workspace:update" + | "provisioner_daemon:*" + | "provisioner_daemon:create" + | "provisioner_daemon:delete" + | "provisioner_daemon:read" + | "provisioner_daemon:update" + | "provisioner_jobs:*" + | "provisioner_jobs:create" + | "provisioner_jobs:read" + | "provisioner_jobs:update" + | "replicas:*" + | "replicas:read" + | "system:*" + | "system:create" + | "system:delete" + | "system:read" + | "system:update" + | "tailnet_coordinator:*" + | "tailnet_coordinator:create" + | "tailnet_coordinator:delete" + | "tailnet_coordinator:read" + | "tailnet_coordinator:update" + | "task:*" + | "task:create" + | "task:delete" + | "task:read" + | "task:update" | "template:*" | "template:create" | "template:delete" | "template:read" | "template:update" | "template:use" + | "template:view_insights" + | "usage_event:*" + | "usage_event:create" + | "usage_event:read" + | "usage_event:update" + | "user:*" + | "user:create" + | "user:delete" + | "user:read" | "user:read_personal" | "user_secret:*" | "user_secret:create" | "user_secret:delete" | "user_secret:read" | "user_secret:update" + | "user:update" | "user:update_personal" + | "webpush_subscription:*" + | "webpush_subscription:create" + | "webpush_subscription:delete" + | "webpush_subscription:read" + | "workspace_agent_devcontainers:*" + | "workspace_agent_devcontainers:create" + | "workspace_agent_resource_monitor:*" + | "workspace_agent_resource_monitor:create" + | "workspace_agent_resource_monitor:read" + | "workspace_agent_resource_monitor:update" | "workspace:*" | "workspace:application_connect" | "workspace:create" + | "workspace:create_agent" | "workspace:delete" + | "workspace:delete_agent" + | "workspace_dormant:*" + | "workspace_dormant:application_connect" + | "workspace_dormant:create" + | "workspace_dormant:create_agent" + | "workspace_dormant:delete" + | "workspace_dormant:delete_agent" + | "workspace_dormant:read" + | "workspace_dormant:share" + | "workspace_dormant:ssh" + | "workspace_dormant:start" + | "workspace_dormant:stop" + | "workspace_dormant:update" + | "workspace_proxy:*" + | "workspace_proxy:create" + | "workspace_proxy:delete" + | "workspace_proxy:read" + | "workspace_proxy:update" | "workspace:read" + | "workspace:share" | "workspace:ssh" | "workspace:start" | "workspace:stop" | "workspace:update"; export const APIKeyScopes: APIKeyScope[] = [ + "aibridge_interception:*", + "aibridge_interception:create", + "aibridge_interception:read", + "aibridge_interception:update", "all", "api_key:*", "api_key:create", @@ -162,6 +366,20 @@ export const APIKeyScopes: APIKeyScope[] = [ "api_key:read", "api_key:update", "application_connect", + "assign_org_role:*", + "assign_org_role:assign", + "assign_org_role:create", + "assign_org_role:delete", + "assign_org_role:read", + "assign_org_role:unassign", + "assign_org_role:update", + "assign_role:*", + "assign_role:assign", + "assign_role:read", + "assign_role:unassign", + "audit_log:*", + "audit_log:create", + "audit_log:read", "coder:all", "coder:apikeys.manage_self", "coder:application_connect", @@ -171,27 +389,164 @@ export const APIKeyScopes: APIKeyScope[] = [ "coder:workspaces.create", "coder:workspaces.delete", "coder:workspaces.operate", + "connection_log:*", + "connection_log:read", + "connection_log:update", + "crypto_key:*", + "crypto_key:create", + "crypto_key:delete", + "crypto_key:read", + "crypto_key:update", + "debug_info:*", + "debug_info:read", + "deployment_config:*", + "deployment_config:read", + "deployment_config:update", + "deployment_stats:*", + "deployment_stats:read", "file:*", "file:create", "file:read", + "group:*", + "group:create", + "group:delete", + "group_member:*", + "group_member:read", + "group:read", + "group:update", + "idpsync_settings:*", + "idpsync_settings:read", + "idpsync_settings:update", + "inbox_notification:*", + "inbox_notification:create", + "inbox_notification:read", + "inbox_notification:update", + "license:*", + "license:create", + "license:delete", + "license:read", + "notification_message:*", + "notification_message:create", + "notification_message:delete", + "notification_message:read", + "notification_message:update", + "notification_preference:*", + "notification_preference:read", + "notification_preference:update", + "notification_template:*", + "notification_template:read", + "notification_template:update", + "oauth2_app:*", + "oauth2_app_code_token:*", + "oauth2_app_code_token:create", + "oauth2_app_code_token:delete", + "oauth2_app_code_token:read", + "oauth2_app:create", + "oauth2_app:delete", + "oauth2_app:read", + "oauth2_app_secret:*", + "oauth2_app_secret:create", + "oauth2_app_secret:delete", + "oauth2_app_secret:read", + "oauth2_app_secret:update", + "oauth2_app:update", + "organization:*", + "organization:create", + "organization:delete", + "organization_member:*", + "organization_member:create", + "organization_member:delete", + "organization_member:read", + "organization_member:update", + "organization:read", + "organization:update", + "prebuilt_workspace:*", + "prebuilt_workspace:delete", + "prebuilt_workspace:update", + "provisioner_daemon:*", + "provisioner_daemon:create", + "provisioner_daemon:delete", + "provisioner_daemon:read", + "provisioner_daemon:update", + "provisioner_jobs:*", + "provisioner_jobs:create", + "provisioner_jobs:read", + "provisioner_jobs:update", + "replicas:*", + "replicas:read", + "system:*", + "system:create", + "system:delete", + "system:read", + "system:update", + "tailnet_coordinator:*", + "tailnet_coordinator:create", + "tailnet_coordinator:delete", + "tailnet_coordinator:read", + "tailnet_coordinator:update", + "task:*", + "task:create", + "task:delete", + "task:read", + "task:update", "template:*", "template:create", "template:delete", "template:read", "template:update", "template:use", + "template:view_insights", + "usage_event:*", + "usage_event:create", + "usage_event:read", + "usage_event:update", + "user:*", + "user:create", + "user:delete", + "user:read", "user:read_personal", "user_secret:*", "user_secret:create", "user_secret:delete", "user_secret:read", "user_secret:update", + "user:update", "user:update_personal", + "webpush_subscription:*", + "webpush_subscription:create", + "webpush_subscription:delete", + "webpush_subscription:read", + "workspace_agent_devcontainers:*", + "workspace_agent_devcontainers:create", + "workspace_agent_resource_monitor:*", + "workspace_agent_resource_monitor:create", + "workspace_agent_resource_monitor:read", + "workspace_agent_resource_monitor:update", "workspace:*", "workspace:application_connect", "workspace:create", + "workspace:create_agent", "workspace:delete", + "workspace:delete_agent", + "workspace_dormant:*", + "workspace_dormant:application_connect", + "workspace_dormant:create", + "workspace_dormant:create_agent", + "workspace_dormant:delete", + "workspace_dormant:delete_agent", + "workspace_dormant:read", + "workspace_dormant:share", + "workspace_dormant:ssh", + "workspace_dormant:start", + "workspace_dormant:stop", + "workspace_dormant:update", + "workspace_proxy:*", + "workspace_proxy:create", + "workspace_proxy:delete", + "workspace_proxy:read", + "workspace_proxy:update", "workspace:read", + "workspace:share", "workspace:ssh", "workspace:start", "workspace:stop", @@ -204,7 +559,13 @@ export interface APIKeyWithOwner extends APIKey { } // From healthsdk/healthsdk.go +/** + * AccessURLReport shows the results of performing a HTTP_GET to the /healthz endpoint through the configured access URL. + */ export interface AccessURLReport extends BaseReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly access_url: string; readonly reachable: boolean; @@ -239,9 +600,19 @@ export interface AgentScriptTiming { } // From codersdk/templates.go +/** + * AgentStatsReportResponse is returned for each report + * request by the agent. + */ export interface AgentStatsReportResponse { readonly num_comms: number; + /** + * RxBytes is the number of received bytes. + */ readonly rx_bytes: number; + /** + * TxBytes is the number of transmitted bytes. + */ readonly tx_bytes: number; } @@ -256,6 +627,9 @@ export const AgentSubsystems: AgentSubsystem[] = [ // From codersdk/deployment.go export interface AppHostResponse { + /** + * Host is the externally accessible URL for the Coder instance. + */ readonly host: string; } @@ -264,6 +638,9 @@ export interface AppearanceConfig { readonly application_name: string; readonly logo_url: string; readonly docs_url: string; + /** + * Deprecated: ServiceBanner has been replaced by AnnouncementBanners. + */ readonly service_banner: BannerConfig; readonly announcement_banners: readonly BannerConfig[]; readonly support_links?: readonly LinkConfig[]; @@ -271,6 +648,10 @@ export interface AppearanceConfig { // From codersdk/templates.go export interface ArchiveTemplateVersionsRequest { + /** + * By default, only failed versions are archived. Set this to true + * to archive all unused versions regardless of job status. + */ readonly all: boolean; } @@ -283,6 +664,9 @@ export interface ArchiveTemplateVersionsResponse { // From codersdk/roles.go export interface AssignableRoles extends Role { readonly assignable: boolean; + /** + * BuiltIn roles are immutable + */ readonly built_in: boolean; } @@ -339,6 +723,9 @@ export interface AuditLog { readonly user_agent: string; readonly resource_type: ResourceType; readonly resource_id: string; + /** + * ResourceTarget is the name of the resource. + */ readonly resource_target: string; readonly resource_icon: string; readonly action: AuditAction; @@ -348,6 +735,9 @@ export interface AuditLog { readonly description: string; readonly resource_link: string; readonly is_deleted: boolean; + /** + * Deprecated: Use 'organization.id' instead. + */ readonly organization_id: string; readonly organization?: MinimalOrganization; readonly user: User | null; @@ -370,6 +760,9 @@ export interface AuthMethod { } // From codersdk/users.go +/** + * AuthMethods contains authentication method information like whether they are enabled or not or custom text, etc. + */ export interface AuthMethods { readonly terms_of_service_url?: string; readonly password: AuthMethod; @@ -378,22 +771,76 @@ export interface AuthMethods { } // From codersdk/authorization.go +/** + * AuthorizationCheck is used to check if the currently authenticated user (or the specified user) can do a given action to a given set of objects. + * + * @Description AuthorizationCheck is used to check if the currently authenticated user (or the specified user) can do a given action to a given set of objects. + */ export interface AuthorizationCheck { + /** + * Object can represent a "set" of objects, such as: all workspaces in an organization, all workspaces owned by me, and all workspaces across the entire product. + * When defining an object, use the most specific language when possible to + * produce the smallest set. Meaning to set as many fields on 'Object' as + * you can. Example, if you want to check if you can update all workspaces + * owned by 'me', try to also add an 'OrganizationID' to the settings. + * Omitting the 'OrganizationID' could produce the incorrect value, as + * workspaces have both `user` and `organization` owners. + */ readonly object: AuthorizationObject; readonly action: RBACAction; } // From codersdk/authorization.go +/** + * AuthorizationObject can represent a "set" of objects, such as: all workspaces in an organization, all workspaces owned by me, + * all workspaces across the entire product. + * + * @Description AuthorizationObject can represent a "set" of objects, such as: all workspaces in an organization, all workspaces owned by me, + * @Description all workspaces across the entire product. + */ export interface AuthorizationObject { + /** + * ResourceType is the name of the resource. + * `./coderd/rbac/object.go` has the list of valid resource types. + */ readonly resource_type: RBACResource; + /** + * OwnerID (optional) adds the set constraint to all resources owned by a given user. + */ readonly owner_id?: string; + /** + * OrganizationID (optional) adds the set constraint to all resources owned by a given organization. + */ readonly organization_id?: string; + /** + * ResourceID (optional) reduces the set to a singular resource. This assigns + * a resource ID to the resource type, eg: a single workspace. + * The rbac library will not fetch the resource from the database, so if you + * are using this option, you should also set the owner ID and organization ID + * if possible. Be as specific as possible using all the fields relevant. + */ readonly resource_id?: string; + /** + * AnyOrgOwner (optional) will disregard the org_owner when checking for permissions. + * This cannot be set to true if the OrganizationID is set. + */ readonly any_org?: boolean; } // From codersdk/authorization.go +/** + * AuthorizationRequest is a structure instead of a map because + * go-playground/validate can only validate structs. If you attempt to pass + * a map into `httpapi.Read`, you will get an invalid type error. + */ export interface AuthorizationRequest { + /** + * Checks is a map keyed with an arbitrary string to a permission check. + * The key can be any string that is helpful to the caller, and allows + * multiple permission checks to be run in a single request. + * The key ensures that each permission check has the same key in the + * response. + */ readonly checks: Record; } @@ -406,6 +853,10 @@ export type AutomaticUpdates = "always" | "never"; export const AutomaticUpdateses: AutomaticUpdates[] = ["always", "never"]; // From codersdk/deployment.go +/** + * AvailableExperiments is an expandable type that returns all safe experiments + * available to be used with a deployment. + */ export interface AvailableExperiments { readonly safe: readonly Experiment[]; } @@ -418,6 +869,9 @@ export interface BannerConfig { } // From healthsdk/healthsdk.go +/** + * BaseReport holds fields common to various health reports. + */ export interface BaseReport { readonly error?: string; readonly severity: HealthSeverity; @@ -426,16 +880,51 @@ export interface BaseReport { } // From codersdk/deployment.go +/** + * BuildInfoResponse contains build information for this instance of Coder. + */ export interface BuildInfoResponse { + /** + * ExternalURL references the current Coder version. + * For production builds, this will link directly to a release. For development builds, this will link to a commit. + */ readonly external_url: string; + /** + * Version returns the semantic version of the build. + */ readonly version: string; + /** + * DashboardURL is the URL to hit the deployment's dashboard. + * For external workspace proxies, this is the coderd they are connected + * to. + */ readonly dashboard_url: string; + /** + * Telemetry is a boolean that indicates whether telemetry is enabled. + */ readonly telemetry: boolean; readonly workspace_proxy: boolean; + /** + * AgentAPIVersion is the current version of the Agent API (back versions + * MAY still be supported). + */ readonly agent_api_version: string; + /** + * ProvisionerAPIVersion is the current version of the Provisioner API + */ readonly provisioner_api_version: string; + /** + * UpgradeMessage is the message displayed to users when an outdated client + * is detected. + */ readonly upgrade_message: string; + /** + * DeploymentID is the unique identifier for this deployment. + */ readonly deployment_id: string; + /** + * WebPushPublicKey is the public key for push notifications via Web Push. + */ readonly webpush_public_key?: string; } @@ -464,12 +953,25 @@ export const BuildReasons: BuildReason[] = [ ]; // From codersdk/client.go +/** + * BuildVersionHeader contains build information of Coder. + */ export const BuildVersionHeader = "X-Coder-Build-Version"; // From codersdk/client.go +/** + * BypassRatelimitHeader is the custom header to use to bypass ratelimits. + * Only owners can bypass rate limits. This is typically used for scale testing. + * nolint: gosec + */ export const BypassRatelimitHeader = "X-Coder-Bypass-Ratelimit"; // From codersdk/client.go +/** + * CLITelemetryHeader contains a base64-encoded representation of the CLI + * command that was invoked to produce the request. It is for internal use + * only. + */ export const CLITelemetryHeader = "Coder-CLI-Telemetry"; // From codersdk/cors_behavior.go @@ -479,6 +981,9 @@ export const CORSBehaviors: CORSBehavior[] = ["passthru", "simple"]; // From codersdk/workspacebuilds.go export interface CancelWorkspaceBuildParams { + /** + * ExpectStatus ensures the build is in the expected status before canceling. + */ readonly expect_status?: CancelWorkspaceBuildStatus; } @@ -491,6 +996,9 @@ export const CancelWorkspaceBuildStatuses: CancelWorkspaceBuildStatus[] = [ ]; // From codersdk/users.go +/** + * ChangePasswordWithOneTimePasscodeRequest enables callers to change their password when they've forgotten it. + */ export interface ChangePasswordWithOneTimePasscodeRequest { readonly email: string; readonly password: string; @@ -498,9 +1006,16 @@ export interface ChangePasswordWithOneTimePasscodeRequest { } // From codersdk/client.go +/** + * CoderDesktopTelemetryHeader contains a JSON-encoded representation of Desktop telemetry + * fields, including device ID, OS, and Desktop version. + */ export const CoderDesktopTelemetryHeader = "Coder-Desktop-Telemetry"; // From codersdk/insights.go +/** + * ConnectionLatency shows the latency for a connection. + */ export interface ConnectionLatency { readonly p50: number; readonly p95: number; @@ -518,7 +1033,19 @@ export interface ConnectionLog { readonly agent_name: string; readonly ip?: string; readonly type: ConnectionType; + /** + * WebInfo is only set when `type` is one of: + * - `ConnectionTypePortForwarding` + * - `ConnectionTypeWorkspaceApp` + */ readonly web_info?: ConnectionLogWebInfo; + /** + * SSHInfo is only set when `type` is one of: + * - `ConnectionTypeSSH` + * - `ConnectionTypeReconnectingPTY` + * - `ConnectionTypeVSCode` + * - `ConnectionTypeJetBrains` + */ readonly ssh_info?: ConnectionLogSSHInfo; } @@ -531,8 +1058,20 @@ export interface ConnectionLogResponse { // From codersdk/connectionlog.go export interface ConnectionLogSSHInfo { readonly connection_id: string; + /** + * DisconnectTime is omitted if a disconnect event with the same connection ID + * has not yet been seen. + */ readonly disconnect_time?: string; + /** + * DisconnectReason is omitted if a disconnect event with the same connection ID + * has not yet been seen. + */ readonly disconnect_reason?: string; + /** + * ExitCode is the exit code of the SSH session. It is omitted if a + * disconnect event with the same connection ID has not yet been seen. + */ readonly exit_code?: number; } @@ -547,8 +1086,14 @@ export const ConnectionLogStatuses: ConnectionLogStatus[] = [ // From codersdk/connectionlog.go export interface ConnectionLogWebInfo { readonly user_agent: string; + /** + * User is omitted if the connection event was from an unauthenticated user. + */ readonly user: User | null; readonly slug_or_port: string; + /** + * StatusCode is the HTTP status code of the request. + */ readonly status_code: number; } @@ -583,6 +1128,9 @@ export const ContentTypeZip = "application/zip"; // From codersdk/users.go export interface ConvertLoginRequest { + /** + * ToType is the login type to convert to. + */ readonly to_type: LoginType; readonly password: string; } @@ -598,6 +1146,9 @@ export interface CreateFirstUserRequest { } // From codersdk/users.go +/** + * CreateFirstUserResponse contains IDs for newly created user info. + */ export interface CreateFirstUserResponse { readonly user_id: string; readonly organization_id: string; @@ -625,6 +1176,9 @@ export interface CreateGroupRequest { // From codersdk/organizations.go export interface CreateOrganizationRequest { readonly name: string; + /** + * DisplayName will default to the same value as `Name` if not provided. + */ readonly display_name?: string; readonly description?: string; readonly icon?: string; @@ -642,6 +1196,11 @@ export interface CreateProvisionerKeyResponse { } // From codersdk/aitasks.go +/** + * CreateTaskRequest represents the request to create a new task. + * + * Experimental: This type is experimental and may change in the future. + */ export interface CreateTaskRequest { readonly template_version_id: string; readonly template_version_preset_id?: string; @@ -650,30 +1209,127 @@ export interface CreateTaskRequest { } // From codersdk/organizations.go +/** + * CreateTemplateRequest provides options when creating a template. + */ export interface CreateTemplateRequest { + /** + * Name is the name of the template. + */ readonly name: string; + /** + * DisplayName is the displayed name of the template. + */ readonly display_name?: string; + /** + * Description is a description of what the template contains. It must be + * less than 128 bytes. + */ readonly description?: string; + /** + * Icon is a relative path or external URL that specifies + * an icon to be displayed in the dashboard. + */ readonly icon?: string; + /** + * VersionID is an in-progress or completed job to use as an initial version + * of the template. + * + * This is required on creation to enable a user-flow of validating a + * template works. There is no reason the data-model cannot support empty + * templates, but it doesn't make sense for users. + */ readonly template_version_id: string; + /** + * DefaultTTLMillis allows optionally specifying the default TTL + * for all workspaces created from this template. + */ readonly default_ttl_ms?: number; + /** + * ActivityBumpMillis allows optionally specifying the activity bump + * duration for all workspaces created from this template. Defaults to 1h + * but can be set to 0 to disable activity bumping. + */ readonly activity_bump_ms?: number; + /** + * AutostopRequirement allows optionally specifying the autostop requirement + * for workspaces created from this template. This is an enterprise feature. + */ readonly autostop_requirement?: TemplateAutostopRequirement; + /** + * AutostartRequirement allows optionally specifying the autostart allowed days + * for workspaces created from this template. This is an enterprise feature. + */ readonly autostart_requirement?: TemplateAutostartRequirement; + /** + * Allow users to cancel in-progress workspace jobs. + * *bool as the default value is "true". + */ readonly allow_user_cancel_workspace_jobs: boolean | null; + /** + * AllowUserAutostart allows users to set a schedule for autostarting their + * workspace. By default this is true. This can only be disabled when using + * an enterprise license. + */ readonly allow_user_autostart?: boolean; + /** + * AllowUserAutostop allows users to set a custom workspace TTL to use in + * place of the template's DefaultTTL field. By default this is true. If + * false, the DefaultTTL will always be used. This can only be disabled when + * using an enterprise license. + */ readonly allow_user_autostop?: boolean; + /** + * FailureTTLMillis allows optionally specifying the max lifetime before Coder + * stops all resources for failed workspaces created from this template. + */ readonly failure_ttl_ms?: number; + /** + * TimeTilDormantMillis allows optionally specifying the max lifetime before Coder + * locks inactive workspaces created from this template. + */ readonly dormant_ttl_ms?: number; + /** + * TimeTilDormantAutoDeleteMillis allows optionally specifying the max lifetime before Coder + * permanently deletes dormant workspaces created from this template. + */ readonly delete_ttl_ms?: number; + /** + * DisableEveryoneGroupAccess allows optionally disabling the default + * behavior of granting the 'everyone' group access to use the template. + * If this is set to true, the template will not be available to all users, + * and must be explicitly granted to users or groups in the permissions settings + * of the template. + */ readonly disable_everyone_group_access: boolean; + /** + * RequireActiveVersion mandates that workspaces are built with the active + * template version. + */ readonly require_active_version: boolean; + /** + * MaxPortShareLevel allows optionally specifying the maximum port share level + * for workspaces created from the template. + */ readonly max_port_share_level: WorkspaceAgentPortShareLevel | null; + /** + * UseClassicParameterFlow allows optionally specifying whether + * the template should use the classic parameter flow. The default if unset is + * true, and is why `*bool` is used here. When dynamic parameters becomes + * the default, this will default to false. + */ readonly template_use_classic_parameter_flow?: boolean; + /** + * CORSBehavior allows optionally specifying the CORS behavior for all shared ports. + */ readonly cors_behavior: CORSBehavior | null; } // From codersdk/templateversions.go +/** + * CreateTemplateVersionDryRunRequest defines the request parameters for + * CreateTemplateVersionDryRun. + */ export interface CreateTemplateVersionDryRunRequest { readonly workspace_name: string; readonly rich_parameter_values: readonly WorkspaceBuildParameter[]; @@ -681,9 +1337,15 @@ export interface CreateTemplateVersionDryRunRequest { } // From codersdk/organizations.go +/** + * CreateTemplateVersionRequest enables callers to create a new Template Version. + */ export interface CreateTemplateVersionRequest { readonly name?: string; readonly message?: string; + /** + * TemplateID optionally associates a version with a template. + */ readonly template_id?: string; readonly storage_method: ProvisionerStorageMethod; readonly file_id?: string; @@ -708,9 +1370,10 @@ export interface CreateTestAuditLogRequest { // From codersdk/apikey.go export interface CreateTokenRequest { readonly lifetime: number; - readonly scope?: APIKeyScope; + readonly scope?: APIKeyScope; // Deprecated: use Scopes instead. readonly scopes?: readonly APIKeyScope[]; readonly token_name: string; + readonly allow_list?: readonly APIAllowListTarget[]; } // From codersdk/users.go @@ -719,8 +1382,17 @@ export interface CreateUserRequestWithOrgs { readonly username: string; readonly name: string; readonly password: string; + /** + * UserLoginType defaults to LoginTypePassword. + */ readonly login_type: LoginType; + /** + * UserStatus defaults to UserStatusDormant. + */ readonly user_status: UserStatus | null; + /** + * OrganizationIDs is a list of organization IDs that the user should be a member of. + */ readonly organization_ids: readonly string[]; } @@ -741,15 +1413,35 @@ export const CreateWorkspaceBuildReasons: CreateWorkspaceBuildReason[] = [ ]; // From codersdk/workspaces.go +/** + * CreateWorkspaceBuildRequest provides options to update the latest workspace build. + */ export interface CreateWorkspaceBuildRequest { readonly template_version_id?: string; readonly transition: WorkspaceTransition; readonly dry_run?: boolean; readonly state?: string; + /** + * Orphan may be set for the Destroy transition. + */ readonly orphan?: boolean; + /** + * ParameterValues are optional. It will write params to the 'workspace' scope. + * This will overwrite any existing parameters with the same name. + * This will not delete old params not included in this list. + */ readonly rich_parameter_values?: readonly WorkspaceBuildParameter[]; + /** + * Log level changes the default logging verbosity of a provider ("info" if empty). + */ readonly log_level?: ProvisionerLogLevel; + /** + * TemplateVersionPresetID is the ID of the template version preset to use for the build. + */ readonly template_version_preset_id?: string; + /** + * Reason sets the reason for the workspace build. + */ readonly reason?: CreateWorkspaceBuildReason; } @@ -761,12 +1453,36 @@ export interface CreateWorkspaceProxyRequest { } // From codersdk/organizations.go +/** + * CreateWorkspaceRequest provides options for creating a new workspace. + * Either TemplateID or TemplateVersionID must be specified. They cannot both be present. + * @Description CreateWorkspaceRequest provides options for creating a new workspace. + * @Description Only one of TemplateID or TemplateVersionID can be specified, not both. + * @Description If TemplateID is specified, the active version of the template will be used. + * @Description Workspace names: + * @Description - Must start with a letter or number + * @Description - Can only contain letters, numbers, and hyphens + * @Description - Cannot contain spaces or special characters + * @Description - Cannot be named `new` or `create` + * @Description - Must be unique within your workspaces + * @Description - Maximum length of 32 characters + */ export interface CreateWorkspaceRequest { + /** + * TemplateID specifies which template should be used for creating the workspace. + */ readonly template_id?: string; + /** + * TemplateVersionID can be used to specify a specific version of a template for creating the workspace. + */ readonly template_version_id?: string; readonly name: string; readonly autostart_schedule?: string; readonly ttl_ms?: number; + /** + * RichParameterValues allows for additional parameters to be provided + * during the initial provision. + */ readonly rich_parameter_values?: readonly WorkspaceBuildParameter[]; readonly automatic_updates?: AutomaticUpdates; readonly template_version_preset_id?: string; @@ -807,16 +1523,30 @@ export interface CustomNotificationRequest { } // From codersdk/roles.go +/** + * CustomRoleRequest is used to edit custom roles. + */ export interface CustomRoleRequest { readonly name: string; readonly display_name: string; readonly site_permissions: readonly Permission[]; - readonly organization_permissions: readonly Permission[]; readonly user_permissions: readonly Permission[]; + /** + * OrganizationPermissions are specific to the organization the role belongs to. + */ + readonly organization_permissions: readonly Permission[]; + /** + * OrganizationMemberPermissions are specific to the organization the role belongs to. + */ + readonly organization_member_permissions: readonly Permission[]; } // From codersdk/deployment.go export interface DAUEntry { + /** + * Date is a string formatted as 2024-01-31. + * Timezone and time information is not included. + */ readonly date: string; readonly amount: number; } @@ -847,7 +1577,13 @@ export interface DERPConfig { } // From healthsdk/healthsdk.go +/** + * DERPHealthReport includes health details of each configured DERP/STUN region. + */ export interface DERPHealthReport extends BaseReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly regions: Record; readonly netcheck?: NetcheckReport; @@ -856,7 +1592,13 @@ export interface DERPHealthReport extends BaseReport { } // From healthsdk/healthsdk.go +/** + * DERPHealthReport includes health details of a single node in a single region. + */ export interface DERPNodeReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly severity: HealthSeverity; readonly warnings: readonly HealthMessage[]; @@ -879,7 +1621,13 @@ export interface DERPRegion { } // From healthsdk/healthsdk.go +/** + * DERPHealthReport includes health details of each node in a single region. + */ export interface DERPRegionReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly severity: HealthSeverity; readonly warnings: readonly HealthMessage[]; @@ -909,7 +1657,13 @@ export interface DangerousConfig { export const DatabaseNotReachable = "database not reachable"; // From healthsdk/healthsdk.go +/** + * DatabaseReport shows the results of pinging the configured database.Conn. + */ export interface DatabaseReport extends BaseReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly reachable: boolean; readonly latency: string; @@ -919,6 +1673,9 @@ export interface DatabaseReport extends BaseReport { // From codersdk/externalauth.go export interface DeleteExternalAuthByIDResponse { + /** + * TokenRevoked set to true if token revocation was attempted and was successful + */ readonly token_revoked: boolean; readonly token_revocation_error?: string; } @@ -935,6 +1692,9 @@ export interface DeleteWorkspaceAgentPortShareRequest { } // From codersdk/deployment.go +/** + * DeploymentConfig contains both the deployment values and how they're set. + */ export interface DeploymentConfig { readonly config?: DeploymentValues; readonly options?: SerpentOptionSet; @@ -942,20 +1702,37 @@ export interface DeploymentConfig { // From codersdk/deployment.go export interface DeploymentStats { + /** + * AggregatedFrom is the time in which stats are aggregated from. + * This might be back in time a specific duration or interval. + */ readonly aggregated_from: string; + /** + * CollectedAt is the time in which stats are collected at. + */ readonly collected_at: string; + /** + * NextUpdateAt is the time when the next batch of stats will + * be updated. + */ readonly next_update_at: string; readonly workspaces: WorkspaceDeploymentStats; readonly session_count: SessionCountDeploymentStats; } // From codersdk/deployment.go +/** + * DeploymentValues is the central configuration values the coder server. + */ export interface DeploymentValues { readonly verbose?: boolean; readonly access_url?: string; readonly wildcard_access_url?: string; readonly docs_url?: string; readonly redirect_to_access_url?: boolean; + /** + * HTTPAddress is a string because it may be set to zero to disable. + */ readonly http_address?: string; readonly autobuild_poll_interval?: number; readonly job_hang_detector_interval?: number; @@ -994,6 +1771,7 @@ export interface DeploymentValues { readonly session_lifetime?: SessionLifetime; readonly disable_password_auth?: boolean; readonly support?: SupportConfig; + readonly enable_authz_recording?: boolean; readonly external_auth?: SerpentStruct; readonly config_ssh?: SSHConfig; readonly wgtunnel_host?: string; @@ -1014,6 +1792,9 @@ export interface DeploymentValues { readonly ai?: AIConfig; readonly config?: string; readonly write_config?: boolean; + /** + * Deprecated: Use HTTPAddress or TLS.Address instead. + */ readonly address?: string; } @@ -1048,8 +1829,15 @@ export const DisplayApps: DisplayApp[] = [ // From codersdk/parameters.go export interface DynamicParametersRequest { + /** + * ID identifies the request. The response contains the same + * ID so that the client can match it to the request. + */ readonly id: number; readonly inputs: Record; + /** + * OwnerID if uuid.Nil, it defaults to `codersdk.Me` + */ readonly owner_id?: string; } @@ -1099,11 +1887,13 @@ export interface Entitlements { } // From codersdk/client.go +/** + * EntitlementsWarnings contains active warnings for the user's entitlements. + */ export const EntitlementsWarningHeader = "X-Coder-Entitlements-Warning"; // From codersdk/deployment.go export type Experiment = - | "aibridge" | "auto-fill-parameters" | "example" | "mcp-server-http" @@ -1114,7 +1904,6 @@ export type Experiment = | "workspace-usage"; export const Experiments: Experiment[] = [ - "aibridge", "auto-fill-parameters", "example", "mcp-server-http", @@ -1131,6 +1920,9 @@ export interface ExternalAPIKeyScopes { } // From codersdk/workspaces.go +/** + * ExternalAgentCredentials contains the credentials needed for an external agent to connect to Coder. + */ export interface ExternalAgentCredentials { readonly command: string; readonly agent_token: string; @@ -1142,9 +1934,21 @@ export interface ExternalAuth { readonly device: boolean; readonly display_name: string; readonly supports_revocation: boolean; + /** + * User is the user that authenticated with the provider. + */ readonly user: ExternalAuthUser | null; + /** + * AppInstallable is true if the request for app installs was successful. + */ readonly app_installable: boolean; + /** + * AppInstallations are the installations that the user has access to. + */ readonly installations: readonly ExternalAuthAppInstallation[]; + /** + * AppInstallURL is the URL to install the app. + */ readonly app_install_url: string; } @@ -1157,8 +1961,15 @@ export interface ExternalAuthAppInstallation { // From codersdk/deployment.go export interface ExternalAuthConfig { + /** + * Type is the type of external auth config. + */ readonly type: string; readonly client_id: string; + /** + * ID is a unique identifier for the auth config. + * It defaults to `type` when not provided. + */ readonly id: string; readonly auth_url: string; readonly token_url: string; @@ -1173,12 +1984,30 @@ export interface ExternalAuthConfig { readonly mcp_url: string; readonly mcp_tool_allow_regex: string; readonly mcp_tool_deny_regex: string; + /** + * Regex allows API requesters to match an auth config by + * a string (e.g. coder.com) instead of by it's type. + * + * Git clone makes use of this by parsing the URL from: + * 'Username for "https://github.com":' + * And sending it to the Coder server to match against the Regex. + */ readonly regex: string; + /** + * DisplayName is shown in the UI to identify the auth config. + */ readonly display_name: string; + /** + * DisplayIcon is a URL to an icon to display in the UI. + */ readonly display_icon: string; } // From codersdk/externalauth.go +/** + * ExternalAuthDevice is the response from the device authorization endpoint. + * See: https://tools.ietf.org/html/rfc8628#section-3.2 + */ export interface ExternalAuthDevice { readonly device_code: string; readonly user_code: string; @@ -1193,6 +2022,11 @@ export interface ExternalAuthDeviceExchange { } // From codersdk/externalauth.go +/** + * ExternalAuthLink is a link between a user and an external auth provider. + * It excludes information that requires a token to access, so can be statically + * built from the database and configs. + */ export interface ExternalAuthLink { readonly provider_id: string; readonly created_at: string; @@ -1204,6 +2038,9 @@ export interface ExternalAuthLink { } // From codersdk/externalauth.go +/** + * ExternalAuthLinkProvider are the static details of a provider. + */ export interface ExternalAuthLinkProvider { readonly id: string; readonly type: string; @@ -1230,7 +2067,23 @@ export interface Feature { readonly enabled: boolean; readonly limit?: number; readonly actual?: number; + /** + * SoftLimit is the soft limit of the feature, and is only used for showing + * included limits in the dashboard. No license validation or warnings are + * generated from this value. + */ readonly soft_limit?: number; + /** + * UsagePeriod denotes that the usage is a counter that accumulates over + * this period (and most likely resets with the issuance of the next + * license). + * + * These dates are determined from the license that this entitlement comes + * from, see enterprise/coderd/license/license.go. + * + * Only certain features set these fields: + * - FeatureManagedAgentLimit + */ readonly usage_period?: UsagePeriod; } @@ -1295,6 +2148,10 @@ export const FeatureSets: FeatureSet[] = ["enterprise", "", "premium"]; export const FormatZip = "zip"; // From codersdk/parameters.go +/** + * FriendlyDiagnostic == previewtypes.FriendlyDiagnostic + * Copied to avoid import deps + */ export interface FriendlyDiagnostic { readonly severity: DiagnosticSeverityString; readonly summary: string; @@ -1303,6 +2160,9 @@ export interface FriendlyDiagnostic { } // From codersdk/apikey.go +/** + * GenerateAPIKeyResponse contains an API key for a user. + */ export interface GenerateAPIKeyResponse { readonly key: string; } @@ -1334,6 +2194,11 @@ export interface GitSSHKey { readonly user_id: string; readonly created_at: string; readonly updated_at: string; + /** + * PublicKey is the SSH public key in OpenSSH format. + * Example: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAID3OmYJvT7q1cF1azbybYy0OZ9yrXfA+M6Lr4vzX5zlp\n" + * Note: The key includes a trailing newline (\n). + */ readonly public_key: string; } @@ -1350,6 +2215,11 @@ export interface Group { readonly display_name: string; readonly organization_id: string; readonly members: readonly ReducedUser[]; + /** + * How many members are in this group. Shows the total count, + * even if the user is not authorized to read group member details. + * May be greater than `len(Group.Members)`. + */ readonly total_member_count: number; readonly avatar_url: string; readonly quota_allowance: number; @@ -1360,8 +2230,18 @@ export interface Group { // From codersdk/groups.go export interface GroupArguments { + /** + * Organization can be an org UUID or name + */ readonly Organization: string; + /** + * HasMember can be a user uuid or username + */ readonly HasMember: string; + /** + * GroupIDs is a list of group UUIDs to filter by. + * If not set, all groups will be returned. + */ readonly GroupIDs: readonly string[]; } @@ -1372,10 +2252,33 @@ export const GroupSources: GroupSource[] = ["oidc", "user"]; // From codersdk/idpsync.go export interface GroupSyncSettings { + /** + * Field is the name of the claim field that specifies what groups a user + * should be in. If empty, no groups will be synced. + */ readonly field: string; + /** + * Mapping is a map from OIDC groups to Coder group IDs + */ readonly mapping: Record; + /** + * RegexFilter is a regular expression that filters the groups returned by + * the OIDC provider. Any group not matched by this regex will be ignored. + * If the group filter is nil, then no group filtering will occur. + */ readonly regex_filter: string | null; + /** + * AutoCreateMissing controls whether groups returned by the OIDC provider + * are automatically created in Coder if they are missing. + */ readonly auto_create_missing_groups: boolean; + /** + * LegacyNameMapping is deprecated. It remaps an IDP group name to + * a Coder group name. Since configuration is now done at runtime, + * group IDs are used to account for group renames. + * For legacy configurations, this config option has to remain. + * Deprecated: Use Mapping instead. + */ readonly legacy_group_name_mapping?: Record; } @@ -1437,6 +2340,9 @@ export const HealthCodes: HealthCode[] = [ ]; // From health/model.go +/** + * @typescript-generate Message + */ export interface HealthMessage { readonly code: HealthCode; readonly message: string; @@ -1472,21 +2378,46 @@ export const HealthSeveritys: HealthSeverity[] = ["error", "ok", "warning"]; // From codersdk/workspaceapps.go export interface Healthcheck { + /** + * URL specifies the endpoint to check for the app health. + */ readonly url: string; + /** + * Interval specifies the seconds between each health check. + */ readonly interval: number; + /** + * Threshold specifies the number of consecutive failed health checks before returning "unhealthy". + */ readonly threshold: number; } // From codersdk/deployment.go +/** + * HealthcheckConfig contains configuration for healthchecks. + */ export interface HealthcheckConfig { readonly refresh: number; readonly threshold_database: number; } // From healthsdk/healthsdk.go +/** + * HealthcheckReport contains information about the health status of a Coder deployment. + */ export interface HealthcheckReport { + /** + * Time is the time the report was generated at. + */ readonly time: string; + /** + * Healthy is true if the report returns no errors. + * Deprecated: use `Severity` instead + */ readonly healthy: boolean; + /** + * Severity indicates the status of Coder health. + */ readonly severity: HealthSeverity; readonly derp: DERPHealthReport; readonly access_url: AccessURLReport; @@ -1494,12 +2425,21 @@ export interface HealthcheckReport { readonly database: DatabaseReport; readonly workspace_proxy: WorkspaceProxyReport; readonly provisioner_daemons: ProvisionerDaemonsReport; + /** + * The Coder version of the server that the report was generated on. + */ readonly coder_version: string; } // From codersdk/idpsync.go -export interface IDPSyncMapping { +export interface IDPSyncMapping { + /** + * The IdP claim the user has + */ readonly Given: string; + /** + * The ID of the Coder resource the user should be added to + */ readonly Gets: ResourceIdType; } @@ -1545,6 +2485,9 @@ export const InsightsReportIntervals: InsightsReportInterval[] = [ // From codersdk/workspaceagents.go export interface IssueReconnectingPTYSignedTokenRequest { + /** + * URL is the URL of the reconnecting-pty endpoint you are connecting to. + */ readonly url: string; readonly agentID: string; } @@ -1564,6 +2507,12 @@ export interface License { readonly id: number; readonly uuid: string; readonly uploaded_at: string; + /** + * Claims are the JWT claims asserted by the license. Here we use + * a generic string map to ensure that all data from the server is + * parsed verbatim, not just the fields this version of Coder + * understands. + */ // empty interface{} type, falling back to unknown readonly claims: Record; } @@ -1580,6 +2529,7 @@ export interface LinkConfig { readonly name: string; readonly target: string; readonly icon: string; + readonly location?: string; } // From codersdk/inboxnotification.go @@ -1599,6 +2549,12 @@ export interface ListInboxNotificationsResponse { // From codersdk/externalauth.go export interface ListUserExternalAuthResponse { readonly providers: readonly ExternalAuthLinkProvider[]; + /** + * Links are all the authenticated links for the user. + * If a link has a provider ID that does not exist, then that provider + * is no longer configured, rendering it unusable. It is still valuable + * to include these links so that the user can unlink them. + */ readonly links: readonly ExternalAuthLink[]; } @@ -1639,20 +2595,45 @@ export const LoginTypes: LoginType[] = [ ]; // From codersdk/users.go +/** + * LoginWithPasswordRequest enables callers to authenticate with email and password. + */ export interface LoginWithPasswordRequest { readonly email: string; readonly password: string; } // From codersdk/users.go +/** + * LoginWithPasswordResponse contains a session token for the newly authenticated user. + */ export interface LoginWithPasswordResponse { readonly session_token: string; } // From codersdk/provisionerdaemons.go +/** + * MatchedProvisioners represents the number of provisioner daemons + * available to take a job at a specific point in time. + * Introduced in Coder version 2.18.0. + */ export interface MatchedProvisioners { + /** + * Count is the number of provisioner daemons that matched the given + * tags. If the count is 0, it means no provisioner daemons matched the + * requested tags. + */ readonly count: number; + /** + * Available is the number of provisioner daemons that are available to + * take jobs. This may be less than the count if some provisioners are + * busy or have been stopped. + */ readonly available: number; + /** + * MostRecentlySeen is the most recently seen time of the set of matched + * provisioners. If no provisioners matched, this field will be null. + */ readonly most_recently_seen?: string; } @@ -1665,32 +2646,65 @@ export interface MinimalOrganization { } // From codersdk/users.go +/** + * MinimalUser is the minimal information needed to identify a user and show + * them on the UI. + */ export interface MinimalUser { readonly id: string; readonly username: string; + readonly name?: string; readonly avatar_url?: string; } // From netcheck/netcheck.go +/** + * Report contains the result of a single netcheck. + */ export interface NetcheckReport { - readonly UDP: boolean; - readonly IPv6: boolean; - readonly IPv4: boolean; - readonly IPv6CanSend: boolean; - readonly IPv4CanSend: boolean; - readonly OSHasIPv6: boolean; - readonly ICMPv4: boolean; + readonly UDP: boolean; // a UDP STUN round trip completed + readonly IPv6: boolean; // an IPv6 STUN round trip completed + readonly IPv4: boolean; // an IPv4 STUN round trip completed + readonly IPv6CanSend: boolean; // an IPv6 packet was able to be sent + readonly IPv4CanSend: boolean; // an IPv4 packet was able to be sent + readonly OSHasIPv6: boolean; // could bind a socket to ::1 + readonly ICMPv4: boolean; // an ICMPv4 round trip completed + /** + * MappingVariesByDestIP is whether STUN results depend which + * STUN server you're talking to (on IPv4). + */ readonly MappingVariesByDestIP: boolean | null; + /** + * HairPinning is whether the router supports communicating + * between two local devices through the NATted public IP address + * (on IPv4). + */ readonly HairPinning: boolean | null; + /** + * UPnP is whether UPnP appears present on the LAN. + * Empty means not checked. + */ readonly UPnP: boolean | null; + /** + * PMP is whether NAT-PMP appears present on the LAN. + * Empty means not checked. + */ readonly PMP: boolean | null; + /** + * PCP is whether PCP appears present on the LAN. + * Empty means not checked. + */ readonly PCP: boolean | null; - readonly PreferredDERP: number; - readonly RegionLatency: Record; - readonly RegionV4Latency: Record; - readonly RegionV6Latency: Record; - readonly GlobalV4: string; - readonly GlobalV6: string; + readonly PreferredDERP: number; // or 0 for unknown + readonly RegionLatency: Record; // keyed by DERP Region ID + readonly RegionV4Latency: Record; // keyed by DERP Region ID + readonly RegionV6Latency: Record; // keyed by DERP Region ID + readonly GlobalV4: string; // ip:port of global IPv4 + readonly GlobalV6: string; // [ip]:port of global IPv6 + /** + * CaptivePortal is set when we think there's a captive portal that is + * intercepting HTTP traffic. + */ readonly CaptivePortal: boolean | null; } @@ -1722,45 +2736,139 @@ export interface NotificationTemplate { // From codersdk/deployment.go export interface NotificationsConfig { + /** + * The upper limit of attempts to send a notification. + */ readonly max_send_attempts: number; + /** + * The minimum time between retries. + */ readonly retry_interval: number; + /** + * The notifications system buffers message updates in memory to ease pressure on the database. + * This option controls how often it synchronizes its state with the database. The shorter this value the + * lower the change of state inconsistency in a non-graceful shutdown - but it also increases load on the + * database. It is recommended to keep this option at its default value. + */ readonly sync_interval: number; + /** + * The notifications system buffers message updates in memory to ease pressure on the database. + * This option controls how many updates are kept in memory. The lower this value the + * lower the change of state inconsistency in a non-graceful shutdown - but it also increases load on the + * database. It is recommended to keep this option at its default value. + */ readonly sync_buffer_size: number; + /** + * How long a notifier should lease a message. This is effectively how long a notification is 'owned' + * by a notifier, and once this period expires it will be available for lease by another notifier. Leasing + * is important in order for multiple running notifiers to not pick the same messages to deliver concurrently. + * This lease period will only expire if a notifier shuts down ungracefully; a dispatch of the notification + * releases the lease. + */ readonly lease_period: number; + /** + * How many notifications a notifier should lease per fetch interval. + */ readonly lease_count: number; + /** + * How often to query the database for queued notifications. + */ readonly fetch_interval: number; + /** + * Which delivery method to use (available options: 'smtp', 'webhook'). + */ readonly method: string; + /** + * How long to wait while a notification is being sent before giving up. + */ readonly dispatch_timeout: number; + /** + * SMTP settings. + */ readonly email: NotificationsEmailConfig; + /** + * Webhook settings. + */ readonly webhook: NotificationsWebhookConfig; + /** + * Inbox settings. + */ readonly inbox: NotificationsInboxConfig; } // From codersdk/deployment.go export interface NotificationsEmailAuthConfig { + /** + * Identity for PLAIN auth. + */ readonly identity: string; + /** + * Username for LOGIN/PLAIN auth. + */ readonly username: string; + /** + * Password for LOGIN/PLAIN auth. + */ readonly password: string; + /** + * File from which to load the password for LOGIN/PLAIN auth. + */ readonly password_file: string; } // From codersdk/deployment.go export interface NotificationsEmailConfig { + /** + * The sender's address. + */ readonly from: string; + /** + * The intermediary SMTP host through which emails are sent (host:port). + */ readonly smarthost: string; + /** + * The hostname identifying the SMTP server. + */ readonly hello: string; + /** + * Authentication details. + */ readonly auth: NotificationsEmailAuthConfig; + /** + * TLS details. + */ readonly tls: NotificationsEmailTLSConfig; + /** + * ForceTLS causes a TLS connection to be attempted. + */ readonly force_tls: boolean; } // From codersdk/deployment.go export interface NotificationsEmailTLSConfig { + /** + * StartTLS attempts to upgrade plain connections to TLS. + */ readonly start_tls: boolean; + /** + * ServerName to verify the hostname for the targets. + */ readonly server_name: string; + /** + * InsecureSkipVerify skips target certificate validation. + */ readonly insecure_skip_verify: boolean; + /** + * CAFile specifies the location of the CA certificate to use. + */ readonly ca_file: string; + /** + * CertFile specifies the location of the certificate to use. + */ readonly cert_file: string; + /** + * KeyFile specifies the location of the key to use. + */ readonly key_file: string; } @@ -1776,10 +2884,16 @@ export interface NotificationsSettings { // From codersdk/deployment.go export interface NotificationsWebhookConfig { + /** + * The URL to which the payload will be sent with an HTTP POST request. + */ readonly endpoint: string; } // From codersdk/parameters.go +/** + * NullHCLString == `previewtypes.NullHCLString`. + */ export interface NullHCLString { readonly value: string; readonly valid: boolean; @@ -1789,10 +2903,17 @@ export interface NullHCLString { export interface OAuth2AppEndpoints { readonly authorization: string; readonly token: string; + readonly token_revoke: string; + /** + * DeviceAuth is optional. + */ readonly device_authorization: string; } // From codersdk/oauth2.go +/** + * OAuth2AuthorizationServerMetadata represents RFC 8414 OAuth 2.0 Authorization Server Metadata + */ export interface OAuth2AuthorizationServerMetadata { readonly issuer: string; readonly authorization_endpoint: string; @@ -1806,6 +2927,10 @@ export interface OAuth2AuthorizationServerMetadata { } // From codersdk/oauth2.go +/** + * OAuth2ClientConfiguration represents RFC 7592 Client Configuration (for GET/PUT operations) + * Same as OAuth2ClientRegistrationResponse but without client_secret in GET responses + */ export interface OAuth2ClientConfiguration { readonly client_id: string; readonly client_id_issued_at: number; @@ -1830,6 +2955,9 @@ export interface OAuth2ClientConfiguration { } // From codersdk/oauth2.go +/** + * OAuth2ClientRegistrationRequest represents RFC 7591 Dynamic Client Registration Request + */ export interface OAuth2ClientRegistrationRequest { readonly redirect_uris?: readonly string[]; readonly client_name?: string; @@ -1850,6 +2978,9 @@ export interface OAuth2ClientRegistrationRequest { } // From codersdk/oauth2.go +/** + * OAuth2ClientRegistrationResponse represents RFC 7591 Dynamic Client Registration Response + */ export interface OAuth2ClientRegistrationResponse { readonly client_id: string; readonly client_secret?: string; @@ -1898,6 +3029,9 @@ export interface OAuth2GithubConfig { } // From codersdk/oauth2.go +/** + * OAuth2ProtectedResourceMetadata represents RFC 9728 OAuth 2.0 Protected Resource Metadata + */ export interface OAuth2ProtectedResourceMetadata { readonly resource: string; readonly authorization_servers: readonly string[]; @@ -1911,6 +3045,11 @@ export interface OAuth2ProviderApp { readonly name: string; readonly callback_url: string; readonly icon: string; + /** + * Endpoints are included in the app response for easier discovery. The OAuth2 + * spec does not have a defined place to find these (for comparison, OIDC has + * a '/.well-known/openid-configuration' endpoint). + */ readonly endpoints: OAuth2AppEndpoints; } @@ -1948,9 +3087,15 @@ export const OAuth2ProviderResponseTypes: OAuth2ProviderResponseType[] = [ ]; // From codersdk/client.go +/** + * OAuth2RedirectCookie is the name of the cookie that stores the oauth2 redirect. + */ export const OAuth2RedirectCookie = "oauth_redirect"; // From codersdk/client.go +/** + * OAuth2StateCookie is the name of the cookie that stores the oauth2 state. + */ export const OAuth2StateCookie = "oauth_state"; // From codersdk/users.go @@ -1972,6 +3117,9 @@ export interface OIDCConfig { readonly allow_signups: boolean; readonly client_id: string; readonly client_secret: string; + /** + * ClientKeyFile & ClientCertFile are used in place of ClientSecret for PKI auth. + */ readonly client_key_file: string; readonly client_cert_file: string; readonly email_domain: string; @@ -1982,7 +3130,20 @@ export interface OIDCConfig { readonly name_field: string; readonly email_field: string; readonly auth_url_params: SerpentStruct>; + /** + * IgnoreUserInfo & UserInfoFromAccessToken are mutually exclusive. Only 1 + * can be set to true. Ideally this would be an enum with 3 states, ['none', + * 'userinfo', 'access_token']. However, for backward compatibility, + * `ignore_user_info` must remain. And `access_token` is a niche, non-spec + * compliant edge case. So it's use is rare, and should not be advised. + */ readonly ignore_user_info: boolean; + /** + * UserInfoFromAccessToken as mentioned above is an edge case. This allows + * sourcing the user_info from the access token itself instead of a user_info + * endpoint. This assumes the access token is a valid JWT with a set of claims to + * be merged with the id_token. + */ readonly source_user_info_from_access_token: boolean; readonly organization_field: string; readonly organization_mapping: SerpentStruct>; @@ -2012,6 +3173,9 @@ export const OptionTypes: OptionType[] = [ ]; // From codersdk/organizations.go +/** + * Organization is the JSON representation of a Coder organization. + */ export interface Organization extends MinimalOrganization { readonly description: string; readonly created_at: string; @@ -2060,12 +3224,25 @@ export interface OrganizationProvisionerJobsOptions { readonly IDs: readonly string[]; readonly Status: readonly ProvisionerJobStatus[]; readonly Tags: Record; + readonly Initiator: string; } // From codersdk/idpsync.go export interface OrganizationSyncSettings { + /** + * Field selects the claim field to be used as the created user's + * organizations. If the field is the empty string, then no organization + * updates will ever come from the OIDC provider. + */ readonly field: string; + /** + * Mapping maps from an OIDC claim --> Coder organization uuid + */ readonly mapping: Record; + /** + * AssignDefault will ensure the default org is always included + * for every user, regardless of their claims. This preserves legacy behavior. + */ readonly organization_assign_default: boolean; } @@ -2082,9 +3259,30 @@ export interface PaginatedMembersResponse { } // From codersdk/pagination.go +/** + * Pagination sets pagination options for the endpoints that support it. + */ export interface Pagination { + /** + * AfterID returns all or up to Limit results after the given + * UUID. This option can be used with or as an alternative to + * Offset for better performance. To use it as an alternative, + * set AfterID to the last UUID returned by the previous + * request. + */ readonly after_id?: string; + /** + * Limit sets the maximum number of users to be returned + * in a single page. If the limit is <= 0, there is no limit + * and all users are returned. + */ readonly limit?: number; + /** + * Offset is used to indicate which page to return. An offset of 0 + * returns the first 'limit' number of users. + * To get the next page, use offset=*. + * Offset is 0 indexed, so the first record sits at offset 0. + */ readonly offset?: number; } @@ -2124,6 +3322,9 @@ export interface PatchGroupIDPSyncConfigRequest { } // From codersdk/idpsync.go +/** + * If the same mapping is present in both Add and Remove, Remove will take presidence. + */ export interface PatchGroupIDPSyncMappingRequest { readonly Add: readonly IDPSyncMapping[]; readonly Remove: readonly IDPSyncMapping[]; @@ -2146,6 +3347,9 @@ export interface PatchOrganizationIDPSyncConfigRequest { } // From codersdk/idpsync.go +/** + * If the same mapping is present in both Add and Remove, Remove will take presidence. + */ export interface PatchOrganizationIDPSyncMappingRequest { readonly Add: readonly IDPSyncMapping[]; readonly Remove: readonly IDPSyncMapping[]; @@ -2157,6 +3361,9 @@ export interface PatchRoleIDPSyncConfigRequest { } // From codersdk/idpsync.go +/** + * If the same mapping is present in both Add and Remove, Remove will take presidence. + */ export interface PatchRoleIDPSyncMappingRequest { readonly Add: readonly IDPSyncMapping[]; readonly Remove: readonly IDPSyncMapping[]; @@ -2178,10 +3385,21 @@ export interface PatchWorkspaceProxy { } // From codersdk/client.go +/** + * PathAppSessionTokenCookie is the name of the cookie that stores an + * application-scoped API token on workspace proxy path app domains. + *nolint:gosec + */ export const PathAppSessionTokenCookie = "coder_path_app_session_token"; // From codersdk/roles.go +/** + * Permission is the format passed into the rego. + */ export interface Permission { + /** + * Negate makes this a negative permission + */ readonly negate: boolean; readonly resource_type: RBACResource; readonly action: RBACAction; @@ -2213,9 +3431,26 @@ export interface PprofConfig { // From codersdk/deployment.go export interface PrebuildsConfig { + /** + * ReconciliationInterval defines how often the workspace prebuilds state should be reconciled. + */ readonly reconciliation_interval: number; + /** + * ReconciliationBackoffInterval specifies the amount of time to increase the backoff interval + * when errors occur during reconciliation. + */ readonly reconciliation_backoff_interval: number; + /** + * ReconciliationBackoffLookback determines the time window to look back when calculating + * the number of failed prebuilds, which influences the backoff strategy. + */ readonly reconciliation_backoff_lookback: number; + /** + * FailureHardLimit defines the maximum number of consecutive failed prebuild attempts allowed + * before a preset is considered to be in a hard limit state. When a preset hits this limit, + * no new prebuilds will be created until the limit is reset. + * FailureHardLimit is disabled when set to zero. + */ readonly failure_hard_limit: number; } @@ -2261,6 +3496,9 @@ export interface PreviewParameterData { readonly options: readonly PreviewParameterOption[]; readonly validations: readonly PreviewParameterValidation[]; readonly required: boolean; + /** + * legacy_variable_name was removed (= 14) + */ readonly order: number; readonly ephemeral: boolean; } @@ -2284,6 +3522,9 @@ export interface PreviewParameterStyling { // From codersdk/parameters.go export interface PreviewParameterValidation { readonly validation_error: string; + /** + * All validation attributes are optional. + */ readonly validation_regex: string | null; readonly validation_min: number | null; readonly validation_max: number | null; @@ -2301,6 +3542,9 @@ export interface PrometheusConfig { // From codersdk/deployment.go export interface ProvisionerConfig { + /** + * Daemons is the number of built-in terraform provisioners. + */ readonly daemons: number; readonly daemon_types: string; readonly daemon_poll_interval: number; @@ -2321,6 +3565,9 @@ export interface ProvisionerDaemon { readonly api_version: string; readonly provisioners: readonly ProvisionerType[]; readonly tags: Record; + /** + * Optional fields. + */ readonly key_name: string | null; readonly status: ProvisionerDaemonStatus | null; readonly current_job: ProvisionerDaemonJob | null; @@ -2337,9 +3584,15 @@ export interface ProvisionerDaemonJob { } // From codersdk/client.go +/** + * ProvisionerDaemonKey contains the authentication key for an external provisioner daemon + */ export const ProvisionerDaemonKey = "Coder-Provisioner-Daemon-Key"; // From codersdk/client.go +/** + * ProvisionerDaemonPSK contains the authentication pre-shared key for an external provisioner daemon + */ export const ProvisionerDaemonPSK = "Coder-Provisioner-Daemon-PSK"; // From codersdk/provisionerdaemons.go @@ -2352,6 +3605,9 @@ export const ProvisionerDaemonStatuses: ProvisionerDaemonStatus[] = [ ]; // From healthsdk/healthsdk.go +/** + * ProvisionerDaemonsReport includes health details of each connected provisioner daemon. + */ export interface ProvisionerDaemonsReport extends BaseReport { readonly items: readonly ProvisionerDaemonsReportItem[]; } @@ -2363,6 +3619,9 @@ export interface ProvisionerDaemonsReportItem { } // From codersdk/provisionerdaemons.go +/** + * ProvisionerJob describes the job executed by the provisioning daemon. + */ export interface ProvisionerJob { readonly id: string; readonly created_at: string; @@ -2379,6 +3638,7 @@ export interface ProvisionerJob { readonly queue_position: number; readonly queue_size: number; readonly organization_id: string; + readonly initiator_id: string; readonly input: ProvisionerJobInput; readonly type: ProvisionerJobType; readonly available_workers?: readonly string[]; @@ -2387,6 +3647,9 @@ export interface ProvisionerJob { } // From codersdk/provisionerdaemons.go +/** + * ProvisionerJobInput represents the input for the job. + */ export interface ProvisionerJobInput { readonly template_version_id?: string; readonly workspace_build_id?: string; @@ -2394,6 +3657,9 @@ export interface ProvisionerJobInput { } // From codersdk/provisionerdaemons.go +/** + * ProvisionerJobLog represents the provisioner log entry annotated with source and level. + */ export interface ProvisionerJobLog { readonly id: number; readonly created_at: string; @@ -2404,6 +3670,9 @@ export interface ProvisionerJobLog { } // From codersdk/provisionerdaemons.go +/** + * ProvisionerJobMetadata contains metadata for the job. + */ export interface ProvisionerJobMetadata { readonly template_version_name: string; readonly template_id: string; @@ -2509,8 +3778,19 @@ export type ProvisionerType = "echo" | "terraform"; export const ProvisionerTypes: ProvisionerType[] = ["echo", "terraform"]; // From codersdk/workspaceproxy.go +/** + * ProxyHealthReport is a report of the health of the workspace proxy. + * A healthy report will have no errors. Warnings are not fatal. + */ export interface ProxyHealthReport { + /** + * Errors are problems that prevent the workspace proxy from being healthy + */ readonly errors: readonly string[]; + /** + * Warnings do not prevent the workspace proxy from being healthy, but + * should be addressed. + */ readonly warnings: readonly string[]; } @@ -2529,6 +3809,10 @@ export const ProxyHealthStatuses: ProxyHealthStatus[] = [ ]; // From codersdk/workspaces.go +/** + * PutExtendWorkspaceRequest is a request to extend the deadline of + * the active workspace build. + */ export interface PutExtendWorkspaceRequest { readonly deadline: string; } @@ -2551,6 +3835,7 @@ export type RBACAction = | "read" | "read_personal" | "ssh" + | "share" | "unassign" | "update" | "update_personal" @@ -2569,6 +3854,7 @@ export const RBACActions: RBACAction[] = [ "read", "read_personal", "ssh", + "share", "unassign", "update", "update_personal", @@ -2610,6 +3896,7 @@ export type RBACResource = | "replicas" | "system" | "tailnet_coordinator" + | "task" | "template" | "usage_event" | "user" @@ -2653,6 +3940,7 @@ export const RBACResources: RBACResource[] = [ "replicas", "system", "tailnet_coordinator", + "task", "template", "usage_event", "user", @@ -2673,14 +3961,23 @@ export interface RateLimitConfig { } // From codersdk/users.go +/** + * ReducedUser omits role and organization information. Roles are deduced from + * the user's site and organization roles. This requires fetching the user's + * organizational memberships. Fetching that is more expensive, and not usually + * required by the frontend. + */ export interface ReducedUser extends MinimalUser { - readonly name?: string; readonly email: string; readonly created_at: string; readonly updated_at: string; readonly last_seen_at?: string; readonly status: UserStatus; readonly login_type: LoginType; + /** + * Deprecated: this value should be retrieved from + * `codersdk.UserPreferenceSettings` instead. + */ readonly theme_preference?: string; } @@ -2691,7 +3988,18 @@ export interface Region { readonly display_name: string; readonly icon_url: string; readonly healthy: boolean; + /** + * PathAppURL is the URL to the base path for path apps. Optional + * unless wildcard_hostname is set. + * E.g. https://us.example.com + */ readonly path_app_url: string; + /** + * WildcardHostname is the wildcard hostname for subdomain apps. + * E.g. *.us.example.com + * E.g. *--suffix.au.example.com + * Optional. Does not need to be on the same domain as PathAppURL. + */ readonly wildcard_hostname: string; } @@ -2705,16 +4013,40 @@ export interface RegionsResponse { // From codersdk/replicas.go export interface Replica { + /** + * ID is the unique identifier for the replica. + */ readonly id: string; + /** + * Hostname is the hostname of the replica. + */ readonly hostname: string; + /** + * CreatedAt is the timestamp when the replica was first seen. + */ readonly created_at: string; + /** + * RelayAddress is the accessible address to relay DERP connections. + */ readonly relay_address: string; + /** + * RegionID is the region of the replica. + */ readonly region_id: number; + /** + * Error is the replica error. + */ readonly error: string; + /** + * DatabaseLatency is the latency in microseconds to the database. + */ readonly database_latency: number; } // From codersdk/users.go +/** + * RequestOneTimePasscodeRequest enables callers to request a one-time-passcode to change their password. + */ export interface RequestOneTimePasscodeRequest { readonly email: string; } @@ -2743,6 +4075,7 @@ export type ResourceType = | "organization" | "organization_member" | "prebuilds_settings" + | "task" | "template" | "template_version" | "user" @@ -2770,6 +4103,7 @@ export const ResourceTypes: ResourceType[] = [ "organization", "organization_member", "prebuilds_settings", + "task", "template", "template_version", "user", @@ -2781,76 +4115,169 @@ export const ResourceTypes: ResourceType[] = [ ]; // From codersdk/client.go +/** + * Response represents a generic HTTP response. + */ export interface Response { + /** + * Message is an actionable message that depicts actions the request took. + * These messages should be fully formed sentences with proper punctuation. + * Examples: + * - "A user has been created." + * - "Failed to create a user." + */ readonly message: string; + /** + * Detail is a debug message that provides further insight into why the + * action failed. This information can be technical and a regular golang + * err.Error() text. + * - "database: too many open connections" + * - "stat: too many open files" + */ readonly detail?: string; + /** + * Validations are form field-specific friendly error messages. They will be + * shown on a form field in the UI. These can also be used to add additional + * context if there is a set of errors in the primary 'Message'. + */ readonly validations?: readonly ValidationError[]; } // From codersdk/roles.go +/** + * Role is a longer form of SlimRole that includes permissions details. + */ export interface Role { readonly name: string; readonly organization_id?: string; readonly display_name: string; readonly site_permissions: readonly Permission[]; - readonly organization_permissions: readonly Permission[]; readonly user_permissions: readonly Permission[]; + /** + * OrganizationPermissions are specific for the organization in the field 'OrganizationID' above. + */ + readonly organization_permissions: readonly Permission[]; + /** + * OrganizationMemberPermissions are specific for the organization in the field 'OrganizationID' above. + */ + readonly organization_member_permissions: readonly Permission[]; } // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleAuditor = "auditor"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleMember = "member"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationAdmin = "organization-admin"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationAuditor = "organization-auditor"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationMember = "organization-member"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationTemplateAdmin = "organization-template-admin"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationUserAdmin = "organization-user-admin"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOrganizationWorkspaceCreationBan = "organization-workspace-creation-ban"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleOwner = "owner"; // From codersdk/idpsync.go export interface RoleSyncSettings { + /** + * Field is the name of the claim field that specifies what organization roles + * a user should be given. If empty, no roles will be synced. + */ readonly field: string; + /** + * Mapping is a map from OIDC groups to Coder organization roles. + */ readonly mapping: Record; } // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleTemplateAdmin = "template-admin"; // From codersdk/rbacroles.go +/** + * Ideally this roles would be generated from the rbac/roles.go package. + */ export const RoleUserAdmin = "user-admin"; // From codersdk/deployment.go +/** + * SSHConfig is configuration the cli & vscode extension use for configuring + * ssh connections. + */ export interface SSHConfig { + /** + * DeploymentName is the config-ssh Hostname prefix + */ readonly DeploymentName: string; + /** + * SSHConfigOptions are additional options to add to the ssh config file. + * This will override defaults. + */ readonly SSHConfigOptions: string; } // From codersdk/deployment.go export interface SSHConfigResponse { + /** + * HostnamePrefix is the prefix we append to workspace names for SSH hostnames. + * Deprecated: use HostnameSuffix instead. + */ readonly hostname_prefix: string; + /** + * HostnameSuffix is the suffix to append to workspace names for SSH hostnames. + */ readonly hostname_suffix: string; readonly ssh_config_options: Record; } // From healthsdk/healthsdk.go +/** + * STUNReport contains information about a given node's STUN capabilities. + */ export interface STUNReport { readonly Enabled: boolean; readonly CanSTUN: boolean; @@ -2858,9 +4285,16 @@ export interface STUNReport { } // From serpent/serpent.go +/** + * Annotations is an arbitrary key-mapping used to extend the Option and Command types. + * Its methods won't panic if the map is nil. + */ export type SerpentAnnotations = Record; // From serpent/serpent.go +/** + * Group describes a hierarchy of groups that an option or command belongs to. + */ export interface SerpentGroup { readonly parent?: SerpentGroup; readonly name?: string; @@ -2869,26 +4303,71 @@ export interface SerpentGroup { } // From serpent/option.go +/** + * Option is a configuration option for a CLI application. + */ export interface SerpentOption { readonly name?: string; readonly description?: string; + /** + * Required means this value must be set by some means. It requires + * `ValueSource != ValueSourceNone` + * If `Default` is set, then `Required` is ignored. + */ readonly required?: boolean; + /** + * Flag is the long name of the flag used to configure this option. If unset, + * flag configuring is disabled. + */ readonly flag?: string; + /** + * FlagShorthand is the one-character shorthand for the flag. If unset, no + * shorthand is used. + */ readonly flag_shorthand?: string; + /** + * Env is the environment variable used to configure this option. If unset, + * environment configuring is disabled. + */ readonly env?: string; + /** + * YAML is the YAML key used to configure this option. If unset, YAML + * configuring is disabled. + */ readonly yaml?: string; + /** + * Default is parsed into Value if set. + */ readonly default?: string; + /** + * Value includes the types listed in values.go. + */ // interface type, falling back to unknown // this is likely an enum in an external package "github.com/spf13/pflag.Value" readonly value?: unknown; + /** + * Annotations enable extensions to serpent higher up in the stack. It's useful for + * help formatting and documentation generation. + */ readonly annotations?: SerpentAnnotations; + /** + * Group is a group hierarchy that helps organize this option in help, configs + * and other documentation. + */ readonly group?: SerpentGroup; + /** + * UseInstead is a list of options that should be used instead of this one. + * The field is used to generate a deprecation warning. + */ readonly use_instead?: readonly SerpentOption[]; readonly hidden?: boolean; readonly value_source?: SerpentValueSource; } // From serpent/option.go +/** + * OptionSet is a group of options that can be applied to a command. + */ export type SerpentOptionSet = readonly SerpentOption[]; // From serpent/values.go @@ -2898,8 +4377,26 @@ export type SerpentStruct = T; export type SerpentValueSource = string; // From derp/derp_client.go +/** + * ServerInfoMessage is sent by the server upon first connect. + */ export interface ServerInfoMessage { + /** + * TokenBucketBytesPerSecond is how many bytes per second the + * server says it will accept, including all framing bytes. + * + * Zero means unspecified. There might be a limit, but the + * client need not try to respect it. + */ readonly TokenBucketBytesPerSecond: number; + /** + * TokenBucketBytesBurst is how many bytes the server will + * allow to burst, temporarily violating + * TokenBucketBytesPerSecond. + * + * Zero means unspecified. There might be a limit, but the + * client need not try to respect it. + */ readonly TokenBucketBytesBurst: number; } @@ -2920,6 +4417,9 @@ export const ServerSentEventTypes: ServerSentEventType[] = [ ]; // From codersdk/deployment.go +/** + * Deprecated: ServiceBannerConfig has been renamed to BannerConfig. + */ export interface ServiceBannerConfig { readonly enabled: boolean; readonly message?: string; @@ -2935,9 +4435,39 @@ export interface SessionCountDeploymentStats { } // From codersdk/deployment.go +/** + * SessionLifetime refers to "sessions" authenticating into Coderd. Coder has + * multiple different session types: api keys, tokens, workspace app tokens, + * agent tokens, etc. This configuration struct should be used to group all + * settings referring to any of these session lifetime controls. + * TODO: These config options were created back when coder only had api keys. + * Today, the config is ambigously used for all of them. For example: + * - cli based api keys ignore all settings + * - login uses the default lifetime, not the MaximumTokenDuration + * - Tokens use the Default & MaximumTokenDuration + * - ... etc ... + * The rational behind each decision is undocumented. The naming behind these + * config options is also confusing without any clear documentation. + * 'CreateAPIKey' is used to make all sessions, and it's parameters are just + * 'LifetimeSeconds' and 'DefaultLifetime'. Which does not directly correlate to + * the config options here. + */ export interface SessionLifetime { + /** + * DisableExpiryRefresh will disable automatically refreshing api + * keys when they are used from the api. This means the api key lifetime at + * creation is the lifetime of the api key. + */ readonly disable_expiry_refresh?: boolean; + /** + * DefaultDuration is only for browser, workspace app and oauth sessions. + */ readonly default_duration: number; + /** + * RefreshDefaultDuration is the default lifetime for OAuth2 refresh tokens. + * This should generally be longer than access token lifetimes to allow + * refreshing after access token expiry. + */ readonly refresh_default_duration?: number; readonly default_token_lifetime?: number; readonly max_token_lifetime?: number; @@ -2945,15 +4475,39 @@ export interface SessionLifetime { } // From codersdk/client.go +/** + * SessionTokenHeader is the custom header to use for authentication. + */ export const SessionTokenHeader = "Coder-Session-Token"; // From codersdk/client.go +/** + * SignedAppTokenCookie is the name of the cookie that stores a temporary + * JWT that can be used to authenticate instead of the app session token. + *nolint:gosec + */ export const SignedAppTokenCookie = "coder_signed_app_token"; // From codersdk/client.go +/** + * SignedAppTokenQueryParameter is the name of the query parameter that + * stores a temporary JWT that can be used to authenticate instead of the + * session token. This is only acceptable on reconnecting-pty requests, not + * apps. + * + * It has a random suffix to avoid conflict with user query parameters on + * apps. + *nolint:gosec + */ export const SignedAppTokenQueryParameter = "coder_signed_app_token_23db1dde"; // From codersdk/roles.go +/** + * SlimRole omits permission information from a role. + * At present, this is because our apis do not return permission information, + * and it would require extra db calls to fetch this information. The UI does + * not need it, so most api calls will use this structure that omits information. + */ export interface SlimRole { readonly name: string; readonly display_name: string; @@ -2961,6 +4515,15 @@ export interface SlimRole { } // From codersdk/client.go +/** + * SubdomainAppSessionTokenCookie is the name of the cookie that stores an + * application-scoped API token on subdomain app domains (both the primary + * and proxies). + * + * To avoid conflicts between multiple proxies, we append an underscore and + * a hash suffix to the cookie name. + *nolint:gosec + */ export const SubdomainAppSessionTokenCookie = "coder_subdomain_app_session_token"; @@ -2991,55 +4554,198 @@ export interface TLSConfig { } // From tailcfg/derpmap.go +/** + * DERPNode describes a DERP packet relay node running within a DERPRegion. + */ export interface TailDERPNode { + /** + * Name is a unique node name (across all regions). + * It is not a host name. + * It's typically of the form "1b", "2a", "3b", etc. (region + * ID + suffix within that region) + */ readonly Name: string; + /** + * RegionID is the RegionID of the DERPRegion that this node + * is running in. + */ readonly RegionID: number; + /** + * HostName is the DERP node's hostname. + * + * It is required but need not be unique; multiple nodes may + * have the same HostName but vary in configuration otherwise. + */ readonly HostName: string; + /** + * CertName optionally specifies the expected TLS cert common + * name. If empty, HostName is used. If CertName is non-empty, + * HostName is only used for the TCP dial (if IPv4/IPv6 are + * not present) + TLS ClientHello. + */ readonly CertName?: string; + /** + * IPv4 optionally forces an IPv4 address to use, instead of using DNS. + * If empty, A record(s) from DNS lookups of HostName are used. + * If the string is not an IPv4 address, IPv4 is not used; the + * conventional string to disable IPv4 (and not use DNS) is + * "none". + */ readonly IPv4?: string; + /** + * IPv6 optionally forces an IPv6 address to use, instead of using DNS. + * If empty, AAAA record(s) from DNS lookups of HostName are used. + * If the string is not an IPv6 address, IPv6 is not used; the + * conventional string to disable IPv6 (and not use DNS) is + * "none". + */ readonly IPv6?: string; + /** + * Port optionally specifies a STUN port to use. + * Zero means 3478. + * To disable STUN on this node, use -1. + */ readonly STUNPort?: number; + /** + * STUNOnly marks a node as only a STUN server and not a DERP + * server. + */ readonly STUNOnly?: boolean; + /** + * DERPPort optionally provides an alternate TLS port number + * for the DERP HTTPS server. + * + * If zero, 443 is used. + */ readonly DERPPort?: number; + /** + * InsecureForTests is used by unit tests to disable TLS verification. + * It should not be set by users. + */ readonly InsecureForTests?: boolean; + /** + * ForceHTTP is used by unit tests to force HTTP. + * It should not be set by users. + */ readonly ForceHTTP?: boolean; + /** + * STUNTestIP is used in tests to override the STUN server's IP. + * If empty, it's assumed to be the same as the DERP server. + */ readonly STUNTestIP?: string; + /** + * CanPort80 specifies whether this DERP node is accessible over HTTP + * on port 80 specifically. This is used for captive portal checks. + */ readonly CanPort80?: boolean; } // From tailcfg/derpmap.go +/** + * DERPRegion is a geographic region running DERP relay node(s). + * + * Client nodes discover which region they're closest to, advertise + * that "home" DERP region (previously called "home node", when there + * was only 1 node per region) and maintain a persistent connection + * that region as long as it's the closest. Client nodes will further + * connect to other regions as necessary to communicate with peers + * advertising other regions as their homes. + */ export interface TailDERPRegion { + /** + * EmbeddedRelay is true when the region is bundled with the Coder + * control plane. + */ readonly EmbeddedRelay: boolean; + /** + * RegionID is a unique integer for a geographic region. + * + * It corresponds to the legacy derpN.tailscale.com hostnames + * used by older clients. (Older clients will continue to resolve + * derpN.tailscale.com when contacting peers, rather than use + * the server-provided DERPMap) + * + * RegionIDs must be non-zero, positive, and guaranteed to fit + * in a JavaScript number. + * + * RegionIDs in range 900-999 are reserved for end users to run their + * own DERP nodes. + */ readonly RegionID: number; + /** + * RegionCode is a short name for the region. It's usually a popular + * city or airport code in the region: "nyc", "sf", "sin", + * "fra", etc. + */ readonly RegionCode: string; + /** + * RegionName is a long English name for the region: "New York City", + * "San Francisco", "Singapore", "Frankfurt", etc. + */ readonly RegionName: string; + /** + * Avoid is whether the client should avoid picking this as its home + * region. The region should only be used if a peer is there. + * Clients already using this region as their home should migrate + * away to a new region without Avoid set. + */ readonly Avoid?: boolean; + /** + * Nodes are the DERP nodes running in this region, in + * priority order for the current client. Client TLS + * connections should ideally only go to the first entry + * (falling back to the second if necessary). STUN packets + * should go to the first 1 or 2. + * + * If nodes within a region route packets amongst themselves, + * but not to other regions. That said, each user/domain + * should get a the same preferred node order, so if all nodes + * for a user/network pick the first one (as they should, when + * things are healthy), the inter-cluster routing is minimal + * to zero. + */ readonly Nodes: readonly TailDERPNode[]; } // From codersdk/aitasks.go +/** + * Task represents a task. + * + * Experimental: This type is experimental and may change in the future. + */ export interface Task { readonly id: string; readonly organization_id: string; readonly owner_id: string; readonly owner_name: string; + readonly owner_avatar_url?: string; readonly name: string; readonly template_id: string; + readonly template_version_id: string; readonly template_name: string; readonly template_display_name: string; readonly template_icon: string; readonly workspace_id: string | null; + readonly workspace_name: string; + readonly workspace_status?: WorkspaceStatus; + readonly workspace_build_number?: number; readonly workspace_agent_id: string | null; readonly workspace_agent_lifecycle: WorkspaceAgentLifecycle | null; readonly workspace_agent_health: WorkspaceAgentHealth | null; + readonly workspace_app_id: string | null; readonly initial_prompt: string; - readonly status: WorkspaceStatus; + readonly status: TaskStatus; readonly current_state: TaskStateEntry | null; readonly created_at: string; readonly updated_at: string; } // From codersdk/aitasks.go +/** + * TaskLogEntry represents a single log entry for a task. + * + * Experimental: This type is experimental and may change in the future. + */ export interface TaskLogEntry { readonly id: number; readonly content: string; @@ -3053,11 +4759,21 @@ export type TaskLogType = "input" | "output"; export const TaskLogTypes: TaskLogType[] = ["input", "output"]; // From codersdk/aitasks.go +/** + * TaskLogsResponse contains the logs for a task. + * + * Experimental: This type is experimental and may change in the future. + */ export interface TaskLogsResponse { readonly logs: readonly TaskLogEntry[]; } // From codersdk/aitasks.go +/** + * TaskSendRequest is used to send task input to the tasks sidebar app. + * + * Experimental: This type is experimental and may change in the future. + */ export interface TaskSendRequest { readonly input: string; } @@ -3066,6 +4782,11 @@ export interface TaskSendRequest { export type TaskState = "complete" | "failed" | "idle" | "working"; // From codersdk/aitasks.go +/** + * TaskStateEntry represents a single entry in the task's state history. + * + * Experimental: This type is experimental and may change in the future. + */ export interface TaskStateEntry { readonly timestamp: string; readonly state: TaskState; @@ -3081,8 +4802,57 @@ export const TaskStates: TaskState[] = [ ]; // From codersdk/aitasks.go +export type TaskStatus = + | "active" + | "error" + | "initializing" + | "paused" + | "pending" + | "unknown"; + +export const TaskStatuses: TaskStatus[] = [ + "active", + "error", + "initializing", + "paused", + "pending", + "unknown", +]; + +// From codersdk/aitasks.go +/** + * TasksFilter filters the list of tasks. + * + * Experimental: This type is experimental and may change in the future. + */ export interface TasksFilter { + /** + * Owner can be a username, UUID, or "me". + */ readonly owner?: string; + /** + * Organization can be an organization name or UUID. + */ + readonly organization?: string; + /** + * Status filters the tasks by their task status. + */ + readonly status?: TaskStatus; + /** + * FilterQuery allows specifying a raw filter query. + */ + readonly filter_query?: string; +} + +// From codersdk/aitasks.go +/** + * TaskListResponse is the response shape for tasks list. + * + * Experimental response shape for tasks list (server returns []Task). + */ +export interface TasksListResponse { + readonly tasks: readonly Task[]; + readonly count: number; } // From codersdk/deployment.go @@ -3093,6 +4863,10 @@ export interface TelemetryConfig { } // From codersdk/templates.go +/** + * Template is the JSON representation of a Coder template. This type matches the + * database object for now, but is abstracted for ease of change later on. + */ export interface Template { readonly id: string; readonly created_at: string; @@ -3105,6 +4879,9 @@ export interface Template { readonly display_name: string; readonly provisioner: ProvisionerType; readonly active_version_id: string; + /** + * ActiveUserCount is set to -1 when loading. + */ readonly active_user_count: number; readonly build_time_stats: TemplateBuildTimeStats; readonly description: string; @@ -3113,16 +4890,35 @@ export interface Template { readonly icon: string; readonly default_ttl_ms: number; readonly activity_bump_ms: number; + /** + * AutostopRequirement and AutostartRequirement are enterprise features. Its + * value is only used if your license is entitled to use the advanced template + * scheduling feature. + */ readonly autostop_requirement: TemplateAutostopRequirement; readonly autostart_requirement: TemplateAutostartRequirement; readonly created_by_id: string; readonly created_by_name: string; + /** + * AllowUserAutostart and AllowUserAutostop are enterprise-only. Their + * values are only used if your license is entitled to use the advanced + * template scheduling feature. + */ readonly allow_user_autostart: boolean; readonly allow_user_autostop: boolean; readonly allow_user_cancel_workspace_jobs: boolean; + /** + * FailureTTLMillis, TimeTilDormantMillis, and TimeTilDormantAutoDeleteMillis are enterprise-only. Their + * values are used if your license is entitled to use the advanced + * template scheduling feature. + */ readonly failure_ttl_ms: number; readonly time_til_dormant_ms: number; readonly time_til_dormant_autodelete_ms: number; + /** + * RequireActiveVersion mandates that workspaces are built with the active + * template version. + */ readonly require_active_version: boolean; readonly max_port_share_level: WorkspaceAgentPortShareLevel; readonly cors_behavior: CORSBehavior; @@ -3136,6 +4932,9 @@ export interface TemplateACL { } // From codersdk/insights.go +/** + * TemplateAppUsage shows the usage of an app for one or more templates. + */ export interface TemplateAppUsage { readonly template_ids: readonly string[]; readonly type: TemplateAppsType; @@ -3153,12 +4952,32 @@ export const TemplateAppsTypes: TemplateAppsType[] = ["app", "builtin"]; // From codersdk/templates.go export interface TemplateAutostartRequirement { + /** + * DaysOfWeek is a list of days of the week in which autostart is allowed + * to happen. If no days are specified, autostart is not allowed. + */ readonly days_of_week: readonly string[]; } // From codersdk/templates.go export interface TemplateAutostopRequirement { + /** + * DaysOfWeek is a list of days of the week on which restarts are required. + * Restarts happen within the user's quiet hours (in their configured + * timezone). If no days are specified, restarts are not required. Weekdays + * cannot be specified twice. + * + * Restarts will only happen on weekdays in this list on weeks which line up + * with Weeks. + */ readonly days_of_week: readonly string[]; + /** + * Weeks is the number of weeks between required restarts. Weeks are synced + * across all workspaces (and Coder deployments) using modulo math on a + * hardcoded epoch week of January 2nd, 2023 (the first Monday of 2023). + * Values of 0 or 1 indicate weekly restarts. Values of 2 indicate + * fortnightly restarts, etc. + */ readonly weeks: number; } @@ -3169,18 +4988,33 @@ export type TemplateBuildTimeStats = Record< >; // From codersdk/insights.go +/** + * Enums define the display name of the builtin app reported. + */ export const TemplateBuiltinAppDisplayNameJetBrains = "JetBrains"; // From codersdk/insights.go +/** + * Enums define the display name of the builtin app reported. + */ export const TemplateBuiltinAppDisplayNameSFTP = "SFTP"; // From codersdk/insights.go +/** + * Enums define the display name of the builtin app reported. + */ export const TemplateBuiltinAppDisplayNameSSH = "SSH"; // From codersdk/insights.go +/** + * Enums define the display name of the builtin app reported. + */ export const TemplateBuiltinAppDisplayNameVSCode = "Visual Studio Code"; // From codersdk/insights.go +/** + * Enums define the display name of the builtin app reported. + */ export const TemplateBuiltinAppDisplayNameWebTerminal = "Web Terminal"; // From codersdk/templates.go @@ -3205,6 +5039,10 @@ export interface TemplateGroup extends Group { } // From codersdk/insights.go +/** + * TemplateInsightsIntervalReport is the report from the template insights + * endpoint for a specific interval. + */ export interface TemplateInsightsIntervalReport { readonly start_time: string; readonly end_time: string; @@ -3214,6 +5052,9 @@ export interface TemplateInsightsIntervalReport { } // From codersdk/insights.go +/** + * TemplateInsightsReport is the report from the template insights endpoint. + */ export interface TemplateInsightsReport { readonly start_time: string; readonly end_time: string; @@ -3233,6 +5074,9 @@ export interface TemplateInsightsRequest { } // From codersdk/insights.go +/** + * TemplateInsightsResponse is the response from the template insights endpoint. + */ export interface TemplateInsightsResponse { readonly report?: TemplateInsightsReport; readonly interval_reports?: readonly TemplateInsightsIntervalReport[]; @@ -3247,6 +5091,10 @@ export const TemplateInsightsSections: TemplateInsightsSection[] = [ ]; // From codersdk/insights.go +/** + * TemplateParameterUsage shows the usage of a parameter for one or more + * templates. + */ export interface TemplateParameterUsage { readonly template_ids: readonly string[]; readonly display_name: string; @@ -3258,6 +5106,10 @@ export interface TemplateParameterUsage { } // From codersdk/insights.go +/** + * TemplateParameterValue shows the usage of a parameter value for one or more + * templates. + */ export interface TemplateParameterValue { readonly value: string; readonly count: number; @@ -3274,6 +5126,9 @@ export interface TemplateUser extends User { } // From codersdk/templateversions.go +/** + * TemplateVersion represents a single version of a template. + */ export interface TemplateVersion { readonly id: string; readonly template_id?: string; @@ -3303,12 +5158,19 @@ export interface TemplateVersionExternalAuth { } // From codersdk/templateversions.go +/** + * TemplateVersionParameter represents a parameter for a template version. + */ export interface TemplateVersionParameter { readonly name: string; readonly display_name?: string; readonly description: string; readonly description_plaintext: string; readonly type: string; + /** + * FormType has an enum value of empty string, `""`. + * Keep the leading comma in the enums struct tag. + */ readonly form_type: string; readonly mutable: boolean; readonly default_value: string; @@ -3324,6 +5186,9 @@ export interface TemplateVersionParameter { } // From codersdk/templateversions.go +/** + * TemplateVersionParameterOption represents a selectable option for a template parameter. + */ export interface TemplateVersionParameterOption { readonly name: string; readonly description: string; @@ -3332,6 +5197,9 @@ export interface TemplateVersionParameterOption { } // From codersdk/templateversions.go +/** + * TemplateVersionVariable represents a managed template variable. + */ export interface TemplateVersionVariable { readonly name: string; readonly description: string; @@ -3350,6 +5218,10 @@ export const TemplateVersionWarnings: TemplateVersionWarning[] = [ ]; // From codersdk/templates.go +/** + * TemplateVersionsByTemplateRequest defines the request parameters for + * TemplateVersionsByTemplate. + */ export interface TemplateVersionsByTemplateRequest extends Pagination { readonly template_id: string; readonly include_archived: boolean; @@ -3426,14 +5298,29 @@ export interface UpdateActiveTemplateVersion { export interface UpdateAppearanceConfig { readonly application_name: string; readonly logo_url: string; + /** + * Deprecated: ServiceBanner has been replaced by AnnouncementBanners. + */ readonly service_banner: BannerConfig; readonly announcement_banners: readonly BannerConfig[]; } // From codersdk/updatecheck.go +/** + * UpdateCheckResponse contains information on the latest release of Coder. + */ export interface UpdateCheckResponse { + /** + * Current indicates whether the server version is the same as the latest. + */ readonly current: boolean; + /** + * Version is the semantic version for the latest release of Coder. + */ readonly version: string; + /** + * URL to download the latest release of Coder. + */ readonly url: string; } @@ -3473,7 +5360,17 @@ export interface UpdateRoles { // From codersdk/templates.go export interface UpdateTemplateACL { + /** + * UserPerms is a mapping from valid user UUIDs to the template role they + * should be granted. To remove a user from the template, use "" as the role + * (available as a constant named codersdk.TemplateRoleDeleted) + */ readonly user_perms?: Record; + /** + * GroupPerms is a mapping from valid group UUIDs to the template role they + * should be granted. To remove a group from the template, use "" as the role + * (available as a constant named codersdk.TemplateRoleDeleted) + */ readonly group_perms?: Record; } @@ -3484,7 +5381,17 @@ export interface UpdateTemplateMeta { readonly description?: string; readonly icon?: string; readonly default_ttl_ms?: number; + /** + * ActivityBumpMillis allows optionally specifying the activity bump + * duration for all workspaces created from this template. Defaults to 1h + * but can be set to 0 to disable activity bumping. + */ readonly activity_bump_ms?: number; + /** + * AutostopRequirement and AutostartRequirement can only be set if your license + * includes the advanced template scheduling feature. If you attempt to set this + * value while unlicensed, it will be ignored. + */ readonly autostop_requirement?: TemplateAutostopRequirement; readonly autostart_requirement?: TemplateAutostartRequirement; readonly allow_user_autostart?: boolean; @@ -3493,13 +5400,49 @@ export interface UpdateTemplateMeta { readonly failure_ttl_ms?: number; readonly time_til_dormant_ms?: number; readonly time_til_dormant_autodelete_ms?: number; + /** + * UpdateWorkspaceLastUsedAt updates the last_used_at field of workspaces + * spawned from the template. This is useful for preventing workspaces being + * immediately locked when updating the inactivity_ttl field to a new, shorter + * value. + */ readonly update_workspace_last_used_at: boolean; + /** + * UpdateWorkspaceDormant updates the dormant_at field of workspaces spawned + * from the template. This is useful for preventing dormant workspaces being immediately + * deleted when updating the dormant_ttl field to a new, shorter value. + */ readonly update_workspace_dormant_at: boolean; + /** + * RequireActiveVersion mandates workspaces built using this template + * use the active version of the template. This option has no + * effect on template admins. + */ readonly require_active_version?: boolean; + /** + * DeprecationMessage if set, will mark the template as deprecated and block + * any new workspaces from using this template. + * If passed an empty string, will remove the deprecated message, making + * the template usable for new workspaces again. + */ readonly deprecation_message?: string; + /** + * DisableEveryoneGroupAccess allows optionally disabling the default + * behavior of granting the 'everyone' group access to use the template. + * If this is set to true, the template will not be available to all users, + * and must be explicitly granted to users or groups in the permissions settings + * of the template. + */ readonly disable_everyone_group_access: boolean; readonly max_port_share_level?: WorkspaceAgentPortShareLevel; readonly cors_behavior?: CORSBehavior; + /** + * UseClassicParameterFlow is a flag that switches the default behavior to use the classic + * parameter flow when creating a workspace. This only affects deployments with the experiment + * "dynamic-parameters" enabled. This setting will live for a period after the experiment is + * made the default. + * An "opt-out" is present in case the new feature breaks some existing templates. + */ readonly use_classic_parameter_flow?: boolean; } @@ -3528,26 +5471,64 @@ export interface UpdateUserProfileRequest { // From codersdk/users.go export interface UpdateUserQuietHoursScheduleRequest { + /** + * Schedule is a cron expression that defines when the user's quiet hours + * window is. Schedule must not be empty. For new users, the schedule is set + * to 2am in their browser or computer's timezone. The schedule denotes the + * beginning of a 4 hour window where the workspace is allowed to + * automatically stop or restart due to maintenance or template schedule. + * + * The schedule must be daily with a single time, and should have a timezone + * specified via a CRON_TZ prefix (otherwise UTC will be used). + * + * If the schedule is empty, the user will be updated to use the default + * schedule. + */ readonly schedule: string; } // From codersdk/workspaces.go export interface UpdateWorkspaceACL { + /** + * UserRoles is a mapping from valid user UUIDs to the workspace role they + * should be granted. To remove a user from the workspace, use "" as the role + * (available as a constant named codersdk.WorkspaceRoleDeleted) + */ readonly user_roles?: Record; + /** + * GroupRoles is a mapping from valid group UUIDs to the workspace role they + * should be granted. To remove a group from the workspace, use "" as the role + * (available as a constant named codersdk.WorkspaceRoleDeleted) + */ readonly group_roles?: Record; } // From codersdk/workspaces.go +/** + * UpdateWorkspaceAutomaticUpdatesRequest is a request to updates a workspace's automatic updates setting. + */ export interface UpdateWorkspaceAutomaticUpdatesRequest { readonly automatic_updates: AutomaticUpdates; } // From codersdk/workspaces.go +/** + * UpdateWorkspaceAutostartRequest is a request to update a workspace's autostart schedule. + */ export interface UpdateWorkspaceAutostartRequest { + /** + * Schedule is expected to be of the form `CRON_TZ= * * ` + * Example: `CRON_TZ=US/Central 30 9 * * 1-5` represents 0930 in the timezone US/Central + * on weekdays (Mon-Fri). `CRON_TZ` defaults to UTC if not present. + */ readonly schedule?: string; } // From codersdk/workspaces.go +/** + * UpdateWorkspaceDormancy is a request to activate or make a workspace dormant. + * A value of false will activate a dormant workspace. + */ export interface UpdateWorkspaceDormancy { readonly dormant: boolean; } @@ -3564,11 +5545,17 @@ export interface UpdateWorkspaceRequest { } // From codersdk/workspaces.go +/** + * UpdateWorkspaceTTLRequest is a request to update a workspace's TTL. + */ export interface UpdateWorkspaceTTLRequest { readonly ttl_ms: number | null; } // From codersdk/files.go +/** + * UploadResponse contains the hash to reference the uploaded file. + */ export interface UploadResponse { readonly hash: string; } @@ -3599,12 +5586,18 @@ export interface UsagePeriod { } // From codersdk/users.go +/** + * User represents a user in Coder. + */ export interface User extends ReducedUser { readonly organization_ids: readonly string[]; readonly roles: readonly SlimRole[]; } // From codersdk/insights.go +/** + * UserActivity shows the session time for a user. + */ export interface UserActivity { readonly template_ids: readonly string[]; readonly user_id: string; @@ -3614,6 +5607,10 @@ export interface UserActivity { } // From codersdk/insights.go +/** + * UserActivityInsightsReport is the report from the user activity insights + * endpoint. + */ export interface UserActivityInsightsReport { readonly start_time: string; readonly end_time: string; @@ -3629,6 +5626,10 @@ export interface UserActivityInsightsRequest { } // From codersdk/insights.go +/** + * UserActivityInsightsResponse is the response from the user activity insights + * endpoint. + */ export interface UserActivityInsightsResponse { readonly report: UserActivityInsightsReport; } @@ -3640,6 +5641,9 @@ export interface UserAppearanceSettings { } // From codersdk/insights.go +/** + * UserLatency shows the connection latency for a user. + */ export interface UserLatency { readonly template_ids: readonly string[]; readonly user_id: string; @@ -3649,6 +5653,10 @@ export interface UserLatency { } // From codersdk/insights.go +/** + * UserLatencyInsightsReport is the report from the user latency insights + * endpoint. + */ export interface UserLatencyInsightsReport { readonly start_time: string; readonly end_time: string; @@ -3664,6 +5672,10 @@ export interface UserLatencyInsightsRequest { } // From codersdk/insights.go +/** + * UserLatencyInsightsResponse is the response from the user latency insights + * endpoint. + */ export interface UserLatencyInsightsResponse { readonly report: UserLatencyInsightsReport; } @@ -3688,10 +5700,26 @@ export interface UserQuietHoursScheduleConfig { // From codersdk/users.go export interface UserQuietHoursScheduleResponse { readonly raw_schedule: string; + /** + * UserSet is true if the user has set their own quiet hours schedule. If + * false, the user is using the default schedule. + */ readonly user_set: boolean; + /** + * UserCanSet is true if the user is allowed to set their own quiet hours + * schedule. If false, the user cannot set a custom schedule and the default + * schedule will always be used. + */ readonly user_can_set: boolean; - readonly time: string; - readonly timezone: string; + /** + * Time is the time of day that the quiet hours window starts in the given + * Timezone each day. + */ + readonly time: string; // HH:mm (24-hour) + readonly timezone: string; // raw format from the cron expression, UTC if unspecified + /** + * Next is the next time that the quiet hours window will start. + */ readonly next: string; } @@ -3729,6 +5757,9 @@ export interface ValidateUserPasswordResponse { } // From codersdk/client.go +/** + * ValidationError represents a scoped error to a user input. + */ export interface ValidationError { readonly field: string; readonly detail: string; @@ -3770,18 +5801,31 @@ export interface WebpushSubscription { } // From healthsdk/healthsdk.go +/** + * WebsocketReport shows if the configured access URL allows establishing WebSocket connections. + */ export interface WebsocketReport extends BaseReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly body: string; readonly code: number; } // From codersdk/workspaces.go +/** + * Workspace is a deployment of a template. It references a specific + * version and can be updated. + */ export interface Workspace { readonly id: string; readonly created_at: string; readonly updated_at: string; readonly owner_id: string; + /** + * OwnerName is the username of the owner of the workspace. + */ readonly owner_name: string; readonly owner_avatar_url: string; readonly organization_id: string; @@ -3801,14 +5845,40 @@ export interface Workspace { readonly autostart_schedule?: string; readonly ttl_ms?: number; readonly last_used_at: string; + /** + * DeletingAt indicates the time at which the workspace will be permanently deleted. + * A workspace is eligible for deletion if it is dormant (a non-nil dormant_at value) + * and a value has been specified for time_til_dormant_autodelete on its template. + */ readonly deleting_at: string | null; + /** + * DormantAt being non-nil indicates a workspace that is dormant. + * A dormant workspace is no longer accessible must be activated. + * It is subject to deletion if it breaches + * the duration of the time_til_ field on its template. + */ readonly dormant_at: string | null; + /** + * Health shows the health of the workspace and information about + * what is causing an unhealthy status. + */ readonly health: WorkspaceHealth; readonly automatic_updates: AutomaticUpdates; readonly allow_renames: boolean; readonly favorite: boolean; readonly next_start_at: string | null; + /** + * IsPrebuild indicates whether the workspace is a prebuilt workspace. + * Prebuilt workspaces are owned by the prebuilds system user and have specific behavior, + * such as being managed differently from regular workspaces. + * Once a prebuilt workspace is claimed by a user, it transitions to a regular workspace, + * and IsPrebuild returns false. + */ readonly is_prebuild: boolean; + /** + * TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task. + */ + readonly task_id?: string; } // From codersdk/workspaces.go @@ -3843,44 +5913,111 @@ export interface WorkspaceAgent { readonly version: string; readonly api_version: string; readonly apps: readonly WorkspaceApp[]; + /** + * DERPLatency is mapped by region name (e.g. "New York City", "Seattle"). + */ readonly latency?: Record; readonly connection_timeout_seconds: number; readonly troubleshooting_url: string; readonly subsystems: readonly AgentSubsystem[]; - readonly health: WorkspaceAgentHealth; + readonly health: WorkspaceAgentHealth; // Health reports the health of the agent. readonly display_apps: readonly DisplayApp[]; readonly log_sources: readonly WorkspaceAgentLogSource[]; readonly scripts: readonly WorkspaceAgentScript[]; + /** + * StartupScriptBehavior is a legacy field that is deprecated in favor + * of the `coder_script` resource. It's only referenced by old clients. + * Deprecated: Remove in the future! + */ readonly startup_script_behavior: WorkspaceAgentStartupScriptBehavior; } // From codersdk/workspaceagents.go +/** + * WorkspaceAgentContainer describes a devcontainer of some sort + * that is visible to the workspace agent. This struct is an abstraction + * of potentially multiple implementations, and the fields will be + * somewhat implementation-dependent. + */ export interface WorkspaceAgentContainer { + /** + * CreatedAt is the time the container was created. + */ readonly created_at: string; + /** + * ID is the unique identifier of the container. + */ readonly id: string; + /** + * FriendlyName is the human-readable name of the container. + */ readonly name: string; + /** + * Image is the name of the container image. + */ readonly image: string; + /** + * Labels is a map of key-value pairs of container labels. + */ readonly labels: Record; + /** + * Running is true if the container is currently running. + */ readonly running: boolean; + /** + * Ports includes ports exposed by the container. + */ readonly ports: readonly WorkspaceAgentContainerPort[]; + /** + * Status is the current status of the container. This is somewhat + * implementation-dependent, but should generally be a human-readable + * string. + */ readonly status: string; + /** + * Volumes is a map of "things" mounted into the container. Again, this + * is somewhat implementation-dependent. + */ readonly volumes: Record; } // From codersdk/workspaceagents.go +/** + * WorkspaceAgentContainerPort describes a port as exposed by a container. + */ export interface WorkspaceAgentContainerPort { + /** + * Port is the port number *inside* the container. + */ readonly port: number; + /** + * Network is the network protocol used by the port (tcp, udp, etc). + */ readonly network: string; + /** + * HostIP is the IP address of the host interface to which the port is + * bound. Note that this can be an IPv4 or IPv6 address. + */ readonly host_ip?: string; + /** + * HostPort is the port number *outside* the container. + */ readonly host_port?: number; } // From codersdk/workspaceagents.go +/** + * WorkspaceAgentDevcontainer defines the location of a devcontainer + * configuration in a workspace that is visible to the workspace agent. + */ export interface WorkspaceAgentDevcontainer { readonly id: string; readonly name: string; readonly workspace_folder: string; readonly config_path?: string; + /** + * Additional runtime fields. + */ readonly status: WorkspaceAgentDevcontainerStatus; readonly dirty: boolean; readonly container?: WorkspaceAgentContainer; @@ -3889,6 +6026,10 @@ export interface WorkspaceAgentDevcontainer { } // From codersdk/workspaceagents.go +/** + * WorkspaceAgentDevcontainerAgent represents the sub agent for a + * devcontainer. + */ export interface WorkspaceAgentDevcontainerAgent { readonly id: string; readonly name: string; @@ -3907,8 +6048,8 @@ export const WorkspaceAgentDevcontainerStatuses: WorkspaceAgentDevcontainerStatu // From codersdk/workspaceagents.go export interface WorkspaceAgentHealth { - readonly healthy: boolean; - readonly reason?: string; + readonly healthy: boolean; // Healthy is true if the agent is healthy. + readonly reason?: string; // Reason is a human-readable explanation of the agent's health. It is empty if Healthy is true. } // From codersdk/workspaceagents.go @@ -3936,21 +6077,41 @@ export const WorkspaceAgentLifecycles: WorkspaceAgentLifecycle[] = [ ]; // From codersdk/workspaceagents.go +/** + * WorkspaceAgentListContainersResponse is the response to the list containers + * request. + */ export interface WorkspaceAgentListContainersResponse { + /** + * Devcontainers is a list of devcontainers visible to the workspace agent. + */ readonly devcontainers: readonly WorkspaceAgentDevcontainer[]; + /** + * Containers is a list of containers visible to the workspace agent. + */ readonly containers: readonly WorkspaceAgentContainer[]; + /** + * Warnings is a list of warnings that may have occurred during the + * process of listing containers. This should not include fatal errors. + */ readonly warnings?: readonly string[]; } // From codersdk/workspaceagents.go export interface WorkspaceAgentListeningPort { - readonly process_name: string; - readonly network: string; + readonly process_name: string; // may be empty + readonly network: string; // only "tcp" at the moment readonly port: number; } // From codersdk/workspaceagents.go export interface WorkspaceAgentListeningPortsResponse { + /** + * If there are no ports in the list, nothing should be displayed in the UI. + * There must not be a "no ports available" message or anything similar, as + * there will always be no ports displayed on platforms where our port + * detection logic is unsupported. + */ readonly ports: readonly WorkspaceAgentListeningPort[]; } @@ -3979,6 +6140,11 @@ export interface WorkspaceAgentMetadata { } // From codersdk/workspaceagents.go +/** + * WorkspaceAgentMetadataDescription is a description of dynamic metadata the agent should report + * back to coderd. It is provided via the `metadata` list in the `coder_agent` + * block. + */ export interface WorkspaceAgentMetadataDescription { readonly display_name: string; readonly key: string; @@ -3990,6 +6156,10 @@ export interface WorkspaceAgentMetadataDescription { // From codersdk/workspaceagents.go export interface WorkspaceAgentMetadataResult { readonly collected_at: string; + /** + * Age is the number of seconds since the metadata was collected. + * It is provided in addition to CollectedAt to protect against clock skew. + */ readonly age: number; readonly value: string; readonly error: string; @@ -4066,21 +6236,58 @@ export const WorkspaceAgentStatuses: WorkspaceAgentStatus[] = [ // From codersdk/workspaceapps.go export interface WorkspaceApp { readonly id: string; + /** + * URL is the address being proxied to inside the workspace. + * If external is specified, this will be opened on the client. + */ readonly url?: string; + /** + * External specifies whether the URL should be opened externally on + * the client or not. + */ readonly external: boolean; + /** + * Slug is a unique identifier within the agent. + */ readonly slug: string; + /** + * DisplayName is a friendly name for the app. + */ readonly display_name?: string; readonly command?: string; + /** + * Icon is a relative path or external URL that specifies + * an icon to be displayed in the dashboard. + */ readonly icon?: string; + /** + * Subdomain denotes whether the app should be accessed via a path on the + * `coder server` or via a hostname-based dev URL. If this is set to true + * and there is no app wildcard configured on the server, the app will not + * be accessible in the UI. + */ readonly subdomain: boolean; + /** + * SubdomainName is the application domain exposed on the `coder server`. + */ readonly subdomain_name?: string; readonly sharing_level: WorkspaceAppSharingLevel; + /** + * Healthcheck specifies the configuration for checking app health. + */ readonly healthcheck?: Healthcheck; readonly health: WorkspaceAppHealth; readonly group?: string; readonly hidden: boolean; readonly open_in: WorkspaceAppOpenIn; + /** + * Tooltip is an optional markdown supported field that is displayed + * when hovering over workspace apps in the UI. + */ readonly tooltip?: string; + /** + * Statuses is a list of statuses for the app. + */ readonly statuses: readonly WorkspaceAppStatus[]; } @@ -4126,8 +6333,21 @@ export interface WorkspaceAppStatus { readonly app_id: string; readonly state: WorkspaceAppStatusState; readonly message: string; + /** + * URI is the URI of the resource that the status is for. + * e.g. https://github.com/org/repo/pull/123 + * e.g. file:///path/to/file + */ readonly uri: string; + /** + * Deprecated: This field is unused and will be removed in a future version. + * Icon is an external URL to an icon that will be rendered in the UI. + */ readonly icon: string; + /** + * Deprecated: This field is unused and will be removed in a future version. + * NeedsUserAttention specifies whether the status needs user attention. + */ readonly needs_user_attention: boolean; } @@ -4146,6 +6366,10 @@ export const WorkspaceAppStatusStates: WorkspaceAppStatusState[] = [ ]; // From codersdk/workspacebuilds.go +/** + * WorkspaceBuild is an at-point representation of a workspace state. + * BuildNumbers start at 1 and increase by 1 for each subsequent build + */ export interface WorkspaceBuild { readonly id: string; readonly created_at: string; @@ -4153,6 +6377,9 @@ export interface WorkspaceBuild { readonly workspace_id: string; readonly workspace_name: string; readonly workspace_owner_id: string; + /** + * WorkspaceOwnerName is the username of the owner of the workspace. + */ readonly workspace_owner_name: string; readonly workspace_owner_avatar_url?: string; readonly template_version_id: string; @@ -4171,11 +6398,17 @@ export interface WorkspaceBuild { readonly matched_provisioners?: MatchedProvisioners; readonly template_version_preset_id: string | null; readonly has_ai_task?: boolean; + /** + * Deprecated: This field has been replaced with `Task.WorkspaceAppID` + */ readonly ai_task_sidebar_app_id?: string; readonly has_external_agent?: boolean; } // From codersdk/workspacebuilds.go +/** + * WorkspaceBuildParameter represents a parameter specific for a workspace build. + */ export interface WorkspaceBuildParameter { readonly name: string; readonly value: string; @@ -4184,6 +6417,10 @@ export interface WorkspaceBuildParameter { // From codersdk/workspacebuilds.go export interface WorkspaceBuildTimings { readonly provisioner_timings: readonly ProvisionerTiming[]; + /** + * TODO: Consolidate agent-related timing metrics into a single struct when + * updating the API version + */ readonly agent_script_timings: readonly AgentScriptTiming[]; readonly agent_connection_timings: readonly AgentConnectionTiming[]; } @@ -4213,6 +6450,9 @@ export interface WorkspaceDeploymentStats { // From codersdk/workspaces.go export interface WorkspaceFilter { + /** + * FilterQuery supports a raw filter query string + */ readonly q?: string; } @@ -4223,8 +6463,8 @@ export interface WorkspaceGroup extends Group { // From codersdk/workspaces.go export interface WorkspaceHealth { - readonly healthy: boolean; - readonly failing_agents: readonly string[]; + readonly healthy: boolean; // Healthy is true if the workspace is healthy. + readonly failing_agents: readonly string[]; // FailingAgents lists the IDs of the agents that are failing, if any. } // From codersdk/workspaces.go @@ -4236,6 +6476,11 @@ export interface WorkspaceOptions { export interface WorkspaceProxy extends Region { readonly derp_enabled: boolean; readonly derp_only: boolean; + /** + * Status is the latest status check of the proxy. This will be empty for deleted + * proxies. This value can be used to determine if a workspace proxy is healthy + * and ready to use. + */ readonly status?: WorkspaceProxyStatus; readonly created_at: string; readonly updated_at: string; @@ -4245,12 +6490,24 @@ export interface WorkspaceProxy extends Region { // From codersdk/deployment.go export interface WorkspaceProxyBuildInfo { + /** + * TODO: @emyrk what should we include here? + */ readonly workspace_proxy: boolean; + /** + * DashboardURL is the URL of the coderd this proxy is connected to. + */ readonly dashboard_url: string; } // From healthsdk/healthsdk.go +/** + * WorkspaceProxyReport includes health details of each connected workspace proxy. + */ export interface WorkspaceProxyReport extends BaseReport { + /** + * Healthy is deprecated and left for backward compatibility purposes, use `Severity` instead. + */ readonly healthy: boolean; readonly workspace_proxies: RegionsResponse; } @@ -4258,6 +6515,9 @@ export interface WorkspaceProxyReport extends BaseReport { // From codersdk/workspaceproxy.go export interface WorkspaceProxyStatus { readonly status: ProxyHealthStatus; + /** + * Report provides more information about the health of the workspace proxy. + */ readonly report?: ProxyHealthReport; readonly checked_at: string; } @@ -4269,6 +6529,10 @@ export interface WorkspaceQuota { } // From codersdk/workspacebuilds.go +/** + * WorkspaceResource describes resources used to create a workspace, for instance: + * containers, images, volumes. + */ export interface WorkspaceResource { readonly id: string; readonly created_at: string; @@ -4284,6 +6548,9 @@ export interface WorkspaceResource { } // From codersdk/workspacebuilds.go +/** + * WorkspaceResourceMetadata annotates the workspace resource with custom key-value pairs. + */ export interface WorkspaceResourceMetadata { readonly key: string; readonly value: string; @@ -4345,30 +6612,3 @@ export interface WorkspacesResponse { readonly workspaces: readonly Workspace[]; readonly count: number; } - -// From codersdk/deployment.go -export const annotationEnterpriseKey = "enterprise"; - -// From codersdk/deployment.go -export const annotationExternalProxies = "external_workspace_proxies"; - -// From codersdk/deployment.go -export const annotationFormatDuration = "format_duration"; - -// From codersdk/deployment.go -export const annotationSecretKey = "secret"; - -// From codersdk/insights.go -export const insightsTimeLayout = "2006-01-02T15:04:05Z07:00"; - -// From codersdk/notifications.go -export const maxCustomNotificationMessageLen = 2000; - -// From codersdk/notifications.go -export const maxCustomNotificationTitleLen = 120; - -// From healthsdk/interfaces.go -export const safeMTU = 1378; - -// From codersdk/workspacedisplaystatus.go -export const unknownStatus = "Unknown"; diff --git a/site/src/components/CopyButton/CopyButton.tsx b/site/src/components/CopyButton/CopyButton.tsx index 9110bb4cd68d0..c00b9360e4206 100644 --- a/site/src/components/CopyButton/CopyButton.tsx +++ b/site/src/components/CopyButton/CopyButton.tsx @@ -19,9 +19,7 @@ export const CopyButton: FC = ({ label, ...buttonProps }) => { - const { showCopiedSuccess, copyToClipboard } = useClipboard({ - textToCopy: text, - }); + const { showCopiedSuccess, copyToClipboard } = useClipboard(); return ( @@ -30,7 +28,7 @@ export const CopyButton: FC = ({ + ), diff --git a/site/src/components/Select/Select.tsx b/site/src/components/Select/Select.tsx index 3d2f8ffc3b706..f9261629e93c7 100644 --- a/site/src/components/Select/Select.tsx +++ b/site/src/components/Select/Select.tsx @@ -13,11 +13,13 @@ export const SelectGroup = SelectPrimitive.Group; export const SelectValue = SelectPrimitive.Value; +export type SelectTriggerProps = React.ComponentPropsWithoutRef< + typeof SelectPrimitive.Trigger +>; + export const SelectTrigger = React.forwardRef< React.ElementRef, - React.ComponentPropsWithoutRef & { - id?: string; - } + SelectTriggerProps >(({ className, children, id, ...props }, ref) => ( )); -const _TableFooter = React.forwardRef< +export const TableFooter = React.forwardRef< HTMLTableSectionElement, React.HTMLAttributes >(({ className, ...props }, ref) => ( @@ -82,22 +82,23 @@ const tableRowVariants = cva( }, ); -export const TableRow = React.forwardRef< - HTMLTableRowElement, - React.HTMLAttributes & - VariantProps ->(({ className, hover, ...props }, ref) => ( - -)); +export type TableRowProps = React.HTMLAttributes & + VariantProps; + +export const TableRow = React.forwardRef( + ({ className, hover, ...props }, ref) => ( + + ), +); export const TableHead = React.forwardRef< HTMLTableCellElement, diff --git a/site/src/components/TableEmpty/TableEmpty.stories.tsx b/site/src/components/TableEmpty/TableEmpty.stories.tsx index 3165ed5fe6f6f..c189e3bfec890 100644 --- a/site/src/components/TableEmpty/TableEmpty.stories.tsx +++ b/site/src/components/TableEmpty/TableEmpty.stories.tsx @@ -1,8 +1,6 @@ -import Table from "@mui/material/Table"; -import TableBody from "@mui/material/TableBody"; -import TableContainer from "@mui/material/TableContainer"; import type { Meta, StoryObj } from "@storybook/react-vite"; import { CodeExample } from "components/CodeExample/CodeExample"; +import { Table, TableBody } from "components/Table/Table"; import { TableEmpty } from "./TableEmpty"; const meta: Meta = { @@ -13,13 +11,11 @@ const meta: Meta = { }, decorators: [ (Story) => ( - - - - - -
-
+ + + + +
), ], }; diff --git a/site/src/components/TableEmpty/TableEmpty.tsx b/site/src/components/TableEmpty/TableEmpty.tsx index 4b1b72c5beff4..1dd2d08dbd469 100644 --- a/site/src/components/TableEmpty/TableEmpty.tsx +++ b/site/src/components/TableEmpty/TableEmpty.tsx @@ -1,9 +1,8 @@ -import TableCell from "@mui/material/TableCell"; -import TableRow from "@mui/material/TableRow"; import { EmptyState, type EmptyStateProps, } from "components/EmptyState/EmptyState"; +import { TableCell, TableRow } from "components/Table/Table"; import type { FC } from "react"; type TableEmptyProps = EmptyStateProps; diff --git a/site/src/components/TableLoader/TableLoader.stories.tsx b/site/src/components/TableLoader/TableLoader.stories.tsx index 645b4e343c493..c15b4eba4825e 100644 --- a/site/src/components/TableLoader/TableLoader.stories.tsx +++ b/site/src/components/TableLoader/TableLoader.stories.tsx @@ -1,7 +1,5 @@ -import Table from "@mui/material/Table"; -import TableBody from "@mui/material/TableBody"; -import TableContainer from "@mui/material/TableContainer"; import type { Meta, StoryObj } from "@storybook/react-vite"; +import { Table, TableBody } from "components/Table/Table"; import { TableLoader } from "./TableLoader"; const meta: Meta = { @@ -9,13 +7,11 @@ const meta: Meta = { component: TableLoader, decorators: [ (Story) => ( - - - - - -
-
+ + + + +
), ], }; diff --git a/site/src/components/TableLoader/TableLoader.tsx b/site/src/components/TableLoader/TableLoader.tsx index bdbb7efcd70ea..8b063b22a6dce 100644 --- a/site/src/components/TableLoader/TableLoader.tsx +++ b/site/src/components/TableLoader/TableLoader.tsx @@ -1,5 +1,8 @@ -import TableCell from "@mui/material/TableCell"; -import TableRow, { type TableRowProps } from "@mui/material/TableRow"; +import { + TableCell, + TableRow, + type TableRowProps, +} from "components/Table/Table"; import { cloneElement, type FC, isValidElement, type ReactNode } from "react"; import { Loader } from "../Loader/Loader"; diff --git a/site/src/components/Timeline/TimelineDateRow.tsx b/site/src/components/Timeline/TimelineDateRow.tsx index 3425b06abe05a..4f38f7b34a759 100644 --- a/site/src/components/Timeline/TimelineDateRow.tsx +++ b/site/src/components/Timeline/TimelineDateRow.tsx @@ -1,6 +1,5 @@ import { css, useTheme } from "@emotion/react"; -import TableCell from "@mui/material/TableCell"; -import TableRow from "@mui/material/TableRow"; +import { TableCell, TableRow } from "components/Table/Table"; import type { FC } from "react"; import { createDisplayDate } from "./utils"; diff --git a/site/src/components/Timeline/TimelineEntry.tsx b/site/src/components/Timeline/TimelineEntry.tsx index 727d5933c285d..2a9c15703267c 100644 --- a/site/src/components/Timeline/TimelineEntry.tsx +++ b/site/src/components/Timeline/TimelineEntry.tsx @@ -1,4 +1,4 @@ -import TableRow, { type TableRowProps } from "@mui/material/TableRow"; +import { TableRow, type TableRowProps } from "components/Table/Table"; import { forwardRef } from "react"; import { cn } from "utils/cn"; diff --git a/site/src/hooks/useClickable.ts b/site/src/hooks/useClickable.ts index 3f59851b8fc3b..6f5e0afff7ff7 100644 --- a/site/src/hooks/useClickable.ts +++ b/site/src/hooks/useClickable.ts @@ -4,6 +4,7 @@ import { type RefObject, useRef, } from "react"; +import { useEffectEvent } from "./hookPolyfills"; // Literally any object (ideally an HTMLElement) that has a .click method type ClickableElement = { @@ -43,10 +44,11 @@ export const useClickable = < role?: TRole, ): UseClickableResult => { const ref = useRef(null); + const stableOnClick = useEffectEvent(onClick); return { ref, - onClick, + onClick: stableOnClick, tabIndex: 0, role: (role ?? "button") as TRole, diff --git a/site/src/hooks/useClipboard.test.tsx b/site/src/hooks/useClipboard.test.tsx index 1d4d2eb702a81..156d70ab31593 100644 --- a/site/src/hooks/useClipboard.test.tsx +++ b/site/src/hooks/useClipboard.test.tsx @@ -9,9 +9,11 @@ * immediately pollutes the tests with false negatives. Even if something should * fail, it won't. */ -import { act, renderHook, screen } from "@testing-library/react"; + +import { renderHook, screen } from "@testing-library/react"; import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; import { ThemeOverride } from "contexts/ThemeProvider"; +import { act } from "react"; import themes, { DEFAULT_THEME } from "theme"; import { COPY_FAILED_MESSAGE, @@ -115,8 +117,8 @@ function setupMockClipboard(isSecure: boolean): SetupMockClipboardResult { }; } -function renderUseClipboard(inputs: TInput) { - return renderHook( +function renderUseClipboard(inputs?: UseClipboardInput) { + return renderHook( (props) => useClipboard(props), { initialProps: inputs, @@ -188,9 +190,9 @@ describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { const assertClipboardUpdateLifecycle = async ( result: RenderResult, - textToCheck: string, + textToCopy: string, ): Promise => { - await act(() => result.current.copyToClipboard()); + await act(() => result.current.copyToClipboard(textToCopy)); expect(result.current.showCopiedSuccess).toBe(true); // Because of timing trickery, any timeouts for flipping the copy status @@ -203,18 +205,18 @@ describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { await act(() => jest.runAllTimersAsync()); const clipboardText = getClipboardText(); - expect(clipboardText).toEqual(textToCheck); + expect(clipboardText).toEqual(textToCopy); }; it("Copies the current text to the user's clipboard", async () => { const textToCopy = "dogs"; - const { result } = renderUseClipboard({ textToCopy }); + const { result } = renderUseClipboard(); await assertClipboardUpdateLifecycle(result, textToCopy); }); it("Should indicate to components not to show successful copy after a set period of time", async () => { const textToCopy = "cats"; - const { result } = renderUseClipboard({ textToCopy }); + const { result } = renderUseClipboard(); await assertClipboardUpdateLifecycle(result, textToCopy); expect(result.current.showCopiedSuccess).toBe(false); }); @@ -222,16 +224,16 @@ describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { it("Should notify the user of an error using the provided callback", async () => { const textToCopy = "birds"; const onError = jest.fn(); - const { result } = renderUseClipboard({ textToCopy, onError }); + const { result } = renderUseClipboard({ onError }); setSimulateFailure(true); - await act(() => result.current.copyToClipboard()); + await act(() => result.current.copyToClipboard(textToCopy)); expect(onError).toBeCalled(); }); it("Should dispatch a new toast message to the global snackbar when errors happen while no error callback is provided to the hook", async () => { const textToCopy = "crow"; - const { result } = renderUseClipboard({ textToCopy }); + const { result } = renderUseClipboard(); /** * @todo Look into why deferring error-based state updates to the global @@ -241,7 +243,7 @@ describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { * flushed through the GlobalSnackbar component afterwards */ setSimulateFailure(true); - await act(() => result.current.copyToClipboard()); + await act(() => result.current.copyToClipboard(textToCopy)); const errorMessageNode = screen.queryByText(COPY_FAILED_MESSAGE); expect(errorMessageNode).not.toBeNull(); @@ -252,11 +254,91 @@ describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { // Snackbar state transitions that you might get if the hook uses the // default const textToCopy = "hamster"; - const { result } = renderUseClipboard({ textToCopy, onError: jest.fn() }); + const { result } = renderUseClipboard({ onError: jest.fn() }); + + setSimulateFailure(true); + await act(() => result.current.copyToClipboard(textToCopy)); + + expect(result.current.error).toBeInstanceOf(Error); + }); + it("Clears out existing errors if a new copy operation succeeds", async () => { + const text = "dummy-text"; + const { result } = renderUseClipboard(); setSimulateFailure(true); - await act(() => result.current.copyToClipboard()); + await act(() => result.current.copyToClipboard(text)); expect(result.current.error).toBeInstanceOf(Error); + + setSimulateFailure(false); + await assertClipboardUpdateLifecycle(result, text); + expect(result.current.error).toBeUndefined(); + }); + + // This test case is really important to ensure that it's easy to plop this + // inside of useEffect calls without having to think about dependencies too + // much + it("Ensures that the copyToClipboard function always maintains a stable reference across all re-renders", async () => { + const initialOnError = jest.fn(); + const { result, rerender } = renderUseClipboard({ + onError: initialOnError, + clearErrorOnSuccess: true, + }); + const initialCopy = result.current.copyToClipboard; + + // Re-render arbitrarily with no clipboard state transitions to make + // sure that a parent re-rendering doesn't break anything + rerender({ onError: initialOnError }); + expect(result.current.copyToClipboard).toBe(initialCopy); + + // Re-render with new onError prop and then swap back to simplify + // testing + rerender({ onError: jest.fn() }); + expect(result.current.copyToClipboard).toBe(initialCopy); + rerender({ onError: initialOnError }); + + // Re-render with a new clear value then swap back to simplify testing + rerender({ onError: initialOnError, clearErrorOnSuccess: false }); + expect(result.current.copyToClipboard).toBe(initialCopy); + rerender({ onError: initialOnError, clearErrorOnSuccess: true }); + + // Trigger a failed clipboard interaction + setSimulateFailure(true); + await act(() => result.current.copyToClipboard("dummy-text-2")); + expect(result.current.copyToClipboard).toBe(initialCopy); + + /** + * Trigger a successful clipboard interaction + * + * @todo For some reason, using the assertClipboardUpdateLifecycle + * helper triggers Jest errors with it thinking that values are being + * accessed after teardown, even though the problem doesn't exist for + * any other test case. + * + * It's not a huge deal, because we only need to inspect React after the + * interaction, instead of the full DOM, but for correctness, it would + * be nice if we could get this issue figured out. + */ + setSimulateFailure(false); + await act(() => result.current.copyToClipboard("dummy-text-2")); + expect(result.current.copyToClipboard).toBe(initialCopy); + }); + + it("Always uses the most up-to-date onError prop", async () => { + const initialOnError = jest.fn(); + const { result, rerender } = renderUseClipboard({ + onError: initialOnError, + }); + setSimulateFailure(true); + + const secondOnError = jest.fn(); + rerender({ onError: secondOnError }); + await act(() => result.current.copyToClipboard("dummy-text")); + + expect(initialOnError).not.toHaveBeenCalled(); + expect(secondOnError).toHaveBeenCalledTimes(1); + expect(secondOnError).toHaveBeenCalledWith( + "Failed to copy text to clipboard", + ); }); }); diff --git a/site/src/hooks/useClipboard.ts b/site/src/hooks/useClipboard.ts index 1eb91cc356155..88a57a61fc05a 100644 --- a/site/src/hooks/useClipboard.ts +++ b/site/src/hooks/useClipboard.ts @@ -1,22 +1,18 @@ import { displayError } from "components/GlobalSnackbar/utils"; -import { useEffect, useRef, useState } from "react"; +import { useCallback, useEffect, useRef, useState } from "react"; +import { useEffectEvent } from "./hookPolyfills"; const CLIPBOARD_TIMEOUT_MS = 1_000; export const COPY_FAILED_MESSAGE = "Failed to copy text to clipboard"; export const HTTP_FALLBACK_DATA_ID = "http-fallback"; export type UseClipboardInput = Readonly<{ - textToCopy: string; - - /** - * Optional callback to call when an error happens. If not specified, the hook - * will dispatch an error message to the GlobalSnackbar - */ onError?: (errorMessage: string) => void; + clearErrorOnSuccess?: boolean; }>; export type UseClipboardResult = Readonly<{ - copyToClipboard: () => Promise; + copyToClipboard: (textToCopy: string) => Promise; error: Error | undefined; /** @@ -40,47 +36,56 @@ export type UseClipboardResult = Readonly<{ showCopiedSuccess: boolean; }>; -export const useClipboard = (input: UseClipboardInput): UseClipboardResult => { - const { textToCopy, onError: errorCallback } = input; +export const useClipboard = (input?: UseClipboardInput): UseClipboardResult => { + const { onError = displayError, clearErrorOnSuccess = true } = input ?? {}; + const [showCopiedSuccess, setShowCopiedSuccess] = useState(false); const [error, setError] = useState(); const timeoutIdRef = useRef(undefined); useEffect(() => { - const clearIdOnUnmount = () => window.clearTimeout(timeoutIdRef.current); - return clearIdOnUnmount; + const clearTimeoutOnUnmount = () => { + window.clearTimeout(timeoutIdRef.current); + }; + return clearTimeoutOnUnmount; }, []); - const handleSuccessfulCopy = () => { + const stableOnError = useEffectEvent(() => onError(COPY_FAILED_MESSAGE)); + const handleSuccessfulCopy = useEffectEvent(() => { setShowCopiedSuccess(true); + if (clearErrorOnSuccess) { + setError(undefined); + } + timeoutIdRef.current = window.setTimeout(() => { setShowCopiedSuccess(false); }, CLIPBOARD_TIMEOUT_MS); - }; - - const copyToClipboard = async () => { - try { - await window.navigator.clipboard.writeText(textToCopy); - handleSuccessfulCopy(); - } catch (err) { - const fallbackCopySuccessful = simulateClipboardWrite(textToCopy); - if (fallbackCopySuccessful) { - handleSuccessfulCopy(); - return; - } + }); - const wrappedErr = new Error(COPY_FAILED_MESSAGE); - if (err instanceof Error) { - wrappedErr.stack = err.stack; + const copyToClipboard = useCallback( + async (textToCopy: string) => { + try { + await window.navigator.clipboard.writeText(textToCopy); + handleSuccessfulCopy(); + } catch (err) { + const fallbackCopySuccessful = simulateClipboardWrite(textToCopy); + if (fallbackCopySuccessful) { + handleSuccessfulCopy(); + return; + } + + const wrappedErr = new Error(COPY_FAILED_MESSAGE); + if (err instanceof Error) { + wrappedErr.stack = err.stack; + } + + console.error(wrappedErr); + setError(wrappedErr); + stableOnError(); } - - console.error(wrappedErr); - setError(wrappedErr); - - const notifyUser = errorCallback ?? displayError; - notifyUser(COPY_FAILED_MESSAGE); - } - }; + }, + [stableOnError, handleSuccessfulCopy], + ); return { showCopiedSuccess, error, copyToClipboard }; }; diff --git a/site/src/index.css b/site/src/index.css index 6486e162568ed..40adcb9fbab58 100644 --- a/site/src/index.css +++ b/site/src/index.css @@ -8,7 +8,8 @@ @tailwind utilities; @layer base { - :root { + :root, + .light { --content-primary: 240 10% 4%; --content-secondary: 240 5% 34%; --content-link: 221 83% 53%; diff --git a/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx index 84df0b97960f3..f26e85709da3b 100644 --- a/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx +++ b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx @@ -1,29 +1,24 @@ -import { css, type Interpolation, type Theme } from "@emotion/react"; import { InlineMarkdown } from "components/Markdown/Markdown"; import type { FC } from "react"; import { readableForegroundColor } from "utils/colors"; interface AnnouncementBannerViewProps { - message?: string; - backgroundColor?: string; + message: string; + backgroundColor: string; } export const AnnouncementBannerView: FC = ({ message, backgroundColor, }) => { - if (!message || !backgroundColor) { - return null; - } - return (
{message} @@ -31,20 +26,3 @@ export const AnnouncementBannerView: FC = ({
); }; - -const styles = { - banner: css` - padding: 12px; - display: flex; - align-items: center; - `, - wrapper: css` - margin-right: auto; - margin-left: auto; - font-weight: 400; - - & a { - color: inherit; - } - `, -} satisfies Record>; diff --git a/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx index a962c291d1a35..1e4411edf7811 100644 --- a/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx +++ b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx @@ -1,3 +1,4 @@ +import type { BannerConfig } from "api/typesGenerated"; import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; import { AnnouncementBannerView } from "./AnnouncementBannerView"; @@ -15,7 +16,12 @@ export const AnnouncementBanners: FC = () => { return ( <> {announcementBanners - .filter((banner) => banner.enabled) + .filter( + (banner): banner is Required => + banner.enabled && + Boolean(banner.message) && + Boolean(banner.background_color), + ) .map((banner) => ( = ({ fetchStats(); } }} - variant="text" + variant="subtle" > - + {timeUntilRefresh}s diff --git a/site/src/modules/dashboard/Navbar/Navbar.tsx b/site/src/modules/dashboard/Navbar/Navbar.tsx index 8db0252c0059d..012053341e976 100644 --- a/site/src/modules/dashboard/Navbar/Navbar.tsx +++ b/site/src/modules/dashboard/Navbar/Navbar.tsx @@ -1,4 +1,5 @@ import { buildInfo } from "api/queries/buildInfo"; +import type { LinkConfig } from "api/typesGenerated"; import { useProxy } from "contexts/ProxyContext"; import { useAuthenticated } from "hooks"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; @@ -25,12 +26,18 @@ export const Navbar: FC = () => { const canViewConnectionLog = featureVisibility.connection_log && permissions.viewAnyConnectionLog; + const uniqueLinks = new Map(); + for (const link of appearance.support_links ?? []) { + if (!uniqueLinks.has(link.name)) { + uniqueLinks.set(link.name, link); + } + } return ( = { @@ -33,6 +29,7 @@ const meta: Meta = { canViewDeployment: true, canViewHealth: true, canViewOrganizations: true, + supportLinks: [], }, decorators: [withDashboardProvider], }; @@ -102,30 +99,69 @@ export const IdleTasks: Story = { queries: [ { key: ["tasks", tasksFilter], - data: [ - { - prompt: "Task 1", - workspace: { - ...MockWorkspace, - latest_app_status: { - ...MockWorkspaceAppStatus, - state: "idle", - }, - }, - }, - { - prompt: "Task 2", - workspace: MockWorkspace, - }, - { - prompt: "Task 3", - workspace: { - ...MockWorkspace, - latest_app_status: MockWorkspaceAppStatus, - }, - }, - ], + data: MockTasks, }, ], }, }; + +export const SupportLinks: Story = { + args: { + user: MockUserMember, + canViewAuditLog: false, + canViewDeployment: false, + canViewHealth: false, + canViewOrganizations: false, + supportLinks: [ + { + name: "This is a bug", + icon: "bug", + target: "#", + }, + { + name: "This is a star", + icon: "star", + target: "#", + location: "navbar", + }, + { + name: "This is a chat", + icon: "chat", + target: "#", + location: "navbar", + }, + { + name: "No icon here", + icon: "", + target: "#", + location: "navbar", + }, + { + name: "No icon here too", + icon: "", + target: "#", + }, + ], + }, +}; + +export const DefaultSupportLinks: Story = { + args: { + user: MockUserMember, + canViewAuditLog: false, + canViewDeployment: false, + canViewHealth: false, + canViewOrganizations: false, + supportLinks: [ + { icon: "docs", name: "Documentation", target: "" }, + { icon: "bug", name: "Report a bug", target: "" }, + { + icon: "chat", + name: "Join the Coder Discord", + target: "", + location: "navbar", + }, + { icon: "star", name: "Star the Repo", target: "" }, + ], + }, +}; diff --git a/site/src/modules/dashboard/Navbar/NavbarView.test.tsx b/site/src/modules/dashboard/Navbar/NavbarView.test.tsx index 9d011089ba6c5..f313b6aa2b33e 100644 --- a/site/src/modules/dashboard/Navbar/NavbarView.test.tsx +++ b/site/src/modules/dashboard/Navbar/NavbarView.test.tsx @@ -34,6 +34,7 @@ describe("NavbarView", () => { canViewHealth canViewAuditLog canViewConnectionLog + supportLinks={[]} />, ); const workspacesLink = @@ -52,6 +53,7 @@ describe("NavbarView", () => { canViewHealth canViewAuditLog canViewConnectionLog + supportLinks={[]} />, ); const templatesLink = @@ -70,6 +72,7 @@ describe("NavbarView", () => { canViewHealth canViewAuditLog canViewConnectionLog + supportLinks={[]} />, ); const deploymentMenu = await screen.findByText("Admin settings"); @@ -89,6 +92,7 @@ describe("NavbarView", () => { canViewHealth canViewAuditLog canViewConnectionLog + supportLinks={[]} />, ); const deploymentMenu = await screen.findByText("Admin settings"); diff --git a/site/src/modules/dashboard/Navbar/NavbarView.tsx b/site/src/modules/dashboard/Navbar/NavbarView.tsx index 0cafaa8fdd46f..eb7b8afe5c923 100644 --- a/site/src/modules/dashboard/Navbar/NavbarView.tsx +++ b/site/src/modules/dashboard/Navbar/NavbarView.tsx @@ -21,13 +21,14 @@ import { cn } from "utils/cn"; import { DeploymentDropdown } from "./DeploymentDropdown"; import { MobileMenu } from "./MobileMenu"; import { ProxyMenu } from "./ProxyMenu"; +import { SupportIcon } from "./SupportIcon"; import { UserDropdown } from "./UserDropdown/UserDropdown"; interface NavbarViewProps { logo_url?: string; user: TypesGen.User; buildInfo?: TypesGen.BuildInfoResponse; - supportLinks?: readonly TypesGen.LinkConfig[]; + supportLinks: readonly TypesGen.LinkConfig[]; onSignOut: () => void; canViewDeployment: boolean; canViewOrganizations: boolean; @@ -71,6 +72,16 @@ export const NavbarView: FC = ({
+ {supportLinks.filter(isNavbarLink).map((link) => ( +
+ +
+ ))} + {proxyContextValue && (
@@ -121,7 +132,7 @@ export const NavbarView: FC = ({ !isNavbarLink(link))} onSignOut={onSignOut} />
@@ -189,8 +200,8 @@ const TasksNavItem: FC = ({ user }) => { process.env.NODE_ENV === "development" || process.env.STORYBOOK, ); - const filter = { - username: user.username, + const filter: TypesGen.TasksFilter = { + owner: user.username, }; const { data: idleCount } = useQuery({ queryKey: ["tasks", filter], @@ -200,8 +211,7 @@ const TasksNavItem: FC = ({ user }) => { refetchOnWindowFocus: true, initialData: [], select: (data) => - data.filter((task) => task.workspace.latest_app_status?.state === "idle") - .length, + data.filter((task) => task.current_state?.state === "idle").length, }); if (!canSeeTasks) { @@ -240,3 +250,36 @@ const TasksNavItem: FC = ({ user }) => { function idleTasksLabel(count: number) { return `You have ${count} ${count === 1 ? "task" : "tasks"} waiting for input`; } + +function isNavbarLink(link: TypesGen.LinkConfig): boolean { + return link.location === "navbar"; +} + +interface SupportButtonProps { + name: string; + target: string; + icon: string; + location?: string; +} + +const SupportButton: FC = ({ name, target, icon }) => { + return ( + + ); +}; diff --git a/site/src/modules/dashboard/Navbar/SupportIcon.tsx b/site/src/modules/dashboard/Navbar/SupportIcon.tsx new file mode 100644 index 0000000000000..6c32f03aea67a --- /dev/null +++ b/site/src/modules/dashboard/Navbar/SupportIcon.tsx @@ -0,0 +1,39 @@ +import type { SvgIconProps } from "@mui/material/SvgIcon"; +import { ExternalImage } from "components/ExternalImage/ExternalImage"; +import { BookOpenTextIcon, BugIcon, MessageSquareIcon } from "lucide-react"; +import type { FC } from "react"; + +interface SupportIconProps { + icon: string; + className?: string; +} + +export const SupportIcon: FC = ({ icon, className }) => { + switch (icon) { + case "bug": + return ; + case "chat": + return ; + case "docs": + return ; + case "star": + return ; + default: + return ; + } +}; + +const GithubStar: FC = (props) => ( + +); diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx index 6240a68c9509e..4c796ee436d47 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx @@ -11,7 +11,7 @@ import { UserDropdownContent } from "./UserDropdownContent"; interface UserDropdownProps { user: TypesGen.User; buildInfo?: TypesGen.BuildInfoResponse; - supportLinks?: readonly TypesGen.LinkConfig[]; + supportLinks: readonly TypesGen.LinkConfig[]; onSignOut: () => void; } diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.test.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.test.tsx index 70e2f35c941e1..1d25d894eacb6 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.test.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.test.tsx @@ -8,7 +8,11 @@ describe("UserDropdownContent", () => { it("has the correct link for the account item", async () => { render( - + , ); await waitForLoaderToBeRemoved(); @@ -25,7 +29,11 @@ describe("UserDropdownContent", () => { const onSignOut = jest.fn(); render( - + , ); await waitForLoaderToBeRemoved(); diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx index fd0636da3d457..b56c1c67deadd 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx @@ -6,24 +6,20 @@ import { } from "@emotion/react"; import Divider from "@mui/material/Divider"; import MenuItem from "@mui/material/MenuItem"; -import type { SvgIconProps } from "@mui/material/SvgIcon"; import Tooltip from "@mui/material/Tooltip"; import { PopoverClose } from "@radix-ui/react-popover"; import type * as TypesGen from "api/typesGenerated"; import { CopyButton } from "components/CopyButton/CopyButton"; -import { ExternalImage } from "components/ExternalImage/ExternalImage"; import { Stack } from "components/Stack/Stack"; import { - BookOpenTextIcon, - BugIcon, CircleUserIcon, LogOutIcon, - MessageSquareIcon, MonitorDownIcon, SquareArrowOutUpRightIcon, } from "lucide-react"; -import type { FC, JSX } from "react"; +import type { FC } from "react"; import { Link } from "react-router"; +import { SupportIcon } from "../SupportIcon"; export const Language = { accountLabel: "Account", @@ -34,7 +30,7 @@ export const Language = { interface UserDropdownContentProps { user: TypesGen.User; buildInfo?: TypesGen.BuildInfoResponse; - supportLinks?: readonly TypesGen.LinkConfig[]; + supportLinks: readonly TypesGen.LinkConfig[]; onSignOut: () => void; } @@ -44,26 +40,6 @@ export const UserDropdownContent: FC = ({ supportLinks, onSignOut, }) => { - const renderMenuIcon = (icon: string): JSX.Element => { - switch (icon) { - case "bug": - return ; - case "chat": - return ; - case "docs": - return ; - case "star": - return ; - default: - return ( - - ); - } - }; - return (
@@ -76,7 +52,7 @@ export const UserDropdownContent: FC = ({ - + Install CLI @@ -85,14 +61,14 @@ export const UserDropdownContent: FC = ({ - + {Language.accountLabel} - + {Language.signOutLabel} @@ -109,7 +85,12 @@ export const UserDropdownContent: FC = ({ > - {renderMenuIcon(link.icon)} + {link.icon && ( + + )} {link.name} @@ -152,21 +133,6 @@ export const UserDropdownContent: FC = ({ ); }; -const GithubStar: FC = (props) => ( - -); - const styles = { info: (theme) => [ theme.typography.body2 as CSSObject, @@ -196,11 +162,6 @@ const styles = { transition: background-color 0.3s ease; } `, - menuItemIcon: (theme) => ({ - color: theme.palette.text.secondary, - width: 20, - height: 20, - }), menuItemText: { fontSize: 14, }, diff --git a/site/src/modules/resources/AppLink/ShareIcon.tsx b/site/src/modules/resources/AppLink/ShareIcon.tsx index 7e6660fe4b162..d9d536f999259 100644 --- a/site/src/modules/resources/AppLink/ShareIcon.tsx +++ b/site/src/modules/resources/AppLink/ShareIcon.tsx @@ -1,9 +1,11 @@ -import BusinessIcon from "@mui/icons-material/Business"; -import GroupOutlinedIcon from "@mui/icons-material/GroupOutlined"; -import PublicOutlinedIcon from "@mui/icons-material/PublicOutlined"; import Tooltip from "@mui/material/Tooltip"; import type * as TypesGen from "api/typesGenerated"; -import { SquareArrowOutUpRightIcon } from "lucide-react"; +import { + Building2Icon, + GlobeIcon, + SquareArrowOutUpRightIcon, + UsersIcon, +} from "lucide-react"; interface ShareIconProps { app: TypesGen.WorkspaceApp; @@ -20,21 +22,21 @@ export const ShareIcon = ({ app }: ShareIconProps) => { if (app.sharing_level === "authenticated") { return ( - + ); } if (app.sharing_level === "organization") { return ( - + ); } if (app.sharing_level === "public") { return ( - + ); } diff --git a/site/src/modules/resources/PortForwardButton.tsx b/site/src/modules/resources/PortForwardButton.tsx index 749ddfc32bc01..78d4e999b340c 100644 --- a/site/src/modules/resources/PortForwardButton.tsx +++ b/site/src/modules/resources/PortForwardButton.tsx @@ -1,8 +1,4 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; -import BusinessIcon from "@mui/icons-material/Business"; -import LockIcon from "@mui/icons-material/Lock"; -import LockOpenIcon from "@mui/icons-material/LockOpen"; -import SensorsIcon from "@mui/icons-material/Sensors"; import FormControl from "@mui/material/FormControl"; import Link from "@mui/material/Link"; import MenuItem from "@mui/material/MenuItem"; @@ -45,8 +41,12 @@ import { } from "components/Tooltip/Tooltip"; import { useFormik } from "formik"; import { + BuildingIcon, ChevronDownIcon, ExternalLinkIcon, + LockIcon, + LockOpenIcon, + RadioIcon, ShareIcon, X as XIcon, } from "lucide-react"; @@ -385,7 +385,7 @@ export const PortForwardPopoverView: FC = ({ target="_blank" rel="noreferrer" > - + {port.port} = ({ rel="noreferrer" > {share.share_level === "public" ? ( - + ) : share.share_level === "organization" ? ( - + ) : ( - + )} {label} diff --git a/site/src/modules/resources/Resources.tsx b/site/src/modules/resources/Resources.tsx index 094092619347b..13e1bbf835252 100644 --- a/site/src/modules/resources/Resources.tsx +++ b/site/src/modules/resources/Resources.tsx @@ -1,5 +1,5 @@ -import Button from "@mui/material/Button"; import type { WorkspaceAgent, WorkspaceResource } from "api/typesGenerated"; +import { Button } from "components/Button/Button"; import { DropdownArrow } from "components/DropdownArrow/DropdownArrow"; import { Stack } from "components/Stack/Stack"; import { type FC, type JSX, useState } from "react"; @@ -37,8 +37,9 @@ export const Resources: FC = ({ resources, agentRow }) => { {hasHideResources && (