diff --git a/.depcheckrc.json b/.depcheckrc.json deleted file mode 100644 index 221e718c..00000000 --- a/.depcheckrc.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "ignoreMatches": [ - "@types/*", - "eslint-*", - "prettier*", - "husky", - "rimraf", - "vitest", - "vite", - "typescript", - "wrangler", - "electron*" - ], - "ignoreDirs": ["dist", "build", "node_modules", ".git"], - "skipMissing": false, - "ignorePatterns": ["*.d.ts", "*.test.ts", "*.test.tsx", "*.spec.ts", "*.spec.tsx"] -} diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 2f8f89bc..00000000 --- a/.dockerignore +++ /dev/null @@ -1,26 +0,0 @@ -# Ignore Git and GitHub files -.git -.github/ - -# Ignore Husky configuration files -.husky/ - -# Ignore documentation and metadata files -CONTRIBUTING.md -LICENSE -README.md - -# Ignore environment examples and sensitive info -.env -*.local -*.example - -# Ignore node modules, logs and cache files -**/*.log -**/node_modules -**/dist -**/build -**/.cache -logs -dist-ssr -.DS_Store diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 5274ff01..00000000 --- a/.editorconfig +++ /dev/null @@ -1,13 +0,0 @@ -root = true - -[*] -indent_style = space -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 120 -indent_size = 2 - -[*.md] -trim_trailing_whitespace = false diff --git a/.env.example b/.env.example deleted file mode 100644 index 47fb7aaf..00000000 --- a/.env.example +++ /dev/null @@ -1,142 +0,0 @@ -# shellcheck disable=SC2034,SC2148 - -# Rename this file to .env once you have filled in the below environment variables! - -# Get your GROQ API Key here - -# https://console.groq.com/keys -# You only need this environment variable set if you want to use Groq models -GROQ_API_KEY= - -# Get your HuggingFace API Key here - -# https://huggingface.co/settings/tokens -# You only need this environment variable set if you want to use HuggingFace models -HuggingFace_API_KEY= - - -# Get your Open AI API Key by following these instructions - -# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key -# You only need this environment variable set if you want to use GPT models -OPENAI_API_KEY= - -# Get your Anthropic API Key in your account settings - -# https://console.anthropic.com/settings/keys -# You only need this environment variable set if you want to use Claude models -ANTHROPIC_API_KEY= - -# Get your OpenRouter API Key in your account settings - -# https://openrouter.ai/settings/keys -# You only need this environment variable set if you want to use OpenRouter models -OPEN_ROUTER_API_KEY= - -# Get your Google Generative AI API Key by following these instructions - -# https://console.cloud.google.com/apis/credentials -# You only need this environment variable set if you want to use Google Generative AI models -GOOGLE_GENERATIVE_AI_API_KEY= - -# You only need this environment variable set if you want to use oLLAMA models -# DONT USE http://localhost:11434 due to IPV6 issues -# USE EXAMPLE http://127.0.0.1:11434 -OLLAMA_API_BASE_URL= - -# You only need this environment variable set if you want to use OpenAI Like models -OPENAI_LIKE_API_BASE_URL= - -# You only need this environment variable set if you want to use Together AI models -TOGETHER_API_BASE_URL= - -# You only need this environment variable set if you want to use DeepSeek models through their API -DEEPSEEK_API_KEY= - -# Get your OpenAI Like API Key -OPENAI_LIKE_API_KEY= - -# Get your Together API Key -TOGETHER_API_KEY= - -# You only need this environment variable set if you want to use Hyperbolic models -#Get your Hyperbolics API Key at https://app.hyperbolic.xyz/settings -#baseURL="https://api.hyperbolic.xyz/v1/chat/completions" -HYPERBOLIC_API_KEY= -HYPERBOLIC_API_BASE_URL= - -# Get your Mistral API Key by following these instructions - -# https://console.mistral.ai/api-keys/ -# You only need this environment variable set if you want to use Mistral models -MISTRAL_API_KEY= - -# Get the Cohere Api key by following these instructions - -# https://dashboard.cohere.com/api-keys -# You only need this environment variable set if you want to use Cohere models -COHERE_API_KEY= - -# Get LMStudio Base URL from LM Studio Developer Console -# Make sure to enable CORS -# DONT USE http://localhost:1234 due to IPV6 issues -# Example: http://127.0.0.1:1234 -LMSTUDIO_API_BASE_URL= - -# Get your xAI API key -# https://x.ai/api -# You only need this environment variable set if you want to use xAI models -XAI_API_KEY= - -# Get your Perplexity API Key here - -# https://www.perplexity.ai/settings/api -# You only need this environment variable set if you want to use Perplexity models -PERPLEXITY_API_KEY= - -# Get your AWS configuration -# https://console.aws.amazon.com/iam/home -# The JSON should include the following keys: -# - region: The AWS region where Bedrock is available. -# - accessKeyId: Your AWS access key ID. -# - secretAccessKey: Your AWS secret access key. -# - sessionToken (optional): Temporary session token if using an IAM role or temporary credentials. -# Example JSON: -# {"region": "us-east-1", "accessKeyId": "yourAccessKeyId", "secretAccessKey": "yourSecretAccessKey", "sessionToken": "yourSessionToken"} -AWS_BEDROCK_CONFIG= - -# Include this environment variable if you want more logging for debugging locally -VITE_LOG_LEVEL=debug - -# Get your GitHub Personal Access Token here - -# https://github.com/settings/tokens -# This token is used for: -# 1. Importing/cloning GitHub repositories without rate limiting -# 2. Accessing private repositories -# 3. Automatic GitHub authentication (no need to manually connect in the UI) -# -# For classic tokens, ensure it has these scopes: repo, read:org, read:user -# For fine-grained tokens, ensure it has Repository and Organization access -VITE_GITHUB_ACCESS_TOKEN= - -# Specify the type of GitHub token you're using -# Can be 'classic' or 'fine-grained' -# Classic tokens are recommended for broader access -VITE_GITHUB_TOKEN_TYPE=classic - -# Example Context Values for qwen2.5-coder:32b -# -# DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM -# DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM -# DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM -# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM -DEFAULT_NUM_CTX= - -# LangSmith Tracing Configuration for Claude Code -# These variables enable OpenTelemetry tracing to LangSmith for monitoring Claude Code operations -# See langsmith-tracing.md for detailed setup instructions -# -# Get your LangSmith API Key from https://smith.langchain.com -# You only need these environment variables if you want to trace Claude Code operations -LANGSMITH_API_KEY= -LANGSMITH_PROJECT=claude-code-trace - -# Claude Code Telemetry (set to 1 to enable) -CLAUDE_CODE_ENABLE_TELEMETRY= - -# OpenTelemetry Configuration (uncomment to enable) -# OTEL_LOGS_EXPORTER=otlp -# OTEL_EXPORTER_OTLP_LOGS_PROTOCOL=http/json -# OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=https://api.smith.langchain.com/otel/v1/claude_code -# OTEL_LOG_USER_PROMPTS=1 diff --git a/.env.production b/.env.production deleted file mode 100644 index b6f9b4f8..00000000 --- a/.env.production +++ /dev/null @@ -1,117 +0,0 @@ -# shellcheck disable=SC2034,SC2148 - -# Rename this file to .env once you have filled in the below environment variables! - -# Get your GROQ API Key here - -# https://console.groq.com/keys -# You only need this environment variable set if you want to use Groq models -GROQ_API_KEY= - -# Get your HuggingFace API Key here - -# https://huggingface.co/settings/tokens -# You only need this environment variable set if you want to use HuggingFace models -HuggingFace_API_KEY= - -# Get your Open AI API Key by following these instructions - -# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key -# You only need this environment variable set if you want to use GPT models -OPENAI_API_KEY= - -# Get your Anthropic API Key in your account settings - -# https://console.anthropic.com/settings/keys -# You only need this environment variable set if you want to use Claude models -ANTHROPIC_API_KEY= - -# Get your OpenRouter API Key in your account settings - -# https://openrouter.ai/settings/keys -# You only need this environment variable set if you want to use OpenRouter models -OPEN_ROUTER_API_KEY= - -# Get your Google Generative AI API Key by following these instructions - -# https://console.cloud.google.com/apis/credentials -# You only need this environment variable set if you want to use Google Generative AI models -GOOGLE_GENERATIVE_AI_API_KEY= - -# You only need this environment variable set if you want to use oLLAMA models -# DONT USE http://localhost:11434 due to IPV6 issues -# USE EXAMPLE http://127.0.0.1:11434 -OLLAMA_API_BASE_URL= - -# You only need this environment variable set if you want to use OpenAI Like models -OPENAI_LIKE_API_BASE_URL= - -# You only need this environment variable set if you want to use Together AI models -TOGETHER_API_BASE_URL= - -# You only need this environment variable set if you want to use DeepSeek models through their API -DEEPSEEK_API_KEY= - -# Get your OpenAI Like API Key -OPENAI_LIKE_API_KEY= - -# Get your Together API Key -TOGETHER_API_KEY= - -# You only need this environment variable set if you want to use Hyperbolic models -HYPERBOLIC_API_KEY= -HYPERBOLIC_API_BASE_URL= - -# Get your Mistral API Key by following these instructions - -# https://console.mistral.ai/api-keys/ -# You only need this environment variable set if you want to use Mistral models -MISTRAL_API_KEY= - -# Get the Cohere Api key by following these instructions - -# https://dashboard.cohere.com/api-keys -# You only need this environment variable set if you want to use Cohere models -COHERE_API_KEY= - -# Get LMStudio Base URL from LM Studio Developer Console -# Make sure to enable CORS -# DONT USE http://localhost:1234 due to IPV6 issues -# Example: http://127.0.0.1:1234 -LMSTUDIO_API_BASE_URL= - -# Get your xAI API key -# https://x.ai/api -# You only need this environment variable set if you want to use xAI models -XAI_API_KEY= - -# Get your Perplexity API Key here - -# https://www.perplexity.ai/settings/api -# You only need this environment variable set if you want to use Perplexity models -PERPLEXITY_API_KEY= - -# Get your AWS configuration -# https://console.aws.amazon.com/iam/home -AWS_BEDROCK_CONFIG= - -# Include this environment variable if you want more logging for debugging locally -VITE_LOG_LEVEL= - -# Get your GitHub Personal Access Token here - -# https://github.com/settings/tokens -# This token is used for: -# 1. Importing/cloning GitHub repositories without rate limiting -# 2. Accessing private repositories -# 3. Automatic GitHub authentication (no need to manually connect in the UI) -# -# For classic tokens, ensure it has these scopes: repo, read:org, read:user -# For fine-grained tokens, ensure it has Repository and Organization access -VITE_GITHUB_ACCESS_TOKEN= - -# Specify the type of GitHub token you're using -# Can be 'classic' or 'fine-grained' -# Classic tokens are recommended for broader access -VITE_GITHUB_TOKEN_TYPE= - -# Netlify Authentication -VITE_NETLIFY_ACCESS_TOKEN= - -# Example Context Values for qwen2.5-coder:32b -# -# DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM -# DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM -# DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM -# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM -DEFAULT_NUM_CTX= \ No newline at end of file diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index c47f8528..00000000 --- a/.eslintrc.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "env": { - "browser": true, - "es2021": true - }, - "extends": ["eslint:recommended", "plugin:prettier/recommended"], - "rules": { - "no-console": "off" - } -} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index b1074c2d..00000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -@Gerome-Elassaad \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index ab1b5981..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -github: codinit-dev diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 4c129002..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: 'Bug report' -description: Help us fix an issue -body: - - type: markdown - attributes: - value: Thanks for reporting an issue with [codinit.dev](https://codinit.dev)! - - - type: textarea - id: description - attributes: - label: What happened? - placeholder: | - **Bug:** … - **Expected:** … - validations: - required: true - - - type: textarea - id: steps - attributes: - label: How to reproduce - placeholder: | - 1. … - 2. … - 3. … - validations: - required: true - - - type: textarea - id: environment - attributes: - label: Environment - placeholder: | - - Browser: - - OS: - - Provider/Model: - - - type: textarea - id: additional - attributes: - label: Extra info - placeholder: Screenshots, links, etc. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index c0d44d6f..00000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,5 +0,0 @@ -blank_issues_enabled: false -contact_links: - - name: codinit.dev related issues - url: https://github.com/codinit-dev/codinit-dev/issues/new/choose - about: Report issues related to codinit.dev diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md deleted file mode 100644 index 9d6719cf..00000000 --- a/.github/ISSUE_TEMPLATE/feature.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Issue -title: '' -labels: - - feature -assignees: '' ---- - -# Motivation - - - -# Proposed Solution - - - -# Additional Context - - \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 45afc677..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -name: Feature request -about: Submit a feature request that has been denied by other app dev platforms -title: '' -labels: '' -assignees: '' ---- - -**Describe the solution you'd like:** - - - -**Have you requested this from a different platform and been denied?:** - - - -**If you answered yes to the last question please write the platform that refused your suggestion.:** - - - -**Additional context:** - - diff --git a/.github/actions/setup-and-build/action.yaml b/.github/actions/setup-and-build/action.yaml deleted file mode 100644 index 8004384d..00000000 --- a/.github/actions/setup-and-build/action.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: Setup and Build -description: Generic setup action -inputs: - pnpm-version: - description: 'The version of pnpm to use' - required: false - default: '9.15.9' - node-version: - description: 'The version of Node.js to use' - required: false - default: '20.15.1' -runs: - using: composite - - steps: - - uses: pnpm/action-setup@v4 - with: - version: ${{ inputs.pnpm-version }} - run_install: false - - - name: Set Node.js version to ${{ inputs.node-version }} - uses: actions/setup-node@v4 - with: - node-version: ${{ inputs.node-version }} - cache: pnpm - - - name: Install dependencies and build project - shell: bash - run: | - pnpm install - pnpm run build \ No newline at end of file diff --git a/.github/actions/setup-and-build/security.yaml b/.github/actions/setup-and-build/security.yaml deleted file mode 100644 index 2f445234..00000000 --- a/.github/actions/setup-and-build/security.yaml +++ /dev/null @@ -1,120 +0,0 @@ -name: Security Analysis - -on: - push: - branches: [main, stable] - pull_request: - branches: [main] - schedule: - # Run weekly security scan on Sundays at 2 AM - - cron: '0 2 * * 0' - -permissions: - actions: read - contents: read - security-events: read - -jobs: - codeql: - name: CodeQL Analysis - runs-on: ubuntu-latest - timeout-minutes: 45 - - strategy: - fail-fast: false - matrix: - language: ['javascript', 'typescript'] - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - queries: security-extended,security-and-quality - - - name: Autobuild - uses: github/codeql-action/autobuild@v3 - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" - upload: false - output: "codeql-results" - - - name: Upload CodeQL results as artifact - uses: actions/upload-artifact@v4 - if: always() - with: - name: codeql-results-${{ matrix.language }} - path: codeql-results - - dependency-scan: - name: Dependency Vulnerability Scan - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: '20.18.0' - - - name: Install pnpm - uses: pnpm/action-setup@v4 - with: - version: '9.15.9' - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Run npm audit - run: pnpm audit --audit-level moderate - continue-on-error: true - - - name: Generate SBOM - uses: anchore/sbom-action@v0 - with: - path: ./ - format: spdx-json - artifact-name: sbom.spdx.json - - - name: Upload SBOM as artifact - uses: actions/upload-artifact@v4 - if: always() - with: - name: sbom-results - path: | - sbom.spdx.json - **/sbom.spdx.json - - secrets-scan: - name: Secrets Detection - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Run Trivy secrets scan - uses: aquasecurity/trivy-action@master - with: - scan-type: 'fs' - scan-ref: '.' - format: 'sarif' - output: 'trivy-secrets-results.sarif' - scanners: 'secret' - - - name: Upload Trivy secrets results as artifact - uses: actions/upload-artifact@v4 - if: always() - with: - name: trivy-secrets-results - path: trivy-secrets-results.sarif diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml deleted file mode 100644 index 8ab236d5..00000000 --- a/.github/workflows/ci.yaml +++ /dev/null @@ -1,27 +0,0 @@ -name: CI/CD - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup and Build - uses: ./.github/actions/setup-and-build - - - name: Run type check - run: pnpm run typecheck - - # - name: Run ESLint - # run: pnpm run lint - - - name: Run tests - run: pnpm run test diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml deleted file mode 100644 index f6f0e6d0..00000000 --- a/.github/workflows/docker.yaml +++ /dev/null @@ -1,63 +0,0 @@ -name: Docker Publish - -on: - push: - branches: [main, stable] - tags: ["v*", "*.*.*"] - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -permissions: - packages: write - contents: read - -env: - REGISTRY: ghcr.io - IMAGE_NAME: codinit-dev/codinit-dev - -jobs: - docker-build-publish: - runs-on: ubuntu-latest - # timeout-minutes: 30 - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Log in to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata for Docker image - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} - type=raw,value=stable,enable=${{ github.ref == 'refs/heads/stable' }} - type=ref,event=tag - type=sha,format=short - type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/stable' }} - - name: Build and push Docker image - uses: docker/build-push-action@v6 - with: - context: . - platforms: linux/amd64,linux/arm64 - target: codinit-dev-production - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - - name: Check manifest - run: | - IMAGE_NAME_LOWER=$(echo "${{ env.IMAGE_NAME }}" | tr '[:upper:]' '[:lower:]') - docker buildx imagetools inspect ${{ env.REGISTRY }}/${IMAGE_NAME_LOWER}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/electron.yml b/.github/workflows/electron.yml deleted file mode 100644 index 63ef8902..00000000 --- a/.github/workflows/electron.yml +++ /dev/null @@ -1,128 +0,0 @@ -name: Electron Build and Release - -on: - workflow_dispatch: - inputs: - tag: - description: 'Tag for the release (e.g., v1.0.0). Leave empty if not applicable.' - required: false - os: - description: 'Operating system to build for (all, ubuntu-latest, windows-latest, macos-latest)' - required: false - default: 'all' - push: - branches: - - electron - tags: - - 'v*' - -permissions: - contents: write - -jobs: - build: - runs-on: ${{ matrix.os }} - - strategy: - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - node-version: [20.15.1] - fail-fast: false - - steps: - - name: Check out Git repository - uses: actions/checkout@v4 - - - name: Install Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install pnpm - uses: pnpm/action-setup@v2 - with: - version: 9.15.9 - run_install: false - - - name: Get pnpm store directory - id: get-pnpm-store-path - shell: bash - run: | - echo "store_path=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.get-pnpm-store-path.outputs.store_path }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') || 'no-lock' }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install - - - name: Install Linux dependencies - if: matrix.os == 'ubuntu-latest' - run: | - sudo apt-get update - sudo apt-get install -y rpm - - - name: Build Electron app - env: - GH_TOKEN: ${{ secrets.TOKEN }} - NODE_OPTIONS: "--max_old_space_size=4096" - run: | - if [ "$RUNNER_OS" == "Windows" ]; then - pnpm run electron:build:win - elif [ "$RUNNER_OS" == "macOS" ]; then - pnpm run electron:build:mac - else - pnpm run electron:build:linux - fi - shell: bash - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: electron-${{ runner.os }}-artifacts - path: | - dist/*.exe - dist/*.dmg - dist/*.deb - dist/*.AppImage - dist/*.zip - dist/*.blockmap - dist/*.yml - retention-days: 1 - if-no-files-found: warn - - release: - needs: build - runs-on: ubuntu-latest - if: github.event_name == 'push' && github.ref_type == 'tag' - - steps: - - name: Download all artifacts - uses: actions/download-artifact@v4 - with: - path: artifacts - - - name: Display structure of downloaded files - run: ls -R artifacts - - - name: Create Release - uses: softprops/action-gh-release@v2 - with: - tag_name: ${{ github.ref_name }} - draft: false - name: Release ${{ github.ref_name }} - files: | - artifacts/**/*.exe - artifacts/**/*.dmg - artifacts/**/*.deb - artifacts/**/*.AppImage - artifacts/**/*.zip - artifacts/**/*.blockmap - artifacts/**/*.yml - env: - GITHUB_TOKEN: ${{ secrets.TOKEN }} \ No newline at end of file diff --git a/.github/workflows/pr-release-validation.yaml b/.github/workflows/pr-release-validation.yaml deleted file mode 100644 index 9c5787e2..00000000 --- a/.github/workflows/pr-release-validation.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: PR Validation - -on: - pull_request: - types: [opened, synchronize, reopened, labeled, unlabeled] - branches: - - main - -jobs: - validate: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Validate PR Labels - run: | - if [[ "${{ contains(github.event.pull_request.labels.*.name, 'stable-release') }}" == "true" ]]; then - echo "✓ PR has stable-release label" - - # Check version bump labels - if [[ "${{ contains(github.event.pull_request.labels.*.name, 'major') }}" == "true" ]]; then - echo "✓ Major version bump requested" - elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'minor') }}" == "true" ]]; then - echo "✓ Minor version bump requested" - else - echo "✓ Patch version bump will be applied" - fi - else - echo "This PR doesn't have the stable-release label. No release will be created." - fi diff --git a/.github/workflows/update-stable.yml b/.github/workflows/update-stable.yml deleted file mode 100644 index c2df1b1b..00000000 --- a/.github/workflows/update-stable.yml +++ /dev/null @@ -1,102 +0,0 @@ -name: Update Stable Branch - -# DISABLED: Using manual tag workflow instead -# To re-enable, uncomment the 'on:' section below -on: - workflow_dispatch: - -# on: -# push: -# branches: -# - main - -permissions: - contents: write - -jobs: - prepare-release: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Configure Git - run: | - git config --global user.name 'github-actions[bot]' - git config --global user.email 'github-actions[bot]@users.noreply.github.com' - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: '20' - - - name: Install pnpm - uses: pnpm/action-setup@v2 - with: - version: latest - run_install: false - - - name: Get pnpm store directory - id: pnpm-cache - shell: bash - run: | - echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Get Current Version - id: current_version - run: | - CURRENT_VERSION=$(node -p "require('./package.json').version") - echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT - - - name: Install semver - run: pnpm add -g semver - - - name: Determine Version Bump - id: version_bump - run: | - # Default to patch since this workflow is now manual only - echo "bump=patch" >> $GITHUB_OUTPUT - - - name: Bump Version - id: bump_version - run: | - NEW_VERSION=$(semver -i ${{ steps.version_bump.outputs.bump }} ${{ steps.current_version.outputs.version }}) - echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT - - - name: Update Package.json - run: | - NEW_VERSION=${{ steps.bump_version.outputs.new_version }} - pnpm version $NEW_VERSION --no-git-tag-version --allow-same-version - - - name: Get the latest commit hash and version tag - run: | - echo "COMMIT_HASH=$(git rev-parse HEAD)" >> $GITHUB_ENV - echo "NEW_VERSION=${{ steps.bump_version.outputs.new_version }}" >> $GITHUB_ENV - - - name: Commit and Tag Release - run: | - git pull - git add package.json pnpm-lock.yaml - git commit -m "chore: release version ${{ steps.bump_version.outputs.new_version }}" - git tag "v${{ steps.bump_version.outputs.new_version }}" - git push - git push --tags - - - name: Update Stable Branch - run: | - if ! git checkout stable 2>/dev/null; then - echo "Creating new stable branch..." - git checkout -b stable - fi - git merge main --no-ff -m "chore: release version ${{ steps.bump_version.outputs.new_version }}" - git push --set-upstream origin stable --force diff --git a/.gitignore b/.gitignore deleted file mode 100644 index e2609f4e..00000000 --- a/.gitignore +++ /dev/null @@ -1,55 +0,0 @@ -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -node_modules -dist -dist-ssr -*.local - -.vscode/* -.vscode/launch.json -!.vscode/extensions.json -.idea -.DS_Store -*.suo -*.ntvs* -*.njsproj -*.sln -*.sw? - -/.history -/.cache -/build -functions/build/ -.env.local -.env -.dev.vars -*.vars -.wrangler -_worker.bundle - -Modelfile -modelfiles - -# docs ignore -site - -# commit file ignore -app/commit.json -changelogUI.md -docs/instructions/Roadmap.md -.cursorrules -*.md -.qodo -CLAUDE.md -.roo - -AGENTS.md* -.mcp.json -.claude -backend \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100644 index 5f5c2b9e..00000000 --- a/.husky/pre-commit +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh - -echo "🔍 Running pre-commit hook to check the code looks good... 🔍" - -# Load NVM if available (useful for managing Node.js versions) -export NVM_DIR="$HOME/.nvm" -[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" - -# Ensure `pnpm` is available -echo "Checking if pnpm is available..." -if ! command -v pnpm >/dev/null 2>&1; then - echo "❌ pnpm not found! Please ensure pnpm is installed and available in PATH." - exit 1 -fi - -# Run typecheck -echo "Running typecheck..." -if ! pnpm typecheck; then - echo "❌ Type checking failed! Please review TypeScript types." - echo "Once you're done, don't forget to add your changes to the commit! 🚀" - exit 1 -fi - -# Run lint -echo "Running lint..." -if ! pnpm lint; then - echo "❌ Linting failed! Run 'pnpm lint:fix' to fix the easy issues." - echo "Once you're done, don't forget to add your beautification to the commit! 🤩" - exit 1 -fi - -echo "👍 All checks passed! Committing changes..." diff --git a/.lighthouserc.json b/.lighthouserc.json deleted file mode 100644 index af1207d8..00000000 --- a/.lighthouserc.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "ci": { - "collect": { - "url": ["http://localhost:5173/"], - "startServerCommand": "pnpm run start", - "numberOfRuns": 3 - }, - "assert": { - "assertions": { - "categories:performance": ["warn", { "minScore": 0.8 }], - "categories:accessibility": ["warn", { "minScore": 0.9 }], - "categories:best-practices": ["warn", { "minScore": 0.8 }], - "categories:seo": ["warn", { "minScore": 0.8 }] - } - }, - "upload": { - "target": "temporary-public-storage" - } - } -} diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/.npmrc b/.npmrc deleted file mode 100644 index bf2e7648..00000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -shamefully-hoist=true diff --git a/.prettierignore b/.prettierignore deleted file mode 100644 index bd5535a6..00000000 --- a/.prettierignore +++ /dev/null @@ -1 +0,0 @@ -pnpm-lock.yaml diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index 8d3dfb04..00000000 --- a/.prettierrc +++ /dev/null @@ -1,8 +0,0 @@ -{ - "printWidth": 120, - "singleQuote": true, - "useTabs": false, - "tabWidth": 2, - "semi": true, - "bracketSpacing": true -} diff --git a/404.html b/404.html new file mode 100644 index 00000000..aa259219 --- /dev/null +++ b/404.html @@ -0,0 +1,449 @@ + + + +
+ + + + + + + + + + + + + + +Welcome! This guide provides all the details you need to contribute effectively to the project. Thank you for helping us make codinit.dev a better tool for developers worldwide. 💡
+This project is governed by our Code of Conduct. By participating, you agree to uphold this code. Report unacceptable behavior to the project maintainers.
+Interested in maintaining and growing the project? Fill out our Contributor Application Form.
+.env.example to .env.local..env.local to .env:
+ # Option 1: Use the setup script
+./scripts/setup-env.sh
+
+# Option 2: Manual copy
+cp .env.local .env
+.env for variable substitution.VITE_LOG_LEVEL=debugDEFAULT_NUM_CTX=32768127.0.0.1 instead of localhostNote: Never commit your .env.local or .env files to version control. They're already in .gitignore.
Tip: Use Google Chrome Canary for local testing.
+Run the test suite with:
+ +Ensure you have required permissions and that Wrangler is configured.
+This section outlines the methods for deploying the application using Docker. The processes for Development and Production are provided separately for clarity.
+Option 1: Helper Scripts
+ +Option 2: Direct Docker Build Command
+ +Option 3: Docker Compose Profile
+ +Option 1: Helper Scripts
+ +Option 2: Direct Docker Build Command
+ +Option 3: Docker Compose Profile
+ +For an easy deployment process, use Coolify:
+The docker-compose.yaml configuration is compatible with VS Code Dev Containers, making it easy to set up a development environment directly in Visual Studio Code.
Ctrl+Shift+P or Cmd+Shift+P on macOS).Ensure .env.local is configured correctly with:
Example for the DEFAULT_NUM_CTX variable:
For the best experience with CodinIT.dev, we recommend using the following models from our 19+ supported providers:
+**Top Recommended Models:**
+- **Claude 3.5 Sonnet** (Anthropic): Best overall coder, excellent for complex applications
+- **GPT-4o** (OpenAI): Strong alternative with great performance across all use cases
+- **Claude 4 Opus** (Anthropic): Latest flagship model with enhanced capabilities
+- **Gemini 2.0 Flash** (Google): Exceptional speed for rapid development
+- **DeepSeekCoder V3** (DeepSeek): Best open-source model for coding tasks
+
+**Self-Hosting Options:**
+- **DeepSeekCoder V2 236b**: Powerful self-hosted option
+- **Qwen 2.5 Coder 32b**: Best for moderate hardware requirements
+- **Ollama models**: Local inference with various model sizes
+
+**Latest Specialized Models:**
+- **Moonshot AI (Kimi)**: Kimi K2 models with advanced reasoning capabilities
+- **xAI Grok 4**: Latest Grok model with 256K context window
+- **Anthropic Claude 4 Opus**: Latest flagship model from Anthropic
+
+!!! tip "Model Selection Tips"
+ - Use larger models (7B+ parameters) for complex applications
+ - Claude models excel at structured code generation
+ - GPT-4o provides excellent general-purpose coding assistance
+ - Gemini models offer the fastest response times
+You can configure API keys in two ways:
+**Option 1: Environment Variables (Recommended for production)**
+Create a `.env.local` file in your project root:
+```bash
+ANTHROPIC_API_KEY=your_anthropic_key_here
+OPENAI_API_KEY=your_openai_key_here
+GOOGLE_GENERATIVE_AI_API_KEY=your_google_key_here
+MOONSHOT_API_KEY=your_moonshot_key_here
+XAI_API_KEY=your_xai_key_here
+```
+
+**Option 2: In-App Configuration**
+- Click the settings icon (⚙️) in the sidebar
+- Navigate to the "Providers" tab
+- Switch between "Cloud Providers" and "Local Providers" tabs
+- Click on a provider card to expand its configuration
+- Click on the "API Key" field to enter edit mode
+- Paste your API key and press Enter to save
+- Look for the green checkmark to confirm proper configuration
+
+!!! note "Security Note"
+ Never commit API keys to version control. The `.env.local` file is already in `.gitignore`.
+CodinIT.dev uses a modular provider architecture. To add a new provider:
+1. **Create a Provider Class** in `app/lib/modules/llm/providers/your-provider.ts`
+2. **Implement the BaseProvider interface** with your provider's specific logic
+3. **Register the provider** in `app/lib/modules/llm/registry.ts`
+4. **The system automatically detects** and registers your new provider
+
+See the [Adding New LLMs](../#adding-new-llms) section for complete implementation details.
+Moonshot AI provides access to advanced Kimi models with excellent reasoning capabilities:
+**Setup Steps:**
+1. Visit [Moonshot AI Platform](https://platform.moonshot.ai/console/api-keys)
+2. Create an account and generate an API key
+3. Add `MOONSHOT_API_KEY=your_key_here` to your `.env.local` file
+4. Or configure it directly in Settings → Providers → Cloud Providers → Moonshot
+
+**Available Models:**
+- **Kimi K2 Preview**: Latest Kimi model with 128K context
+- **Kimi K2 Turbo**: Fast inference optimized version
+- **Kimi Thinking**: Specialized for complex reasoning tasks
+- **Moonshot v1 series**: Legacy models with vision capabilities
+
+!!! tip "Moonshot AI Features"
+ - Excellent for Chinese language tasks
+ - Strong reasoning capabilities
+ - Vision-enabled models available
+ - Competitive pricing
+xAI has released several new Grok models with enhanced capabilities:
+**Latest Models:**
+- **Grok 4**: Most advanced model with 256K context window
+- **Grok 4 (07-09)**: Specialized variant for specific tasks
+- **Grok 3 Beta**: Previous generation with 131K context
+- **Grok 3 Mini variants**: Optimized for speed and efficiency
+
+**Setup:**
+1. Get your API key from [xAI Platform](https://docs.x.ai/docs/quickstart#creating-an-api-key)
+2. Add `XAI_API_KEY=your_key_here` to your `.env.local` file
+3. Models will be available in the provider selection
+CodinIT.dev provides multiple ways to access help and documentation:
+**Help Icon in Sidebar:**
+- Look for the question mark (?) icon in the sidebar
+- Click it to open the full documentation in a new tab
+- Provides instant access to guides, troubleshooting, and FAQs
+
+**Documentation Resources:**
+- **Main Documentation**: Complete setup and feature guides
+- **FAQ Section**: Answers to common questions
+- **Troubleshooting**: Solutions for common issues
+- **Best Practices**: Tips for optimal usage
+
+**Community Support:**
+- **GitHub Issues**: Report bugs and request features
+Follow these proven strategies for optimal results:
+**Project Setup:**
+- **Be specific about your stack**: Mention frameworks/libraries (Astro, Tailwind, ShadCN, Next.js) in your initial prompt
+- **Choose appropriate templates**: Use our 15+ project templates for quick starts
+- **Configure providers properly**: Set up your preferred LLM providers before starting
+
+**Development Workflow:**
+- **Use the enhance prompt icon**: Click the enhance icon to let AI refine your prompts before submitting
+- **Scaffold basics first**: Build foundational structure before adding advanced features
+- **Batch simple instructions**: Combine tasks like *"Change colors, add mobile responsiveness, restart dev server"*
+
+**Advanced Features:**
+- **Leverage MCP tools**: Use Model Context Protocol for enhanced AI capabilities
+- **Connect databases**: Integrate Supabase for backend functionality
+- **Use Git integration**: Version control your projects with GitHub
+- **Deploy easily**: Use built-in Vercel, Netlify, or GitHub Pages deployment
+MCP (Model Context Protocol) is an open protocol that extends CodinIT.dev's AI capabilities by allowing it to interact with external tools and data sources:
+**What MCP Enables:**
+- **Database Access**: Query SQL databases, MongoDB, Redis, and more
+- **File Operations**: Read/write files with proper permissions
+- **API Integrations**: Connect to REST APIs, GraphQL endpoints
+- **Custom Tools**: Build domain-specific tools for your workflow
+- **Real-time Data**: Access live data during AI conversations
+
+**Why Use MCP:**
+- Makes the AI aware of your specific data and context
+- Automates complex workflows with multiple tool calls
+- Securely connects to enterprise systems
+- Standardized protocol supported by multiple AI platforms
+Setting up MCP servers is straightforward:
+**Step-by-Step Setup:**
+1. **Open Settings**: Click the settings icon (⚙️) in the sidebar
+2. **Navigate to MCP Tab**: Select "MCP" from the settings menu
+3. **Add Server**: Click "Add Server" or "Configure Server"
+4. **Choose Server Type**:
+ - **STDIO**: For local command-line tools
+ - **SSE**: For Server-Sent Events servers
+ - **HTTP**: For HTTP-based MCP servers
+5. **Configure Connection**:
+ - Enter server name and description
+ - Set command/URL based on server type
+ - Add required environment variables or headers
+6. **Save and Enable**: Save configuration and enable the server
+
+**Example STDIO Configuration (PostgreSQL)**:
+```json
+{
+ "name": "postgres-db",
+ "type": "stdio",
+ "command": "npx",
+ "args": ["-y", "@modelcontextprotocol/server-postgres"],
+ "env": {
+ "DATABASE_URL": "postgresql://user:pass@localhost/mydb"
+ }
+}
+```
+
+**Example SSE Configuration (Remote API)**:
+```json
+{
+ "name": "my-api",
+ "type": "sse",
+ "url": "https://api.example.com/mcp",
+ "headers": {
+ "Authorization": "Bearer YOUR_API_TOKEN"
+ }
+}
+```
+CodinIT.dev supports three types of MCP server connections:
+**1. STDIO Servers (Local Tools)**
+- Run as local command-line processes
+- Communicate via standard input/output
+- Best for: Local databases, file systems, CLI tools
+- Examples: PostgreSQL server, filesystem server, git tools
+
+**2. SSE Servers (Server-Sent Events)**
+- Connect to remote servers via HTTP
+- Real-time streaming with Server-Sent Events
+- Best for: Remote APIs, cloud services
+- Examples: Cloud database services, third-party APIs
+
+**3. Streamable HTTP Servers**
+- HTTP-based protocol with streaming support
+- Flexible connection options
+- Best for: Custom services, enterprise systems
+- Examples: Internal APIs, custom business logic
+
+Each type has different setup requirements and use cases. Choose based on your needs.
+When MCP servers are configured, their tools become available to the AI:
+**Tool Discovery:**
+- CodinIT.dev automatically detects all tools from connected servers
+- Tools appear in the AI's available tool list
+- Tool descriptions help the AI understand when to use them
+
+**Tool Execution Flow:**
+1. **AI Decides**: Based on your prompt, AI determines which tool to use
+2. **User Approval**: You review and approve the tool execution for security
+3. **Tool Runs**: The MCP server executes the tool with provided parameters
+4. **Results Return**: Tool output is sent back to the AI
+5. **AI Responds**: AI incorporates tool results into its response
+
+**Approval States:**
+- **APPROVE**: Allow the tool to execute
+- **REJECT**: Deny the tool execution
+- **ERROR**: Tool execution failed
+
+**Security Features:**
+- All tool executions require explicit user approval
+- Tool parameters are shown before execution
+- Failed executions are logged with error details
+MCP enables many powerful workflows:
+**Development & DevOps:**
+- Query database schemas and generate migrations
+- Read/write code files for automated refactoring
+- Execute git commands for version control
+- Deploy applications to cloud platforms
+- Run test suites and analyze results
+
+**Data Analysis:**
+- Query SQL databases for business intelligence
+- Process CSV/JSON files for data transformation
+- Generate charts and visualizations
+- Fetch real-time data from APIs
+- Aggregate data from multiple sources
+
+**Business Operations:**
+- Integrate with CRM systems (Salesforce, HubSpot)
+- Manage customer support tickets
+- Access inventory and order management systems
+- Generate reports from business data
+- Automate routine administrative tasks
+
+**Content Management:**
+- Read/write documentation files
+- Manage blog posts and articles
+- Update website content
+- Process and optimize images
+- Version control content changes
+Common MCP issues and solutions:
+**Server Won't Connect:**
+- Verify server endpoint/command is correct
+- Check authentication credentials (API keys, tokens)
+- Ensure network connectivity for remote servers
+- Review server logs for specific error messages
+- Confirm MCP protocol version compatibility
+
+**Tools Not Appearing:**
+- Restart the MCP server to refresh tool list
+- Check server configuration in Settings → MCP
+- Verify server is enabled (toggle switch on)
+- Look for errors in browser console (F12)
+- Confirm server implements tool discovery correctly
+
+**Tool Execution Failures:**
+- Check tool parameters are valid
+- Ensure required permissions are granted
+- Verify environment variables are set correctly
+- Review tool-specific error messages
+- Test the tool outside CodinIT.dev first
+
+**Performance Issues:**
+- Limit number of concurrent tool calls
+- Use smaller result sets when querying databases
+- Implement caching in your MCP server
+- Monitor server response times
+- Consider using local servers for better performance
+
+**Conflict Resolution:**
+- If multiple servers provide the same tool name, CodinIT.dev will detect the conflict
+- Rename tools in server configuration to avoid conflicts
+- Disable unused servers to reduce tool namespace pollution
+Yes! You can create custom MCP servers for your specific needs:
+**Building Custom Servers:**
+- Use the official MCP SDK: `@modelcontextprotocol/sdk`
+- Implement server in TypeScript, Python, or other languages
+- Define custom tools with input schemas and handlers
+- Deploy as STDIO, SSE, or HTTP server
+
+**Basic Server Example (TypeScript)**:
+```typescript
+import { Server } from '@modelcontextprotocol/sdk/server/index.js';
+import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
+
+const server = new Server({
+ name: 'my-custom-server',
+ version: '1.0.0',
+});
+
+// Register a custom tool
+server.setRequestHandler('tools/list', async () => ({
+ tools: [{
+ name: 'get_user_data',
+ description: 'Fetch user data from database',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ userId: { type: 'string' }
+ }
+ }
+ }]
+}));
+
+// Start the server
+const transport = new StdioServerTransport();
+await server.connect(transport);
+```
+
+**Resources:**
+- MCP Documentation: [modelcontextprotocol.io](https://modelcontextprotocol.io)
+- Example Servers: GitHub MCP organization
+- SDK Reference: @modelcontextprotocol/sdk package
+
+Custom servers allow you to integrate any system or data source with CodinIT.dev!
+CodinIT.dev supports one-click deployment to multiple platforms:
+**Supported Platforms:**
+- **Vercel**: Go to Settings → Connections → Vercel, then deploy with one click
+- **Netlify**: Connect your Netlify account and deploy instantly
+- **GitHub Pages**: Push to GitHub and enable Pages in repository settings
+
+**Deployment Features:**
+- Automatic build configuration for popular frameworks
+- Environment variable management
+- Custom domain support
+- Preview deployments for testing
+CodinIT.dev provides comprehensive Git integration with GitHub and GitLab:
+**Basic Git Operations:**
+- Import existing repositories by URL
+- Create new repositories on GitHub or GitLab
+- Automatic commits for major changes
+- Push/pull changes seamlessly
+
+**GitHub Integration:**
+- Connect GitHub account in Settings → Connections → GitHub
+- Import from your connected repositories
+- Create and manage branches
+- View repository statistics
+
+**GitLab Integration:**
+- Connect GitLab account in Settings → Connections → GitLab
+- Browse and import GitLab projects
+- Manage GitLab branches
+- Access project metadata
+
+**Advanced Features:**
+- Version control with diff visualization
+- Collaborative development support
+- Bug report generation with automatic system information
+Check out our Contribution Guide for more details on how to get involved!
+We're forming a team of maintainers to manage demand and streamline issue resolution. The maintainers are rockstars, and we're also exploring partnerships to help the project thrive.
+Recent major additions to CodinIT.dev include:
+**Advanced AI Capabilities:**
+- **19 LLM Providers**: Support for Anthropic, OpenAI, Google, DeepSeek, Cohere, and more
+- **MCP Integration**: Model Context Protocol for enhanced AI tool calling
+- **Dynamic Model Loading**: Automatic model discovery from provider APIs
+
+**Development Tools:**
+- **WebContainer**: Secure sandboxed development environment
+- **Live Preview**: Real-time application previews without leaving the editor
+- **Project Templates**: 15+ starter templates for popular frameworks
+
+**Version Control & Collaboration:**
+- **Git Integration**: Import/export projects with GitHub and GitLab
+- **Automatic Commits**: Smart version control for project changes
+- **Diff Visualization**: See code changes clearly
+- **Bug Reporting**: Built-in bug report generation and tracking
+
+**Backend & Database:**
+- **Supabase Integration**: Built-in database and authentication
+- **API Integration**: Connect to external services and databases
+
+**Deployment & Production:**
+- **One-Click Deployment**: Vercel, Netlify, and GitHub Pages support
+- **Environment Management**: Production-ready configuration
+- **Build Optimization**: Automatic configuration for popular frameworks
+CodinIT.dev offers templates for popular frameworks and technologies:
+**Getting Started:**
+1. Start a new project in CodinIT.dev
+2. Browse available templates in the starter selection
+3. Choose your preferred technology stack
+4. The AI will scaffold your project with best practices
+
+**Available Templates:**
+- **Frontend**: React, Vue, Angular, Svelte, SolidJS
+- **Full-Stack**: Next.js, Astro, Qwik, Remix, Nuxt
+- **Mobile**: Expo, React Native
+- **Content**: Slidev presentations, Astro blogs
+- **Vanilla**: Vite with TypeScript/JavaScript
+
+Templates include pre-configured tooling, linting, and build processes.
+WebContainer provides a secure development environment:
+**Features:**
+- **Isolated Environment**: Secure sandbox for running code
+- **Full Node.js Support**: Run npm, build tools, and dev servers
+- **Live File System**: Direct manipulation of project files
+- **Terminal Integration**: Execute commands with real-time output
+
+**Supported Technologies:**
+- All major JavaScript frameworks (React, Vue, Angular, etc.)
+- Build tools (Vite, Webpack, Parcel)
+- Package managers (npm, pnpm, yarn)
+Use Supabase for backend database functionality:
+**Setup Process:**
+1. Create a Supabase project at supabase.com
+2. Get your project URL and API keys
+3. Configure the connection in your CodinIT.dev project
+4. Use Supabase tools to interact with your database
+
+**Available Features:**
+- Real-time subscriptions
+- Built-in authentication
+- Row Level Security (RLS)
+- Automatic API generation
+- Database migrations
+While local LLMs are improving rapidly, larger models still offer the best results for complex applications. Here's the current landscape:
+**Recommended for Production:**
+- **Claude 4 Opus**: Latest flagship model with enhanced reasoning (200K context)
+- **Claude 3.5 Sonnet**: Proven excellent performance across all tasks
+- **GPT-4o**: Strong general-purpose coding with great reliability
+- **xAI Grok 4**: Latest Grok with 256K context window
+
+**Fast & Efficient:**
+- **Gemini 2.0 Flash**: Exceptional speed for rapid development
+- **Claude 3 Haiku**: Cost-effective for simpler tasks
+- **xAI Grok 3 Mini Fast**: Optimized for speed and efficiency
+
+**Advanced Reasoning:**
+- **Moonshot AI Kimi K2**: Advanced reasoning with 128K context
+- **Moonshot AI Kimi Thinking**: Specialized for complex reasoning tasks
+
+**Open Source & Self-Hosting:**
+- **DeepSeekCoder V3**: Best open-source model available
+- **DeepSeekCoder V2 236b**: Powerful self-hosted option
+- **Qwen 2.5 Coder 32b**: Good balance of performance and resource usage
+
+**Local Models (Ollama):**
+- Best for privacy and offline development
+- Use 7B+ parameter models for reasonable performance
+- Still experimental for complex, large-scale applications
+
+!!! tip "Model Selection Guide"
+ - Use Claude/GPT-4o for complex applications
+ - Use Gemini for fast prototyping
+ - Use local models for privacy/offline development
+ - Always test with your specific use case
+This generic error message means something went wrong. Check these locations:
+- **Terminal output**: If you started with Docker or `pnpm`
+- **Browser developer console**: Press `F12` → Console tab
+- **Server logs**: Check for any backend errors
+- **Network tab**: Verify API calls are working
+This authentication error can be resolved by:
+- **Restarting the container**: `docker compose restart` (if using Docker)
+- **Switching run methods**: Try `pnpm` if using Docker, or vice versa
+- **Checking API keys**: Verify your API keys are properly configured
+- **Clearing browser cache**: Sometimes cached authentication causes issues
+Blank previews usually indicate code generation issues:
+- **Check developer console** for JavaScript errors
+- **Verify WebContainer is running** properly
+- **Try refreshing** the preview pane
+- **Check for hallucinated code** in the generated files
+- **Restart the development server** if issues persist
+If you're having trouble with MCP integrations:
+- **Verify server configuration** in Settings → MCP
+- **Check server endpoints** and authentication credentials
+- **Test server connectivity** outside of CodinIT.dev
+- **Review MCP server logs** for specific error messages
+- **Ensure server supports** the MCP protocol version
+Common Git-related issues and solutions:
+- **GitHub connection failed**: Verify your GitHub token has correct permissions
+- **Repository not found**: Check repository URL and access permissions
+- **Push/pull failed**: Ensure you have write access to the repository
+- **Merge conflicts**: Resolve conflicts manually or use the diff viewer
+- **Large files blocked**: Check GitHub's file size limits
+Deployment issues can be resolved by:
+- **Checking build logs** for specific error messages
+- **Verifying environment variables** are set correctly
+- **Testing locally** before deploying
+- **Checking platform-specific requirements** (Node version, build commands)
+- **Reviewing deployment configuration** in platform settings
+For suboptimal AI responses, try these solutions:
+- **Switch to a more capable model**: Use Claude 3.5 Sonnet, GPT-4o, or Claude 4 Opus
+- **Be more specific** in your prompts about requirements and technologies
+- **Use the enhance prompt feature** to refine your requests
+- **Break complex tasks** into smaller, focused prompts
+- **Provide context** about your project structure and goals
+If the live preview isn't working:
+- **Check WebContainer status** in the terminal
+- **Verify Node.js compatibility** with your project
+- **Restart the development environment**
+- **Clear browser cache** and reload
+- **Check for conflicting ports** (default is 5173)
+Windows-specific issue: Update the Visual C++ Redistributable
+Windows development environment: Install Visual Studio C++ (version 14.40.33816 or later). More details in GitHub Issues
+If your custom LLM provider isn't appearing:
+- **Restart the development server** to reload providers
+- **Check the provider registry** in `app/lib/modules/llm/registry.ts`
+- **Verify the provider class** extends `BaseProvider` correctly
+- **Check browser console** for provider loading errors
+- **Ensure proper TypeScript compilation** without errors
+Report Issues
+Open an Issue in our GitHub Repository
+ + + + + + + + + + + + + +
-
-
- Build, manage, and deploy intelligent applications faster — directly from your browser or desktop. -
- ---- - -## Overview - -CodinIT.dev is an open‑source, AI full‑stack development platform designed to help developers build modern Node.js applications with speed and precision. It combines code generation, project management, and deployment tools into a single workflow, powered by your choice of AI providers. - -Whether you are prototyping, scaling a SaaS product, or experimenting with local LLMs, CodinIT.dev adapts to your stack and workflow. - ---- - -## Quick Start - -### Run as a Desktop App - -Download the latest prebuilt release for macOS, Windows, and Linux. - -[Download Latest Release](https://github.com/codinit-dev/codinit-dev/releases/latest) - -Get up and running in minutes. - -### 1. Clone the Repository - -```bash -git clone https://github.com/codinit-dev/codinit-dev.git -cd codinit-dev - -``` - -### 2. Install Dependencies - -```bash -# npm -npm install - -# or pnpm -pnpm install - -``` - -### 3. Configure Environment - -Create a `.env` file and add your preferred AI provider keys. You can mix and match multiple providers depending on your requirements. - -### 4. Run the Development Server - -```bash -pnpm run dev - -``` - -The application will be available at: http://localhost:5173 - ---- - -## Core Capabilities - -- **Automated Full-Stack Engineering:** Streamline the creation and management of complex Node.js architectures using intelligent generation. -- **Universal Model Integration:** Seamlessly connect with over 19 cloud and local AI providers. -- **Hybrid Environment Support:** native compatibility for both Web browsers and Desktop (Electron) environments. -- **Production-Ready Containerization:** Fully Dockerized workflow with preset configurations for Vercel, Netlify, and GitHub Pages. -- **Integrated Development Suite:** Includes robust utilities such as semantic search, diff visualization, and concurrency file-locking. -- **Expanded Ecosystem Connectivity:** Native integration with Supabase, real-time data visualization tools, and voice-command interfaces. -- **Vendor-Neutral Infrastructure:** A flexible architecture designed to prevent vendor lock-in, allowing dynamic switching between backend providers. - ---- - -## Supported AI Providers - -CodinIT.dev allows you to use one provider or switch dynamically per task. - -### Cloud Providers - -OpenAI, Anthropic, Google, Groq, xAI, DeepSeek, Cohere, Mistral, Together, Perplexity, HuggingFace, OpenRouter, and more. - -### Local Providers - -Ollama, LM Studio, and OpenAI‑compatible local endpoints. - ---- - -## Deployment & Desktop Usage - -### Run with Docker - -```bash -npm run dockerbuild -docker compose --profile development up - -``` - - diff --git a/__tests__/config/setup.ts b/__tests__/config/setup.ts deleted file mode 100644 index a3ce7f82..00000000 --- a/__tests__/config/setup.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { expect, afterEach, beforeAll, vi } from 'vitest'; -import { cleanup } from '@testing-library/react'; -import * as matchers from '@testing-library/jest-dom/matchers'; - -// Extend expect with jest-dom matchers -expect.extend(matchers); - -// Clean up after each test -afterEach(() => { - cleanup(); -}); - -// Mock environment variables -beforeAll(() => { - // Set up test environment variables - process.env.NODE_ENV = 'test'; - process.env.VITE_APP_ENV = 'test'; - - // Mock console methods to reduce noise in tests - const originalConsoleError = console.error; - const originalConsoleWarn = console.warn; - - console.error = (...args: any[]) => { - // Only show errors that aren't from React testing library - if (!args[0]?.includes?.('Warning: ReactDOMTestUtils')) { - originalConsoleError(...args); - } - }; - - console.warn = (...args: any[]) => { - // Only show warnings that aren't from React testing library - if (!args[0]?.includes?.('Warning: ReactDOMTestUtils')) { - originalConsoleWarn(...args); - } - }; -}); - -// Mock fetch globally -(globalThis as any).fetch = vi.fn(); - -// Mock ResizeObserver -(globalThis as any).ResizeObserver = vi.fn().mockImplementation(() => ({ - observe: vi.fn(), - unobserve: vi.fn(), - disconnect: vi.fn(), -})); - -// Mock IntersectionObserver -(globalThis as any).IntersectionObserver = vi.fn().mockImplementation(() => ({ - observe: vi.fn(), - unobserve: vi.fn(), - disconnect: vi.fn(), -})); - -// Mock matchMedia -Object.defineProperty(window, 'matchMedia', { - writable: true, - value: vi.fn().mockImplementation((query: string) => ({ - matches: false, - media: query, - onchange: null, - addListener: vi.fn(), // deprecated - removeListener: vi.fn(), // deprecated - addEventListener: vi.fn(), - removeEventListener: vi.fn(), - dispatchEvent: vi.fn(), - })), -}); - -// Mock localStorage -const localStorageMock = { - getItem: vi.fn(), - setItem: vi.fn(), - removeItem: vi.fn(), - clear: vi.fn(), - length: 0, - key: vi.fn(), -} as Storage; -(globalThis as any).localStorage = localStorageMock; - -// Mock sessionStorage -const sessionStorageMock = { - getItem: vi.fn(), - setItem: vi.fn(), - removeItem: vi.fn(), - clear: vi.fn(), - length: 0, - key: vi.fn(), -} as Storage; -(globalThis as any).sessionStorage = sessionStorageMock; - -// Skip WebSocket mocking for now to avoid type issues diff --git a/__tests__/config/vitest.config.ts b/__tests__/config/vitest.config.ts deleted file mode 100644 index 12a3385b..00000000 --- a/__tests__/config/vitest.config.ts +++ /dev/null @@ -1,52 +0,0 @@ -///