Skip to content
@ai-codespark

codespark

AI code generation

codex

export LITELLM_API_KEY="sk-1234"

npm install -g @openai/codex@native
model = "gemini-2.5-pro"
model_provider = "litellm"

[model_providers.litellm]
name = "LiteLLM"
base_url = "https://litellm.example.com"
env_key = "LITELLM_API_KEY"
wire_api = "responses"

$HOME/.codex/config.toml

gemini

export GOOGLE_GEMINI_BASE_URL="https://litellm.example.com"
export GEMINI_API_KEY="sk-1234"
export GEMINI_MODEL=gemini-2.5-pro

npm install -g @ai-codespark/gemini-cli@latest
{
  "selectedAuthType": "gemini-api-key",
  "theme": "ANSI"
}

$HOME/.gemini/settings.json

gen

export SILICONFLOW_BASE_URL="https://litellm.example.com"
export SILICONFLOW_API_KEY="sk-1234"
export GEMINI_MODEL=gemini-2.5-pro

npm install -g @gen-cli/gen-cli@latest
{
  "selectedAuthType": "siliconflow-api-key"
}

$HOME/.gen-cli/settings.json

kimi

export KIMI_VERSION=1.14.0
export KIMI_BASE_URL="https://litellm.example.com"
export KIMI_API_KEY=sk-1234
export KIMI_MODEL_NAME=gemini-2.5-pro
curl -L https://github.com/MoonshotAI/kimi-cli/releases/download/${KIMI_VERSION}/kimi-${KIMI_VERSION}-x86_64-unknown-linux-gnu.tar.gz -o /tmp/kimi.tar.gz && \
    tar -xzf /tmp/kimi.tar.gz -C /tmp && \
    mv /tmp/kimi /usr/local/bin/kimi && \
    chown root:root /usr/local/bin/kimi && \
    chmod +x /usr/local/bin/kimi && \
    rm -rf /tmp/kimi.tar.gz
default_model = "kimi-for-coding"
default_thinking = false
default_yolo = false

[providers.kimi-for-coding]
type = "kimi"
base_url = "https://api.kimi.com/coding/v1"
api_key = "sk-xxx"

[models.kimi-for-coding]
provider = "kimi-for-coding"
model = "kimi-for-coding"
max_context_size = 262144

$HOME/.kimi/config.toml

opencode

BUN_INSTALL="$HOME/.bun"
PATH="$BUN_INSTALL/bin:$PATH"
XDG_DATA_HOME="$HOME/.local/share"
XDG_CACHE_HOME="$HOME/.cache"
XDG_CONFIG_HOME="$HOME/.config"
XDG_STATE_HOME="$HOME/.local/state"

mkdir -p \
    $HOME/.local/share/opencode/bin \
    $HOME/.local/share/opencode/log \
    $HOME/.cache/opencode \
    $HOME/.config/opencode \
    $HOME/.local/state/opencode

bun install -g @ai-sdk/openai-compatible && \
bun install -g opencode-ai

mkdir -p /tmp/test-opencode && \
    cd /tmp/test-opencode && \
    git init && \
    (echo "hello" | opencode "$PWD" > /tmp/opencode.log 2>&1 &) && \
    OPENCODE_PID=$! && \
    sleep 5 && \
    kill $OPENCODE_PID 2>/dev/null || true && \
    rm -rf /tmp/test-opencode

echo "export TERM=xterm-256color" >> /home/$NAME/.bashrc
{
  "$schema": "https://opencode.ai/config.json",
  "model": "litellm/ollama-kimi-k2.5",
  "provider": {
    "litellm": {
      "npm": "@ai-sdk/openai-compatible",
      "name": "LiteLLM",
      "options": {
        "baseURL": "https://litellm.example.com",
        "apiKey": "sk-1234"
      },
      "models": {
        "ollama-kimi-k2.5": {
          "name": "Ollama Kimi K2.5"
        }
      }
    }
  }
}

$HOME/.config/opencode/opencode.jsonc

qwen

export OPENAI_BASE_URL="https://litellm.example.com"
export OPENAI_API_KEY="sk-1234"
export OPENAI_MODEL="gemini-2.5-pro"

npm install -g @qwen-code/qwen-code@latest
{
  "selectedAuthType": "openai"
}

$HOME/.qwen/settings.json

trae

uv venv
uv sync --all-extras
source .venv/bin/activate
{
  "default_provider": "openai",
  "max_steps": 20,
  "enable_lakeview": false,
  "model_providers": {
    "openai": {
      "api_key": "sk-1234",
      "base_url": "https://litellm.example.com",
      "model": "gemini-2.5-pro",
      "max_tokens": 120000,
      "temperature": 0.5,
      "top_p": 1,
      "max_retries": 10
    }
  }
}

trae_config.json

Pinned Loading

  1. codewiki codewiki Public

    Forked from AsyncFuncAI/deepwiki-open

    Open Source DeepWiki: AI-Powered Wiki Generator for GitHub/Gitlab/Bitbucket Repositories. Join the discord: https://discord.gg/gMwThUMeme

    Python

  2. codex codex Public

    Forked from openai/codex

    Lightweight coding agent that runs in your terminal

    Rust

  3. continue continue Public

    Forked from continuedev/continue

    ⏩ Create, share, and use custom AI code assistants with our open-source IDE extensions and hub of models, rules, prompts, docs, and other building blocks

    TypeScript

  4. kimi-cli kimi-cli Public

    Forked from MoonshotAI/kimi-cli

    Kimi Code CLI is your next CLI agent.

    Python

  5. opencode opencode Public

    Forked from anomalyco/opencode

    The open source coding agent.

    TypeScript

  6. qwen-code qwen-code Public

    Forked from QwenLM/qwen-code

    Qwen Code is a coding agent that lives in the digital world.

    TypeScript

Repositories

Showing 10 of 21 repositories

Top languages

Loading…

Most used topics

Loading…