NextChat_Section_000_ApiKey

部署运行你感兴趣的模型镜像

1 claude

$env:ANTHROPIC_API_KEY = "YOUR_ANTHROPIC_KEY"
# 还用你刚才那个 key


$headers = @{
  "x-api-key"         = $env:ANTHROPIC_API_KEY
  "anthropic-version" = "2023-06-01"
}

# 调用 /v1/models
$response = Invoke-RestMethod -Uri "https://api.anthropic.com/v1/models" `
                              -Method Get `
                              -Headers $headers

# 只看模型 ID 和显示名
$response.data | Select-Object id, display_name
id                         display_name
--                         ------------
claude-haiku-4-5-20251001  Claude Haiku 4.5
claude-sonnet-4-5-20250929 Claude Sonnet 4.5
claude-opus-4-1-20250805   Claude Opus 4.1
claude-opus-4-20250514     Claude Opus 4
claude-sonnet-4-20250514   Claude Sonnet 4
claude-3-5-haiku-20241022  Claude Haiku 3.5
claude-3-haiku-20240307    Claude Haiku 3
# 假设 $env:ANTHROPIC_API_KEY 已经是你的真实 key

$headers = @{
  "x-api-key"         = $env:ANTHROPIC_API_KEY
  "anthropic-version" = "2023-06-01"
  "content-type"      = "application/json"
}

$body = @{
  model      = "claude-sonnet-4-5-20250929"   # Claude Sonnet 4.5
  max_tokens = 50
  messages   = @(
    @{
      role    = "user"
      content = @(
        @{
          type = "text"
          text = "你好,这是一个使用 Claude Sonnet 4.5 的测试请求。"
        }
      )
    }
  )
} | ConvertTo-Json -Depth 6

$response = Invoke-RestMethod -Uri "https://api.anthropic.com/v1/messages" `
                              -Method Post `
                              -Headers $headers `
                              -Body $body

# 看一下返回的文本
$response.content[0].text
# 原始乱码文本
$raw = $response.content[0].text

# 把“错误解码后”的字符串按 Latin1/ISO-8859-1 再转回字节
$bytes = [System.Text.Encoding]::GetEncoding("ISO-8859-1").GetBytes($raw)

# 再按 UTF-8 正确解码一遍
$fixed = [System.Text.Encoding]::UTF8.GetString($bytes)

# 看看是否正常中文
$fixed

2 gemini

# 把下面这一行里的 YOUR_API_KEY 换成你真实的 key
export GEMINI_API_KEY="YOUR_API_KEY"
curl "https://generativelanguage.googleapis.com/v1beta/models" \
  -H "x-goog-api-key: $GEMINI_API_KEY"
{
  "models": [
    {
      "name": "models/embedding-gecko-001",
      "version": "001",
      "displayName": "Embedding Gecko",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 1024,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedText",
        "countTextTokens"
      ]
    },
    {
      "name": "models/gemini-2.5-pro-preview-03-25",
      "version": "2.5-preview-03-25",
      "displayName": "Gemini 2.5 Pro Preview 03-25",
      "description": "Gemini 2.5 Pro Preview 03-25",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-flash",
      "version": "001",
      "displayName": "Gemini 2.5 Flash",
      "description": "Stable version of Gemini 2.5 Flash, our mid-size multimodal model that supports up to 1 million tokens, released in June of 2025.",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-pro-preview-05-06",
      "version": "2.5-preview-05-06",
      "displayName": "Gemini 2.5 Pro Preview 05-06",
      "description": "Preview release (May 6th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-pro-preview-06-05",
      "version": "2.5-preview-06-05",
      "displayName": "Gemini 2.5 Pro Preview",
      "description": "Preview release (June 5th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-pro",
      "version": "2.5",
      "displayName": "Gemini 2.5 Pro",
      "description": "Stable release (June 17th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.0-flash-exp",
      "version": "2.0",
      "displayName": "Gemini 2.0 Flash Experimental",
      "description": "Gemini 2.0 Flash Experimental",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "bidiGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash",
      "version": "2.0",
      "displayName": "Gemini 2.0 Flash",
      "description": "Gemini 2.0 Flash",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash-001",
      "version": "2.0",
      "displayName": "Gemini 2.0 Flash 001",
      "description": "Stable version of Gemini 2.0 Flash, our fast and versatile multimodal model for scaling across diverse tasks, released in January of 2025.",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash-lite-001",
      "version": "2.0",
      "displayName": "Gemini 2.0 Flash-Lite 001",
      "description": "Stable version of Gemini 2.0 Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash-lite",
      "version": "2.0",
      "displayName": "Gemini 2.0 Flash-Lite",
      "description": "Gemini 2.0 Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash-lite-preview-02-05",
      "version": "preview-02-05",
      "displayName": "Gemini 2.0 Flash-Lite Preview 02-05",
      "description": "Preview release (February 5th, 2025) of Gemini 2.0 Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-flash-lite-preview",
      "version": "preview-02-05",
      "displayName": "Gemini 2.0 Flash-Lite Preview",
      "description": "Preview release (February 5th, 2025) of Gemini 2.0 Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 40,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.0-pro-exp",
      "version": "2.5-exp-03-25",
      "displayName": "Gemini 2.0 Pro Experimental",
      "description": "Experimental release (March 25th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.0-pro-exp-02-05",
      "version": "2.5-exp-03-25",
      "displayName": "Gemini 2.0 Pro Experimental 02-05",
      "description": "Experimental release (March 25th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-exp-1206",
      "version": "2.5-exp-03-25",
      "displayName": "Gemini Experimental 1206",
      "description": "Experimental release (March 25th, 2025) of Gemini 2.5 Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.0-flash-thinking-exp-01-21",
      "version": "2.5-preview-05-20",
      "displayName": "Gemini 2.5 Flash Preview 05-20",
      "description": "Preview release (April 17th, 2025) of Gemini 2.5 Flash",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.0-flash-thinking-exp",
      "version": "2.5-preview-05-20",
      "displayName": "Gemini 2.5 Flash Preview 05-20",
      "description": "Preview release (April 17th, 2025) of Gemini 2.5 Flash",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.0-flash-thinking-exp-1219",
      "version": "2.5-preview-05-20",
      "displayName": "Gemini 2.5 Flash Preview 05-20",
      "description": "Preview release (April 17th, 2025) of Gemini 2.5 Flash",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-flash-preview-tts",
      "version": "gemini-2.5-flash-exp-tts-2025-05-19",
      "displayName": "Gemini 2.5 Flash Preview TTS",
      "description": "Gemini 2.5 Flash Preview TTS",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 16384,
      "supportedGenerationMethods": [
        "countTokens",
        "generateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2
    },
    {
      "name": "models/gemini-2.5-pro-preview-tts",
      "version": "gemini-2.5-pro-preview-tts-2025-05-19",
      "displayName": "Gemini 2.5 Pro Preview TTS",
      "description": "Gemini 2.5 Pro Preview TTS",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 16384,
      "supportedGenerationMethods": [
        "countTokens",
        "generateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2
    },
    {
      "name": "models/learnlm-2.0-flash-experimental",
      "version": "2.0",
      "displayName": "LearnLM 2.0 Flash Experimental",
      "description": "LearnLM 2.0 Flash Experimental",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 32768,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2
    },
    {
      "name": "models/gemma-3-1b-it",
      "version": "001",
      "displayName": "Gemma 3 1B",
      "inputTokenLimit": 32768,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemma-3-4b-it",
      "version": "001",
      "displayName": "Gemma 3 4B",
      "inputTokenLimit": 32768,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemma-3-12b-it",
      "version": "001",
      "displayName": "Gemma 3 12B",
      "inputTokenLimit": 32768,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemma-3-27b-it",
      "version": "001",
      "displayName": "Gemma 3 27B",
      "inputTokenLimit": 131072,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemma-3n-e4b-it",
      "version": "001",
      "displayName": "Gemma 3n E4B",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 2048,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemma-3n-e2b-it",
      "version": "001",
      "displayName": "Gemma 3n E2B",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 2048,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64
    },
    {
      "name": "models/gemini-flash-latest",
      "version": "Gemini Flash Latest",
      "displayName": "Gemini Flash Latest",
      "description": "Latest release of Gemini Flash",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-flash-lite-latest",
      "version": "Gemini Flash-Lite Latest",
      "displayName": "Gemini Flash-Lite Latest",
      "description": "Latest release of Gemini Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-pro-latest",
      "version": "Gemini Pro Latest",
      "displayName": "Gemini Pro Latest",
      "description": "Latest release of Gemini Pro",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-flash-lite",
      "version": "001",
      "displayName": "Gemini 2.5 Flash-Lite",
      "description": "Stable version of Gemini 2.5 Flash-Lite, released in July of 2025",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-flash-image-preview",
      "version": "2.0",
      "displayName": "Nano Banana",
      "description": "Gemini 2.5 Flash Preview Image",
      "inputTokenLimit": 32768,
      "outputTokenLimit": 32768,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 1
    },
    {
      "name": "models/gemini-2.5-flash-image",
      "version": "2.0",
      "displayName": "Nano Banana",
      "description": "Gemini 2.5 Flash Preview Image",
      "inputTokenLimit": 32768,
      "outputTokenLimit": 32768,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 1
    },
    {
      "name": "models/gemini-2.5-flash-preview-09-2025",
      "version": "Gemini 2.5 Flash Preview 09-2025",
      "displayName": "Gemini 2.5 Flash Preview Sep 2025",
      "description": "Gemini 2.5 Flash Preview Sep 2025",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-flash-lite-preview-09-2025",
      "version": "2.5-preview-09-25",
      "displayName": "Gemini 2.5 Flash-Lite Preview Sep 2025",
      "description": "Preview release (Septempber 25th, 2025) of Gemini 2.5 Flash-Lite",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-3-pro-preview",
      "version": "3-pro-preview-11-2025",
      "displayName": "Gemini 3 Pro Preview",
      "description": "Gemini 3 Pro Preview",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "createCachedContent",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-3-pro-image-preview",
      "version": "3.0",
      "displayName": "Nano Banana Pro",
      "description": "Gemini 3 Pro Image Preview",
      "inputTokenLimit": 131072,
      "outputTokenLimit": 32768,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 1,
      "thinking": true
    },
    {
      "name": "models/nano-banana-pro-preview",
      "version": "3.0",
      "displayName": "Nano Banana Pro",
      "description": "Gemini 3 Pro Image Preview",
      "inputTokenLimit": 131072,
      "outputTokenLimit": 32768,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens",
        "batchGenerateContent"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 1,
      "thinking": true
    },
    {
      "name": "models/gemini-robotics-er-1.5-preview",
      "version": "1.5-preview",
      "displayName": "Gemini Robotics-ER 1.5 Preview",
      "description": "Gemini Robotics-ER 1.5 Preview",
      "inputTokenLimit": 1048576,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/gemini-2.5-computer-use-preview-10-2025",
      "version": "Gemini 2.5 Computer Use Preview 10-2025",
      "displayName": "Gemini 2.5 Computer Use Preview 10-2025",
      "description": "Gemini 2.5 Computer Use Preview 10-2025",
      "inputTokenLimit": 131072,
      "outputTokenLimit": 65536,
      "supportedGenerationMethods": [
        "generateContent",
        "countTokens"
      ],
      "temperature": 1,
      "topP": 0.95,
      "topK": 64,
      "maxTemperature": 2,
      "thinking": true
    },
    {
      "name": "models/embedding-001",
      "version": "001",
      "displayName": "Embedding 001",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 2048,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedContent"
      ]
    },
    {
      "name": "models/text-embedding-004",
      "version": "004",
      "displayName": "Text Embedding 004",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 2048,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedContent"
      ]
    },
    {
      "name": "models/gemini-embedding-exp-03-07",
      "version": "exp-03-07",
      "displayName": "Gemini Embedding Experimental 03-07",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedContent",
        "countTextTokens",
        "countTokens"
      ]
    },
    {
      "name": "models/gemini-embedding-exp",
      "version": "exp-03-07",
      "displayName": "Gemini Embedding Experimental",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 8192,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedContent",
        "countTextTokens",
        "countTokens"
      ]
    },
    {
      "name": "models/gemini-embedding-001",
      "version": "001",
      "displayName": "Gemini Embedding 001",
      "description": "Obtain a distributed representation of a text.",
      "inputTokenLimit": 2048,
      "outputTokenLimit": 1,
      "supportedGenerationMethods": [
        "embedContent",
        "countTextTokens",
        "countTokens",
        "asyncBatchEmbedContent"
      ]
    },
    {
      "name": "models/aqa",
      "version": "001",
      "displayName": "Model that performs Attributed Question Answering.",
      "description": "Model trained to return answers to questions that are grounded in provided sources, along with estimating answerable probability.",
      "inputTokenLimit": 7168,
      "outputTokenLimit": 1024,
      "supportedGenerationMethods": [
        "generateAnswer"
      ],
      "temperature": 0.2,
      "topP": 1,
      "topK": 40
    },
    {
      "name": "models/imagen-4.0-generate-preview-06-06",
      "version": "01",
      "displayName": "Imagen 4 (Preview)",
      "description": "Vertex served Imagen 4.0 model",
      "inputTokenLimit": 480,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "predict"
      ]
    },
    {
      "name": "models/imagen-4.0-ultra-generate-preview-06-06",
      "version": "01",
      "displayName": "Imagen 4 Ultra (Preview)",
      "description": "Vertex served Imagen 4.0 ultra model",
      "inputTokenLimit": 480,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "predict"
      ]
    },
    {
      "name": "models/imagen-4.0-generate-001",
      "version": "001",
      "displayName": "Imagen 4",
      "description": "Vertex served Imagen 4.0 model",
      "inputTokenLimit": 480,
      "outputTokenLimit": 8192,
      "supportedGenerationMethods": [
        "predict"
      ]
    }
  ],
  "nextPageToken": "Ch5tb2RlbHMvaW1hZ2VuLTQuMC1nZW5lcmF0ZS0wMDE="
}
curl "https://generativelanguage.googleapis.com/v1beta/models/gemini-3-pro-preview:generateContent" \
  -H "Content-Type: application/json" \
  -H "x-goog-api-key: $GEMINI_API_KEY" \
  -X POST \
  -d '{
    "contents": [
      {
        "parts": [
          {
            "text": "如果你是gemini3模型,请只返回数字1;如果你不是gemini3模型,请只返回数字0。不要输出其他任何文字、标点、空格或解释。"
          }
        ]
      }
    ]
  }'
{
  "candidates": [
    {
      "content": {
        "parts": [
          {
            "text": "0",
            "thoughtSignature": "Ev0LCvoLAdHtim8oaGXwKrSZ5he48PgQlsvnS1VzgqRUMtpZjkv99N9IRWZBEtcWyG//BOUJV1GU5jjpjnPryppEXd8I8441oHcsWrYUPh2VLRO02GqlnaxmpzB2CSrN5jx9lDhGqnkjLemX6Zswss+y/ovbTtSFL49Zi0fHHmlvlFSpzXeshGer3hve5rWQKOrW3JTdZOSIw2ipM6unE4/A2AsRfNsMV5gOjCOvzF8o+vkcX3khsmEUDHoFP1oSM0GqJ9vBNyToTqFe7yycuYZtAOut8AhdKUwK0HqK8bVcHIwk9IJpR1oMgpYEkNY2j6WSbCRVECdN24cw7GWQ5LtjCakdKNjz03CyNyL4XuwqkdswbitF74xEkQNw5yebHF8nK6+livxqEiNfRjcA1ITtLuSbPokhI1uOn5VL2jX6sawA70PSvyqpwVgmGED/sYAeKVvsIJY1UVlHPnNAx6rMIvokZuvvSPz6EgyITc9hhV2odlV/QbPC0ulhgrExYQfrQ9pwtUZkanhT2WQ5iu84ZiA/mbIcAkz1SSjg1vmj7Sb8U3WT/zX+Kjgz3H3Gm2wR4I2RBUSeH58aH1cuI7EVQTFOd7jlY4VXBiwXG4eMiIUzVgnvuyjuQzpMJG57LAfx1pDdLNGuoGijc7Ud5N0bN3veHUBaZ36I+MjgP3sORCCmDXO0a15w8bvWIRulUHvq+qyV495Jr62llN3ibNbVPdQ79Lw5Fdc8zBYQYqYyBFQv3uzLEyAPwu0s1NKoEV+Mx+3W3WLjEkGlib7uDn92/NZ8oAVUM3hjSG2gC8CCEt5J8TV1HR6dpjPZLJPVxAJNU5I/PeRhVdj+HKXsakv4N+cieI6WTHALqCE+nE2vKQQG3mtVzVg1pBRFfciNRoQw3wY/acDTcspLKeyDkyswGhkb1KUCbk2gedtp52x5K1CAoVz/UZv5X/FzcZlQjW3ykSG/OOoeTQjChgbOtvJ+waQyRg5W4bjMjJ+Xd5P6YrzqMfeDIpFE9+xVsL0F5y4XsTgnC9VCco09+YNm1eumbpDsc+SvOaVjtvYMZupPjH6igxLhYTMa+RwiMu8qAa0Mut8tUHvCNQKea5gt/Qo6DCv3vBhAFFNtCkQzbLXspdQYVXD7hHIb4Yjrlwixfdnowi19X0+2bS7P3n6OMsxBDxygK6YiJjAzZz2ysuquKChTtnV9tpXSKFc7L6JSrCNy4/0RB/E2Oa+4/RBRvVNQ9JvzSCK5xbYVo7pJhiPVKuQD9NhzajpGwcXLqrYrdCFAnKo3uiEc8ioHIsnhCA+Nlpley8ncfz8PFFWBHrqWuMLKo/9edpUmlQKawebNONfbQv+l4FXBgntOySkwMcOBHXISysawO78bibSjd0jGxiNa6OQvRtQ6QpdxcrzuKYw8BGEVgJHLGdP7IxIWaPzVKfRDqWSaFk62xrc2ukBcBWExfpxVnsxDBUjo/yvtG+6++vmrC9BxB9sKmE5JPORKrNxwZXmBfg0Lzi1r7I8VwNCvGP9aQcd7CKg56candUyOjN7FDzktCBkGnY2YhGlwq+8ztvGcQeNpSg23Q0xSEsDZICRRjDMdjRYS+xiCyifoCU1abviE4pfdPVr1vcTzTLs62d75k2/A6uiT1y1Uu2gjwrX04ataWah03KP6Dr2fKX2nzUdWl2GvuUxGUHjV3i338ltQof5w7S0+Fenb9w39dvbSJSItpB5s6o1FveKIpLEOzy7Qt6+KzE4BRxFEQ0ObH1IJvS/HCjM9mXknpuwQhCG5xrOyGrMtVnfzYSBDN5NpR5ho/1r5HiOts3MTZzO6lKm8/f8wYpNeqA3FMLK6xrfV+g6G+VRh1wHV5SGB3e9vrfB41jrm4kfKMwPbmUfkI4t9pNAC/x+18+RYHU3z5J4np7+p0+PdxtKrwLK1OK0b6afEOuZ+LxrmZCv11GOvs4TDdAfFJieTne/Jfk7Or6xC0Mwtt7F9jRINQR8vMKGcGPcFB/6OY6pO3ONdLJhdp6RsDO2O5UjHknD7rMB198zUZDMIqJ+1j2iI"
          }
        ],
        "role": "model"
      },
      "finishReason": "STOP",
      "index": 0
    }
  ],
  "usageMetadata": {
    "promptTokenCount": 39,
    "candidatesTokenCount": 1,
    "totalTokenCount": 424,
    "promptTokensDetails": [
      {
        "modality": "TEXT",
        "tokenCount": 39
      }
    ],
    "thoughtsTokenCount": 384
  },
  "modelVersion": "gemini-3-pro-preview",
  "responseId": "jgImae3EFdW1nsEPtIvgOQ"
}
snw@DESKTOP-SHDD92E:~$

3 gpt5

curl https://api.openai.com/v1/responses -H "Authorization: Bearer $OPENAI_API_KEY" -H "Content-Type: application/json" -d '{
    "model": "gpt-5",
    "input": "ping"
  }'
snw@DESKTOP-SHDD92E:~$ curl https://api.openai.com/v1/models \
>   -H "Authorization: Bearer sk-...RCYA"
{
  "object": "list",
  "data": [
    {
      "id": "gpt-4-0613",
      "object": "model",
      "created": 1686588896,
      "owned_by": "openai"
    },
    {
      "id": "gpt-4",
      "object": "model",
      "created": 1687882411,
      "owned_by": "openai"
    },
    {
      "id": "gpt-3.5-turbo",
      "object": "model",
      "created": 1677610602,
      "owned_by": "openai"
    },
    {
      "id": "gpt-5.1-codex-mini",
      "object": "model",
      "created": 1763007109,
      "owned_by": "system"
    },
    {
      "id": "gpt-5.1-chat-latest",
      "object": "model",
      "created": 1762547951,
      "owned_by": "system"
    },
    {
      "id": "gpt-5.1-2025-11-13",
      "object": "model",
      "created": 1762800353,
      "owned_by": "system"
    },
    {
      "id": "gpt-5.1",
      "object": "model",
      "created": 1762800673,
      "owned_by": "system"
    },
    {
      "id": "gpt-5.1-codex",
      "object": "model",
      "created": 1762988221,
      "owned_by": "system"
    },
    {
      "id": "davinci-002",
      "object": "model",
      "created": 1692634301,
      "owned_by": "system"
    },
    {
      "id": "babbage-002",
      "object": "model",
      "created": 1692634615,
      "owned_by": "system"
    },
    {
      "id": "gpt-3.5-turbo-instruct",
      "object": "model",
      "created": 1692901427,
      "owned_by": "system"
    },
    {
      "id": "gpt-3.5-turbo-instruct-0914",
      "object": "model",
      "created": 1694122472,
      "owned_by": "system"
    },
    {
      "id": "dall-e-3",
      "object": "model",
      "created": 1698785189,
      "owned_by": "system"
    },
    {
      "id": "dall-e-2",
      "object": "model",
      "created": 1698798177,
      "owned_by": "system"
    },
    {
      "id": "gpt-4-1106-preview",
      "object": "model",
      "created": 1698957206,
      "owned_by": "system"
    },
    {
      "id": "gpt-3.5-turbo-1106",
      "object": "model",
      "created": 1698959748,
      "owned_by": "system"
    },
    {
      "id": "tts-1-hd",
      "object": "model",
      "created": 1699046015,
      "owned_by": "system"
    },
    {
      "id": "tts-1-1106",
      "object": "model",
      "created": 1699053241,
      "owned_by": "system"
    },
    {
      "id": "tts-1-hd-1106",
      "object": "model",
      "created": 1699053533,
      "owned_by": "system"
    },
    {
      "id": "text-embedding-3-small",
      "object": "model",
      "created": 1705948997,
      "owned_by": "system"
    },
    {
      "id": "text-embedding-3-large",
      "object": "model",
      "created": 1705953180,
      "owned_by": "system"
    },
    {
      "id": "gpt-4-0125-preview",
      "object": "model",
      "created": 1706037612,
      "owned_by": "system"
    },
    {
      "id": "gpt-4-turbo-preview",
      "object": "model",
      "created": 1706037777,
      "owned_by": "system"
    },
    {
      "id": "gpt-3.5-turbo-0125",
      "object": "model",
      "created": 1706048358,
      "owned_by": "system"
    },
    {
      "id": "gpt-4-turbo",
      "object": "model",
      "created": 1712361441,
      "owned_by": "system"
    },
    {
      "id": "gpt-4-turbo-2024-04-09",
      "object": "model",
      "created": 1712601677,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o",
      "object": "model",
      "created": 1715367049,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-2024-05-13",
      "object": "model",
      "created": 1715368132,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-2024-07-18",
      "object": "model",
      "created": 1721172717,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini",
      "object": "model",
      "created": 1721172741,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-2024-08-06",
      "object": "model",
      "created": 1722814719,
      "owned_by": "system"
    },
    {
      "id": "chatgpt-4o-latest",
      "object": "model",
      "created": 1723515131,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-realtime-preview-2024-10-01",
      "object": "model",
      "created": 1727131766,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-audio-preview-2024-10-01",
      "object": "model",
      "created": 1727389042,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-audio-preview",
      "object": "model",
      "created": 1727460443,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-realtime-preview",
      "object": "model",
      "created": 1727659998,
      "owned_by": "system"
    },
    {
      "id": "omni-moderation-latest",
      "object": "model",
      "created": 1731689265,
      "owned_by": "system"
    },
    {
      "id": "omni-moderation-2024-09-26",
      "object": "model",
      "created": 1732734466,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-realtime-preview-2024-12-17",
      "object": "model",
      "created": 1733945430,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-audio-preview-2024-12-17",
      "object": "model",
      "created": 1734034239,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-realtime-preview-2024-12-17",
      "object": "model",
      "created": 1734112601,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-audio-preview-2024-12-17",
      "object": "model",
      "created": 1734115920,
      "owned_by": "system"
    },
    {
      "id": "o1-2024-12-17",
      "object": "model",
      "created": 1734326976,
      "owned_by": "system"
    },
    {
      "id": "o1",
      "object": "model",
      "created": 1734375816,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-realtime-preview",
      "object": "model",
      "created": 1734387380,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-audio-preview",
      "object": "model",
      "created": 1734387424,
      "owned_by": "system"
    },
    {
      "id": "o3-mini",
      "object": "model",
      "created": 1737146383,
      "owned_by": "system"
    },
    {
      "id": "o3-mini-2025-01-31",
      "object": "model",
      "created": 1738010200,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-2024-11-20",
      "object": "model",
      "created": 1739331543,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-search-preview-2025-03-11",
      "object": "model",
      "created": 1741388170,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-search-preview",
      "object": "model",
      "created": 1741388720,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-search-preview-2025-03-11",
      "object": "model",
      "created": 1741390858,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-search-preview",
      "object": "model",
      "created": 1741391161,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-transcribe",
      "object": "model",
      "created": 1742068463,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-transcribe",
      "object": "model",
      "created": 1742068596,
      "owned_by": "system"
    },
    {
      "id": "o1-pro-2025-03-19",
      "object": "model",
      "created": 1742251504,
      "owned_by": "system"
    },
    {
      "id": "o1-pro",
      "object": "model",
      "created": 1742251791,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-mini-tts",
      "object": "model",
      "created": 1742403959,
      "owned_by": "system"
    },
    {
      "id": "o3-2025-04-16",
      "object": "model",
      "created": 1744133301,
      "owned_by": "system"
    },
    {
      "id": "o4-mini-2025-04-16",
      "object": "model",
      "created": 1744133506,
      "owned_by": "system"
    },
    {
      "id": "o3",
      "object": "model",
      "created": 1744225308,
      "owned_by": "system"
    },
    {
      "id": "o4-mini",
      "object": "model",
      "created": 1744225351,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1-2025-04-14",
      "object": "model",
      "created": 1744315746,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1",
      "object": "model",
      "created": 1744316542,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1-mini-2025-04-14",
      "object": "model",
      "created": 1744317547,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1-mini",
      "object": "model",
      "created": 1744318173,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1-nano-2025-04-14",
      "object": "model",
      "created": 1744321025,
      "owned_by": "system"
    },
    {
      "id": "gpt-4.1-nano",
      "object": "model",
      "created": 1744321707,
      "owned_by": "system"
    },
    {
      "id": "gpt-image-1",
      "object": "model",
      "created": 1745517030,
      "owned_by": "system"
    },
    {
      "id": "codex-mini-latest",
      "object": "model",
      "created": 1746673257,
      "owned_by": "system"
    },
    {
      "id": "o3-pro",
      "object": "model",
      "created": 1748475349,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-realtime-preview-2025-06-03",
      "object": "model",
      "created": 1748907838,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-audio-preview-2025-06-03",
      "object": "model",
      "created": 1748908498,
      "owned_by": "system"
    },
    {
      "id": "o3-pro-2025-06-10",
      "object": "model",
      "created": 1749166761,
      "owned_by": "system"
    },
    {
      "id": "o4-mini-deep-research",
      "object": "model",
      "created": 1749685485,
      "owned_by": "system"
    },
    {
      "id": "o3-deep-research",
      "object": "model",
      "created": 1749840121,
      "owned_by": "system"
    },
    {
      "id": "gpt-4o-transcribe-diarize",
      "object": "model",
      "created": 1750798887,
      "owned_by": "system"
    },
    {
      "id": "o3-deep-research-2025-06-26",
      "object": "model",
      "created": 1750865219,
      "owned_by": "system"
    },
    {
      "id": "o4-mini-deep-research-2025-06-26",
      "object": "model",
      "created": 1750866121,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-chat-latest",
      "object": "model",
      "created": 1754073306,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-2025-08-07",
      "object": "model",
      "created": 1754075360,
      "owned_by": "system"
    },
    {
      "id": "gpt-5",
      "object": "model",
      "created": 1754425777,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-mini-2025-08-07",
      "object": "model",
      "created": 1754425867,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-mini",
      "object": "model",
      "created": 1754425928,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-nano-2025-08-07",
      "object": "model",
      "created": 1754426303,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-nano",
      "object": "model",
      "created": 1754426384,
      "owned_by": "system"
    },
    {
      "id": "gpt-audio-2025-08-28",
      "object": "model",
      "created": 1756256146,
      "owned_by": "system"
    },
    {
      "id": "gpt-realtime",
      "object": "model",
      "created": 1756271701,
      "owned_by": "system"
    },
    {
      "id": "gpt-realtime-2025-08-28",
      "object": "model",
      "created": 1756271773,
      "owned_by": "system"
    },
    {
      "id": "gpt-audio",
      "object": "model",
      "created": 1756339249,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-codex",
      "object": "model",
      "created": 1757527818,
      "owned_by": "system"
    },
    {
      "id": "gpt-image-1-mini",
      "object": "model",
      "created": 1758845821,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-pro-2025-10-06",
      "object": "model",
      "created": 1759469707,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-pro",
      "object": "model",
      "created": 1759469822,
      "owned_by": "system"
    },
    {
      "id": "gpt-audio-mini",
      "object": "model",
      "created": 1759512027,
      "owned_by": "system"
    },
    {
      "id": "gpt-audio-mini-2025-10-06",
      "object": "model",
      "created": 1759512137,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-search-api",
      "object": "model",
      "created": 1759514629,
      "owned_by": "system"
    },
    {
      "id": "gpt-realtime-mini",
      "object": "model",
      "created": 1759517133,
      "owned_by": "system"
    },
    {
      "id": "gpt-realtime-mini-2025-10-06",
      "object": "model",
      "created": 1759517175,
      "owned_by": "system"
    },
    {
      "id": "sora-2",
      "object": "model",
      "created": 1759708615,
      "owned_by": "system"
    },
    {
      "id": "sora-2-pro",
      "object": "model",
      "created": 1759708663,
      "owned_by": "system"
    },
    {
      "id": "gpt-5-search-api-2025-10-14",
      "object": "model",
      "created": 1760043960,
      "owned_by": "system"
    },
    {
      "id": "gpt-3.5-turbo-16k",
      "object": "model",
      "created": 1683758102,
      "owned_by": "openai-internal"
    },
    {
      "id": "tts-1",
      "object": "model",
      "created": 1681940951,
      "owned_by": "openai-internal"
    },
    {
      "id": "whisper-1",
      "object": "model",
      "created": 1677532384,
      "owned_by": "openai-internal"
    },
    {
      "id": "text-embedding-ada-002",
      "object": "model",
      "created": 1671217299,
      "owned_by": "openai-internal"
    }
  ]
}snw@DESKTOP-SHDD92E:~$
export OPENAI_API_KEY="你的API_KEY"

curl https://api.openai.com/v1/responses \
  -H "Authorization: Bearer $OPENAI_API_KEY" \
  -H "Content-Type: application/json" \
  -d '{
    "model": "gpt-5.1",
    "input": "你是gpt5或更高版本吗"
  }'

5 router

curl "https://sh...in/v1/models" \
  -H "Authorization: Bearer sk-xxxxxxxxxxxxxxxx" \
  -H "Content-Type: application/json"

您可能感兴趣的与本文相关的镜像

GPT-oss:20b

GPT-oss:20b

图文对话
Gpt-oss

GPT OSS 是OpenAI 推出的重量级开放模型,面向强推理、智能体任务以及多样化开发场景

评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值