gemini-integration
2
总安装量
2
周安装量
#68933
全站排名
安装命令
npx skills add https://github.com/dseirz-rgb/worker --skill gemini-integration
Agent 安装分布
opencode
2
gemini-cli
2
antigravity
2
claude-code
2
github-copilot
2
codex
2
Skill 文档
Gemini Integration (Google Gemini API éææå)
ð¤ æ ¸å¿ç念: æ åå Gemini API éææµç¨ï¼ç¡®ä¿ AI åè½çå¯é æ§ãæ§è½åç¨æ·ä½éªã
When to Use This Skill
ä½¿ç¨æ¤æè½å½ä½ éè¦ï¼
- éæ Google Gemini API è¿è¡ AI åæ
- ç¼ååä¼åæç¤ºè¯ (Prompt Engineering)
- å®ç°æµå¼ååº (Streaming Response)
- å¤ç夿¨¡æè¾å ¥ (ææ¬ãå¾çãææ¡£)
- æå»ºéèåæ AI åè½
- å®ç°å¤è½®å¯¹è¯ç³»ç»
Not For / Boundaries
æ¤æè½ä¸éç¨äºï¼
- å ¶ä» LLM æä¾å (OpenAI, Claude ç)
- æ¬å°æ¨¡åé¨ç½²
- 模åå¾®è° (Fine-tuning)
Quick Reference
ð¯ Gemini éææ å工使µ
éæ±åæ â æ¨¡åéæ© â æç¤ºè¯è®¾è®¡ â API éæ â æµå¼å¤ç â æµè¯éªè¯
â â â â â â
åºæ¯å®ä¹ æ§è½/ææ¬ ç»æåè¾åº é误å¤ç ç¨æ·ä½éª è´¨éè¯ä¼°
ð éæåå¿ é®æ¸ å
| é®é¢ | ç®ç |
|---|---|
| 1. 使ç¨åªä¸ªæ¨¡åï¼ | gemini-2.0-flash / gemini-1.5-pro |
| 2. éè¦æµå¼ååºåï¼ | é¿ææ¬çæå»ºè®®æµå¼ |
| 3. è¾å ¥ç±»åæ¯ä»ä¹ï¼ | çº¯ææ¬ / å¾ç / ææ¡£ |
| 4. è¾åºæ ¼å¼è¦æ±ï¼ | èªç±ææ¬ / JSON / ç»æå |
| 5. ä¸ä¸æé¿åº¦éæ±ï¼ | 影忍¡å鿩忿¬ |
| 6. å®å ¨è¿æ»¤çº§å«ï¼ | éèåºæ¯éè¦éå½é ç½® |
ð æ¨¡åéæ©æå
| 模å | éç¨åºæ¯ | ç¹ç¹ |
|---|---|---|
gemini-2.0-flash |
éç¨ä»»å¡ãå¿«éååº | éåº¦å¿«ãææ¬ä½ |
gemini-1.5-pro |
夿åæãé¿ä¸ä¸æ | è½å强ãä¸ä¸æé¿ |
gemini-1.5-flash |
平衡æ§è½åææ¬ | ä¸çé度åè½å |
API éæåºç¡
ç¯å¢é ç½®
# .env.local
GEMINI_API_KEY=your_api_key_here
# Vercel ç¯å¢åé忥
vercel env add GEMINI_API_KEY production
vercel env add GEMINI_API_KEY preview
åºç¡å®¢æ·ç«¯é ç½®
// src/services/ai/gemini-client.ts
import { GoogleGenerativeAI, HarmCategory, HarmBlockThreshold } from '@google/generative-ai';
// åå§å客æ·ç«¯
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY!);
// å®å
¨è®¾ç½® (éèåºæ¯æ¨è)
const safetySettings = [
{
category: HarmCategory.HARM_CATEGORY_HARASSMENT,
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
},
{
category: HarmCategory.HARM_CATEGORY_HATE_SPEECH,
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
},
{
category: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
},
{
category: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
},
];
// çæé
ç½®
const generationConfig = {
temperature: 0.7, // åé æ§ (0-1)
topP: 0.95, // æ ¸éæ ·
topK: 40, // Top-K éæ ·
maxOutputTokens: 8192, // æå¤§è¾åºé¿åº¦
};
// è·å模åå®ä¾
export function getModel(modelName = 'gemini-2.0-flash') {
return genAI.getGenerativeModel({
model: modelName,
safetySettings,
generationConfig,
});
}
// è·åæµå¼æ¨¡å
export function getStreamingModel(modelName = 'gemini-2.0-flash') {
return genAI.getGenerativeModel({
model: modelName,
safetySettings,
generationConfig,
});
}
åºç¡ææ¬çæ
// src/services/ai/generate.ts
import { getModel } from './gemini-client';
export async function generateText(prompt: string): Promise<string> {
const model = getModel();
try {
const result = await model.generateContent(prompt);
const response = result.response;
return response.text();
} catch (error) {
console.error('[Gemini] çæå¤±è´¥:', error);
throw new GeminiError('ææ¬çæå¤±è´¥', error);
}
}
// èªå®ä¹é误类
export class GeminiError extends Error {
constructor(message: string, public readonly cause?: unknown) {
super(message);
this.name = 'GeminiError';
}
}
æµå¼ååºå¤ç
æå¡ç«¯æµå¼çæ
// src/services/ai/streaming.ts
import { getStreamingModel } from './gemini-client';
/**
* æµå¼çæææ¬
* @param prompt æç¤ºè¯
* @param onChunk æ¯ä¸ª chunk çåè°
*/
export async function generateStream(
prompt: string,
onChunk: (chunk: string) => void
): Promise<string> {
const model = getStreamingModel();
try {
const result = await model.generateContentStream(prompt);
let fullText = '';
for await (const chunk of result.stream) {
const chunkText = chunk.text();
fullText += chunkText;
onChunk(chunkText);
}
return fullText;
} catch (error) {
console.error('[Gemini] æµå¼çæå¤±è´¥:', error);
throw new GeminiError('æµå¼çæå¤±è´¥', error);
}
}
API Route æµå¼ååº
// app/api/ai/stream/route.ts
import { getStreamingModel } from '@/services/ai/gemini-client';
export async function POST(request: Request) {
const { prompt, systemPrompt } = await request.json();
const model = getStreamingModel();
// å建 ReadableStream
const stream = new ReadableStream({
async start(controller) {
try {
const fullPrompt = systemPrompt
? `${systemPrompt}\n\n${prompt}`
: prompt;
const result = await model.generateContentStream(fullPrompt);
for await (const chunk of result.stream) {
const text = chunk.text();
// åé SSE æ ¼å¼æ°æ®
controller.enqueue(
new TextEncoder().encode(`data: ${JSON.stringify({ text })}\n\n`)
);
}
// åéç»æä¿¡å·
controller.enqueue(
new TextEncoder().encode('data: [DONE]\n\n')
);
controller.close();
} catch (error) {
controller.enqueue(
new TextEncoder().encode(
`data: ${JSON.stringify({ error: 'çæå¤±è´¥' })}\n\n`
)
);
controller.close();
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
}
å端æµå¼æ¶è´¹
// src/hooks/useStreamingChat.ts
import { useState, useCallback } from 'react';
interface UseStreamingChatOptions {
onChunk?: (chunk: string) => void;
onComplete?: (fullText: string) => void;
onError?: (error: Error) => void;
}
export function useStreamingChat(options: UseStreamingChatOptions = {}) {
const [isStreaming, setIsStreaming] = useState(false);
const [streamedText, setStreamedText] = useState('');
const sendMessage = useCallback(async (prompt: string, systemPrompt?: string) => {
setIsStreaming(true);
setStreamedText('');
try {
const response = await fetch('/api/ai/stream', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt, systemPrompt }),
});
if (!response.ok) {
throw new Error('请æ±å¤±è´¥');
}
const reader = response.body?.getReader();
if (!reader) throw new Error('æ æ³è¯»åååº');
const decoder = new TextDecoder();
let fullText = '';
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split('\n');
for (const line of lines) {
if (line.startsWith('data: ')) {
const data = line.slice(6);
if (data === '[DONE]') continue;
try {
const parsed = JSON.parse(data);
if (parsed.text) {
fullText += parsed.text;
setStreamedText(fullText);
options.onChunk?.(parsed.text);
}
if (parsed.error) {
throw new Error(parsed.error);
}
} catch (e) {
// 忽ç¥è§£æé误
}
}
}
}
options.onComplete?.(fullText);
return fullText;
} catch (error) {
const err = error instanceof Error ? error : new Error('æªç¥é误');
options.onError?.(err);
throw err;
} finally {
setIsStreaming(false);
}
}, [options]);
return {
sendMessage,
isStreaming,
streamedText,
};
}
æµå¼ååº UI ç»ä»¶
// src/components/StreamingMessage.tsx
'use client';
import { useStreamingChat } from '@/hooks/useStreamingChat';
import { useState } from 'react';
export function StreamingMessage() {
const [input, setInput] = useState('');
const { sendMessage, isStreaming, streamedText } = useStreamingChat({
onComplete: (text) => {
console.log('çæå®æ:', text.length, 'å符');
},
});
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!input.trim() || isStreaming) return;
await sendMessage(input);
setInput('');
};
return (
<div className="space-y-4">
<form onSubmit={handleSubmit} className="flex gap-2">
<input
type="text"
value={input}
onChange={(e) => setInput(e.target.value)}
placeholder="è¾å
¥é®é¢..."
className="flex-1 px-4 py-2 border rounded-lg"
disabled={isStreaming}
/>
<button
type="submit"
disabled={isStreaming}
className="px-4 py-2 bg-blue-500 text-white rounded-lg disabled:opacity-50"
>
{isStreaming ? 'çæä¸...' : 'åé'}
</button>
</form>
{streamedText && (
<div className="p-4 bg-gray-50 rounded-lg whitespace-pre-wrap">
{streamedText}
{isStreaming && <span className="animate-pulse">â</span>}
</div>
)}
</div>
);
}
夿¨¡æè¾å ¥å¤ç
å¾çåæ
// src/services/ai/vision.ts
import { getModel } from './gemini-client';
/**
* åæå¾çå
容
*/
export async function analyzeImage(
imageBase64: string,
mimeType: string,
prompt: string
): Promise<string> {
const model = getModel('gemini-1.5-pro'); // è§è§ä»»å¡æ¨è pro 模å
const imagePart = {
inlineData: {
data: imageBase64,
mimeType,
},
};
try {
const result = await model.generateContent([prompt, imagePart]);
return result.response.text();
} catch (error) {
console.error('[Gemini] å¾çåæå¤±è´¥:', error);
throw new GeminiError('å¾çåæå¤±è´¥', error);
}
}
/**
* ä» URL åæå¾ç
*/
export async function analyzeImageFromUrl(
imageUrl: string,
prompt: string
): Promise<string> {
// è·åå¾çæ°æ®
const response = await fetch(imageUrl);
const arrayBuffer = await response.arrayBuffer();
const base64 = Buffer.from(arrayBuffer).toString('base64');
const mimeType = response.headers.get('content-type') || 'image/jpeg';
return analyzeImage(base64, mimeType, prompt);
}
ææ¡£åæ
// src/services/ai/document.ts
import { getModel } from './gemini-client';
/**
* åæ PDF ææ¡£
*/
export async function analyzePDF(
pdfBase64: string,
prompt: string
): Promise<string> {
const model = getModel('gemini-1.5-pro');
const pdfPart = {
inlineData: {
data: pdfBase64,
mimeType: 'application/pdf',
},
};
try {
const result = await model.generateContent([prompt, pdfPart]);
return result.response.text();
} catch (error) {
console.error('[Gemini] PDF åæå¤±è´¥:', error);
throw new GeminiError('PDF åæå¤±è´¥', error);
}
}
/**
* åæå¤ä¸ªæä»¶
*/
export async function analyzeMultipleFiles(
files: Array<{ data: string; mimeType: string }>,
prompt: string
): Promise<string> {
const model = getModel('gemini-1.5-pro');
const parts = files.map(file => ({
inlineData: {
data: file.data,
mimeType: file.mimeType,
},
}));
try {
const result = await model.generateContent([prompt, ...parts]);
return result.response.text();
} catch (error) {
console.error('[Gemini] 夿件åæå¤±è´¥:', error);
throw new GeminiError('夿件åæå¤±è´¥', error);
}
}
å¤è½®å¯¹è¯
对è¯ç®¡ç
// src/services/ai/chat.ts
import { getModel } from './gemini-client';
import type { Content } from '@google/generative-ai';
export interface ChatMessage {
role: 'user' | 'model';
content: string;
}
/**
* å建对è¯ä¼è¯
*/
export function createChatSession(systemPrompt?: string) {
const model = getModel();
const history: Content[] = [];
// 妿æç³»ç»æç¤ºè¯ï¼ä½ä¸ºç¬¬ä¸æ¡æ¶æ¯
if (systemPrompt) {
history.push({
role: 'user',
parts: [{ text: `ç³»ç»æä»¤: ${systemPrompt}` }],
});
history.push({
role: 'model',
parts: [{ text: 'æå·²çè§£ç³»ç»æä»¤ï¼åå¤å¥½ä¸ºæ¨æå¡ã' }],
});
}
const chat = model.startChat({ history });
return {
/**
* åéæ¶æ¯
*/
async sendMessage(message: string): Promise<string> {
const result = await chat.sendMessage(message);
return result.response.text();
},
/**
* æµå¼åéæ¶æ¯
*/
async sendMessageStream(
message: string,
onChunk: (chunk: string) => void
): Promise<string> {
const result = await chat.sendMessageStream(message);
let fullText = '';
for await (const chunk of result.stream) {
const text = chunk.text();
fullText += text;
onChunk(text);
}
return fullText;
},
/**
* è·å对è¯åå²
*/
getHistory(): ChatMessage[] {
return chat.getHistory().map(msg => ({
role: msg.role as 'user' | 'model',
content: msg.parts.map(p => (p as { text: string }).text).join(''),
}));
},
};
}
å¯¹è¯ Hook
// src/hooks/useChat.ts
import { useState, useCallback, useRef } from 'react';
import { createChatSession, ChatMessage } from '@/services/ai/chat';
export function useChat(systemPrompt?: string) {
const [messages, setMessages] = useState<ChatMessage[]>([]);
const [isLoading, setIsLoading] = useState(false);
const chatRef = useRef(createChatSession(systemPrompt));
const sendMessage = useCallback(async (content: string) => {
setIsLoading(true);
// æ·»å ç¨æ·æ¶æ¯
setMessages(prev => [...prev, { role: 'user', content }]);
try {
// æ·»å 空ç AI æ¶æ¯ç¨äºæµå¼æ´æ°
setMessages(prev => [...prev, { role: 'model', content: '' }]);
await chatRef.current.sendMessageStream(content, (chunk) => {
setMessages(prev => {
const newMessages = [...prev];
const lastMessage = newMessages[newMessages.length - 1];
if (lastMessage.role === 'model') {
lastMessage.content += chunk;
}
return newMessages;
});
});
} catch (error) {
// ç§»é¤ç©ºç AI æ¶æ¯
setMessages(prev => prev.slice(0, -1));
throw error;
} finally {
setIsLoading(false);
}
}, []);
const reset = useCallback(() => {
chatRef.current = createChatSession(systemPrompt);
setMessages([]);
}, [systemPrompt]);
return {
messages,
sendMessage,
isLoading,
reset,
};
}
ç»æåè¾åº
JSON è¾åº
// src/services/ai/structured.ts
import { getModel } from './gemini-client';
/**
* çæç»æå JSON è¾åº
*/
export async function generateJSON<T>(
prompt: string,
schema: string
): Promise<T> {
const model = getModel();
const structuredPrompt = `
${prompt}
è¯·ä¸¥æ ¼æç
§ä»¥ä¸ JSON Schema æ ¼å¼è¾åºï¼ä¸è¦å
å«ä»»ä½å
¶ä»æåï¼
${schema}
åªè¾åº JSONï¼ä¸è¦æä»»ä½è§£éæ markdown æ ¼å¼ã
`;
try {
const result = await model.generateContent(structuredPrompt);
const text = result.response.text();
// æ¸
çå¯è½ç markdown 代ç å
const cleanedText = text
.replace(/```json\n?/g, '')
.replace(/```\n?/g, '')
.trim();
return JSON.parse(cleanedText) as T;
} catch (error) {
console.error('[Gemini] JSON çæå¤±è´¥:', error);
throw new GeminiError('JSON çæå¤±è´¥', error);
}
}
// 使ç¨ç¤ºä¾
interface StockAnalysis {
symbol: string;
recommendation: 'buy' | 'hold' | 'sell';
targetPrice: number;
riskLevel: 'low' | 'medium' | 'high';
reasons: string[];
}
const analysis = await generateJSON<StockAnalysis>(
'åæè¹æå
¬å¸ (AAPL) çè¡ç¥¨',
`{
"symbol": "string",
"recommendation": "buy | hold | sell",
"targetPrice": "number",
"riskLevel": "low | medium | high",
"reasons": ["string"]
}`
);
é误å¤ç
Gemini ç¹å®é误
// src/services/ai/errors.ts
export class GeminiError extends Error {
constructor(
message: string,
public readonly cause?: unknown,
public readonly code?: string
) {
super(message);
this.name = 'GeminiError';
}
}
export class GeminiRateLimitError extends GeminiError {
constructor(retryAfter?: number) {
super(`API 请æ±è¿äºé¢ç¹ï¼è¯· ${retryAfter || 60} ç§åéè¯`);
this.name = 'GeminiRateLimitError';
}
}
export class GeminiSafetyError extends GeminiError {
constructor(public readonly blockedCategories: string[]) {
super('å
容被å®å
¨è¿æ»¤å¨æ¦æª');
this.name = 'GeminiSafetyError';
}
}
export class GeminiQuotaError extends GeminiError {
constructor() {
super('API é
é¢å·²ç¨å°½');
this.name = 'GeminiQuotaError';
}
}
/**
* å¤ç Gemini API é误
*/
export function handleGeminiError(error: unknown): never {
if (error instanceof GeminiError) {
throw error;
}
const errorMessage = error instanceof Error ? error.message : String(error);
// éçéå¶
if (errorMessage.includes('429') || errorMessage.includes('rate limit')) {
throw new GeminiRateLimitError();
}
// é
é¢ç¨å°½
if (errorMessage.includes('quota') || errorMessage.includes('billing')) {
throw new GeminiQuotaError();
}
// å®å
¨è¿æ»¤
if (errorMessage.includes('safety') || errorMessage.includes('blocked')) {
throw new GeminiSafetyError([]);
}
throw new GeminiError('Gemini API è°ç¨å¤±è´¥', error);
}
éè¯å è£ å¨
// src/services/ai/retry.ts
import { GeminiError, GeminiRateLimitError, handleGeminiError } from './errors';
interface RetryOptions {
maxRetries?: number;
baseDelay?: number;
maxDelay?: number;
}
export async function withGeminiRetry<T>(
fn: () => Promise<T>,
options: RetryOptions = {}
): Promise<T> {
const { maxRetries = 3, baseDelay = 1000, maxDelay = 30000 } = options;
let lastError: Error | undefined;
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await fn();
} catch (error) {
try {
handleGeminiError(error);
} catch (geminiError) {
lastError = geminiError as Error;
// ä¸éè¯çé误
if (
!(geminiError instanceof GeminiRateLimitError) &&
attempt === maxRetries
) {
throw geminiError;
}
// 计ç®å»¶è¿
const delay = Math.min(baseDelay * Math.pow(2, attempt), maxDelay);
console.log(`[Gemini] éè¯ ${attempt + 1}/${maxRetries}ï¼çå¾
${delay}ms`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
}
throw lastError;
}
æ§è½ä¼å
请æ±ç¼å
// src/services/ai/cache.ts
const cache = new Map<string, { result: string; timestamp: number }>();
const CACHE_TTL = 5 * 60 * 1000; // 5 åé
/**
* 带ç¼åççæ
*/
export async function generateWithCache(
prompt: string,
generator: (prompt: string) => Promise<string>
): Promise<string> {
const cacheKey = hashPrompt(prompt);
const cached = cache.get(cacheKey);
if (cached && Date.now() - cached.timestamp < CACHE_TTL) {
console.log('[Gemini] å½ä¸ç¼å');
return cached.result;
}
const result = await generator(prompt);
cache.set(cacheKey, { result, timestamp: Date.now() });
return result;
}
function hashPrompt(prompt: string): string {
// ç®ååå¸ï¼ç产ç¯å¢å»ºè®®ä½¿ç¨æ´å¥½çåå¸ç®æ³
let hash = 0;
for (let i = 0; i < prompt.length; i++) {
const char = prompt.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash;
}
return hash.toString(36);
}
å¹¶åæ§å¶
// src/services/ai/concurrency.ts
class ConcurrencyLimiter {
private running = 0;
private queue: Array<() => void> = [];
constructor(private maxConcurrent: number) {}
async run<T>(fn: () => Promise<T>): Promise<T> {
if (this.running >= this.maxConcurrent) {
await new Promise<void>(resolve => this.queue.push(resolve));
}
this.running++;
try {
return await fn();
} finally {
this.running--;
const next = this.queue.shift();
if (next) next();
}
}
}
// éå¶å¹¶åè¯·æ±æ°
export const geminiLimiter = new ConcurrencyLimiter(5);
// 使ç¨
const result = await geminiLimiter.run(() => generateText(prompt));
æä½³å®è·µ
â æ¨èåæ³
| åæ³ | 说æ |
|---|---|
| ä½¿ç¨æµå¼ååº | é¿ææ¬çææåç¨æ·ä½éª |
| ç»æåæç¤ºè¯ | ä½¿ç¨æ¨¡æ¿ç¡®ä¿è¾åºä¸è´æ§ |
| é误éè¯ | å¤çä¸´æ¶æ§ API é误 |
| 请æ±ç¼å | ç¸å请æ±é¿å éå¤è°ç¨ |
| å¹¶åæ§å¶ | é¿å 触åéçéå¶ |
| å®å ¨è¿æ»¤ | é ç½®éå½çå®å ¨çº§å« |
â é¿å åæ³
| åæ³ | é®é¢ |
|---|---|
| åç«¯ç´æ¥è°ç¨ API | æ´é² API Key |
| 忽ç¥é误å¤ç | ç¨æ·ä½éªå·® |
| æ éå¶å¹¶å | 触åéçéå¶ |
| 硬ç¼ç æç¤ºè¯ | é¾ä»¥ç»´æ¤åä¼å |
| å¿½ç¥ Token éå¶ | 请æ±å¤±è´¥ææªæ |
References
references/prompt-templates.md: éèåææç¤ºè¯æ¨¡æ¿ãå¤è½®å¯¹è¯æ¨¡æ¿
Maintenance
- Sources: Google Gemini API 宿¹ææ¡£, 项ç®å®è·µç»éª
- Last Updated: 2025-01-01
- Known Limits:
- API éçéå¶éæ ¹æ®è´¦æ·çº§å«è°æ´
- 夿¨¡æåè½éè¦ Pro 模å
- é¿ä¸ä¸æåºæ¯ææ¬è¾é«