先放链接:Join Codebuff with a referral bonus! | Codebuff AFF (这个链接你我各自获取500积分)
Github登录,然后获取API KEY,用下面的代码自建代理,配合你的KEY就可以用了。
Cloudflare Workers
/**
* Freebuff OpenAI API Cloudflare Worker 代理
*/
const API_BASE = 'https://www.codebuff.com';
const MODEL_TO_AGENT = {
'minimax/minimax-m2.7': 'base2-free',
'z-ai/glm-5.1': 'base2-free',
'google/gemini-2.5-flash-lite': 'file-picker',
'google/gemini-3.1-flash-lite-preview': 'file-picker-max',
'google/gemini-3.1-pro-preview': 'thinker-with-files-gemini',
};
const defaultModel = 'minimax/minimax-m2.7';
// Cloudflare Workers 是无状态的,但全局变量可以在同一个隔离区(Isolate)的生命周期内存活,作为简单缓存。
let cachedRunId = null;
let cachedAgentId = null;
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
};
export default {
async fetch(request, env, ctx) {
// 1. 处理 CORS 预检请求
if (request.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
const url = new URL(request.url);
try {
// 2. 路由分发
if (url.pathname === '/v1/models' && request.method === 'GET') {
return handleModels();
} else if (url.pathname === '/v1/chat/completions' && request.method === 'POST') {
return await handleChatCompletion(request, env);
} else if (url.pathname === '/' || url.pathname === '/health') {
return new Response(JSON.stringify({ status: 'ok', cachedRunId, cachedAgentId }), {
headers: { 'Content-Type': 'application/json', ...corsHeaders },
});
} else {
return errorResponse(404, 'Not found');
}
} catch (e) {
return errorResponse(500, e.message);
}
},
};
function handleModels() {
const models = Object.keys(MODEL_TO_AGENT).map((id) => ({
id: id,
object: 'model',
created: 1700000000,
owned_by: 'freebuff',
}));
return new Response(JSON.stringify({ object: 'list', data: models }), {
headers: { 'Content-Type': 'application/json', ...corsHeaders },
});
}
function errorResponse(status, message) {
return new Response(JSON.stringify({ error: { message } }), {
status,
headers: { 'Content-Type': 'application/json', ...corsHeaders },
});
}
// 核心处理逻辑
async function handleChatCompletion(request, env) {
// 获取 Token:优先使用请求头中的 Authorization (由各种客户端如 NextChat/OpenCat 传入)
// 如果没有,则降级使用在 Cloudflare 中配置的环境变量 FREEBUFF_TOKEN
const authHeader = request.headers.get('Authorization');
let token = authHeader ? authHeader.replace('Bearer ', '').trim() : env.FREEBUFF_TOKEN;
if (!token) {
return errorResponse(401, 'Unauthorized: Please provide Authorization header or FREEBUFF_TOKEN environment variable');
}
const body = await request.json();
const model = body.model || defaultModel;
const agentId = MODEL_TO_AGENT[model] || 'base2-free';
// 获取 Run ID
let runId = await getOrCreateAgentRun(token, agentId);
let freebuffBody = openaiToFreebuffBody(body, runId);
// 第一次尝试请求大模型
let llmRes = await fetch(`${API_BASE}/api/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
'Accept': body.stream ? 'text/event-stream' : 'application/json',
'User-Agent': 'freebuff-cf-worker/1.0',
},
body: JSON.stringify(freebuffBody),
});
// 如果 Run ID 失效 (400/404),重新创建一次
if (llmRes.status === 400 || llmRes.status === 404) {
cachedRunId = null;
runId = await getOrCreateAgentRun(token, agentId);
freebuffBody.codebuff_metadata.run_id = runId;
llmRes = await fetch(`${API_BASE}/api/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
'Accept': body.stream ? 'text/event-stream' : 'application/json',
'User-Agent': 'freebuff-cf-worker/1.0',
},
body: JSON.stringify(freebuffBody),
});
}
if (!llmRes.ok) {
const errorText = await llmRes.text();
return errorResponse(llmRes.status, `Upstream API Error: ${errorText}`);
}
// 处理流式返回
if (body.stream) {
const { readable, writable } = new TransformStream();
streamToOpenAIFormat(llmRes.body, writable, model);
return new Response(readable, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
...corsHeaders,
},
});
}
// 处理非流式返回
const llmData = await llmRes.json();
const choice = llmData.choices?.[0] || {};
const message = choice.message || {};
const openaiResponse = {
id: `freebuff-${runId}`,
object: 'chat.completion',
created: Math.floor(Date.now() / 1000),
model: model,
choices: [{
index: 0,
message: {
role: 'assistant',
content: message.content || '',
...(message.tool_calls && { tool_calls: message.tool_calls }),
},
finish_reason: choice.finish_reason || 'stop',
}],
usage: llmData.usage || { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
};
return new Response(JSON.stringify(openaiResponse), {
headers: { 'Content-Type': 'application/json', ...corsHeaders },
});
}
// 流式数据转换处理
async function streamToOpenAIFormat(readableStream, writableStream, model) {
const reader = readableStream.getReader();
const writer = writableStream.getWriter();
const decoder = new TextDecoder();
const encoder = new TextEncoder();
let buffer = '';
let finishReason = 'stop';
const responseId = `freebuff-${Date.now()}`;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || !trimmed.startsWith('data: ')) continue;
const jsonStr = trimmed.slice(6).trim();
if (jsonStr === '[DONE]') {
await writer.write(encoder.encode('data: [DONE]\n\n'));
continue;
}
try {
const parsed = JSON.parse(jsonStr);
const delta = parsed.choices?.[0]?.delta || {};
const choiceFinishReason = parsed.choices?.[0]?.finish_reason;
if (choiceFinishReason) finishReason = choiceFinishReason;
const deltaObj = {};
if (delta.content) deltaObj.content = delta.content;
if (delta.tool_calls) deltaObj.tool_calls = delta.tool_calls;
if (delta.role) deltaObj.role = delta.role;
if (Object.keys(deltaObj).length > 0) {
const openaiChunk = {
id: responseId,
object: 'chat.completion.chunk',
created: Math.floor(Date.now() / 1000),
model: model,
choices: [{ index: 0, delta: deltaObj, finish_reason: null }],
};
await writer.write(encoder.encode(`data: ${JSON.stringify(openaiChunk)}\n\n`));
}
} catch (e) {
// 忽略解析错误
}
}
}
// 发送最终的结束标记
const finalChunk = {
id: responseId,
object: 'chat.completion.chunk',
created: Math.floor(Date.now() / 1000),
model: model,
choices: [{ index: 0, delta: {}, finish_reason: finishReason }],
};
await writer.write(encoder.encode(`data: ${JSON.stringify(finalChunk)}\n\n`));
await writer.write(encoder.encode('data: [DONE]\n\n'));
} finally {
await writer.close();
}
}
// 获取或创建 Agent Run
async function getOrCreateAgentRun(token, agentId) {
if (cachedAgentId !== agentId) {
cachedRunId = null;
cachedAgentId = agentId;
}
if (cachedRunId) return cachedRunId;
const res = await fetch(`${API_BASE}/api/v1/agent-runs`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({ action: 'START', agentId }),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`Failed to create Agent Run: ${text}`);
}
const data = await res.json();
if (!data.runId) {
throw new Error(`Failed to create Agent Run: No runId in response`);
}
cachedRunId = data.runId;
return cachedRunId;
}
function openaiToFreebuffBody(openaiBody, runId) {
return {
...openaiBody,
codebuff_metadata: {
run_id: runId,
client_id: `freebuff-cf-${Math.random().toString(36).substring(2, 10)}`,
cost_mode: 'free',
},
};
}