太酷了!
1 Like
大佬牛的
厉害呀,支持~~
哇,终于等到你,还好我没放弃~
这速度快的离谱,感谢大佬
佬太强了!
强啊在!
强啊在!
2API大王!!king!
让gpt翻译成 cloudflare 的 worker了,部署简单点
const UNLIMITED_AI_URL = "https://app.unlimitedai.chat/api/chat";
const MAX_RETRIES = 3;
// 将OpenAI消息转换为UnlimitedAI消息
function convertOpenAIToUnlimitedMessages(messages) {
const systemMessages = messages.filter((msg) => msg.role === "system");
const nonSystemMessages = messages.filter((msg) => msg.role !== "system");
const result = [];
if (systemMessages.length > 0) {
const systemContent = systemMessages.map((msg) => msg.content).join("\n\n");
result.push({
id: crypto.randomUUID(),
createdAt: new Date().toISOString(),
role: "user",
content: systemContent,
parts: [{ type: "text", text: systemContent }],
});
result.push({
id: crypto.randomUUID(),
createdAt: new Date().toISOString(),
role: "assistant",
content: "Ok, I got it, I'll remember it and do it.",
parts: [{ type: "text", text: "Ok, I got it, I'll remember it and do it." }],
});
}
nonSystemMessages.forEach((msg) => {
result.push({
id: crypto.randomUUID(),
createdAt: new Date().toISOString(),
role: msg.role,
content: msg.content,
parts: [{ type: "text", text: msg.content }],
});
});
return result;
}
// 将OpenAI请求体转换为UnlimitedAI请求体
function convertOpenAIToUnlimitedBody(openaiBody) {
return {
id: openaiBody.id || crypto.randomUUID(),
messages: convertOpenAIToUnlimitedMessages(openaiBody.messages),
selectedChatModel: openaiBody.model || "chat-model-reasoning",
};
}
// 处理流式响应
async function* transformStreamResponse(reader) {
let buffer = "";
const decoder = new TextDecoder();
let messageId = "";
let firstResult = true;
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
yield "data: [DONE]\n\n";
break;
}
buffer += decoder.decode(value, { stream: true });
let lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (!line.trim()) continue;
const idx = line.indexOf(":");
if (idx === -1) continue;
const key = line.slice(0, idx);
let val = line.slice(idx + 1).trim();
if (val.startsWith('"') && val.endsWith('"')) {
val = val.slice(1, -1);
}
if (key === "f") {
try {
const obj = JSON.parse(val);
messageId = obj.messageId || "";
} catch (error) {
console.error("Error parsing messageId:", error);
}
} else if (key === "g") {
const delta = firstResult
? {
role: "assistant",
reasoning_content: val.replace(/\\n/g, "\n"),
}
: { reasoning_content: val.replace(/\\n/g, "\n") };
const chunk = {
id: messageId || crypto.randomUUID(),
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: "chat-model-reasoning",
choices: [
{
delta,
index: 0,
finish_reason: null,
},
],
};
yield `data: ${JSON.stringify(chunk)}\n\n`;
} else if (key === "0") {
const delta = { content: val.replace(/\\n/g, "\n") };
const chunk = {
id: messageId || crypto.randomUUID(),
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: "chat-model-reasoning",
choices: [
{
delta,
index: 0,
finish_reason: null,
},
],
};
yield `data: ${JSON.stringify(chunk)}\n\n`;
firstResult = false;
} else if (key === "e" || key === "d") {
yield "data: [DONE]\n\n";
}
}
}
} catch (error) {
console.error("Stream transformation error:", error);
yield "data: [DONE]\n\n";
} finally {
reader.releaseLock();
}
}
// 转换非流式响应
async function transformNonStreamResponse(text) {
const lines = text.split("\n");
const data = {};
for (const line of lines) {
if (!line.trim()) continue;
const idx = line.indexOf(":");
if (idx === -1) continue;
const key = line.slice(0, idx);
let val = line.slice(idx + 1).trim();
try {
val = JSON.parse(val);
} catch (error) {
// 如果解析失败,保持原始字符串
}
data[key] = val;
}
const content = data["0"];
const reasoning_content = data.g;
return {
id: data.f?.messageId || crypto.randomUUID(),
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: "chat-model-reasoning",
choices: [
{
index: 0,
message: {
role: "assistant",
reasoning_content,
content,
},
finish_reason: "stop",
},
],
usage: {
prompt_tokens: 0,
completion_tokens: 0,
total_tokens: 0,
},
};
}
// 主处理函数
async function handleChatCompletions(openaiBody, isStream, retryCount = 0) {
try {
const unlimitedBody = convertOpenAIToUnlimitedBody(openaiBody);
const upstreamHeaders = {
"content-type": "application/json",
};
const upstreamRes = await fetch(UNLIMITED_AI_URL, {
method: "POST",
headers: upstreamHeaders,
body: JSON.stringify(unlimitedBody),
});
if (!upstreamRes.ok) {
throw new Error(`Chat completion failed: ${upstreamRes.status}`);
}
if (isStream) {
const reader = upstreamRes.body?.getReader();
if (!reader) {
throw new Error("Failed to get response body reader");
}
const transformedStream = new ReadableStream({
async start(controller) {
try {
for await (const chunk of transformStreamResponse(reader)) {
controller.enqueue(new TextEncoder().encode(chunk));
}
controller.close();
} catch (error) {
console.error("Stream transformation error:", error);
controller.error(error);
}
},
});
return new Response(transformedStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Access-Control-Allow-Origin": "*",
},
});
} else {
const text = await upstreamRes.text();
const transformedResponse = await transformNonStreamResponse(text);
return new Response(JSON.stringify(transformedResponse), {
status: 200,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
});
}
} catch (error) {
console.error("Request handling error:", error);
return new Response(
JSON.stringify({ error: "Internal server error", message: error.message }),
{
status: 500,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
}
}
);
}
}
// 主入口(Cloudflare Workers 使用 fetch 作为入口)
addEventListener("fetch", (event) => {
event.respondWith(
(async () => {
const req = event.request;
const url = new URL(req.url);
const path = url.pathname;
// CORS 预检请求
if (req.method === "OPTIONS") {
return new Response(null, {
status: 204,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
"Access-Control-Max-Age": "86400",
},
});
}
console.log(path);
try {
// 模型列表接口
if (path === "/v1/models" && req.method === "GET") {
return new Response(
JSON.stringify({
object: "list",
data: [
{
id: "chat-model-reasoning",
object: "model",
created: 0,
owned_by: "unlimitedai",
permission: [
{
id: "modelperm-chat-model-reasoning",
object: "model_permission",
created: 0,
allow_create_engine: false,
allow_sampling: true,
allow_logprobs: false,
allow_search_indices: false,
allow_view: true,
allow_fine_tuning: false,
organization: "*",
group: null,
is_blocking: false,
},
],
root: "chat-model-reasoning",
parent: null,
},
],
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
}
);
} else if (path === "/v1/chat/completions" && req.method === "POST") {
const openaiBody = await req.json();
const isStream = openaiBody.stream === true;
return await handleChatCompletions(openaiBody, isStream);
} else {
return new Response(
JSON.stringify({
error: "Not found",
message: "Endpoint not supported",
}),
{
status: 404,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
}
);
}
} catch (error) {
console.error("Request handling error:", error);
return new Response(
JSON.stringify({
error: "Internal server error",
message: error.message,
}),
{
status: 500,
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
}
}
);
}
})()
);
});
4 Likes
第一个文档链接有说明。
这个是 API 的 BASE_URL,未设置密钥又或者密钥可以随便填写。
1 Like
就喜欢这种简单粗暴的
很黄很暴力啊
Nb 速度飞快
好像cherry 不能用,是不是不支持流式输出的原因
应该是Grok3-mini
感谢大佬
N佬牛逼!
大佬太强了,我立刻使用!