兼容流式,兼容OpenAi格式,可以直接走它的api调用,或者使用下面的api请求
同样可以使用 HuggingChat 或 https://playground.ai.cloudflare.com/
import fetch from 'node-fetch';
async function getChatCompletion(model, messages, authorization, stream = false) {
const url = "https://api.fireworks.ai/inference/v1/chat/completions";
const headers = {
"accept": "text/event-stream",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"authorization": `Bearer ${authorization}`,
"content-type": "application/json",
"priority": "u=1, i",
"sec-ch-ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Microsoft Edge\";v=\"126\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "\"Windows\"",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site"
};
const body = {
model,
messages,
stream,
max_tokens: 16384,
top_p: 1,
top_k: 40,
presence_penalty: 0,
frequency_penalty: 0,
temperature: 0.6,
n: 1,
logprobs: 1
};
try {
const response = await fetch(url, {
method: 'POST',
headers: headers,
body: JSON.stringify(body),
referrerPolicy: 'no-referrer'
});
if (!response.ok) {
return null;
}
const data = await response.json();
console.log(data);
} catch (error) {
console.error(error);
return null;
}
}
const authorization = 'xxx'; //登录F12拿
const model = "accounts/fireworks/models/llama-v3p1-405b-instruct";
const messages = [{ role: "user", content: "你好" }];
getChatCompletion(model, messages, authorization);