Poe上所有的机器人 转 OpenAI API (新增非流式输出、支持NextChat等平台)

@0v0 大佬的代码上作了简单修改

  • 在“流式”的基础上添加了“非流式”,完全兼容了 OpenAI 格式的文本 API 调用
  • 已测试支持在Dify等平台使用,稳定且快速
  • 尚未支持 Vision 之类功能,有待大佬们接着糊一下
  • 接入 @0v0 大佬的更改,实现 NextChat 的兼容

依赖安装:pip install --break-system-packages openai fastapi uvicorn uvloop fastapi_poe

用法(也就是测试方法):

from openai import OpenAI

API_ENDPOINT = 'http://127.0.0.1:10000/v1'
API_KEY = 'your api key from Poe' # 从 https://poe.com/api_key 获取
MODEL_NAME = 'GPT-4o' # 按照 Poe 中的命名选择模型

client = OpenAI(base_url=API_ENDPOINT, api_key=API_KEY)

print("Testing stream completion")

completion = client.chat.completions.create(
    model=MODEL_NAME,
    messages=[{"role": "user", "content": "Say this is a test"}],
    stream=True,
)

for chunk in completion:
    if chunk.choices[0].delta.content is not None:
        print(chunk.choices[0].delta.content, end="")
      
print("\n\nTesting non-stream completion")
        
completion = client.chat.completions.create(
    model=MODEL_NAME,
    messages=[{"role": "user", "content": "Say this is a test"}],
    stream=False,
)
print(completion.choices[0].message.content)

代码如下:

# Poe Documentation: https://creator.poe.com/docs/server-bots-functional-guides
# OpenAI Documentation: https://platform.openai.com/docs/api-reference/chat/create

# Get Environment Variables
import os

DEFAULT_MODEL = os.getenv("BOT", default="GPT-4o")
LISTEN_PORT = int(os.getenv("PORT", default=10000))
BASE_URL = os.getenv("BASE", default="https://api.poe.com/bot/")

# Proxy Server
import uvicorn
import json

from fastapi import FastAPI, Request, Header
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from typing import Any, AsyncGenerator
from fastapi_poe.types import ProtocolMessage
from fastapi_poe.client import get_bot_response
from fastapi.middleware.cors import CORSMiddleware

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allow all origins
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


def openai_format_messages_to_poe_format(openai_format_messages: list) -> list:
    """Convert OpenAI formatted messages to POE formatted messages."""
    poe_format_messages = [
        # Convert 'assistant' to 'bot' or we get an error
        ProtocolMessage(
            role=msg["role"].lower().replace("assistant", "bot"),
            content=msg["content"],
            temperature=msg.get("temperature", 0.5),
        )
        for msg in openai_format_messages
    ]
    return poe_format_messages


async def get_poe_bot_stream_partials(
    api_key: str, poe_format_messages: list, bot_name: str
) -> AsyncGenerator[str, None]:
    async for partial in get_bot_response(
        messages=poe_format_messages,
        bot_name=bot_name,
        api_key=api_key,
        base_url=BASE_URL,
        skip_system_prompt=False,
    ):
        yield partial.text


# by @OvO
async def adaptive_streamer(
    poe_bot_stream_partials_generator, is_sse_enabled=False
) -> AsyncGenerator[str, Any]:

    STREAM_PREFIX = 'data:{"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"a","choices":[{"index":0,"delta":{"content":'
    STREAM_SUFFIX = "}}]}\n\n"

    ENDING_CHUNK = 'data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-4","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}\n\ndata: [DONE]\n\n'

    NON_STREAM_PREFIX = '{"id":"chatcmpl-123","object":"chat.completion","created":1694268190,"model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"'
    NON_STREAM_SUFFIX = '"},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0},"system_fingerprint":"abc"}\n\n'

    if is_sse_enabled:
        chat_prefix, chat_suffix = STREAM_PREFIX, STREAM_SUFFIX
        _json_dumps = lambda data: json.dumps(data)
    else:
        chat_prefix, chat_suffix = "", ""
        _json_dumps = lambda data: json.dumps(data)[1:-1]
        yield NON_STREAM_PREFIX

    async for partial in poe_bot_stream_partials_generator:
        try:
            yield chat_prefix
            yield _json_dumps(partial)
            yield chat_suffix
        except:
            continue

    if is_sse_enabled:
        yield ENDING_CHUNK
    else:
        yield NON_STREAM_SUFFIX

    return


@app.post("/v1/chat/completions")
async def chat_completions(
    request: Request, authorization: str = Header(None)
) -> StreamingResponse:

    # Assuming the header follows the standard format: "Bearer $API_KEY"
    api_key = authorization.split(" ")[1]
    body = await request.json()

    # Extract bot_name (model) and messages from the request body
    bot_name = body.get("model", DEFAULT_MODEL)
    openai_format_messages = body.get("messages", [])
    is_stream = body.get("stream", False)

    # Convert OpenAI formatted messages to POE formatted messages
    poe_format_messages = openai_format_messages_to_poe_format(openai_format_messages)

    # Get poe bot response
    poe_bot_stream_partials_generator = get_poe_bot_stream_partials(
        api_key, poe_format_messages, bot_name
    )

    return StreamingResponse(
        adaptive_streamer(poe_bot_stream_partials_generator, is_stream),
        media_type=(
            ("text/event-stream" if is_stream else "application/json")
            + ";charset=UTF-8"
        ),
    )


if __name__ == "__main__":
    try:
        import uvloop
    except ImportError:
        uvloop = None
    if uvloop:
        uvloop.install()
    uvicorn.run(app, host="0.0.0.0", port=LISTEN_PORT)
44 个赞

我就知道关注对人了!

5 个赞

感谢分享
就是我没订阅

2 个赞

顶,支持大佬,另外需要poe拼车的可以dd

2 个赞

蹲一个低价Poe指路 :face_holding_back_tears:

1 个赞

哈哈哈,可以看看我,780年会员

大佬

1 个赞

能使用在chatgpt next web吗?

2 个赞

测了一下,还得糊几行,明天搞一下

"OPTIONS /v1/chat/completions HTTP/1.1" 405 Method Not Allowed
1 个赞
from fastapi.middleware.cors import CORSMiddleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"], # Allow all origins
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

稍微配置下,加在FastAPI后面就行啦

2 个赞

流和非流有更简单且高性能的处理。

STREAM_PREFIX = 'data:{"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"a","choices":[{"index":0,"delta":{"content":'
STREAM_SUFFIX = '}}]}\n\n'

ENDING_CHUNK = 'data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-4","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}\n\ndata: [DONE]\n\n'

NON_STREAM_PREFIX = '{"id":"chatcmpl-123","object":"chat.completion","created":1694268190,"model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"'
NON_STREAM_SUFFIX = '"},"logprobs":null,"finish_reason":"stop"}],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0},"system_fingerprint":"abc"}\n\n'

# 主函数
@app.post("/v1/chat/completions")
async def chat_completions(request: Request) -> StreamingResponse:
    request_body =  = await request.json()
    is_sse_enabled = request_body.get("stream", True)

    '''
    other code here
    '''

    return StreamingResponse(
        adaptive_streamer(response, is_sse_enabled),
        status_code=response.status_code,
        media_type=\
            (("text/event-stream" if is_sse_enabled else "application/json") + ";charset=UTF-8"),
        )


# 无状态的流式处理器(兼容非流;低内存占用)
async def adaptive_streamer(response, is_sse_enabled=True) -> AsyncGenerator[str, Any]:
    if is_sse_enabled:
        chat_prefix, chat_suffix = STREAM_PREFIX, STREAM_SUFFIX
        _json_dumps = lambda data: json.dumps(data)
    else:
        chat_prefix, chat_suffix = "", ""
        _json_dumps = lambda data: json.dumps(data)[1:-1]
        yield NON_STREAM_PREFIX

    async for partial in get_bot_response(
        messages=poe_format_messages,
        bot_name=bot_name,
        api_key=api_key,
        base_url=BASE_URL,
        skip_system_prompt=False,
    ):
        try:
            yield chat_prefix
            yield _json_dumps(partial.text)
            yield chat_suffix
        except:
            continue
    
    if is_sse_enabled:
        yield ENDING_CHUNK
    else:
        yield NON_STREAM_SUFFIX
    
    return
6 个赞

非常感谢

2 个赞

感谢大佬分享

2 个赞

太强了!!

2 个赞

大佬牛逼,学习了,0V0大佬也牛逼,流浪的心大佬也牛逼,大帅哥大佬也牛逼,三lue大佬也牛逼,这个帖子里全是牛逼大佬啊

2 个赞

大佬牛逼,学习了,0V0大佬也牛逼,流浪的心大佬也牛逼,大帅哥大佬也牛逼,三lue大佬也牛逼,linzong也牛逼 这个帖子里全是牛逼大佬啊

2 个赞

代码党福音

2 个赞

大佬牛逼,学习了,0V0大佬也牛逼,流浪的心大佬也牛逼,大帅哥大佬也牛逼,三lue大佬也牛逼,linzong也牛逼 这个帖子里全是牛逼大佬啊

3 个赞

牛哇牛哇,加上之后居然把 NextChat 连通了

2 个赞

现在可以了,刚刚测了下

1 个赞