cursor2a / app.js
coolmanx's picture
Update app.js
ce4a553 verified
raw
history blame
12.8 kB
const express = require('express');
const { v4: uuidv4 } = require('uuid');
const zlib = require('zlib');
const protobuf = require('protobufjs');
// 创建 proto 定义字符串
const protoDefinition = `
syntax = "proto3";
message ChatMessage {
message FileContent {
message Position {
int32 line = 1;
int32 column = 2;
}
message Range {
Position start = 1;
Position end = 2;
}
string filename = 1;
string content = 2;
Position position = 3;
string language = 5;
Range range = 6;
int32 length = 8;
int32 type = 9;
int32 error_code = 11;
}
message UserMessage {
string content = 1;
int32 role = 2;
string message_id = 13;
}
message Instructions {
string instruction = 1;
}
message Model {
string name = 1;
string empty = 4;
}
repeated UserMessage messages = 2;
Instructions instructions = 4;
string projectPath = 5;
Model model = 7;
string requestId = 9;
string summary = 11;
string conversationId = 15;
}
message ResMessage {
string msg = 1;
}
`;
// 使用 protobuf.parse 解析 proto 定义
const root = protobuf.parse(protoDefinition).root;
// 获取消息类型
const ChatMessage = root.lookupType("ChatMessage");
const ResMessage = root.lookupType("ResMessage");
// 创建 $root 对象
const $root = {
ChatMessage,
ResMessage
};
// regex 定义
const regex = /<\|BEGIN_SYSTEM\|>.*?<\|END_SYSTEM\|>.*?<\|BEGIN_USER\|>.*?<\|END_USER\|>/s;
async function stringToHex(messages, modelName) {
const formattedMessages = messages.map((msg) => ({
...msg,
role: msg.role === 'user' ? 1 : 2,
message_id: uuidv4(),
}));
const message = {
messages: formattedMessages,
instructions: {
instruction: 'Always respond in 中文',
},
projectPath: '/path/to/project',
model: {
name: modelName,
empty: '',
},
requestId: uuidv4(),
summary: '',
conversationId: uuidv4(),
};
const errMsg = $root.ChatMessage.verify(message);
if (errMsg) throw Error(errMsg);
const messageInstance = $root.ChatMessage.create(message);
const buffer = $root.ChatMessage.encode(messageInstance).finish();
const hexString = (buffer.length.toString(16).padStart(10, '0') +
buffer.toString('hex')).toUpperCase();
return Buffer.from(hexString, 'hex');
}
async function chunkToUtf8String(chunk) {
try {
let hex = Buffer.from(chunk).toString('hex');
let offset = 0;
let results = [];
while (offset < hex.length) {
if (offset + 10 > hex.length) break;
const dataLength = parseInt(hex.slice(offset, offset + 10), 16);
offset += 10;
if (offset + dataLength * 2 > hex.length) break;
const messageHex = hex.slice(offset, offset + dataLength * 2);
offset += dataLength * 2;
const messageBuffer = Buffer.from(messageHex, 'hex');
const message = $root.ResMessage.decode(messageBuffer);
results.push(message.msg);
}
if (results.length == 0) {
return gunzip(chunk);
}
return results.join('');
} catch (err) {
return gunzip(chunk);
}
}
function gunzip(chunk) {
return new Promise((resolve, reject) => {
zlib.gunzip(chunk.slice(5), (err, decompressed) => {
if (err) {
resolve('');
} else {
const text = decompressed.toString('utf-8');
if (regex.test(text)) {
resolve('');
} else {
resolve(text);
}
}
});
});
}
function getRandomIDPro({ size, dictType, customDict }) {
let random = '';
if (!customDict) {
switch (dictType) {
case 'alphabet':
customDict = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
break;
case 'max':
customDict = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-';
break;
default:
customDict = '0123456789';
}
}
for (; size--; ) random += customDict[(Math.random() * customDict.length) | 0];
return random;
}
// Express 应用设置
const app = express();
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.get('/ai/v1/models', async (req, res) => {
const models_list = {
"object": "list",
"data": [
{
"id": "cursor-small",
"object": "model",
"created": 1610076050,
"owned_by": "cursor"
},
{
"id": "gpt-4",
"object": "model",
"created": 1610076051,
"owned_by": "openai"
},
{
"id": "claude-3.5-sonnet",
"object": "model",
"created": 1610076052,
"owned_by": "anthropic"
},
{
"id": "gpt-4o",
"object": "model",
"created": 1610076053,
"owned_by": "openai"
},
{
"id": "claude-3-opus",
"object": "model",
"created": 1610076054,
"owned_by": "anthropic"
},
{
"id": "gpt-3.5-turbo",
"object": "model",
"created": 1610076055,
"owned_by": "openai"
},
{
"id": "gpt-4-turbo-2024-04-09",
"object": "model",
"created": 1610076056,
"owned_by": "openai"
},
{
"id": "gpt-4o-128k",
"object": "model",
"created": 1610076057,
"owned_by": "openai"
},
{
"id": "gemini-1.5-flash-500k",
"object": "model",
"created": 1610076058,
"owned_by": "google"
},
{
"id": "claude-3-haiku-200k",
"object": "model",
"created": 1610076059,
"owned_by": "anthropic"
},
{
"id": "claude-3-5-sonnet-200k",
"object": "model",
"created": 1610076060,
"owned_by": "anthropic"
},
{
"id": "claude-3-5-sonnet-20241022",
"object": "model",
"created": 1610076061,
"owned_by": "anthropic"
},
{
"id": "gpt-4o-mini",
"object": "model",
"created": 1610076062,
"owned_by": "openai"
},
{
"id": "o1-mini",
"object": "model",
"created": 1610076063,
"owned_by": "openai"
},
{
"id": "o1-preview",
"object": "model",
"created": 1610076064,
"owned_by": "openai"
},
{
"id": "claude-3.5-haiku",
"object": "model",
"created": 1610076065,
"owned_by": "anthropic"
},
{
"id": "gemini-exp-1206",
"object": "model",
"created": 1610076066,
"owned_by": "google"
}
]
};
return res.json(models_list);
});
// 主要路由处理
app.post('/ai/v1/chat/completions', async (req, res) => {
// o1开头的模型,不支持流式输出
if (req.body.model.startsWith('o1-') && req.body.stream) {
return res.status(400).json({
error: 'Model not supported stream',
});
}
let currentKeyIndex = 0;
try {
const { model, messages, stream = false } = req.body;
let authToken = req.headers.authorization?.replace('Bearer ', '');
// 处理认证token
const keys = authToken.split(',').map((key) => key.trim());
if (keys.length > 0) {
if (currentKeyIndex >= keys.length) {
currentKeyIndex = 0;
}
authToken = keys[currentKeyIndex];
}
if (authToken && authToken.includes('%3A%3A')) {
authToken = authToken.split('%3A%3A')[1];
}
// 验证请求
if (!messages || !Array.isArray(messages) || messages.length === 0 || !authToken) {
return res.status(400).json({
error: 'Invalid request. Messages should be a non-empty array and authorization is required',
});
}
const hexData = await stringToHex(messages, model);
// 获取checksum
const checksum =
req.headers['x-cursor-checksum'] ??
process.env['x-cursor-checksum'] ??
`zo${getRandomIDPro({ dictType: 'max', size: 6 })}${getRandomIDPro({ dictType: 'max', size: 64 })}/${getRandomIDPro({ dictType: 'max', size: 64 })}`;
// API请求配置
const response = await fetch('https://api2.cursor.sh/aiserver.v1.AiService/StreamChat', {
method: 'POST',
headers: {
'Content-Type': 'application/connect+proto',
'authorization': `Bearer ${authToken}`,
'connect-accept-encoding': 'gzip,br',
'connect-protocol-version': '1',
'user-agent': 'connect-es/1.4.0',
'x-amzn-trace-id': `Root=${uuidv4()}`,
'x-cursor-checksum': checksum,
'x-cursor-client-version': '0.42.3',
'x-cursor-timezone': 'Asia/Shanghai',
'x-ghost-mode': 'false',
'x-request-id': uuidv4(),
'host': 'api2.cursor.sh',
},
body: hexData,
});
// 处理流式响应
if (stream) {
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
const responseId = `chatcmpl-${uuidv4()}`;
for await (const chunk of response.body) {
const text = await chunkToUtf8String(chunk);
if (text.length > 0) {
res.write(
`data: ${JSON.stringify({
id: responseId,
object: 'chat.completion.chunk',
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
delta: {
content: text,
},
},
],
})}\n\n`,
);
}
}
res.write('data: [DONE]\n\n');
return res.end();
} else {
// 处理非流式响应
let text = '';
for await (const chunk of response.body) {
text += await chunkToUtf8String(chunk);
}
text = text.replace(/^.*<\|END_USER\|>/s, '');
text = text.replace(/^\n[a-zA-Z]?/, '').trim();
return res.json({
id: `chatcmpl-${uuidv4()}`,
object: 'chat.completion',
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: 'assistant',
content: text,
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 0,
completion_tokens: 0,
total_tokens: 0,
},
});
}
} catch (error) {
console.error('Error:', error);
if (!res.headersSent) {
if (req.body.stream) {
res.write(`data: ${JSON.stringify({ error: 'Internal server error' })}\n\n`);
return res.end();
} else {
return res.status(500).json({ error: 'Internal server error' });
}
}
}
});
// 启动服务器
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`服务器运行在端口 ${PORT}`);
});
module.exports = app;