server.js 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. import express from "express";
  2. import OpenAI from "openai";
  3. import cors from "cors";
  4. import logger from "./logger";
  5. import config from "./config";
  6. const app = express();
  7. // 中间件
  8. app.use(cors());
  9. app.use(express.json());
  10. const api_url = config["api-url"];
  11. // POST 路由处理OpenAI请求
  12. app.post("/api/openai", async (req, res) => {
  13. try {
  14. const { model_id, open_ai_url, api_key, payload } = req.body;
  15. let requestUrl = open_ai_url;
  16. let apiKey = api_key;
  17. let aiBody = payload;
  18. // 验证必需的参数
  19. if (!model_id) {
  20. if (!open_ai_url || !api_key || !payload) {
  21. return res.status(400).json({
  22. error:
  23. "Missing required parameters: open_ai_url, api_key, or payload",
  24. });
  25. }
  26. } else {
  27. //get model info from api server
  28. try {
  29. const url = api_url + `/v2/ai-model/${model_id}`;
  30. logger.info("get model info from api server " + url);
  31. const response = await fetch(url, {
  32. method: "GET",
  33. headers: {
  34. "Content-Type": "application/json",
  35. },
  36. });
  37. // 获取响应数据
  38. const model = await response.json();
  39. if (model.ok) {
  40. requestUrl = model.data.url;
  41. apiKey = model.data.key;
  42. aiBody.model = model.data.model;
  43. } else {
  44. return res.status(400).json({
  45. error:
  46. "Missing required parameters: open_ai_url, api_key, or payload",
  47. });
  48. }
  49. } catch (error) {
  50. logger.error(error.message);
  51. if (!res.headersSent) {
  52. res.status(500).json({
  53. error: "Proxy server error",
  54. message: error.message,
  55. type: "proxy_error",
  56. });
  57. }
  58. }
  59. }
  60. // 检测不同的 AI 服务提供商
  61. const isClaudeAPI =
  62. requestUrl.includes("anthropic.com") || requestUrl.includes("claude");
  63. const isStreaming = aiBody.stream === true;
  64. // 构建请求URL和headers
  65. let headers = {
  66. "Content-Type": "application/json",
  67. Authorization: `Bearer ${apiKey}`,
  68. };
  69. if (isClaudeAPI) {
  70. // Claude API使用特殊的header格式
  71. headers["x-api-key"] = apiKey;
  72. headers["anthropic-version"] = "2023-06-01";
  73. }
  74. logger.info("request " + requestUrl);
  75. if (isStreaming) {
  76. // 流式响应处理
  77. res.setHeader("Content-Type", "text/event-stream");
  78. res.setHeader("Cache-Control", "no-cache");
  79. res.setHeader("Connection", "keep-alive");
  80. res.setHeader("Access-Control-Allow-Origin", "*");
  81. try {
  82. const response = await fetch(requestUrl, {
  83. method: "POST",
  84. headers: headers,
  85. body: JSON.stringify(aiBody),
  86. });
  87. // 复制响应头到客户端
  88. response.headers.forEach((value, key) => {
  89. // 跳过一些不需要的头部
  90. if (
  91. ![
  92. "content-encoding",
  93. "content-length",
  94. "transfer-encoding",
  95. ].includes(key.toLowerCase())
  96. ) {
  97. res.setHeader(key, value);
  98. }
  99. });
  100. // 设置响应状态码(直接使用大模型返回的状态码)
  101. res.status(response.status);
  102. logger.info(response.status);
  103. if (!response.ok) {
  104. // 对于错误响应,也要透传原始数据
  105. const reader = response.body.getReader();
  106. const decoder = new TextDecoder();
  107. while (true) {
  108. const { done, value } = await reader.read();
  109. if (done) break;
  110. const chunk = decoder.decode(value, { stream: true });
  111. res.write(chunk);
  112. }
  113. res.end();
  114. return;
  115. }
  116. // 处理成功的流式响应
  117. const reader = response.body.getReader();
  118. const decoder = new TextDecoder();
  119. while (true) {
  120. const { done, value } = await reader.read();
  121. if (done) break;
  122. const chunk = decoder.decode(value, { stream: true });
  123. res.write(chunk);
  124. }
  125. res.end();
  126. } catch (streamError) {
  127. logger.error("Streaming error:" + streamError.message);
  128. // 网络错误或其他系统错误
  129. res.status(500);
  130. res.setHeader("Content-Type", "application/json");
  131. res.json({
  132. error: "Proxy server error",
  133. message: streamError.message,
  134. type: "proxy_error",
  135. });
  136. }
  137. } else {
  138. // 非流式响应处理
  139. const response = await fetch(requestUrl, {
  140. method: "POST",
  141. headers: headers,
  142. body: JSON.stringify(aiBody),
  143. });
  144. // 复制响应头到客户端
  145. response.headers.forEach((value, key) => {
  146. // 跳过一些不需要的头部
  147. if (
  148. !["content-encoding", "content-length", "transfer-encoding"].includes(
  149. key.toLowerCase()
  150. )
  151. ) {
  152. res.setHeader(key, value);
  153. }
  154. });
  155. // 设置响应状态码(直接使用大模型返回的状态码)
  156. res.status(response.status);
  157. // 获取响应数据
  158. const responseData = await response.json();
  159. // 直接返回原始响应数据,不进行任何修改
  160. res.json(responseData);
  161. }
  162. } catch (error) {
  163. logger.error("Proxy Error:" + error.message);
  164. // 只有在系统级错误时才返回代理服务器的错误信息
  165. // 比如网络错误、JSON解析错误等
  166. if (!res.headersSent) {
  167. res.status(500).json({
  168. error: "Proxy server error",
  169. message: error.message,
  170. type: "proxy_error",
  171. });
  172. }
  173. }
  174. });
  175. // 健康检查端点
  176. app.get("/health", (req, res) => {
  177. res.json({ status: "OK", timestamp: new Date().toISOString() });
  178. });
  179. export default app;