openai.ts 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. // dashboard-v4/dashboard/src/services/modelAdapters/openai.ts
  2. import { BaseModelAdapter } from "./base";
  3. import type {
  4. OpenAIMessage,
  5. SendOptions,
  6. ParsedChunk,
  7. ChatCompletionChunk,
  8. } from "../../types/chat";
  9. import type { IAiModel } from "../../api/ai";
  10. import { tools } from "../agentApi";
  11. export class OpenAIAdapter extends BaseModelAdapter {
  12. model: IAiModel | undefined;
  13. name = "gpt-4";
  14. supportsFunctionCall = true;
  15. protected buildRequestPayload(
  16. messages: OpenAIMessage[],
  17. options: SendOptions
  18. ) {
  19. return {
  20. model: this.model?.name,
  21. messages,
  22. stream: true,
  23. temperature: options.temperature || 0.7,
  24. max_completion_tokens: options.max_tokens || 2048,
  25. top_p: options.top_p || 1,
  26. tools: tools,
  27. tool_choice: "auto",
  28. };
  29. }
  30. // 修改这个方法
  31. async sendMessage(
  32. messages: OpenAIMessage[],
  33. options: SendOptions
  34. ): Promise<AsyncIterable<string>> {
  35. const payload = this.buildRequestPayload(messages, options);
  36. return this.createStreamIterable(payload);
  37. }
  38. private async *createStreamIterable(payload: any): AsyncIterable<string> {
  39. console.log("ai chat send message", payload);
  40. const response = await fetch(import.meta.env.VITE_REACT_APP_OPENAI_PROXY!, {
  41. method: "POST",
  42. headers: {
  43. "Content-Type": "application/json",
  44. Authorization: `Bearer ${import.meta.env.VITE_REACT_APP_OPENAI_KEY}`,
  45. },
  46. body: JSON.stringify({
  47. model_id: this.model?.uid,
  48. payload,
  49. }),
  50. });
  51. if (!response.ok) {
  52. throw new Error(`HTTP error! status: ${response.status}`);
  53. }
  54. const reader = response.body?.getReader();
  55. if (!reader) {
  56. throw new Error("无法获取响应流");
  57. }
  58. const decoder = new TextDecoder();
  59. let buffer = "";
  60. try {
  61. while (true) {
  62. const { done, value } = await reader.read();
  63. if (done) break;
  64. buffer += decoder.decode(value, { stream: true });
  65. const lines = buffer.split("\n");
  66. buffer = lines.pop() || "";
  67. for (const line of lines) {
  68. if (!line.trim() || !line.startsWith("data: ")) continue;
  69. const data = line.slice(6);
  70. if (data === "[DONE]") return;
  71. yield data;
  72. }
  73. }
  74. } finally {
  75. reader.releaseLock();
  76. }
  77. }
  78. // 其他方法保持不变
  79. parseStreamChunk(chunk: string): ParsedChunk | null {
  80. try {
  81. const parsed: ChatCompletionChunk = JSON.parse(chunk);
  82. const delta = parsed.choices?.[0]?.delta;
  83. const finishReason = parsed.choices?.[0]?.finish_reason;
  84. return {
  85. content: delta?.content,
  86. tool_calls: delta?.tool_calls,
  87. finish_reason: finishReason,
  88. };
  89. } catch {
  90. return null;
  91. }
  92. }
  93. }