|
9 | 9 | throw new Error(_('modai.cmp.failed_request'));
|
10 | 10 | }
|
11 | 11 |
|
| 12 | + const id = data.id; |
| 13 | + |
12 | 14 | return {
|
13 |
| - content |
| 15 | + id, |
| 16 | + content, |
14 | 17 | }
|
15 | 18 | },
|
16 | 19 | image: (data) => {
|
|
33 | 36 | throw new Error(_('modai.cmp.failed_request'));
|
34 | 37 | }
|
35 | 38 |
|
| 39 | + const id = data.id; |
| 40 | + |
36 | 41 | return {
|
37 |
| - content |
| 42 | + id, |
| 43 | + content, |
38 | 44 | }
|
39 | 45 | }
|
40 | 46 | },
|
|
47 | 53 | }
|
48 | 54 |
|
49 | 55 | return {
|
50 |
| - content |
| 56 | + id: `gemini-${Date.now()}-${Math.round(Math.random()*1000)}`, |
| 57 | + content, |
51 | 58 | }
|
52 | 59 | },
|
53 | 60 | image: (data) => {
|
|
65 | 72 | },
|
66 | 73 | stream: {
|
67 | 74 | chatgpt: {
|
68 |
| - content: (newData, currentData = undefined) => { |
| 75 | + content: (newData, currentData = {}) => { |
69 | 76 | const currentContent = currentData?.content ?? '';
|
70 |
| - |
71 | 77 | const content = newData.choices[0]?.delta?.content || '';
|
72 | 78 |
|
73 | 79 | return {
|
| 80 | + ...currentData, |
| 81 | + id: newData.id, |
74 | 82 | content: `${currentContent}${content}`
|
75 | 83 | };
|
76 | 84 | }
|
|
82 | 90 | const content = newData.delta?.text || '';
|
83 | 91 |
|
84 | 92 | return {
|
| 93 | + ...currentData, |
85 | 94 | content: `${currentContent}${content}`
|
86 | 95 | };
|
87 | 96 | }
|
88 | 97 | },
|
89 | 98 | gemini: {
|
90 |
| - content: (newData, currentData = undefined) => { |
| 99 | + content: (newData, currentData = {}) => { |
91 | 100 | const currentContent = currentData?.content ?? '';
|
92 | 101 |
|
93 | 102 | const content = newData.candidates[0]?.content?.parts[0]?.text || '';
|
94 | 103 |
|
95 | 104 | return {
|
| 105 | + ...currentData, |
96 | 106 | content: `${currentContent}${content}`
|
97 | 107 | };
|
98 | 108 | }
|
|
111 | 121 | }
|
112 | 122 | }
|
113 | 123 |
|
114 |
| - const handleStream = async (res, service, parser, onChunkStream) => { |
| 124 | + const handleStream = async (res, service, parser, onChunkStream, signal) => { |
115 | 125 | const reader = res.body.getReader();
|
116 | 126 | const decoder = new TextDecoder('utf-8');
|
117 | 127 | let buffer = '';
|
118 |
| - let currentData = undefined; |
| 128 | + let currentData = { |
| 129 | + id: `${service}-${Date.now()}-${Math.round(Math.random()*1000)}` |
| 130 | + }; |
| 131 | + |
119 | 132 |
|
120 | 133 | while (true) {
|
121 |
| - const { done, value } = await reader.read(); |
| 134 | + if (signal && signal.aborted) { |
| 135 | + break; |
| 136 | + } |
| 137 | + |
| 138 | + const {done, value} = await reader.read(); |
122 | 139 | if (done) break;
|
123 | 140 |
|
124 |
| - const chunk = decoder.decode(value, { stream: true }); |
| 141 | + const chunk = decoder.decode(value, {stream: true}); |
125 | 142 |
|
126 | 143 | if (service === 'gemini') {
|
127 | 144 | const jsonLines = chunk.trim().split(",\r\n").map((line) => line.replace(/^\[|\]$/g, '')).filter(line => line.trim() !== '');
|
128 | 145 | for (const line of jsonLines) {
|
129 | 146 | try {
|
130 | 147 | const parsedData = JSON.parse(line);
|
131 | 148 | currentData = services.stream[service][parser](parsedData, currentData);
|
132 |
| - if(onChunkStream) { |
| 149 | + if (onChunkStream) { |
133 | 150 | onChunkStream(currentData);
|
134 | 151 | }
|
135 |
| - } catch {} |
| 152 | + } catch { |
| 153 | + } |
136 | 154 | }
|
137 | 155 | }
|
138 | 156 |
|
|
156 | 174 | try {
|
157 | 175 | const parsedData = JSON.parse(data);
|
158 | 176 | currentData = services.stream[service][parser](parsedData, currentData);
|
159 |
| - if(onChunkStream) { |
| 177 | + if (onChunkStream) { |
160 | 178 | onChunkStream(currentData);
|
161 | 179 | }
|
162 |
| - } catch {} |
| 180 | + } catch { |
| 181 | + } |
163 | 182 | }
|
164 | 183 | }
|
165 | 184 |
|
|
181 | 200 |
|
182 | 201 | try {
|
183 | 202 | const parsedData = JSON.parse(data);
|
| 203 | + if (parsedData.type === 'message_start') { |
| 204 | + currentData.id = parsedData.message.id; |
| 205 | + continue; |
| 206 | + } |
| 207 | + |
184 | 208 | if (parsedData.type !== 'content_block_delta') {
|
185 | 209 | continue;
|
186 | 210 | }
|
187 | 211 |
|
188 | 212 | currentData = services.stream[service][parser](parsedData, currentData);
|
189 |
| - if(onChunkStream) { |
| 213 | + if (onChunkStream) { |
190 | 214 | onChunkStream(currentData);
|
191 | 215 | }
|
192 | 216 | } catch {}
|
|
198 | 222 | }
|
199 | 223 |
|
200 | 224 | return currentData;
|
| 225 | + |
201 | 226 | }
|
202 | 227 |
|
203 |
| - const serviceExecutor = async (details, onChunkStream = undefined) => { |
| 228 | + const serviceExecutor = async (details, onChunkStream = undefined, controller = undefined) => { |
204 | 229 | if (!details.forExecutor) {
|
205 | 230 | return details;
|
206 | 231 | }
|
207 | 232 |
|
208 | 233 | const executorDetails = details.forExecutor;
|
209 | 234 |
|
| 235 | + controller = !controller ? new AbortController() : controller; |
| 236 | + const signal = controller.signal; |
| 237 | + |
210 | 238 | const callService = async (details) => {
|
211 | 239 | const res = await fetch(details.url, {
|
| 240 | + signal, |
212 | 241 | method: 'POST',
|
213 | 242 | body: details.body,
|
214 | 243 | headers: details.headers
|
|
227 | 256 |
|
228 | 257 | const callStreamService = async (details) => {
|
229 | 258 | const res = await fetch(details.url, {
|
| 259 | + signal, |
230 | 260 | method: 'POST',
|
231 | 261 | body: details.body,
|
232 | 262 | headers: details.headers
|
|
274 | 304 | return res.json();
|
275 | 305 | }
|
276 | 306 |
|
277 |
| - const aiFetch = async (action, params, onChunkStream) => { |
278 |
| - const controller = new AbortController(); |
| 307 | + const aiFetch = async (action, params, onChunkStream = undefined, controller = undefined) => { |
| 308 | + controller = !controller ? new AbortController() : controller; |
279 | 309 | const signal = controller.signal;
|
280 | 310 |
|
281 | 311 | const res = await fetch(`${modAI.apiURL}?action=${action}`, {
|
|
303 | 333 |
|
304 | 334 | if (!proxy) {
|
305 | 335 | const data = await res.json();
|
306 |
| - return serviceExecutor(data, onChunkStream); |
| 336 | + return serviceExecutor(data, onChunkStream, controller); |
307 | 337 | }
|
308 | 338 |
|
309 | 339 | if (!service || !parser) {
|
|
321 | 351 | return services['buffered'][service][parser](data);
|
322 | 352 | }
|
323 | 353 |
|
324 |
| - return handleStream(res, service, parser, onChunkStream); |
| 354 | + return handleStream(res, service, parser, onChunkStream, signal); |
325 | 355 | }
|
326 | 356 |
|
327 | 357 | modAI.executor = {
|
|
332 | 362 | }
|
333 | 363 | },
|
334 | 364 | prompt: {
|
335 |
| - freeText: async (params, onChunkStream) => { |
336 |
| - return aiFetch('Prompt\\FreeText', params, onChunkStream); |
| 365 | + freeText: async (params, onChunkStream, controller = undefined) => { |
| 366 | + return aiFetch('Prompt\\FreeText', params, onChunkStream, controller); |
337 | 367 | },
|
338 |
| - text: async (params, onChunkStream) => { |
339 |
| - return aiFetch('Prompt\\Text', params, onChunkStream); |
| 368 | + text: async (params, onChunkStream, controller = undefined) => { |
| 369 | + return aiFetch('Prompt\\Text', params, onChunkStream, controller); |
340 | 370 | },
|
341 |
| - vision: async (params, onChunkStream) => { |
342 |
| - return aiFetch('Prompt\\Vision', params, onChunkStream); |
| 371 | + vision: async (params, onChunkStream, controller = undefined) => { |
| 372 | + return aiFetch('Prompt\\Vision', params, onChunkStream, controller); |
343 | 373 | },
|
344 |
| - image: async (params) => { |
345 |
| - return aiFetch('Prompt\\Image', params); |
| 374 | + image: async (params, controller = undefined) => { |
| 375 | + return aiFetch('Prompt\\Image', params, undefined, controller); |
346 | 376 | }
|
347 | 377 | }
|
348 | 378 | }
|
|
0 commit comments