Skip to content

Commit 9b3dfe7

Browse files
committed
feat!: replace ext.js free text prompt with a custom ui
1 parent 62307df commit 9b3dfe7

File tree

11 files changed

+1090
-258
lines changed

11 files changed

+1090
-258
lines changed

assets/components/modai/js/mgr/autosummary.js

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -104,14 +104,10 @@ Ext.onReady(function() {
104104
const createFreeTextPrompt = (fieldName) => {
105105
const wandEl = createWandEl();
106106
wandEl.addEventListener('click', () => {
107-
const win = MODx.load({
108-
xtype: 'modai-window-text_prompt',
109-
title: 'Text',
107+
modAI.ui.freePrompt({
108+
key: fieldName,
110109
field: fieldName,
111-
cacheKey: fieldName
112110
});
113-
114-
win.show();
115111
});
116112

117113
return wandEl;
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
(() => {
2+
const _namespace = {};
3+
4+
const ROLES = {
5+
'user': 'user',
6+
'assistant': 'assistant',
7+
};
8+
9+
const addMessage = (key, content, role, id, hidden = false) => {
10+
const namespace = _namespace[key];
11+
if (!namespace) {
12+
return;
13+
}
14+
15+
const msgObject = {content, role, id, hidden};
16+
17+
msgObject.el = namespace.onAddMessage(msgObject);
18+
19+
const index = namespace.history.push(msgObject) - 1;
20+
if (id) {
21+
namespace.idRef[id] = namespace.history[index];
22+
}
23+
}
24+
25+
const updateMessage = (key, id, content) => {
26+
const namespace = _namespace[key];
27+
if (!namespace) {
28+
return;
29+
}
30+
31+
if (!namespace.idRef[id]) {
32+
addMessage(key, content, ROLES.assistant, id);
33+
return;
34+
}
35+
36+
namespace.idRef[id].content = content;
37+
namespace.onUpdateMessage(namespace.idRef[id]);
38+
}
39+
40+
const getMessage = (key, id) => {
41+
const namespace = _namespace[key];
42+
if (!namespace) {
43+
return;
44+
}
45+
46+
return namespace.idRef[id];
47+
}
48+
49+
modAI.chatHistory = {
50+
init: (key, onAddMessage, onUpdateMessage) => {
51+
if (!_namespace[key]) {
52+
_namespace[key] = {
53+
history: [],
54+
idRef: {},
55+
onAddMessage,
56+
onUpdateMessage,
57+
};
58+
}
59+
60+
_namespace[key].onAddMessage = onAddMessage;
61+
_namespace[key].onUpdateMessage = onUpdateMessage;
62+
63+
return {
64+
addUserMessage: (content, hidden = false) => {
65+
addMessage(key, content, ROLES.user, undefined, hidden);
66+
},
67+
addAssistantMessage: (content, id) => {
68+
addMessage(key, content, ROLES.assistant, id);
69+
},
70+
updateAssistantMessage: (id, content) => {
71+
updateMessage(key, id, content);
72+
},
73+
getAssistantMessage: (id) => {
74+
return getMessage(key, id);
75+
},
76+
getMessages: () => {
77+
return _namespace[key].history;
78+
},
79+
getMessagesHistory: () => {
80+
return _namespace[key].history.map((m) => ({
81+
role: m.role,
82+
content: m.content,
83+
}));
84+
}
85+
};
86+
}
87+
};
88+
})();

assets/components/modai/js/mgr/executor.js

Lines changed: 58 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,11 @@
99
throw new Error(_('modai.cmp.failed_request'));
1010
}
1111

12+
const id = data.id;
13+
1214
return {
13-
content
15+
id,
16+
content,
1417
}
1518
},
1619
image: (data) => {
@@ -33,8 +36,11 @@
3336
throw new Error(_('modai.cmp.failed_request'));
3437
}
3538

39+
const id = data.id;
40+
3641
return {
37-
content
42+
id,
43+
content,
3844
}
3945
}
4046
},
@@ -47,7 +53,8 @@
4753
}
4854

4955
return {
50-
content
56+
id: `gemini-${Date.now()}-${Math.round(Math.random()*1000)}`,
57+
content,
5158
}
5259
},
5360
image: (data) => {
@@ -65,12 +72,13 @@
6572
},
6673
stream: {
6774
chatgpt: {
68-
content: (newData, currentData = undefined) => {
75+
content: (newData, currentData = {}) => {
6976
const currentContent = currentData?.content ?? '';
70-
7177
const content = newData.choices[0]?.delta?.content || '';
7278

7379
return {
80+
...currentData,
81+
id: newData.id,
7482
content: `${currentContent}${content}`
7583
};
7684
}
@@ -82,17 +90,19 @@
8290
const content = newData.delta?.text || '';
8391

8492
return {
93+
...currentData,
8594
content: `${currentContent}${content}`
8695
};
8796
}
8897
},
8998
gemini: {
90-
content: (newData, currentData = undefined) => {
99+
content: (newData, currentData = {}) => {
91100
const currentContent = currentData?.content ?? '';
92101

93102
const content = newData.candidates[0]?.content?.parts[0]?.text || '';
94103

95104
return {
105+
...currentData,
96106
content: `${currentContent}${content}`
97107
};
98108
}
@@ -111,28 +121,36 @@
111121
}
112122
}
113123

114-
const handleStream = async (res, service, parser, onChunkStream) => {
124+
const handleStream = async (res, service, parser, onChunkStream, signal) => {
115125
const reader = res.body.getReader();
116126
const decoder = new TextDecoder('utf-8');
117127
let buffer = '';
118-
let currentData = undefined;
128+
let currentData = {
129+
id: `${service}-${Date.now()}-${Math.round(Math.random()*1000)}`
130+
};
131+
119132

120133
while (true) {
121-
const { done, value } = await reader.read();
134+
if (signal && signal.aborted) {
135+
break;
136+
}
137+
138+
const {done, value} = await reader.read();
122139
if (done) break;
123140

124-
const chunk = decoder.decode(value, { stream: true });
141+
const chunk = decoder.decode(value, {stream: true});
125142

126143
if (service === 'gemini') {
127144
const jsonLines = chunk.trim().split(",\r\n").map((line) => line.replace(/^\[|\]$/g, '')).filter(line => line.trim() !== '');
128145
for (const line of jsonLines) {
129146
try {
130147
const parsedData = JSON.parse(line);
131148
currentData = services.stream[service][parser](parsedData, currentData);
132-
if(onChunkStream) {
149+
if (onChunkStream) {
133150
onChunkStream(currentData);
134151
}
135-
} catch {}
152+
} catch {
153+
}
136154
}
137155
}
138156

@@ -156,10 +174,11 @@
156174
try {
157175
const parsedData = JSON.parse(data);
158176
currentData = services.stream[service][parser](parsedData, currentData);
159-
if(onChunkStream) {
177+
if (onChunkStream) {
160178
onChunkStream(currentData);
161179
}
162-
} catch {}
180+
} catch {
181+
}
163182
}
164183
}
165184

@@ -181,12 +200,17 @@
181200

182201
try {
183202
const parsedData = JSON.parse(data);
203+
if (parsedData.type === 'message_start') {
204+
currentData.id = parsedData.message.id;
205+
continue;
206+
}
207+
184208
if (parsedData.type !== 'content_block_delta') {
185209
continue;
186210
}
187211

188212
currentData = services.stream[service][parser](parsedData, currentData);
189-
if(onChunkStream) {
213+
if (onChunkStream) {
190214
onChunkStream(currentData);
191215
}
192216
} catch {}
@@ -198,17 +222,22 @@
198222
}
199223

200224
return currentData;
225+
201226
}
202227

203-
const serviceExecutor = async (details, onChunkStream = undefined) => {
228+
const serviceExecutor = async (details, onChunkStream = undefined, controller = undefined) => {
204229
if (!details.forExecutor) {
205230
return details;
206231
}
207232

208233
const executorDetails = details.forExecutor;
209234

235+
controller = !controller ? new AbortController() : controller;
236+
const signal = controller.signal;
237+
210238
const callService = async (details) => {
211239
const res = await fetch(details.url, {
240+
signal,
212241
method: 'POST',
213242
body: details.body,
214243
headers: details.headers
@@ -227,6 +256,7 @@
227256

228257
const callStreamService = async (details) => {
229258
const res = await fetch(details.url, {
259+
signal,
230260
method: 'POST',
231261
body: details.body,
232262
headers: details.headers
@@ -274,8 +304,8 @@
274304
return res.json();
275305
}
276306

277-
const aiFetch = async (action, params, onChunkStream) => {
278-
const controller = new AbortController();
307+
const aiFetch = async (action, params, onChunkStream = undefined, controller = undefined) => {
308+
controller = !controller ? new AbortController() : controller;
279309
const signal = controller.signal;
280310

281311
const res = await fetch(`${modAI.apiURL}?action=${action}`, {
@@ -303,7 +333,7 @@
303333

304334
if (!proxy) {
305335
const data = await res.json();
306-
return serviceExecutor(data, onChunkStream);
336+
return serviceExecutor(data, onChunkStream, controller);
307337
}
308338

309339
if (!service || !parser) {
@@ -321,7 +351,7 @@
321351
return services['buffered'][service][parser](data);
322352
}
323353

324-
return handleStream(res, service, parser, onChunkStream);
354+
return handleStream(res, service, parser, onChunkStream, signal);
325355
}
326356

327357
modAI.executor = {
@@ -332,17 +362,17 @@
332362
}
333363
},
334364
prompt: {
335-
freeText: async (params, onChunkStream) => {
336-
return aiFetch('Prompt\\FreeText', params, onChunkStream);
365+
freeText: async (params, onChunkStream, controller = undefined) => {
366+
return aiFetch('Prompt\\FreeText', params, onChunkStream, controller);
337367
},
338-
text: async (params, onChunkStream) => {
339-
return aiFetch('Prompt\\Text', params, onChunkStream);
368+
text: async (params, onChunkStream, controller = undefined) => {
369+
return aiFetch('Prompt\\Text', params, onChunkStream, controller);
340370
},
341-
vision: async (params, onChunkStream) => {
342-
return aiFetch('Prompt\\Vision', params, onChunkStream);
371+
vision: async (params, onChunkStream, controller = undefined) => {
372+
return aiFetch('Prompt\\Vision', params, onChunkStream, controller);
343373
},
344-
image: async (params) => {
345-
return aiFetch('Prompt\\Image', params);
374+
image: async (params, controller = undefined) => {
375+
return aiFetch('Prompt\\Image', params, undefined, controller);
346376
}
347377
}
348378
}

0 commit comments

Comments
 (0)