Skip to content

Commit 0edacec

Browse files
authored
Merge pull request #7 from golivecosmos/examples-text-loader
Support for file uploads + memory examples
2 parents 7be9113 + a4197d6 commit 0edacec

File tree

17 files changed

+1417
-59
lines changed

17 files changed

+1417
-59
lines changed

client/Chat.jsx

Lines changed: 68 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,69 +1,104 @@
11
import React, { useEffect, useState } from 'react';
22
import { chatServices } from './services/chat-services';
3-
import { Grid, CircularProgress, Typography } from '@mui/material';
3+
import { Grid, CircularProgress, Typography, Button } from '@mui/material';
44
import { KeyboardReturn } from '@mui/icons-material';
55

6+
const styles = {
7+
grid: {
8+
alignItems: 'center',
9+
display: 'flex',
10+
flexDirection: 'column',
11+
justifyContent: 'center'
12+
},
13+
input: {
14+
boxShadow: 24,
15+
height: '25px',
16+
width: '300px',
17+
minWidth: '100px'
18+
}
19+
}
20+
621
const Chat = () => {
722
const [userInput, setUserInput] = useState('');
823
const [loading, setLoading] = useState(false);
924
const [answer, setAnswer] = useState('');
1025
const [error, setError] = useState('');
26+
const [selectedFile, setSelectedFile] = useState(null);
1127

1228
const handleInputChange = (event) => {
1329
setError('');
1430
setUserInput(event.target.value);
1531
};
1632

1733
const handlSendUserInput = async (event) => {
18-
event.persist();
19-
if (event.key !== "Enter") {
20-
return;
34+
event.persist();
35+
if (event.key !== "Enter") {
36+
return;
37+
}
38+
39+
try {
40+
setLoading(true);
41+
const { response } = await chatServices.chatWithLLM({ userInput });
42+
setAnswer(response);
43+
} catch (err) {
44+
setError(err);
45+
return;
46+
} finally {
47+
setLoading(false);
2148
}
49+
};
2250

51+
const handleFileChange = (event) => {
52+
setSelectedFile(event.target.files[0]);
53+
}
54+
55+
const handleFileUpload = async () => {
56+
if (selectedFile) {
2357
try {
24-
setLoading(true);
25-
const { response } = await chatServices.create({ userInput });
26-
setAnswer(response);
27-
} catch (err) {
28-
setError(err);
29-
return;
30-
} finally {
31-
setLoading(false);
58+
setLoading(true);
59+
const form = new FormData();
60+
form.append('chat-file', selectedFile);
61+
62+
const { success } = await chatServices.ingestFile({ fileInput: form })
63+
if (success) {
64+
setAnswer('Successfully ingested. Ask me anything.');
3265
}
66+
} catch (err) {
67+
setSelectedFile(null);
68+
setError(err);
69+
} finally {
70+
setLoading(false);
71+
}
72+
}
3373
}
3474

3575
useEffect(() => {
3676
if (userInput != null && userInput.trim() === "") {
3777
setAnswer('');
3878
}
3979
}, [userInput]);
40-
41-
const gridStyle = {
42-
alignItems: 'center',
43-
display: 'flex',
44-
flexDirection: 'column',
45-
justifyContent: 'center',
46-
};
47-
48-
const inputStyle = {
49-
boxShadow: 24,
50-
height: '25px',
51-
width: '300px',
52-
minWidth: '100px',
53-
}
5480

5581
return (
56-
<Grid container spacing={2} style={gridStyle}>
57-
<Grid item xs={8} style={{ display: 'flex', flexDirection: 'row' }}>
58-
<input style={inputStyle}
82+
<Grid container spacing={2} style={styles.grid}>
83+
<Grid item xs={8} style={{ display: 'flex', flexDirection: 'column' }}>
84+
<div style={{ display: 'flex' }}>
85+
<input style={styles.input}
5986
value={userInput}
6087
onChange={handleInputChange}
6188
onKeyDown={handlSendUserInput}
6289
disabled={loading}
63-
>
64-
</input>
65-
<div style={{ marginLeft: '5px', marginTop: '5px' }}>
66-
<KeyboardReturn />
90+
/>
91+
92+
<KeyboardReturn style={{ marginLeft: '5px', marginTop: '5px' }} />
93+
</div>
94+
95+
<div style={{ marginTop: '2rem', display: 'flex', flexDirection: 'column', alignItems: 'center' }}>
96+
<input accept=".pdf,.txt,.csv" id="file-input" type="file" onChange={handleFileChange}/>
97+
{selectedFile && (
98+
<Button onClick={handleFileUpload}>
99+
Upload
100+
</Button>
101+
)}
67102
</div>
68103
</Grid>
69104
<Grid item xs={8}>

client/services/chat-services.js

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,24 @@
11
import { requestClient } from './request-client';
22

3-
const create = async ({ userInput }) => {
3+
const chatWithLLM = async ({ userInput }) => {
44
const { data } = await requestClient.post('/chat', { userInput }, 120000);
55
return data;
66
};
77

8+
const ingestFile = async ({ fileInput }) => {
9+
const { data } = await requestClient.post('/chat/ingest', fileInput,
10+
{
11+
timeout: 120000,
12+
headers: {
13+
'Content-Type': `multipart/form-data: boundary=${fileInput._boundary}`
14+
}
15+
});
16+
return data;
17+
};
18+
819
const chatServices = {
9-
create,
20+
chatWithLLM,
21+
ingestFile,
1022
}
1123

12-
export { chatServices };
24+
export { chatServices };

examples/memory/bufferMemory.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { BufferMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
8+
const run = async () => {
9+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
10+
const prompt = ChatPromptTemplate.fromPromptMessages([
11+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
12+
new MessagesPlaceholder('history'),
13+
HumanMessagePromptTemplate.fromTemplate('{input}'),
14+
]);
15+
16+
const memory = new BufferMemory({ returnMessages: true });
17+
const userInput = 'Hi I\'m a human';
18+
19+
const chain = new ConversationChain({
20+
memory,
21+
prompt,
22+
llm,
23+
});
24+
25+
return chain.call({
26+
input: userInput,
27+
});
28+
};
29+
30+
run();

examples/memory/bufferWindowMemory.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { BufferWindowMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
8+
const run = async () => {
9+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
10+
const prompt = ChatPromptTemplate.fromPromptMessages([
11+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
12+
new MessagesPlaceholder('history'),
13+
HumanMessagePromptTemplate.fromTemplate('{input}'),
14+
]);
15+
16+
const memory = new BufferWindowMemory({ returnMessages: true, k: 5 });
17+
const userInput = 'Hi I\'m a human';
18+
19+
const chain = new ConversationChain({
20+
memory,
21+
prompt,
22+
llm,
23+
});
24+
25+
return chain.call({
26+
input: userInput,
27+
});
28+
}
29+
30+
run();
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { ConversationSummaryMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
const run = async () => {
8+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
9+
const prompt = ChatPromptTemplate.fromPromptMessages([
10+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
11+
new MessagesPlaceholder('history'),
12+
HumanMessagePromptTemplate.fromTemplate('{input}'),
13+
]);
14+
15+
const memory = new ConversationSummaryMemory({ llm, returnMessages: true });
16+
const userInput = 'Hi I\'m a human';
17+
18+
const chain = new ConversationChain({
19+
memory,
20+
prompt,
21+
llm,
22+
});
23+
24+
return chain.call({
25+
input: userInput,
26+
});
27+
}
28+
29+
run();

examples/memory/entityMemory.js

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { EntityMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
const run = () => {
8+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
9+
const prompt = ChatPromptTemplate.fromPromptMessages([
10+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
11+
new MessagesPlaceholder('history'),
12+
HumanMessagePromptTemplate.fromTemplate('{input}'),
13+
]);
14+
15+
const memory = new EntityMemory({ llm, returnMessages: true });
16+
const userInput = 'Hi I\'m a human';
17+
18+
const chain = new ConversationChain({
19+
memory,
20+
prompt,
21+
llm,
22+
});
23+
24+
return chain.call({
25+
input: userInput,
26+
});
27+
};
28+
29+
run();

0 commit comments

Comments
 (0)