-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathbot.js
443 lines (383 loc) · 15.7 KB
/
bot.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
import path from 'path'
import fs from 'fs/promises'
import OpenAI from 'openai'
import config from './config.js'
import { state, stats } from './store.js'
import * as actions from './actions.js'
// Initialize OpenAI client
const ai = new OpenAI({ apiKey: config.openaiKey })
// Determine if a given inbound message can be replied by the AI bot
function canReply ({ data, device }) {
const { chat } = data
// Skip chat if already assigned to a team member
if (chat.owner?.agent) {
return false
}
// Skip messages receive from the same number: prevent self-reply loops
if (chat.fromNumber === device.phone) {
console.log('[debug] Skip message: cannot chat with your own WhatsApp number:', device.phone)
return false
}
// Skip messages from group chats and channels
if (chat.type !== 'chat') {
return false
}
// Skip replying chat if it has one of the configured labels, when applicable
if (config.skipChatWithLabels && config.skipChatWithLabels.length && chat.labels && chat.labels.length) {
if (config.skipChatWithLabels.some(label => chat.labels.includes(label))) {
return false
}
}
// Only reply to chats that were whitelisted, when applicable
if (config.numbersWhitelist && config.numbersWhitelist.length && chat.fromNumber) {
if (config.numbersWhitelist.some(number => number === chat.fromNumber || chat.fromNumber.slice(1) === number)) {
return true
} else {
return false
}
}
// Skip replying to chats that were explicitly blacklisted, when applicable
if (config.numbersBlacklist && config.numbersBlacklist.length && chat.fromNumber) {
if (config.numbersBlacklist.some(number => number === chat.fromNumber || chat.fromNumber.slice(1) === number)) {
return false
}
}
// Skip replying to blocked chats
if (chat.status === 'banned' || chat.waStatus === 'banned') {
return false
}
// Skip blocked contacts
if (chat.contact?.status === 'blocked') {
return false
}
// Skip replying chats that were archived, when applicable
if (config.skipArchivedChats && (chat.status === 'archived' || chat.waStatus === 'archived')) {
return false
}
return true
}
// Send message back to the user and perform post-message required actions like
// adding labels to the chat or updating the chat's contact metadata
function replyMessage ({ data, device, useAudio }) {
return async ({ message, ...params }, { text } = {}) => {
const { phone } = data.chat.contact
// If audio mode, create a new voice message
let fileId = null
if (config.features.audioOutput && !text && message.length <= 4096 && (useAudio || config.features.audioOnly)) {
console.log('[info] generating audio response for chat:', data.fromNumber, message)
const audio = await ai.audio.speech.create({
input: message,
model: 'tts-1',
voice: config.features.voice,
response_format: 'mp3',
speed: config.features.voiceSpeed
})
const timestamp = Date.now().toString(16)
const random = Math.floor(Math.random() * 0xfffff).toString(16)
fileId = `${timestamp}${random}`
const filepath = path.join(`${config.tempPath}`, `${fileId}.mp3`)
const buffer = Buffer.from(await audio.arrayBuffer())
await fs.writeFile(filepath, buffer)
}
const payload = {
phone,
device: device.id,
message,
reference: 'bot:chatgpt',
...params
}
if (fileId) {
payload.message = undefined
// Get base URL and add path to the webhook
const schema = new URL(config.webhookUrl)
const url = `${schema.protocol}//${schema.host}${path.dirname(schema.pathname)}files/${fileId}${schema.search}`
payload.media = { url, format: 'ptt' }
}
const msg = await actions.sendMessage(payload)
// Store sent message in chat history
state[data.chat.id] = state[data.chat.id] || {}
state[data.chat.id][msg.waId] = {
id: msg.waId,
flow: 'outbound',
date: msg.createdAt,
body: message
}
// Increase chat messages quota
stats[data.chat.id] = stats[data.chat.id] || { messages: 0, time: Date.now() }
stats[data.chat.id].messages += 1
// Add bot-managed chat labels, if required
if (config.setLabelsOnBotChats.length) {
const labels = config.setLabelsOnBotChats.filter(label => (data.chat.labels || []).includes(label))
if (labels.length) {
await actions.updateChatLabels({ data, device, labels })
}
}
// Add bot-managed chat metadata, if required
if (config.setMetadataOnBotChats.length) {
const metadata = config.setMetadataOnBotChats.filter(entry => entry && entry.key && entry.value).map(({ key, value }) => ({ key, value }))
await actions.updateChatMetadata({ data, device, metadata })
}
}
}
function parseArguments (json) {
try {
return JSON.parse(json || '{}')
} catch (err) {
return {}
}
}
function hasChatMetadataQuotaExceeded (chat) {
const key = chat.contact?.metadata?.find(x => x.key === 'bot:chatgpt:status')
if (key?.value === 'too_many_messages') {
return true
}
return false
}
// Messages quota per chat
function hasChatMessagesQuota (chat) {
const stat = stats[chat.id] = stats[chat.id] || { messages: 0, time: Date.now() }
if (stat.messages >= config.limits.maxMessagesPerChat) {
// Reset chat messages quota after the time limit
if ((Date.now() - stat.time) >= (config.limits.maxMessagesPerChatTime * 1000)) {
stat.messages = 0
stat.time = Date.now()
return true
}
return false
}
return true
}
// Update chat metadata if messages quota is exceeded
async function updateChatOnMessagesQuota ({ data, device }) {
const { chat } = data
if (hasChatMetadataQuotaExceeded(chat)) {
return false
}
await Promise.all([
// Assign chat to an agent
actions.assignChatToAgent({
data,
device,
force: true
}),
// Update metadata status to 'too_many_messages'
actions.updateChatMetadata({
data,
device,
metadata: [{ key: 'bot:chatgpt:status', value: 'too_many_messages' }]
})
])
}
// Process message received from the user on every new inbound webhook event
export async function processMessage ({ data, device } = {}) {
// Can reply to this message?
if (!canReply({ data, device })) {
return console.log('[info] Skip message - chat is not eligible to reply due to active filters:', data.fromNumber, data.date, data.body)
}
const { chat } = data
// Chat has enough messages quota
if (!hasChatMessagesQuota(chat)) {
console.log('[info] Skip message - chat has reached the maximum messages quota:', data.fromNumber)
return await updateChatOnMessagesQuota({ data, device })
}
// Update chat status metadata if messages quota is not exceeded
if (hasChatMetadataQuotaExceeded(chat)) {
actions.updateChatMetadata({ data, device, metadata: [{ key: 'bot:chatgpt:status', value: 'active' }] })
.catch(err => console.error('[error] failed to update chat metadata:', data.chat.id, err.message))
}
// If audio message, transcribe it to text
if (data.type === 'audio') {
const noAudioMessage = config.templateMessages.noAudioAccepted || 'Audio messages are not supported: gently ask the user to send text messages only.'
if (config.features.audioInput && +data.media.meta?.duration <= config.limits.maxAudioDuration) {
const transcription = await actions.transcribeAudio({ message: data, device })
if (transcription) {
data.body = transcription
} else {
console.error('[error] failed to transcribe audio message:', data.fromNumber, data.date, data.media.id)
data.body = noAudioMessage
}
} else {
// console.log('[info] skip message - audio input processing is disabled, enable it on config.js:', data.fromNumber)
data.body = noAudioMessage
}
}
// Extract input body per message type
if (data.type === 'video' && !data.body) {
data.body = 'Video message cannot be processed. Send a text message.'
}
if (data.type === 'document' && !data.body) {
data.body = 'Document message cannot be processed. Send a text message.'
}
if (data.type === 'location' && !data.body) {
data.body = `Location: ${data.location.name || ''} ${data.location.address || ''}`
}
if (data.type === 'poll' && !data.body) {
data.body = `Poll: ${data.poll.name || 'unamed'}\n${data.poll.options.map(x => '-' + x.name).join('\n')}`
}
if (data.type === 'event' && !data.body) {
data.body = [
`Meeting event: ${data.event.name || 'unamed'}`,
`Description: ${data.event.description || 'no description'}`,
`Date: ${data.event.date || 'no date'}`,
`Location: ${data.event.location || 'undefined location'}``Call link: ${data.event.link || 'no call link'}`
].join('\n')
}
if (data.type === 'contacts') {
data.body = data.contacts.map(x => `- Contact card: ${x.formattedName || x.name || x.firstName || ''} - Phone: ${x.phones ? x.phones.map(x => x.number || x.waid) : ''}}`).join('\n')
}
// User message input
const body = data?.body?.trim().slice(0, Math.min(config.limits.maxInputCharacters, 10000))
console.log('[info] New inbound message received:', chat.id, data.type, body || '<empty message>')
// First inbound message, reply with a welcome message
// if (!data.chat.lastOutboundMessageAt || data.meta.isFirstMessage) {
// const message = `${config.welcomeMessage}\n\n${config.defaultMessage}}`
// return await reply({ message })
// }
// If input message is audio, reply with an audio message, unless features.audioOutput is false
const useAudio = data.type === 'audio'
// Create partial function to reply the chat
const reply = replyMessage({ data, device, useAudio })
if (!body) {
if (data.type !== 'image' || (data.type === 'image' && !config.features.imageInput) || (data.type === 'image' && config.features.imageInput && data.media.size > config.limits.maxImageSize)) {
// Default to unknown command response
const unknownCommand = `${config.unknownCommandMessage}\n\n${config.defaultMessage}`
await reply({ message: unknownCommand }, { text: true })
}
}
// Assign the chat to an random agent
if (/^human|person|help|stop$/i.test(body) || /^human/i.test(body)) {
actions.assignChatToAgent({ data, device, force: true }).catch(err => {
console.error('[error] failed to assign chat to user:', data.chat.id, err.message)
})
const message = config.templateMessages.chatAssigned || 'You will be contact shortly by someone from our team. Thank you for your patience.'
return await reply({ message }, { text: true })
}
// Generate response using AI
if (!state[data.chat.id]) {
console.log('[info] fetch previous messages history for chat:', data.chat.id)
await actions.pullChatMessages({ data, device })
}
// Chat messages history
const chatMessages = state[data.chat.id] = state[data.chat.id] || {}
// Chat configuration
const { apiBaseUrl } = config
// Compose chat previous messages to context awareness better AI responses
const previousMessages = Object.values(chatMessages)
.sort((a, b) => +new Date(b.date) - +new Date(a.date))
.slice(0, 40)
.reverse()
.map(message => {
if (message.flow === 'inbound' && !message.body && message.type === 'image' && config.features.imageInput && message.media.size <= config.limits.maxImageSize) {
const url = apiBaseUrl + message.media.links.download.slice(3) + '?token=' + config.apiKey
return {
role: 'user',
content: [{
type: 'image_url',
image_url: { url }
}, message.media.caption ? { type: 'text', text: message.media.caption } : null].filter(x => x)
}
} else {
return {
role: message.flow === 'inbound' ? 'user' : (message.role || 'assistant'),
content: message.body
}
}
})
.filter(message => message.content).slice(-(+config.limits.chatHistoryLimit || 20))
const messages = [
{ role: 'system', content: config.botInstructions },
...previousMessages
]
const lastMessage = messages[messages.length - 1]
if (lastMessage.role !== 'user' || lastMessage.content !== body) {
if (config.features.imageInput && data.type === 'image' && !data.body && data.media.size <= config.limits.maxImageSize) {
const url = apiBaseUrl + data.media.links.download.slice(3) + '?token=' + config.apiKey
messages.push({
role: 'user',
content: [
{
type: 'image_url',
image_url: { url }
},
data.media.caption ? { type: 'text', text: data.media.caption } : null
].filter(x => x)
})
} else {
messages.push({ role: 'user', content: body })
}
}
// Add tool functions to the AI model, if available
const tools = (config.functions || []).filter(x => x && x.name).map(({ name, description, parameters, strict }) => (
{ type: 'function', function: { name, description, parameters, strict } }
))
// Generate response using AI
let completion = await ai.chat.completions.create({
tools,
messages,
model: config.openaiModel,
max_completion_tokens: config.limits.maxOutputTokens,
temperature: config.inferenceParams.temperature,
user: `${device.id}_${chat.id}`
})
// Reply with unknown / default response on invalid/error
if (!completion.choices?.length) {
const unknownCommand = `${config.unknownCommandMessage}\n\n${config.defaultMessage}`
return await reply({ message: unknownCommand })
}
// Process tool function calls, if required by the AI model
const maxCalls = 10
let [response] = completion.choices
let count = 0
while (response?.message?.tool_calls?.length && count < maxCalls) {
count += 1
// If response is a function call, return the custom result
const responses = []
// Store tool calls in history
messages.push({ role: 'assistant', tool_calls: response.message.tool_calls })
// Call tool functions triggerd by the AI
const calls = response.message.tool_calls.filter(x => x.id && x.type === 'function')
for (const call of calls) {
const func = config.functions.find(x => x.name === call.function.name)
if (func && typeof func.run === 'function') {
const parameters = parseArguments(call.function.arguments)
console.log('[info] run function:', call.function.name, parameters)
// Run the function and get the response message
const message = await func.run({ parameters, response, data, device, messages })
if (message) {
responses.push({ role: 'tool', content: message, tool_call_id: call.id })
}
} else if (!func) {
console.error('[warning] missing function call in config.functions', call.function.name)
}
}
if (!responses.length) {
break
}
// Add tool responses to the chat history
messages.push(...responses)
// Generate a new response based on the tool functions responses
completion = await ai.chat.completions.create({
tools,
messages,
temperature: 0.2,
model: config.openaiModel,
user: `${device.id}_${chat.id}`
})
// Reply with unknown / default response on invalid/error
if (!completion.choices?.length) {
break
}
// Reply with unknown / default response on invalid/error
response = completion.choices[0]
if (!response || response.finish_reason === 'stop') {
break
}
}
// Reply with the AI generated response
if (completion.choices?.length) {
return await reply({ message: response?.message?.content || config.unknownCommandMessage })
}
// Unknown default response
const unknownCommand = `${config.unknownCommandMessage}\n\n${config.defaultMessage}`
await reply({ message: unknownCommand })
}