-
Notifications
You must be signed in to change notification settings - Fork 531
Expand file tree
/
Copy pathcopilot.ts
More file actions
244 lines (224 loc) · 8.03 KB
/
copilot.ts
File metadata and controls
244 lines (224 loc) · 8.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
import { AIModel, AImodelsOptionsModel } from '@/types/copilot'
import { createOpenAI } from '@ai-sdk/openai'
import { createDeepSeek } from '@ai-sdk/deepseek'
import { createAnthropic } from '@ai-sdk/anthropic'
import { createXai } from '@ai-sdk/xai'
import basePrompt from './prompts/base.txt'
import clientCodeGen from './prompts/clientCodeGen.txt'
import payloadGen from './prompts/payloadGen.txt'
import emqxGuide from './prompts/emqxGuide.txt'
import mqttFaq from './prompts/mqttFaq.txt'
import funcGen from './prompts/funcGen.txt'
import protobufSchemaGen from './prompts/protobufSchemaGen.txt'
import avroSchemaGen from './prompts/avroSchemaGen.txt'
import mcpPrompt from './prompts/mcp.txt'
import mcpResultAnalysis from './prompts/mcpResultAnalysis.txt'
import { MCPPromptData } from '@/types/mcp'
import {
ALL_CODE_GENERATION_COMMAND_VALUES,
PAYLOAD_GENERATION_COMMAND_VALUES,
EMQX_COMMAND_VALUES,
MQTT_FAQ_COMMAND_VALUES,
CUSTOM_FUNCTION_COMMAND_VALUES,
ALL_SCHEMA_COMMAND_VALUES,
PROTOBUF_SCHEMA_COMMAND_VALUES,
AVRO_SCHEMA_COMMAND_VALUES,
} from './preset'
import { getCopilotMessageId } from '../idGenerator'
export const LANGUAGE_MAP = {
zh: '请使用中文回答(简体中文)',
en: 'Please answer in English(English)',
tr: 'Lütfen Türkçe cevap verin(Turkish)',
ja: '日本語で回答してください(Japanese)',
hu: 'Kérjük, magyarul válaszoljon(Hungarian)',
}
export const loadSystemPrompt = (lang: Language, command?: string, mcpData?: MCPPromptData) => {
let _basePrompt = basePrompt
// Add MCP system prompt if MCP is enabled and available
if (mcpData && mcpData.hasMCP) {
let mcpSystemPrompt = mcpPrompt
.replace('{{SERVERS_SECTION}}', mcpData.serversSection)
.replace('{{TOOLS_SECTION}}', mcpData.toolsSection)
_basePrompt = `${_basePrompt}\n\n${mcpSystemPrompt}`
}
// Check if the command is related to code generation
if (command && ALL_CODE_GENERATION_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${clientCodeGen}`
}
// Check if the command is related to payload generation
if (command && PAYLOAD_GENERATION_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${payloadGen}`
}
// Check if the command is related to EMQX
if (command && EMQX_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${emqxGuide}`
}
// Check if the command is related to MQTT FAQ
if (command && MQTT_FAQ_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${mqttFaq}`
}
// Check if the command is related to custom function
if (command && CUSTOM_FUNCTION_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${funcGen}`
}
// Check if the command is related to schema generation
if (command && ALL_SCHEMA_COMMAND_VALUES.includes(command)) {
// Use the specific schema prompt based on the command (Protobuf or Avro)
if (command && PROTOBUF_SCHEMA_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${protobufSchemaGen}`
} else if (command && AVRO_SCHEMA_COMMAND_VALUES.includes(command)) {
_basePrompt = `${_basePrompt}\n\n${avroSchemaGen}`
}
}
return `${_basePrompt}\n\n${LANGUAGE_MAP[lang]}`
}
/**
* Configuration for available AI models across different providers.
*
* This array defines the supported AI models for MQTTX Copilot, organized by provider.
* Each provider entry includes:
* - value: The provider name identifier
* - children: Array of available models for that provider
* - providerCreator: Function to create the appropriate provider instance
*
* The structure follows the AImodelsOptionsModel type defined in types/copilot.ts
*/
export const AImodelsOptions: AImodelsOptionsModel = [
{
value: 'OpenAI' as const,
children: [
{ value: 'gpt-4o' },
{ value: 'gpt-4o-mini' },
{ value: 'o1' },
{ value: 'o1-mini' },
{ value: 'o1-preview' },
{ value: 'o3-mini' },
],
providerCreator: createOpenAI,
},
{
value: 'DeepSeek' as const,
children: [{ value: 'deepseek-chat' }, { value: 'deepseek-reasoner' }],
providerCreator: createDeepSeek,
},
{
value: 'Anthropic',
children: [
{ value: 'claude-3-7-sonnet-20250219' },
{ value: 'claude-3-7-sonnet-20250219-thinking' },
{ value: 'claude-3-5-sonnet-latest' },
{ value: 'claude-3-5-haiku-latest' },
{ value: 'claude-3-opus-latest' },
{ value: 'claude-3-haiku-20240307' },
],
providerCreator: createAnthropic,
},
{
value: 'xAI' as const,
children: [
{ value: 'grok-2-1212' },
{ value: 'grok-3-mini-fast-beta' },
{ value: 'grok-3-mini-beta' },
{ value: 'grok-3-fast-beta' },
{ value: 'grok-3-beta' },
],
providerCreator: createXai,
},
{
value: 'SiliconFlow' as const,
children: [
{ value: 'deepseek-ai/DeepSeek-V3' },
{ value: 'deepseek-ai/DeepSeek-R1' },
{ value: 'Qwen/Qwen2-VL-72B-Instruct' },
{ value: 'Qwen/Qwen2.5-72B-Instruct' },
],
providerCreator: createOpenAI,
},
]
/**
* Default API host options for different AI providers.
*
* This array contains the base URLs for various AI service providers:
* - OpenAI API endpoint
* - DeepSeek API endpoint
* - Anthropic API endpoint
* - xAI API endpoint
* - SiliconFlow API endpoint
*
* These URLs are used when configuring the API client for each provider.
*/
export const AIAPIHostOptions = [
{
value: 'https://api.openai.com/v1',
},
{
value: 'https://api.deepseek.com/v1',
},
{
value: 'https://api.anthropic.com/v1',
},
{
value: 'https://api.x.ai/v1',
},
{
value: 'https://api.siliconflow.cn/v1',
},
]
export const REASONING_MODEL_REGEX = /thinking|reasoner|r1/i
/**
* Determines if the provided model is a reasoning-capable model.
*
* @param model - The AI model to check
* @returns True if the model supports reasoning capabilities, false otherwise
*/
export const isReasoningModel = (model: AIModel) => {
return REASONING_MODEL_REGEX.test(model)
}
/**
* Gets the appropriate model provider based on the specified model, base URL, and API key.
*
* @param opts - The options for creating the model provider
* @param opts.model - The AI model to use
* @param opts.baseURL - The base URL for the API
* @param opts.apiKey - The API key for authentication
* @returns A configured provider for the specified model
*/
export const getModelProvider = (opts: { model: AIModel; baseURL: string; apiKey: string }) => {
const { model, baseURL, apiKey } = opts
const currentModelOptions = AImodelsOptions.find((item) => item.children.some((child) => child.value === model))
const providerCreator = currentModelOptions?.providerCreator || createOpenAI
const provider = providerCreator({ baseURL, apiKey })
return provider(model)
}
export const buildMCPAnalysisMessages = (
currentLang: Language,
firstUserMessage: string,
assistantContent: string,
userPrompt: string,
): CopilotMessage[] => {
const systemPrompt = `${mcpResultAnalysis}\n\n${LANGUAGE_MAP[currentLang]}`
return [
{ id: getCopilotMessageId(), role: 'system', content: systemPrompt },
{ id: getCopilotMessageId(), role: 'user', content: firstUserMessage },
{ id: getCopilotMessageId(), role: 'assistant', content: assistantContent },
{ id: getCopilotMessageId(), role: 'user', content: userPrompt },
]
}
/**
* Returns the conditions for determining when to continue or stop MCP analysis.
*
* @returns An object containing two functions:
* - shouldContinue: Function that returns true if analysis should continue
* - stopCondition: Function that returns true if analysis should stop
*/
export const getMCPAnalysisConditions = () => {
const COMPLETION_MARKERS = ['[DONE]']
const shouldContinue = (content: string) =>
content.includes('mcp-result') && !COMPLETION_MARKERS.some((marker) => content.includes(marker))
const stopCondition = (content: string) =>
COMPLETION_MARKERS.some((marker) => content.includes(marker)) || !shouldContinue(content)
return {
shouldContinue,
stopCondition,
}
}