Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
P
Pangea-Agent
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Gavin-Group
Pangea-Agent
Commits
b35d4689
Commit
b35d4689
authored
Dec 16, 2025
by
ligaowei
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
添加对LLM 401错误的处理,当接收到401 Unauthorized错误时提示用户配置API密钥
parent
9e67a5c4
Changes
5
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
2656 additions
and
0 deletions
+2656
-0
AgentChatService.java
.../src/main/java/pangea/hiagent/agent/AgentChatService.java
+832
-0
ReActService.java
backend/src/main/java/pangea/hiagent/agent/ReActService.java
+445
-0
LlmConfigController.java
...n/java/pangea/hiagent/controller/LlmConfigController.java
+218
-0
DefaultReactExecutor.java
...c/main/java/pangea/hiagent/tool/DefaultReactExecutor.java
+569
-0
ChatArea.vue
frontend/src/components/ChatArea.vue
+592
-0
No files found.
backend/src/main/java/pangea/hiagent/agent/AgentChatService.java
0 → 100644
View file @
b35d4689
This diff is collapsed.
Click to expand it.
backend/src/main/java/pangea/hiagent/agent/ReActService.java
0 → 100644
View file @
b35d4689
This diff is collapsed.
Click to expand it.
backend/src/main/java/pangea/hiagent/controller/LlmConfigController.java
0 → 100644
View file @
b35d4689
package
pangea
.
hiagent
.
controller
;
import
com.baomidou.mybatisplus.core.metadata.IPage
;
import
lombok.extern.slf4j.Slf4j
;
import
org.springframework.web.bind.annotation.*
;
import
pangea.hiagent.dto.ApiResponse
;
import
pangea.hiagent.dto.PageData
;
import
pangea.hiagent.model.LlmConfig
;
import
pangea.hiagent.service.LlmConfigService
;
import
pangea.hiagent.llm.LlmModelFactory
;
import
org.springframework.ai.chat.model.ChatModel
;
import
org.springframework.ai.chat.prompt.Prompt
;
import
org.springframework.ai.chat.prompt.PromptTemplate
;
import
java.util.List
;
import
java.util.stream.Collectors
;
/**
* LLM配置API控制器
*/
@Slf4j
@RestController
@RequestMapping
(
"/api/v1/llm-config"
)
public
class
LlmConfigController
{
private
final
LlmConfigService
llmConfigService
;
// 注入LlmModelFactory用于测试配置
private
final
LlmModelFactory
llmModelFactory
;
public
LlmConfigController
(
LlmConfigService
llmConfigService
,
LlmModelFactory
llmModelFactory
)
{
this
.
llmConfigService
=
llmConfigService
;
this
.
llmModelFactory
=
llmModelFactory
;
}
/**
* 创建LLM配置
*/
@PostMapping
public
ApiResponse
<
LlmConfig
>
createLlmConfig
(
@RequestBody
LlmConfig
config
)
{
try
{
LlmConfig
created
=
llmConfigService
.
createLlmConfig
(
config
);
return
ApiResponse
.
success
(
created
,
"创建LLM配置成功"
);
}
catch
(
Exception
e
)
{
log
.
error
(
"创建LLM配置失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"创建LLM配置失败: "
+
e
.
getMessage
());
}
}
/**
* 更新LLM配置
*/
@PutMapping
(
"/{id}"
)
public
ApiResponse
<
LlmConfig
>
updateLlmConfig
(
@PathVariable
String
id
,
@RequestBody
LlmConfig
config
)
{
try
{
config
.
setId
(
id
);
LlmConfig
updated
=
llmConfigService
.
updateLlmConfig
(
config
);
return
ApiResponse
.
success
(
updated
,
"更新LLM配置成功"
);
}
catch
(
Exception
e
)
{
log
.
error
(
"更新LLM配置失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"更新LLM配置失败: "
+
e
.
getMessage
());
}
}
/**
* 删除LLM配置
*/
@DeleteMapping
(
"/{id}"
)
public
ApiResponse
<
Void
>
deleteLlmConfig
(
@PathVariable
String
id
)
{
try
{
llmConfigService
.
deleteLlmConfig
(
id
);
return
ApiResponse
.
success
(
null
,
"删除LLM配置成功"
);
}
catch
(
Exception
e
)
{
log
.
error
(
"删除LLM配置失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"删除LLM配置失败: "
+
e
.
getMessage
());
}
}
/**
* 获取LLM配置详情
*/
@GetMapping
(
"/{id}"
)
public
ApiResponse
<
LlmConfig
>
getLlmConfig
(
@PathVariable
String
id
)
{
try
{
LlmConfig
config
=
llmConfigService
.
getLlmConfig
(
id
);
if
(
config
==
null
)
{
return
ApiResponse
.
error
(
4001
,
"LLM配置不存在"
);
}
return
ApiResponse
.
success
(
config
);
}
catch
(
Exception
e
)
{
log
.
error
(
"获取LLM配置详情失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"获取LLM配置详情失败: "
+
e
.
getMessage
());
}
}
/**
* 分页获取LLM配置列表
*/
@GetMapping
(
"/list"
)
public
ApiResponse
<
PageData
<
LlmConfig
>>
listLlmConfigs
(
@RequestParam
(
defaultValue
=
"1"
)
Long
current
,
@RequestParam
(
defaultValue
=
"10"
)
Long
size
,
@RequestParam
(
required
=
false
)
String
name
,
@RequestParam
(
required
=
false
)
String
provider
)
{
try
{
IPage
<
LlmConfig
>
page
=
llmConfigService
.
pageLlmConfigs
(
current
,
size
,
name
,
provider
);
return
ApiResponse
.
success
(
PageData
.
from
(
page
));
}
catch
(
Exception
e
)
{
log
.
error
(
"获取LLM配置列表失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"获取LLM配置列表失败: "
+
e
.
getMessage
());
}
}
/**
* 获取启用的LLM配置列表
*/
@GetMapping
(
"/enabled"
)
public
ApiResponse
<
PageData
<
LlmConfig
>>
getEnabledLlmConfigs
()
{
try
{
// 获取所有启用的配置
var
configs
=
llmConfigService
.
getEnabledLlmConfigs
();
// 转换为分页数据格式(这里简化处理,实际应该分页)
PageData
<
LlmConfig
>
pageData
=
new
PageData
<>();
pageData
.
setRecords
(
configs
);
pageData
.
setTotal
((
long
)
configs
.
size
());
return
ApiResponse
.
success
(
pageData
);
}
catch
(
Exception
e
)
{
log
.
error
(
"获取启用的LLM配置列表失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"获取启用的LLM配置列表失败: "
+
e
.
getMessage
());
}
}
/**
* 测试LLM配置是否有效
*/
@PostMapping
(
"/{id}/test"
)
public
ApiResponse
<
String
>
testLlmConfig
(
@PathVariable
String
id
)
{
try
{
log
.
info
(
"开始测试LLM配置,ID: {}"
,
id
);
// 获取LLM配置
LlmConfig
config
=
llmConfigService
.
getLlmConfig
(
id
);
if
(
config
==
null
)
{
log
.
warn
(
"LLM配置不存在,ID: {}"
,
id
);
return
ApiResponse
.
error
(
4001
,
"LLM配置不存在"
);
}
log
.
info
(
"获取到LLM配置: {}"
,
config
);
if
(!
config
.
getEnabled
())
{
log
.
warn
(
"LLM配置未启用,ID: {}"
,
id
);
return
ApiResponse
.
error
(
4001
,
"LLM配置未启用"
);
}
// 使用LlmModelFactory创建ChatModel实例进行测试
log
.
info
(
"开始创建ChatModel实例"
);
ChatModel
chatModel
=
llmModelFactory
.
createChatModel
(
config
);
log
.
info
(
"成功创建ChatModel实例"
);
// 创建简单的测试提示词
PromptTemplate
promptTemplate
=
new
PromptTemplate
(
"请用中文回答:你好世界"
);
Prompt
prompt
=
promptTemplate
.
create
();
// 尝试调用模型
log
.
info
(
"开始调用模型"
);
org
.
springframework
.
ai
.
chat
.
model
.
ChatResponse
response
=
chatModel
.
call
(
prompt
);
String
result
=
response
.
getResult
().
getOutput
().
toString
();
log
.
info
(
"模型调用成功,结果: {}"
,
result
);
return
ApiResponse
.
success
(
"测试成功:"
+
result
,
"LLM配置测试成功"
);
}
catch
(
Exception
e
)
{
log
.
error
(
"测试LLM配置失败"
,
e
);
// 检查是否是401 Unauthorized错误
if
(
isUnauthorizedError
(
e
))
{
log
.
error
(
"LLM返回401未授权错误: {}"
,
e
.
getMessage
());
return
ApiResponse
.
error
(
5001
,
" 请配置API密钥"
);
}
else
{
return
ApiResponse
.
error
(
5001
,
"测试LLM配置失败: "
+
e
.
getMessage
());
}
}
}
/**
* 获取所有可用的LLM提供商名称
*/
@GetMapping
(
"/providers"
)
public
ApiResponse
<
List
<
String
>>
getAvailableProviders
()
{
try
{
List
<
String
>
providers
=
llmModelFactory
.
getModelAdapterManager
().
getAdapters
().
keySet
().
stream
()
.
map
(
String:
:
toUpperCase
)
.
sorted
()
.
collect
(
Collectors
.
toList
());
return
ApiResponse
.
success
(
providers
);
}
catch
(
Exception
e
)
{
log
.
error
(
"获取LLM提供商列表失败"
,
e
);
return
ApiResponse
.
error
(
5001
,
"获取LLM提供商列表失败: "
+
e
.
getMessage
());
}
}
/**
* 判断异常是否为401未授权错误
* @param e 异常对象
* @return 是否为401错误
*/
private
boolean
isUnauthorizedError
(
Throwable
e
)
{
if
(
e
==
null
)
{
return
false
;
}
// 检查异常消息中是否包含401 Unauthorized
String
message
=
e
.
getMessage
();
if
(
message
!=
null
&&
(
message
.
contains
(
"401 Unauthorized"
)
||
message
.
contains
(
"Unauthorized"
)))
{
return
true
;
}
// 递归检查cause
return
isUnauthorizedError
(
e
.
getCause
());
}
}
\ No newline at end of file
backend/src/main/java/pangea/hiagent/tool/DefaultReactExecutor.java
0 → 100644
View file @
b35d4689
This diff is collapsed.
Click to expand it.
frontend/src/components/ChatArea.vue
0 → 100644
View file @
b35d4689
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment