Skip to content

Commit

Permalink
fix: 修复文心一言模型access_token过期不能刷新问题
Browse files Browse the repository at this point in the history
  • Loading branch information
Fleurxxx committed Jul 8, 2024
1 parent aab88ea commit 1665c01
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 8 deletions.
11 changes: 9 additions & 2 deletions app/controller/app-center/aiChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,21 @@ import { Controller } from 'egg';
import { E_FOUNDATION_MODEL } from '../../lib/enum';

export default class AiChatController extends Controller {


/**
* @router post /api/ai/chat 路径
* @summary AI大模型聊天
* @description 根据角色和提问信息返回AI答复
*/
public async aiChat() {
const { ctx } = this;
const { foundationModel, messages } = ctx.request.body;
const { foundationModel, messages, accessToken } = ctx.request.body;
this.ctx.logger.info('ai接口请求参参数 model选型:', foundationModel);
if (!messages || !Array.isArray(messages)) {
return this.ctx.helper.getResponseData('Not passing the correct message parameter');
}
const model = foundationModel?.model ?? E_FOUNDATION_MODEL.GPT_35_TURBO;
ctx.body = await ctx.service.appCenter.aiChat.getAnswerFromAi(messages, { model });
ctx.body = await ctx.service.appCenter.aiChat.getAnswerFromAi(messages, { model }, accessToken);
}
}
8 changes: 4 additions & 4 deletions app/service/app-center/aiChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ export default class AiChat extends Service {
* @return
*/

async getAnswerFromAi(messages: Array<AiMessage>, chatConfig: any) {
const answer = await this.requestAnswerFromAi(messages, chatConfig);
async getAnswerFromAi(messages: Array<AiMessage>, chatConfig: any, accessToken: string) {
const answer = await this.requestAnswerFromAi(messages, chatConfig, accessToken);
const answerContent = answer.choices[0]?.message.content;
// 从ai回复中提取页面的代码
const codes = this.extractCode(answerContent);
Expand All @@ -45,13 +45,13 @@ export default class AiChat extends Service {
});
}

async requestAnswerFromAi(messages: Array<AiMessage>, chatConfig: any) {
async requestAnswerFromAi(messages: Array<AiMessage>, chatConfig: any,accessToken: string) {
const { ctx } = this;
this.formatMessage(messages);
let res: any = null;
try {
// 根据大模型的不同匹配不同的配置
const aiChatConfig = this.config.aiChat(messages);
const aiChatConfig = this.config.aiChat(messages,accessToken);
const { httpRequestUrl, httpRequestOption } = aiChatConfig[chatConfig.model];
this.ctx.logger.debug(httpRequestOption)
res = await ctx.curl(httpRequestUrl, httpRequestOption);
Expand Down
4 changes: 2 additions & 2 deletions config/config.default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ export default (appInfo) => {
};

//ai大模型相关配置,请自行替换服务配置
config.aiChat = (messages = []) => {
config.aiChat = (messages = [], accessToken: string) => {
return {
[E_FOUNDATION_MODEL.GPT_35_TURBO]: {
httpRequestUrl: (process.env.OPENAI_API_URL || 'https://api.openai.com')+'/v1/chat/completions',
Expand Down Expand Up @@ -279,7 +279,7 @@ export default (appInfo) => {
manufacturer: '!openai',
},
[E_FOUNDATION_MODEL.ERNIE_BOT_TURBO]: {
httpRequestUrl: `https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant?access_token=${process.env.WENXIN_ACCESS_TOKEN}`,
httpRequestUrl: `https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant?access_token=`+accessToken,
httpRequestOption: {
...commonRequestOption,
data: {
Expand Down

0 comments on commit 1665c01

Please sign in to comment.