mirror of
https://github.com/siyuan-note/siyuan.git
synced 2025-12-17 15:10:12 +01:00
✨ 桌面端接入 OpenAI Chat API https://github.com/siyuan-note/siyuan/issues/7560
This commit is contained in:
parent
7ae19716b6
commit
80a7ebc409
2 changed files with 32 additions and 10 deletions
|
|
@ -32,7 +32,6 @@ import {processRender} from "../util/processCode";
|
|||
import {getEventName} from "../util/compatibility";
|
||||
import {Dialog} from "../../dialog";
|
||||
import {isMobile} from "../../util/functions";
|
||||
import {progressLoading} from "../../dialog/processSystem";
|
||||
|
||||
export class Hint {
|
||||
public timeId: number;
|
||||
|
|
@ -565,15 +564,12 @@ ${unicode2Emoji(emoji.unicode, true)}</button>`;
|
|||
dialog.destroy();
|
||||
});
|
||||
btnsElement[1].addEventListener("click", () => {
|
||||
progressLoading({code: 1, cmd:"", data:"", msg:"", sid:""});
|
||||
fetchPost("/api/ai/chatGPT", {
|
||||
msg: inputElement.value,
|
||||
}, (response) => {
|
||||
progressLoading({code: 2, cmd:"", data:"", msg:"", sid:""});
|
||||
dialog.destroy();
|
||||
focusByRange(protyle.toolbar.range);
|
||||
insertHTML(`${inputElement.value}
|
||||
${response.data}`, protyle, true);
|
||||
insertHTML(`${inputElement.value}\n\n${response.data}`, protyle, true);
|
||||
});
|
||||
});
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -17,7 +17,11 @@
|
|||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"github.com/88250/lute/html"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
|
@ -41,6 +45,9 @@ func ChatGPT(msg string) (ret string) {
|
|||
return
|
||||
}
|
||||
|
||||
PushEndlessProgress("Requesting...")
|
||||
defer ClearPushProgress(100)
|
||||
|
||||
config := gogpt.DefaultConfig(OpenAIAPIKey)
|
||||
if "" != OpenAIAPIProxy {
|
||||
proxyUrl, err := url.Parse(OpenAIAPIProxy)
|
||||
|
|
@ -64,16 +71,35 @@ func ChatGPT(msg string) (ret string) {
|
|||
},
|
||||
},
|
||||
}
|
||||
resp, err := c.CreateChatCompletion(ctx, req)
|
||||
|
||||
stream, err := c.CreateChatCompletionStream(ctx, req)
|
||||
if nil != err {
|
||||
logging.LogErrorf("create chat completion failed: %s", err)
|
||||
logging.LogErrorf("create chat completion stream failed: %s", err)
|
||||
return
|
||||
}
|
||||
defer stream.Close()
|
||||
|
||||
if 0 < len(resp.Choices) {
|
||||
ret = resp.Choices[0].Message.Content
|
||||
ret = strings.TrimSpace(ret)
|
||||
buf := bytes.Buffer{}
|
||||
for {
|
||||
resp, recvErr := stream.Recv()
|
||||
if errors.Is(recvErr, io.EOF) {
|
||||
break
|
||||
}
|
||||
|
||||
if nil != recvErr {
|
||||
logging.LogErrorf("create chat completion stream recv failed: %s", recvErr)
|
||||
break
|
||||
}
|
||||
|
||||
for _, choice := range resp.Choices {
|
||||
content := choice.Delta.Content
|
||||
buf.WriteString(content)
|
||||
PushEndlessProgress(html.EscapeHTMLStr(buf.String()))
|
||||
}
|
||||
}
|
||||
|
||||
ret = buf.String()
|
||||
ret = strings.TrimSpace(ret)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue