to:sync
This commit is contained in:
parent
305718c382
commit
342fbf9a61
@ -49,7 +49,7 @@ func InitAiGroup(g *echo.Group) {
|
|||||||
return c.JSON(500, err)
|
return c.JSON(500, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := remote_http.AiOpenRouter.Completions(context.Background(), req.Prompt, "")
|
result, err := remote_http.AiOllama.Completions(context.Background(), req.Prompt, "")
|
||||||
if err != nil || result == nil {
|
if err != nil || result == nil {
|
||||||
return c.JSON(http.StatusOK, utils.Failed(err.Error()))
|
return c.JSON(http.StatusOK, utils.Failed(err.Error()))
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,8 @@ fbConsul:
|
|||||||
# rpc
|
# rpc
|
||||||
rpc:
|
rpc:
|
||||||
api-golang: http://api-golang.consul.service:8080
|
api-golang: http://api-golang.consul.service:8080
|
||||||
|
wss: http://127.0.0.1:8080
|
||||||
|
ollama: https://localaiapi.mylomen.com
|
||||||
|
|
||||||
# pgSql
|
# pgSql
|
||||||
pgSql:
|
pgSql:
|
||||||
|
@ -12,6 +12,8 @@ fbConsul:
|
|||||||
# rpc
|
# rpc
|
||||||
rpc:
|
rpc:
|
||||||
api-golang: http://api-golang.consul.service:8080
|
api-golang: http://api-golang.consul.service:8080
|
||||||
|
wss: http://127.0.0.1:8080
|
||||||
|
ollama: http://10.0.12.8:11434
|
||||||
|
|
||||||
# pgSql
|
# pgSql
|
||||||
pgSql:
|
pgSql:
|
||||||
|
@ -21,6 +21,7 @@ type FbConsul struct {
|
|||||||
type Rpc struct {
|
type Rpc struct {
|
||||||
ApiGolangAddress string `mapstructure:"api-golang" json:"api-golang" yaml:"api-golang"`
|
ApiGolangAddress string `mapstructure:"api-golang" json:"api-golang" yaml:"api-golang"`
|
||||||
WssAddress string `mapstructure:"wss" json:"wss" yaml:"wss"`
|
WssAddress string `mapstructure:"wss" json:"wss" yaml:"wss"`
|
||||||
|
OllamaAddress string `mapstructure:"ollama" json:"ollama" yaml:"ollama"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// PgSql mysql配置
|
// PgSql mysql配置
|
||||||
|
@ -11,3 +11,4 @@ type aiApi interface {
|
|||||||
|
|
||||||
var AiApi aiApi = new(aiApiImpl)
|
var AiApi aiApi = new(aiApiImpl)
|
||||||
var AiOpenRouter aiApi = new(aiRouterImpl)
|
var AiOpenRouter aiApi = new(aiRouterImpl)
|
||||||
|
var AiOllama aiApi = new(aiOllamaImpl)
|
||||||
|
77
infrastructure/remote_http/ai_ollama_imp.go
Normal file
77
infrastructure/remote_http/ai_ollama_imp.go
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
package remote_http
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"mylomen_server/common/config"
|
||||||
|
"mylomen_server/common/dto"
|
||||||
|
"mylomen_server/common/utils"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type aiOllamaImpl struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aiOllamaImpl) Completions(ctx context.Context, prompt, groupId string) (*dto.AiRes, error) {
|
||||||
|
logger := utils.NewLog("")
|
||||||
|
|
||||||
|
url := config.Instance.Rpc.OllamaAddress + "/api/chat"
|
||||||
|
|
||||||
|
var aiResult Result[dto.AiRes]
|
||||||
|
|
||||||
|
var result chatVO
|
||||||
|
resp, err := httpClient.R().
|
||||||
|
SetBody(map[string]interface{}{
|
||||||
|
"model": "qwen2.5:0.5b",
|
||||||
|
"stream": false,
|
||||||
|
"messages": []map[string]interface{}{{"role": "user", "content": prompt}},
|
||||||
|
}).SetHeaders(map[string]string{
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}).SetSuccessResult(&result).Post(url)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(fmt.Sprintf("remote_http_wx_SendMsg error: %s", err.Error()))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.IsSuccessState() {
|
||||||
|
logger.Error(fmt.Sprintf("remote_http_wx_SendMsg resp:%+v", resp))
|
||||||
|
return nil, errors.New("接口异常")
|
||||||
|
}
|
||||||
|
|
||||||
|
aiResult.Data.Completions = result.Message.Content
|
||||||
|
|
||||||
|
return &aiResult.Data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type generateVO struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
Response string `json:"response"`
|
||||||
|
Done bool `json:"done"`
|
||||||
|
DoneReason string `json:"done_reason"`
|
||||||
|
Context []int `json:"context"`
|
||||||
|
TotalDuration int64 `json:"total_duration"`
|
||||||
|
LoadDuration int64 `json:"load_duration"`
|
||||||
|
PromptEvalCount int `json:"prompt_eval_count"`
|
||||||
|
PromptEvalDuration int `json:"prompt_eval_duration"`
|
||||||
|
EvalCount int `json:"eval_count"`
|
||||||
|
EvalDuration int64 `json:"eval_duration"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type chatVO struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
Message struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
} `json:"message"`
|
||||||
|
DoneReason string `json:"done_reason"`
|
||||||
|
Done bool `json:"done"`
|
||||||
|
TotalDuration int64 `json:"total_duration"`
|
||||||
|
LoadDuration int `json:"load_duration"`
|
||||||
|
PromptEvalCount int `json:"prompt_eval_count"`
|
||||||
|
PromptEvalDuration int `json:"prompt_eval_duration"`
|
||||||
|
EvalCount int `json:"eval_count"`
|
||||||
|
EvalDuration int `json:"eval_duration"`
|
||||||
|
}
|
@ -10,3 +10,8 @@ func TestOpenRouter(t *testing.T) {
|
|||||||
res, err := AiOpenRouter.Completions(context.Background(), "你好", "test")
|
res, err := AiOpenRouter.Completions(context.Background(), "你好", "test")
|
||||||
fmt.Println(res, err)
|
fmt.Println(res, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOllamaRouter(t *testing.T) {
|
||||||
|
res, err := AiOllama.Completions(context.Background(), "红烧肉怎么做", "test")
|
||||||
|
fmt.Println(res, err)
|
||||||
|
}
|
@ -23,8 +23,8 @@
|
|||||||
|
|
||||||
height: 100%;
|
height: 100%;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
top: 0px;
|
top: 0;
|
||||||
left: 0px;
|
left: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.myPopWindow4Login {
|
.myPopWindow4Login {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user