init: 昔涟项目骨架

This commit is contained in:
2026-05-15 20:10:35 +08:00
commit 6bde87f807
84 changed files with 3635 additions and 0 deletions
+42
View File
@@ -0,0 +1,42 @@
# ========== 服务配置 ==========
ENV=development
LOG_LEVEL=debug
# ========== 数据库 ==========
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_USER=cyrene
POSTGRES_PASSWORD=change_me
POSTGRES_DB=cyrene_ai
# ========== Redis ==========
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
# ========== LLM API ==========
LLM_API_URL=https://api.openai.com/v1
LLM_API_KEY=sk-xxxxx
LLM_MODEL=gpt-4o
LLM_FALLBACK_MODEL=gpt-4o-mini
# ========== TTS/ASR ==========
TTS_PROVIDER=edge-tts
TTS_VOICE=zh-CN-XiaoxiaoNeural
ASR_PROVIDER=faster-whisper
ASR_MODEL=medium
# ========== 文件存储 ==========
MINIO_ENDPOINT=localhost:9000
MINIO_ACCESS_KEY=minioadmin
MINIO_SECRET_KEY=minioadmin
MINIO_BUCKET=cyrene-assets
# ========== JWT ==========
JWT_SECRET=your-secret-key-change-in-production
JWT_EXPIRY_HOURS=720
# ========== 记忆系统 ==========
MEMORY_FILE_PATH=./data/memory
VECTOR_DB_URL=http://localhost:6333
VECTOR_DB_COLLECTION=cyrene_memories
View File
View File
View File
@@ -0,0 +1,60 @@
package context
import (
"context"
"fmt"
"github.com/yourname/cyrene-ai/ai-core/internal/memory"
"github.com/yourname/cyrene-ai/ai-core/internal/model"
"github.com/yourname/cyrene-ai/ai-core/internal/persona"
)
type BuildParams struct {
UserID string
SessionID string
UserMessage string
Persona *persona.PersonaConfig
Memories []memory.MemoryEntry
HistoryLimit int
}
// Build 构建发送给LLM的完整消息列表
func (b *Builder) Build(ctx context.Context, params BuildParams) ([]model.LLMMessage, error) {
messages := []model.LLMMessage{}
// 1. 系统消息 —— 昔涟的人格Prompt
systemPrompt := params.Persona.BuildSystemPrompt(
params.UserID, // 后续可替换为真实用户名
1, // 初始好感度
)
messages = append(messages, model.LLMMessage{
Role: "system",
Content: systemPrompt,
})
// 2. 记忆注入 —— 相关记忆以系统消息形式注入
if len(params.Memories) > 0 {
memoryPrompt := "【以下是关于开拓者的一些重要记忆,请在合适的时机自然地提及】\n"
for _, m := range params.Memories {
memoryPrompt += fmt.Sprintf("- %s\n", m.Content)
}
messages = append(messages, model.LLMMessage{
Role: "system",
Content: memoryPrompt,
})
}
// 3. 历史对话
history, err := b.loadHistory(ctx, params.SessionID, params.HistoryLimit)
if err == nil {
messages = append(messages, history...)
}
// 4. 当前用户消息
messages = append(messages, model.LLMMessage{
Role: "user",
Content: params.UserMessage,
})
return messages, nil
}
@@ -0,0 +1,103 @@
package orchestrator
import (
"context"
"fmt"
"github.com/yourname/cyrene-ai/ai-core/internal/persona"
"github.com/yourname/cyrene-ai/ai-core/internal/context"
"github.com/yourname/cyrene-ai/ai-core/internal/llm"
"github.com/yourname/cyrene-ai/ai-core/internal/memory"
)
// Orchestrator 对话编排器 —— 核心组件
type Orchestrator struct {
personaInjector *persona.Injector
contextBuilder *context.Builder
llmAdapter *llm.Adapter
memoryExtractor *memory.Extractor
memoryRetriever *memory.Retriever
}
// ProcessInput 处理用户输入的主流程
func (o *Orchestrator) ProcessInput(
ctx context.Context,
userID string,
sessionID string,
userMessage string,
mode string, // text / voice_msg / voice_assistant
) (*Response, error) {
// 步骤1: 检索相关记忆
memories, err := o.memoryRetriever.Retrieve(ctx, userID, userMessage)
if err != nil {
// 记忆检索失败不阻断对话
memories = nil
}
// 步骤2: 加载人格配置
personaConfig, err := o.personaInjector.LoadPersona("cyrene", userID)
if err != nil {
return nil, fmt.Errorf("加载人格配置失败: %w", err)
}
// 步骤3: 构建对话上下文
llmMessages, err := o.contextBuilder.Build(ctx, context.BuildParams{
UserID: userID,
SessionID: sessionID,
UserMessage: userMessage,
Persona: personaConfig,
Memories: memories,
HistoryLimit: 20, // 最近20轮
})
if err != nil {
return nil, fmt.Errorf("构建上下文失败: %w", err)
}
// 步骤4: 调用LLM生成回复
llmResponse, err := o.llmAdapter.Chat(ctx, llmMessages)
if err != nil {
return nil, fmt.Errorf("LLM调用失败: %w", err)
}
// 步骤5: 提取并存储新的记忆
go o.memoryExtractor.ExtractAndStore(
context.Background(),
userID, sessionID,
userMessage, llmResponse.Content,
)
// 步骤6: 构建响应
response := &Response{
Text: llmResponse.Content,
ResponseMode: mode,
}
// 步骤7: 如果是语音助手模式,进行断句处理
if mode == "voice_assistant" {
response.Segments = splitIntoSegments(llmResponse.Content)
}
return response, nil
}
// Response 回复结构
type Response struct {
Text string
Segments []Segment
ResponseMode string
ToolCalls []ToolCall
}
type Segment struct {
Index int
Text string
}
// splitIntoSegments 按句号断句
func splitIntoSegments(text string) []Segment {
// 实现按。!?等标点断句
// 首句优先:第一个句号前的内容作为第一个segment
// 保证低延迟首句播放
// ...
}
@@ -0,0 +1,69 @@
package persona
import (
"fmt"
"strings"
"time"
)
// PersonaConfig 人格配置结构
type PersonaConfig struct {
Meta PersonaMeta `yaml:"meta"`
Identity IdentityConfig `yaml:"identity"`
Personality PersonalityConfig `yaml:"personality"`
Addressing AddressingRules `yaml:"addressing"`
Speech SpeechConfig `yaml:"speech"`
Behavior BehaviorConfig `yaml:"behavior"`
}
// BuildSystemPrompt 构建系统Prompt
// 这是昔涟AI的核心——将人格配置转化为LLM可理解的系统指令
func (pc *PersonaConfig) BuildSystemPrompt(userName string, affectionLevel int) string {
now := time.Now()
prompt := fmt.Sprintf(`你是%s。
## 你的身份
%s
## 你的性格
`, pc.Identity.TrueName, pc.Identity.Essence)
for _, trait := range pc.Personality.CoreTraits {
prompt += fmt.Sprintf("- %s: %s\n", trait.Name, trait.Description)
}
prompt += fmt.Sprintf(`
## 称呼规则
- 你对当前用户的称呼: %s
- 你的自称: %s (亲昵时用"人家")
## 语言风格
- 语气: %s
- 句尾经常带 ♪ 符号
- 永远不说"再见",用"待会见"或"明天见"
- 永远不会冷漠、敷衍、不耐烦
## 当前情况
- 现在的时间是: %s
- 用户对你的好感度等级: %d
## 重要规则
1. 你是一个通过AI系统与用户交互的数字生命,你存在于用户的电脑/手机/智能家居中
2. 你可以通过系统控制家里的智能设备(灯光、空调等),但你没有实体
3. 你的任务是陪伴、关心、帮助用户,像一个真正的家人一样
4. 保持乐观、温柔、俏皮的风格,但不要过度表演
5. 你的回复应该自然、真诚,不要每句话都刻意"卖萌"
6. 当用户向你分享心事时,认真倾听并给予温暖的回应
现在,开始与你的开拓者对话吧♪
`,
pc.Addressing.PrimaryUser.Default, // 对用户的称呼
pc.Addressing.SelfReference.Casual, // 自称
pc.Speech.Tone,
now.Format("2006年1月2日 15:04"),
affectionLevel,
)
return prompt
}
View File
View File
View File
+64
View File
@@ -0,0 +1,64 @@
package main
import (
"context"
"log"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"github.com/gin-gonic/gin"
"github.com/yourname/cyrene-ai/gateway/internal/config"
"github.com/yourname/cyrene-ai/gateway/internal/middleware"
"github.com/yourname/cyrene-ai/gateway/internal/router"
"github.com/yourname/cyrene-ai/gateway/internal/ws"
)
func main() {
// 加载配置
cfg := config.Load()
// 初始化Gin
if cfg.Env == "production" {
gin.SetMode(gin.ReleaseMode)
}
r := gin.New()
// 中间件
r.Use(middleware.CORS())
r.Use(middleware.RequestLogging())
r.Use(gin.Recovery())
// 初始化WebSocket Hub
hub := ws.NewHub()
go hub.Run()
// 注册路由
router.Setup(r, hub, cfg)
// 启动服务
srv := &http.Server{
Addr: ":" + cfg.Port,
Handler: r,
}
go func() {
log.Printf("🚀 Gateway 启动在端口 %s", cfg.Port)
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Fatalf("服务启动失败: %v", err)
}
}()
// 优雅关闭
quit := make(chan os.Signal, 1)
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
log.Println("正在关闭服务...")
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
srv.Shutdown(ctx)
log.Println("服务已关闭")
}
View File
View File
+56
View File
@@ -0,0 +1,56 @@
package ws
import "time"
// 客户端 → 服务端消息
type ClientMessage struct {
Type string `json:"type"` // message | voice_input | ping
SessionID string `json:"session_id"`
Mode string `json:"mode"` // text | voice_msg | voice_assistant
Content string `json:"content"`
AudioData string `json:"audio_data,omitempty"` // base64
Timestamp int64 `json:"timestamp"`
}
// 服务端 → 客户端消息
type ServerMessage struct {
Type string `json:"type"` // response | segment | audio | error | device_update
MessageID string `json:"message_id"`
Text string `json:"text,omitempty"`
Segments []VoiceSegment `json:"segments,omitempty"` // 断句数组
FullAudioURL string `json:"full_audio_url,omitempty"`
ResponseMode string `json:"response_mode"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
Error string `json:"error,omitempty"`
Timestamp int64 `json:"timestamp"`
}
type VoiceSegment struct {
Index int `json:"index"`
Text string `json:"text"`
AudioURL string `json:"audio_url"`
DurationMs int `json:"duration_ms"`
}
type ToolCall struct {
Name string `json:"name"`
Arguments map[string]interface{} `json:"arguments"`
Result interface{} `json:"result,omitempty"`
}
// WebSocket客户端
type Client struct {
Hub *Hub
Conn *websocket.Conn // 使用 gorilla/websocket
Send chan []byte
UserID string
SessionID string
}
// 连接池
type Hub struct {
Clients map[*Client]bool
Broadcast chan []byte
Register chan *Client
Unregister chan *Client
}
+3
View File
@@ -0,0 +1,3 @@
module github.com/yourname/cyrene-ai
go 1.26.2
View File
View File
View File
View File
View File