feat: add MCP diary tools and id search shortcuts

This commit is contained in:
president
2026-02-07 17:01:13 +09:00
parent 0c5ea5269e
commit 943dbd6ef5
2 changed files with 844 additions and 10 deletions

View File

@@ -177,6 +177,70 @@ func handleToolCall(ctx context.Context, tools *mcp.ToolSet, raw json.RawMessage
}
res, err := tools.DevLogSearch(ctx, in)
return formatResult(res, err)
case "dev_log_search_by_id":
var in mcp.DevLogSearchByIDInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DevLogSearchByID(ctx, in)
return formatResult(res, err)
case "diary_upsert":
var in mcp.DiaryUpsertInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DiaryUpsert(ctx, in)
return formatResult(res, err)
case "diary_search":
var in mcp.DiarySearchInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DiarySearch(ctx, in)
return formatResult(res, err)
case "diary_search_by_id":
var in mcp.DiarySearchByIDInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DiarySearchByID(ctx, in)
return formatResult(res, err)
case "dev_log_delete":
var in mcp.DevLogDeleteInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DevLogDelete(ctx, in)
return formatResult(res, err)
case "trade_event_delete":
var in mcp.TradeEventDeleteInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.TradeEventDelete(ctx, in)
return formatResult(res, err)
case "doc_delete":
var in mcp.DocDeleteInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DocDelete(ctx, in)
return formatResult(res, err)
case "security_delete":
var in mcp.SecurityDeleteInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.SecurityDelete(ctx, in)
return formatResult(res, err)
case "trade_event_upsert":
var in mcp.TradeEventUpsertInput
b, _ := marshal(params.Arguments)
@@ -193,6 +257,14 @@ func handleToolCall(ctx context.Context, tools *mcp.ToolSet, raw json.RawMessage
}
res, err := tools.TradeEventSearch(ctx, in)
return formatResult(res, err)
case "trade_event_search_by_id":
var in mcp.TradeEventSearchByIDInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.TradeEventSearchByID(ctx, in)
return formatResult(res, err)
case "doc_upsert":
var in mcp.DocUpsertInput
b, _ := marshal(params.Arguments)
@@ -209,6 +281,14 @@ func handleToolCall(ctx context.Context, tools *mcp.ToolSet, raw json.RawMessage
}
res, err := tools.DocSearch(ctx, in)
return formatResult(res, err)
case "doc_search_by_id":
var in mcp.DocSearchByIDInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.DocSearchByID(ctx, in)
return formatResult(res, err)
case "security_upsert":
var in mcp.SecurityUpsertInput
b, _ := marshal(params.Arguments)
@@ -225,6 +305,14 @@ func handleToolCall(ctx context.Context, tools *mcp.ToolSet, raw json.RawMessage
}
res, err := tools.SecuritySearch(ctx, in)
return formatResult(res, err)
case "security_search_by_id":
var in mcp.SecuritySearchByIDInput
b, _ := marshal(params.Arguments)
if err := unmarshal(b, &in); err != nil {
return toolsCallResult{}, err
}
res, err := tools.SecuritySearchByID(ctx, in)
return formatResult(res, err)
default:
return toolsCallResult{}, fmt.Errorf("unknown tool: %s", params.Name)
}
@@ -272,14 +360,14 @@ func toolList() []toolDef {
"id": schemaString("UUID"),
"title": schemaString("Title"),
"body": schemaString("Body"),
"ts": schemaString("ISO8601 timestamp"),
"ts": schemaString("ISO8601 timestamp (optional; default now)"),
"tags": schemaArray(schemaString("Tag")),
"project": schemaString("Project name"),
"source": schemaString("Source (manual/git/cursor/terminal)"),
"author": schemaString("Author"),
"visibility": schemaString("public/private/confidential"),
"content_hash": schemaString("sha256:<hex>"),
}, []string{"id", "title", "body", "ts"}),
"author": schemaString("Author (optional; default env/USER)"),
"visibility": schemaString("public/private/confidential (default private)"),
"content_hash": schemaString("sha256:<hex> (optional; auto-generated if omitted)"),
}, []string{"id", "title", "body"}),
},
{
Name: "dev_log_search",
@@ -294,6 +382,124 @@ func toolList() []toolDef {
},
}, []string{"query"}),
},
{
Name: "dev_log_search_by_id",
Title: "Dev Log Search By ID",
Description: "Search dev_log entry by metadata.id",
InputSchema: schemaObject(map[string]any{
"id": schemaString("UUID"),
"top_k": schemaNumber("Top K (default 1)"),
}, []string{"id"}),
},
{
Name: "diary_upsert",
Title: "Diary Upsert",
Description: "Upsert a dev_diary or chitchat_diary entry",
InputSchema: schemaObject(map[string]any{
"id": schemaString("UUID"),
"collection": schemaString("dev_diary or chitchat_diary"),
"content": schemaString("Body text"),
"source": schemaString("source (optional; default by collection)"),
"author": schemaString("author (optional; default env/USER)"),
"topic": schemaString("topic (optional; default general)"),
"ts": schemaString("ISO8601 timestamp (optional; default now)"),
"tags": schemaArray(schemaString("Tag")),
"visibility": schemaString("public/private/confidential (default private)"),
"content_hash": schemaString("sha256:<hex> (optional; auto-generated if omitted)"),
"metadata": map[string]any{
"type": "object",
"description": "Optional metadata; missing fields are auto-filled",
},
}, []string{"id", "collection", "content"}),
},
{
Name: "diary_search",
Title: "Diary Search",
Description: "Search dev_diary or chitchat_diary entries",
InputSchema: schemaObject(map[string]any{
"collection": schemaString("dev_diary or chitchat_diary"),
"query": schemaString("Query"),
"top_k": schemaNumber("Top K (default 5)"),
"filter": map[string]any{
"type": "object",
"description": "Optional metadata filter (MCP filter DSL)",
},
}, []string{"collection", "query"}),
},
{
Name: "diary_search_by_id",
Title: "Diary Search By ID",
Description: "Search dev_diary or chitchat_diary entry by metadata.id",
InputSchema: schemaObject(map[string]any{
"collection": schemaString("dev_diary or chitchat_diary"),
"id": schemaString("UUID"),
"top_k": schemaNumber("Top K (default 1)"),
}, []string{"collection", "id"}),
},
{
Name: "dev_log_delete",
Title: "Dev Log Delete",
Description: "Delete dev_log entries by id/doc_id, time range, or metadata filter",
InputSchema: schemaObject(map[string]any{
"id": schemaString("Chunk ID (uuid)"),
"doc_id": schemaString("Parent doc ID (uuid)"),
"before_ts": schemaString("Delete entries with metadata.ts before this ISO8601 timestamp"),
"before_days": schemaNumber("Delete entries older than N days (uses current time)"),
"dry_run": map[string]any{"type": "boolean", "description": "If true, only returns count and does not delete"},
"filter": map[string]any{
"type": "object",
"description": "Required unless id is provided. Also required when using before_ts/before_days.",
},
}, nil),
},
{
Name: "trade_event_delete",
Title: "Trade Event Delete",
Description: "Delete trade_event entries by id/doc_id, date range, or metadata filter",
InputSchema: schemaObject(map[string]any{
"id": schemaString("Chunk ID (uuid)"),
"doc_id": schemaString("Parent doc ID (uuid)"),
"before_date": schemaString("Delete entries with metadata.event_date before this YYYY-MM-DD"),
"before_days": schemaNumber("Delete entries older than N days (uses current date)"),
"dry_run": map[string]any{"type": "boolean", "description": "If true, only returns count and does not delete"},
"filter": map[string]any{
"type": "object",
"description": "Required unless id is provided. Also required when using before_date/before_days.",
},
}, nil),
},
{
Name: "doc_delete",
Title: "Doc Delete",
Description: "Delete doc entries by id/doc_id, time range, or metadata filter",
InputSchema: schemaObject(map[string]any{
"id": schemaString("Chunk ID (uuid)"),
"doc_id": schemaString("Parent doc ID (uuid)"),
"before_ts": schemaString("Delete entries with metadata.ts before this ISO8601 timestamp"),
"before_days": schemaNumber("Delete entries older than N days (uses current time)"),
"dry_run": map[string]any{"type": "boolean", "description": "If true, only returns count and does not delete"},
"filter": map[string]any{
"type": "object",
"description": "Required unless id is provided. Also required when using before_ts/before_days.",
},
}, nil),
},
{
Name: "security_delete",
Title: "Security Delete",
Description: "Delete security entries by id/doc_id, time range, or metadata filter",
InputSchema: schemaObject(map[string]any{
"id": schemaString("Chunk ID (uuid)"),
"doc_id": schemaString("Parent doc ID (uuid)"),
"before_ts": schemaString("Delete entries with metadata.ts before this ISO8601 timestamp"),
"before_days": schemaNumber("Delete entries older than N days (uses current time)"),
"dry_run": map[string]any{"type": "boolean", "description": "If true, only returns count and does not delete"},
"filter": map[string]any{
"type": "object",
"description": "Required unless id is provided. Also required when using before_ts/before_days.",
},
}, nil),
},
{
Name: "trade_event_upsert",
Title: "Trade Event Upsert",
@@ -327,6 +533,15 @@ func toolList() []toolDef {
},
}, []string{"query"}),
},
{
Name: "trade_event_search_by_id",
Title: "Trade Event Search By ID",
Description: "Search trade_event entry by metadata.id",
InputSchema: schemaObject(map[string]any{
"id": schemaString("UUID"),
"top_k": schemaNumber("Top K (default 1)"),
}, []string{"id"}),
},
{
Name: "doc_upsert",
Title: "Doc Upsert",
@@ -354,6 +569,15 @@ func toolList() []toolDef {
},
}, []string{"query"}),
},
{
Name: "doc_search_by_id",
Title: "Doc Search By ID",
Description: "Search doc entry by metadata.id",
InputSchema: schemaObject(map[string]any{
"id": schemaString("UUID"),
"top_k": schemaNumber("Top K (default 1)"),
}, []string{"id"}),
},
{
Name: "security_upsert",
Title: "Security Upsert",
@@ -381,6 +605,15 @@ func toolList() []toolDef {
},
}, []string{"query"}),
},
{
Name: "security_search_by_id",
Title: "Security Search By ID",
Description: "Search security entry by metadata.id",
InputSchema: schemaObject(map[string]any{
"id": schemaString("UUID"),
"top_k": schemaNumber("Top K (default 1)"),
}, []string{"id"}),
},
}
}

View File

@@ -2,6 +2,13 @@ package mcp
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"os"
"sort"
"strings"
"time"
"pgvecterapi/internal/pgvecter"
@@ -34,6 +41,47 @@ type DevLogSearchInput struct {
Filter map[string]any `json:"filter"`
}
type DevLogSearchByIDInput struct {
ID string `json:"id"`
TopK int `json:"top_k"`
}
type DiaryUpsertInput struct {
ID string `json:"id"`
Collection string `json:"collection"`
Content string `json:"content"`
Source string `json:"source"`
Author string `json:"author"`
Topic string `json:"topic"`
TS string `json:"ts"`
Tags []string `json:"tags"`
Visibility string `json:"visibility"`
ContentHash string `json:"content_hash"`
Metadata map[string]any `json:"metadata"`
}
type DiarySearchInput struct {
Collection string `json:"collection"`
Query string `json:"query"`
TopK int `json:"top_k"`
Filter map[string]any `json:"filter"`
}
type DiarySearchByIDInput struct {
Collection string `json:"collection"`
ID string `json:"id"`
TopK int `json:"top_k"`
}
type DevLogDeleteInput struct {
ID string `json:"id"`
DocID string `json:"doc_id"`
BeforeTS string `json:"before_ts"`
BeforeDays int `json:"before_days"`
Filter map[string]any `json:"filter"`
DryRun bool `json:"dry_run"`
}
type TradeEventUpsertInput struct {
ID string `json:"id"`
EventDate string `json:"event_date"`
@@ -56,6 +104,20 @@ type TradeEventSearchInput struct {
Filter map[string]any `json:"filter"`
}
type TradeEventSearchByIDInput struct {
ID string `json:"id"`
TopK int `json:"top_k"`
}
type TradeEventDeleteInput struct {
ID string `json:"id"`
DocID string `json:"doc_id"`
BeforeDate string `json:"before_date"`
BeforeDays int `json:"before_days"`
Filter map[string]any `json:"filter"`
DryRun bool `json:"dry_run"`
}
type DocUpsertInput struct {
ID string `json:"id"`
Title string `json:"title"`
@@ -72,6 +134,20 @@ type DocSearchInput struct {
Filter map[string]any `json:"filter"`
}
type DocSearchByIDInput struct {
ID string `json:"id"`
TopK int `json:"top_k"`
}
type DocDeleteInput struct {
ID string `json:"id"`
DocID string `json:"doc_id"`
BeforeTS string `json:"before_ts"`
BeforeDays int `json:"before_days"`
Filter map[string]any `json:"filter"`
DryRun bool `json:"dry_run"`
}
type SecurityUpsertInput struct {
ID string `json:"id"`
TS string `json:"ts"`
@@ -88,17 +164,48 @@ type SecuritySearchInput struct {
Filter map[string]any `json:"filter"`
}
type SecuritySearchByIDInput struct {
ID string `json:"id"`
TopK int `json:"top_k"`
}
type SecurityDeleteInput struct {
ID string `json:"id"`
DocID string `json:"doc_id"`
BeforeTS string `json:"before_ts"`
BeforeDays int `json:"before_days"`
Filter map[string]any `json:"filter"`
DryRun bool `json:"dry_run"`
}
func (t *ToolSet) DevLogUpsert(ctx context.Context, in DevLogUpsertInput) (*pgvecter.UpsertResponse, error) {
content := "【" + in.Title + "】\n" + in.Body
contentHash := in.ContentHash
if contentHash == "" {
contentHash = buildContentHash(in.Title, in.Body)
}
ts := in.TS
if ts == "" {
ts = NowISO()
}
author := in.Author
if author == "" {
author = defaultAuthor()
}
visibility := in.Visibility
if visibility == "" {
visibility = "private"
}
metadata := map[string]any{
"id": in.ID,
"title": in.Title,
"ts": in.TS,
"ts": ts,
"tags": in.Tags,
"project": in.Project,
"source": in.Source,
"author": in.Author,
"visibility": in.Visibility,
"content_hash": in.ContentHash,
"author": author,
"visibility": visibility,
"content_hash": contentHash,
}
return t.Client.Upsert(ctx, pgvecter.UpsertRequest{
ID: in.ID,
@@ -112,17 +219,208 @@ func (t *ToolSet) DevLogSearch(ctx context.Context, in DevLogSearchInput) (*pgve
if in.TopK == 0 {
in.TopK = 5
}
visibilityFilter := map[string]any{
"eq": map[string]any{
"metadata.visibility": "private",
},
}
filter := in.Filter
if filter == nil {
filter = visibilityFilter
} else {
filter = map[string]any{
"and": []any{visibilityFilter, filter},
}
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: in.Query,
TopK: in.TopK,
Collection: "dev_log",
Filter: filter,
})
}
func (t *ToolSet) DevLogSearchByID(ctx context.Context, in DevLogSearchByIDInput) (*pgvecter.SearchResponse, error) {
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.TopK == 0 {
in.TopK = 1
}
filter := map[string]any{
"eq": map[string]any{
"metadata.id": in.ID,
},
}
return t.DevLogSearch(ctx, DevLogSearchInput{
Query: "id_lookup",
TopK: in.TopK,
Filter: filter,
})
}
func (t *ToolSet) DiaryUpsert(ctx context.Context, in DiaryUpsertInput) (*pgvecter.UpsertResponse, error) {
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.Content == "" {
return nil, errors.New("content is required")
}
if !isDiaryCollection(in.Collection) {
return nil, errors.New("collection must be dev_diary or chitchat_diary")
}
meta := map[string]any{}
if in.Metadata != nil {
for k, v := range in.Metadata {
meta[k] = v
}
}
source := in.Source
if source == "" {
source = getStringMeta(meta, "source")
}
if source == "" {
if in.Collection == "chitchat_diary" {
source = "custom_gpt"
} else {
source = "codex"
}
}
author := in.Author
if author == "" {
author = getStringMeta(meta, "author")
}
if author == "" {
author = defaultAuthor()
}
visibility := in.Visibility
if visibility == "" {
visibility = getStringMeta(meta, "visibility")
}
if visibility == "" {
visibility = "private"
}
topic := in.Topic
if topic == "" {
topic = getStringMeta(meta, "topic")
}
if topic == "" {
topic = "general"
}
ts := in.TS
if ts == "" {
ts = getStringMeta(meta, "ts")
}
if ts == "" {
ts = NowISO()
}
tags := in.Tags
if len(tags) == 0 {
tags = getStringSliceMeta(meta, "tags")
}
if len(tags) == 0 {
tags = []string{}
}
contentHash := in.ContentHash
if contentHash == "" {
contentHash = getStringMeta(meta, "content_hash")
}
if contentHash == "" {
contentHash = buildContentHashFromContent(in.Content)
}
meta["source"] = source
meta["author"] = author
meta["topic"] = topic
meta["ts"] = ts
meta["tags"] = tags
meta["visibility"] = visibility
meta["content_hash"] = contentHash
meta["id"] = in.ID
return t.Client.Upsert(ctx, pgvecter.UpsertRequest{
ID: in.ID,
Collection: in.Collection,
Content: in.Content,
Metadata: meta,
})
}
func (t *ToolSet) DiarySearch(ctx context.Context, in DiarySearchInput) (*pgvecter.SearchResponse, error) {
if !isDiaryCollection(in.Collection) {
return nil, errors.New("collection must be dev_diary or chitchat_diary")
}
if in.TopK == 0 {
in.TopK = 5
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: in.Query,
TopK: in.TopK,
Collection: in.Collection,
Filter: in.Filter,
})
}
func (t *ToolSet) DiarySearchByID(ctx context.Context, in DiarySearchByIDInput) (*pgvecter.SearchResponse, error) {
if !isDiaryCollection(in.Collection) {
return nil, errors.New("collection must be dev_diary or chitchat_diary")
}
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.TopK == 0 {
in.TopK = 1
}
filter := map[string]any{
"eq": map[string]any{
"metadata.id": in.ID,
},
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: "id_lookup",
TopK: in.TopK,
Collection: in.Collection,
Filter: filter,
})
}
func (t *ToolSet) DevLogDelete(ctx context.Context, in DevLogDeleteInput) (*pgvecter.DeleteResponse, error) {
if in.ID == "" && in.Filter == nil {
return nil, errors.New("id or filter is required (doc_id alone is not allowed)")
}
filter := in.Filter
if in.BeforeTS != "" || in.BeforeDays > 0 {
if filter == nil {
return nil, errors.New("before_ts/before_days requires filter")
}
cutoff := in.BeforeTS
if cutoff == "" {
cutoff = time.Now().AddDate(0, 0, -in.BeforeDays).Format(time.RFC3339)
}
tsFilter := map[string]any{
"lt": map[string]any{
"metadata.ts": cutoff,
},
}
if filter == nil {
filter = tsFilter
} else {
filter = map[string]any{
"and": []any{filter, tsFilter},
}
}
}
return t.Client.Delete(ctx, pgvecter.DeleteRequest{
ID: in.ID,
DocID: in.DocID,
Collection: "dev_log",
Filter: filter,
DryRun: in.DryRun,
})
}
func (t *ToolSet) TradeEventUpsert(ctx context.Context, in TradeEventUpsertInput) (*pgvecter.UpsertResponse, error) {
content := "【" + in.EventType + "】" + in.PartnerName + " " + in.EventDate + "\n" + in.Summary
content := buildTradeEventContent(in.EventType, in.PartnerName, in.EventDate, in.Summary, in.Detail)
metadata := map[string]any{
"id": in.ID,
"event_date": in.EventDate,
"partner_name": in.PartnerName,
"event_type": in.EventType,
@@ -156,9 +454,61 @@ func (t *ToolSet) TradeEventSearch(ctx context.Context, in TradeEventSearchInput
})
}
func (t *ToolSet) TradeEventSearchByID(ctx context.Context, in TradeEventSearchByIDInput) (*pgvecter.SearchResponse, error) {
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.TopK == 0 {
in.TopK = 1
}
filter := map[string]any{
"eq": map[string]any{
"metadata.id": in.ID,
},
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: "id_lookup",
TopK: in.TopK,
Collection: "trade_event",
Filter: filter,
})
}
func (t *ToolSet) TradeEventDelete(ctx context.Context, in TradeEventDeleteInput) (*pgvecter.DeleteResponse, error) {
if in.ID == "" && in.Filter == nil {
return nil, errors.New("id or filter is required (doc_id alone is not allowed)")
}
filter := in.Filter
if in.BeforeDate != "" || in.BeforeDays > 0 {
if filter == nil {
return nil, errors.New("before_date/before_days requires filter")
}
cutoff := in.BeforeDate
if cutoff == "" {
cutoff = time.Now().AddDate(0, 0, -in.BeforeDays).Format("2006-01-02")
}
dateFilter := map[string]any{
"lt": map[string]any{
"metadata.event_date": cutoff,
},
}
filter = map[string]any{
"and": []any{filter, dateFilter},
}
}
return t.Client.Delete(ctx, pgvecter.DeleteRequest{
ID: in.ID,
DocID: in.DocID,
Collection: "trade_event",
Filter: filter,
DryRun: in.DryRun,
})
}
func (t *ToolSet) DocUpsert(ctx context.Context, in DocUpsertInput) (*pgvecter.UpsertResponse, error) {
content := "【" + in.Title + "】\n" + in.Body
metadata := map[string]any{
"id": in.ID,
"title": in.Title,
"ts": in.TS,
"tags": in.Tags,
@@ -185,9 +535,61 @@ func (t *ToolSet) DocSearch(ctx context.Context, in DocSearchInput) (*pgvecter.S
})
}
func (t *ToolSet) DocSearchByID(ctx context.Context, in DocSearchByIDInput) (*pgvecter.SearchResponse, error) {
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.TopK == 0 {
in.TopK = 1
}
filter := map[string]any{
"eq": map[string]any{
"metadata.id": in.ID,
},
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: "id_lookup",
TopK: in.TopK,
Collection: "doc",
Filter: filter,
})
}
func (t *ToolSet) DocDelete(ctx context.Context, in DocDeleteInput) (*pgvecter.DeleteResponse, error) {
if in.ID == "" && in.Filter == nil {
return nil, errors.New("id or filter is required (doc_id alone is not allowed)")
}
filter := in.Filter
if in.BeforeTS != "" || in.BeforeDays > 0 {
if filter == nil {
return nil, errors.New("before_ts/before_days requires filter")
}
cutoff := in.BeforeTS
if cutoff == "" {
cutoff = time.Now().AddDate(0, 0, -in.BeforeDays).Format(time.RFC3339)
}
tsFilter := map[string]any{
"lt": map[string]any{
"metadata.ts": cutoff,
},
}
filter = map[string]any{
"and": []any{filter, tsFilter},
}
}
return t.Client.Delete(ctx, pgvecter.DeleteRequest{
ID: in.ID,
DocID: in.DocID,
Collection: "doc",
Filter: filter,
DryRun: in.DryRun,
})
}
func (t *ToolSet) SecurityUpsert(ctx context.Context, in SecurityUpsertInput) (*pgvecter.UpsertResponse, error) {
content := in.Body
metadata := map[string]any{
"id": in.ID,
"ts": in.TS,
"tags": in.Tags,
"source": in.Source,
@@ -214,6 +616,205 @@ func (t *ToolSet) SecuritySearch(ctx context.Context, in SecuritySearchInput) (*
})
}
func (t *ToolSet) SecuritySearchByID(ctx context.Context, in SecuritySearchByIDInput) (*pgvecter.SearchResponse, error) {
if in.ID == "" {
return nil, errors.New("id is required")
}
if in.TopK == 0 {
in.TopK = 1
}
filter := map[string]any{
"eq": map[string]any{
"metadata.id": in.ID,
},
}
return t.Client.Search(ctx, pgvecter.SearchRequest{
Query: "id_lookup",
TopK: in.TopK,
Collection: "security",
Filter: filter,
})
}
func (t *ToolSet) SecurityDelete(ctx context.Context, in SecurityDeleteInput) (*pgvecter.DeleteResponse, error) {
if in.ID == "" && in.Filter == nil {
return nil, errors.New("id or filter is required (doc_id alone is not allowed)")
}
filter := in.Filter
if in.BeforeTS != "" || in.BeforeDays > 0 {
if filter == nil {
return nil, errors.New("before_ts/before_days requires filter")
}
cutoff := in.BeforeTS
if cutoff == "" {
cutoff = time.Now().AddDate(0, 0, -in.BeforeDays).Format(time.RFC3339)
}
tsFilter := map[string]any{
"lt": map[string]any{
"metadata.ts": cutoff,
},
}
filter = map[string]any{
"and": []any{filter, tsFilter},
}
}
return t.Client.Delete(ctx, pgvecter.DeleteRequest{
ID: in.ID,
DocID: in.DocID,
Collection: "security",
Filter: filter,
DryRun: in.DryRun,
})
}
func NowISO() string {
return time.Now().Format(time.RFC3339)
}
func buildContentHash(title, body string) string {
normalizedTitle := normalizeText(title)
normalizedBody := normalizeText(body)
payload := normalizedTitle + "\n" + normalizedBody
sum := sha256.Sum256([]byte(payload))
return "sha256:" + hex.EncodeToString(sum[:])
}
func buildContentHashFromContent(content string) string {
payload := normalizeText(content)
sum := sha256.Sum256([]byte(payload))
return "sha256:" + hex.EncodeToString(sum[:])
}
func normalizeText(s string) string {
s = strings.ReplaceAll(s, "\r\n", "\n")
s = strings.ReplaceAll(s, "\r", "\n")
return strings.TrimSpace(s)
}
func defaultAuthor() string {
if v := strings.TrimSpace(os.Getenv("PGVECTER_DEFAULT_AUTHOR")); v != "" {
return v
}
if v := strings.TrimSpace(os.Getenv("USER")); v != "" {
return v
}
if v := strings.TrimSpace(os.Getenv("USERNAME")); v != "" {
return v
}
return "unknown"
}
func isDiaryCollection(name string) bool {
return name == "dev_diary" || name == "chitchat_diary"
}
func getStringMeta(meta map[string]any, key string) string {
if meta == nil {
return ""
}
if v, ok := meta[key]; ok {
if s, ok := v.(string); ok {
return strings.TrimSpace(s)
}
}
return ""
}
func getStringSliceMeta(meta map[string]any, key string) []string {
if meta == nil {
return nil
}
raw, ok := meta[key]
if !ok {
return nil
}
switch v := raw.(type) {
case []string:
return v
case []any:
out := make([]string, 0, len(v))
for _, item := range v {
s, ok := item.(string)
if !ok {
continue
}
out = append(out, s)
}
return out
default:
return nil
}
}
func buildTradeEventContent(eventType, partnerName, eventDate, summary string, detail map[string]any) string {
lines := []string{
"【" + eventType + "】" + partnerName + " " + eventDate,
}
if strings.TrimSpace(summary) != "" {
lines = append(lines, summary)
}
if len(detail) == 0 {
return strings.TrimSpace(strings.Join(lines, "\n"))
}
keys := orderDetailKeys(detail, []string{
"reference",
"owner",
"notes",
"status",
"amount",
"currency",
"due_date",
})
lines = append(lines, "detail:")
for _, k := range keys {
v := detail[k]
lines = append(lines, "- "+k+": "+formatAny(v))
}
return strings.TrimSpace(strings.Join(lines, "\n"))
}
func formatAny(v any) string {
switch t := v.(type) {
case string:
return t
case []string:
return strings.Join(t, ", ")
case []any:
parts := make([]string, 0, len(t))
for _, item := range t {
parts = append(parts, formatAny(item))
}
return strings.Join(parts, ", ")
case map[string]any:
raw, err := json.Marshal(t)
if err != nil {
return "null"
}
return string(raw)
default:
raw, err := json.Marshal(t)
if err != nil {
return "null"
}
return string(raw)
}
}
func orderDetailKeys(detail map[string]any, preferred []string) []string {
keys := make([]string, 0, len(detail))
seen := make(map[string]struct{}, len(detail))
for _, k := range preferred {
if _, ok := detail[k]; ok {
keys = append(keys, k)
seen[k] = struct{}{}
}
}
rest := make([]string, 0, len(detail))
for k := range detail {
if _, ok := seen[k]; ok {
continue
}
rest = append(rest, k)
}
sort.Strings(rest)
return append(keys, rest...)
}