🎨 Supports cleaning up unreferenced databases https://github.com/siyuan-note/siyuan/issues/11569

Signed-off-by: Daniel <845765@qq.com>
This commit is contained in:
Daniel 2026-01-27 22:58:56 +08:00
parent a77e0820ad
commit 1fa674e430
No known key found for this signature in database
GPG key ID: 86211BA83DF03017
6 changed files with 319 additions and 90 deletions

View file

@ -38,12 +38,108 @@ import (
"github.com/siyuan-note/siyuan/kernel/av"
"github.com/siyuan-note/siyuan/kernel/cache"
"github.com/siyuan-note/siyuan/kernel/filesys"
"github.com/siyuan-note/siyuan/kernel/search"
"github.com/siyuan-note/siyuan/kernel/sql"
"github.com/siyuan-note/siyuan/kernel/treenode"
"github.com/siyuan-note/siyuan/kernel/util"
"github.com/xrash/smetrics"
)
func UnusedAttributeViews() (ret []string) {
defer logging.Recover()
ret = []string{}
allAvIDs, err := getAllAvIDs()
if err != nil {
return
}
referencedAvIDs := map[string]bool{}
luteEngine := util.NewLute()
boxes := Conf.GetBoxes()
for _, box := range boxes {
pages := pagedPaths(filepath.Join(util.DataDir, box.ID), 32)
for _, paths := range pages {
var trees []*parse.Tree
for _, localPath := range paths {
tree, loadTreeErr := loadTree(localPath, luteEngine)
if nil != loadTreeErr {
continue
}
trees = append(trees, tree)
}
for _, tree := range trees {
for _, id := range getAvIDs(tree, allAvIDs) {
referencedAvIDs[id] = true
}
}
}
}
templateAvIDs := search.FindAllMatchedTargets(filepath.Join(util.DataDir, "templates"), allAvIDs)
for _, id := range templateAvIDs {
referencedAvIDs[id] = true
}
for _, id := range allAvIDs {
if !referencedAvIDs[id] {
ret = append(ret, id)
}
}
ret = gulu.Str.RemoveDuplicatedElem(ret)
return
}
func getAvIDs(tree *parse.Tree, allAvIDs []string) (ret []string) {
ast.Walk(tree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
if !entering {
return ast.WalkContinue
}
if ast.NodeAttributeView == n.Type {
ret = append(ret, n.AttributeViewID)
}
for _, kv := range n.KramdownIAL {
ids := util.GetContainsSubStrs(kv[1], allAvIDs)
if 0 < len(ids) {
ret = append(ret, ids...)
}
}
return ast.WalkContinue
})
ret = gulu.Str.RemoveDuplicatedElem(ret)
return
}
func getAllAvIDs() (ret []string, err error) {
ret = []string{}
entries, err := os.ReadDir(filepath.Join(util.DataDir, "storage", "av"))
if nil != err {
return
}
for _, entry := range entries {
name := entry.Name()
if !strings.HasSuffix(name, ".json") {
continue
}
id := strings.TrimSuffix(name, ".json")
if !ast.IsNodeIDPattern(id) {
continue
}
ret = append(ret, id)
}
ret = gulu.Str.RemoveDuplicatedElem(ret)
return
}
func GetAttributeViewItemIDs(avID string, blockIDs []string) (ret map[string]string) {
ret = map[string]string{}
for _, blockID := range blockIDs {

View file

@ -17,17 +17,13 @@
package model
import (
"bufio"
"bytes"
"errors"
"fmt"
"io/fs"
"os"
"path"
"path/filepath"
"runtime"
"strings"
"sync"
"time"
"github.com/88250/lute"
@ -38,6 +34,7 @@ import (
"github.com/siyuan-note/logging"
"github.com/siyuan-note/siyuan/kernel/av"
"github.com/siyuan-note/siyuan/kernel/filesys"
"github.com/siyuan-note/siyuan/kernel/search"
"github.com/siyuan-note/siyuan/kernel/sql"
"github.com/siyuan-note/siyuan/kernel/task"
"github.com/siyuan-note/siyuan/kernel/treenode"
@ -317,7 +314,7 @@ func findUnindexedTreePathInAllBoxes(id string) (ret string) {
boxes := Conf.GetBoxes()
for _, box := range boxes {
root := filepath.Join(util.DataDir, box.ID)
paths := findAllOccurrences(root, id)
paths := search.FindAllMatchedPaths(root, []string{id})
var rootIDs []string
rootIDPaths := map[string]string{}
for _, p := range paths {
@ -335,88 +332,3 @@ func findUnindexedTreePathInAllBoxes(id string) (ret string) {
}
return
}
func findAllOccurrences(root string, target string) []string {
if root == "" || target == "" {
return nil
}
searchBytes := []byte(target)
jobs := make(chan string, 256) // 任务通道
results := make(chan string, 256) // 结果通道
// 用于等待所有 Worker 完成
var wg sync.WaitGroup
// 用于等待结果收集器完成
var collectWg sync.WaitGroup
// 1. 启动结果收集协程
var matchedPaths []string
collectWg.Add(1)
go func() {
defer collectWg.Done()
for path := range results {
matchedPaths = append(matchedPaths, path)
}
}()
// 2. 启动并发 Worker Pool (基于 CPU 核心数)
numWorkers := runtime.NumCPU()
for i := 0; i < numWorkers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for path := range jobs {
if containsTarget(path, searchBytes) {
results <- path
}
}
}()
}
// 3. 遍历文件夹并分发任务
_ = filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
if err == nil && d.Type().IsRegular() {
jobs <- path
}
return nil
})
// 4. 关闭通道并等待结束
close(jobs) // 停止分发任务
wg.Wait() // 等待所有 Worker 处理完
close(results) // 停止收集结果
collectWg.Wait() // 等待切片组装完成
return matchedPaths
}
// containsTarget 针对大文件优化的字节流匹配函数
func containsTarget(path string, target []byte) bool {
f, err := os.Open(path)
if err != nil {
return false
}
defer f.Close()
// 1MB 缓冲区
reader := bufio.NewReaderSize(f, 1024*1024)
for {
// 使用 ReadSlice 实现零拷贝读取
line, err := reader.ReadSlice('\n')
if len(line) > 0 && bytes.Contains(line, target) {
return true
}
if err != nil {
if err == bufio.ErrBufferFull {
// 处理超过 1MB 的超长行,直接跳过当前行剩余部分
for err == bufio.ErrBufferFull {
_, err = reader.ReadSlice('\n')
}
continue
}
break // EOF 或其他错误
}
}
return false
}