Merge remote-tracking branch 'origin/dev' into dev

This commit is contained in:
Vanessa 2026-01-30 00:24:39 +08:00
commit 56e76f98d1
19 changed files with 104 additions and 40 deletions

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "هل تؤكد مسح جميع قواعد البيانات غير المشار إليها؟",
"unreferencedAV": "قاعدة بيانات غير مشار إليها",
"includeSubDocs": "تضمين المستندات الفرعية",
"includeSubDocsTip": "عند التفعيل، سيتم تضمين جميع المستندات الفرعية للمستند الحالي أثناء التصدير",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Bestätigen Sie das Löschen aller nicht referenzierten Datenbanken?",
"unreferencedAV": "Nicht referenzierte Datenbank",
"includeSubDocs": "Unterdokumente einbeziehen",
"includeSubDocsTip": "Wenn aktiviert, werden beim Export alle Unterdokumente des aktuellen Dokuments einbezogen",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Confirm clearing all unreferenced databases?",
"unreferencedAV": "Unreferenced database",
"includeSubDocs": "Include sub-documents",
"includeSubDocsTip": "When enabled, all sub-documents of the current document will be included during export",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "¿Confirmar la limpieza de todas las bases de datos no referenciadas?",
"unreferencedAV": "Base de datos no referenciada",
"includeSubDocs": "Incluir subdocumentos",
"includeSubDocsTip": "Al activar, se incluirán todos los subdocumentos del documento actual en la exportación",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Confirmer le nettoyage de toutes les bases de données non référencées ?",
"unreferencedAV": "Base de données non référencée",
"includeSubDocs": "Inclure les sous-documents",
"includeSubDocsTip": "Si activé, tous les sous-documents du document courant seront inclus lors de l'export",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "האם לאשר ניקוי של כל מסדי הנתונים שאינם בשימוש?",
"unreferencedAV": "מאגר נתונים ללא הפניות",
"includeSubDocs": "כלול מסמכים משניים",
"includeSubDocsTip": "בעת הפעלה, בעת ייצוא יכלול את כל תת‑המסמכים של המסמך הנוכחי",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Confermare la pulizia di tutti i database non referenziati?",
"unreferencedAV": "Database non referenziato",
"includeSubDocs": "Includi sottodocumenti",
"includeSubDocsTip": "Se abilitato, durante l'esportazione verranno inclusi tutti i sottodocumenti del documento corrente",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "すべての参照されていないデータベースを削除してもよろしいですか?",
"unreferencedAV": "参照されていないデータベース",
"includeSubDocs": "子ドキュメントを含める",
"includeSubDocsTip": "有効にすると、エクスポート時に現在のドキュメントのすべての子ドキュメントが含まれます",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "모든 참조되지 않은 데이터베이스를 정리하시겠습니까?",
"unreferencedAV": "참조되지 않은 데이터베이스",
"includeSubDocs": "하위 문서 포함",
"includeSubDocsTip": "활성화하면 내보낼 때 현재 문서의 모든 하위 문서를 포함합니다",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Potwierdzić wyczyszczenie wszystkich niepowiązanych baz danych?",
"unreferencedAV": "Baza danych bez odwołań",
"includeSubDocs": "Uwzględnij poddokumenty",
"includeSubDocsTip": "Po włączeniu podczas eksportu zostaną uwzględnione wszystkie poddokumenty bieżącego dokumentu",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Confirmar a limpeza de todos os bancos de dados não referenciados?",
"unreferencedAV": "Banco de dados não referenciado",
"includeSubDocs": "Incluir subdocumentos",
"includeSubDocsTip": "Ao ativar, todos os subdocumentos do documento atual serão incluídos na exportação",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Подтвердить очистку всех неиспользуемых баз данных?",
"unreferencedAV": "Неиспользуемая база данных",
"includeSubDocs": "Включать дочерние документы",
"includeSubDocsTip": "При включении при экспорте будут добавлены все дочерние документы текущего документа",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "Tüm atıfta bulunulmayan veritabanlarını temizlemek istediğinize emin misiniz?",
"unreferencedAV": "Başvurulmayan veritabanı",
"includeSubDocs": "Alt belgeleri dahil et",
"includeSubDocsTip": "Etkinleştirildiğinde, dışa aktarım sırasında mevcut belgenin tüm alt belgeleri dahil edilir",

View file

@ -1,4 +1,5 @@
{
"clearAllAV": "確認清理所有未引用的資料庫?",
"unreferencedAV": "未引用的資料庫",
"includeSubDocs": "包含子文檔",
"includeSubDocsTip": "啟用後將在匯出時包含當前文檔的所有子文檔",

View file

@ -18,6 +18,7 @@
package av
import (
"bytes"
"errors"
"fmt"
"os"
@ -444,6 +445,52 @@ func GetAttributeViewNameByPath(avJSONPath string) (ret string, err error) {
return
}
func GetAttributeViewContent(avID string) (content string) {
if "" == avID {
return
}
attrView, err := ParseAttributeView(avID)
if err != nil {
logging.LogErrorf("parse attribute view [%s] failed: %s", avID, err)
return
}
return getAttributeViewContent0(attrView)
}
func GetAttributeViewContentByPath(avJSONPath string) (content string) {
attrView, err := ParseAttributeViewByPath(avJSONPath)
if err != nil {
logging.LogErrorf("parse attribute view [%s] failed: %s", avJSONPath, err)
return
}
return getAttributeViewContent0(attrView)
}
func getAttributeViewContent0(attrView *AttributeView) (content string) {
buf := bytes.Buffer{}
buf.WriteString(attrView.Name)
buf.WriteByte(' ')
for _, v := range attrView.Views {
buf.WriteString(v.Name)
buf.WriteByte(' ')
}
for _, keyValues := range attrView.KeyValues {
buf.WriteString(keyValues.Key.Name)
buf.WriteByte(' ')
for _, value := range keyValues.Values {
if nil != value {
buf.WriteString(value.String(true))
buf.WriteByte(' ')
}
}
}
content = strings.TrimSpace(buf.String())
return
}
func IsAttributeViewExist(avID string) bool {
avJSONPath := GetAttributeViewDataPath(avID)
return filelock.IsExist(avJSONPath)
@ -451,11 +498,17 @@ func IsAttributeViewExist(avID string) bool {
func ParseAttributeView(avID string) (ret *AttributeView, err error) {
avJSONPath := GetAttributeViewDataPath(avID)
return ParseAttributeViewByPath(avJSONPath)
}
func ParseAttributeViewByPath(avJSONPath string) (ret *AttributeView, err error) {
if !filelock.IsExist(avJSONPath) {
err = ErrViewNotFound
return
}
avID := filepath.Base(avJSONPath)
avID = strings.TrimSuffix(avID, filepath.Ext(avID))
data, readErr := filelock.ReadFile(avJSONPath)
if nil != readErr {
logging.LogErrorf("read attribute view [%s] failed: %s", avID, readErr)

View file

@ -1561,7 +1561,7 @@ func removeDoc(box *Box, p string, luteEngine *lute.Lute) {
return
}
generateAvHistory(tree, historyDir)
generateAvHistoryInTree(tree, historyDir)
copyDocAssetsToDataAssets(box.ID, p)
removeIDs := treenode.RootChildIDs(tree.ID)

View file

@ -39,6 +39,7 @@ import (
"github.com/siyuan-note/eventbus"
"github.com/siyuan-note/filelock"
"github.com/siyuan-note/logging"
"github.com/siyuan-note/siyuan/kernel/av"
"github.com/siyuan-note/siyuan/kernel/cache"
"github.com/siyuan-note/siyuan/kernel/conf"
"github.com/siyuan-note/siyuan/kernel/search"
@ -527,7 +528,7 @@ func buildSearchHistoryQueryFilter(query, op, box, table string, typ int) (stmt
case HistoryTypeAsset:
stmt += table + " MATCH '{title content}:(" + query + ")'"
case HistoryTypeDatabase:
stmt += " id = '" + query + "'"
stmt += table + " MATCH '{content}:(" + query + ")'"
}
} else {
stmt += "1=1"
@ -673,7 +674,7 @@ func (box *Box) generateDocHistory0() {
if nil != loadErr {
logging.LogErrorf("load tree [%s] failed: %s", file, loadErr)
} else {
generateAvHistory(tree, historyDir)
generateAvHistoryInTree(tree, historyDir)
}
}
}
@ -779,6 +780,34 @@ func recentModifiedAssets() (ret []string) {
return
}
var attributeViewLatestHistoryTime = time.Now().Unix()
func recentModifiedAttributeViews() (ret []string) {
entries, err := os.ReadDir(filepath.Join(util.DataDir, "storage", "av"))
if nil != err {
logging.LogErrorf("read attribute view dir failed: %s", err)
return
}
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") {
continue
}
info, err := entry.Info()
if nil != err {
logging.LogErrorf("read attribute view file info failed: %s", err)
continue
}
if info.ModTime().Unix() > attributeViewLatestHistoryTime {
ret = append(ret, filepath.Join(util.DataDir, "storage", "av", entry.Name()))
}
}
attributeViewLatestHistoryTime = time.Now().Unix()
return
}
const (
HistoryOpClean = "clean"
HistoryOpUpdate = "update"
@ -813,12 +842,12 @@ func generateOpTypeHistory(tree *parse.Tree, opType string) {
return
}
generateAvHistory(tree, historyDir)
generateAvHistoryInTree(tree, historyDir)
indexHistoryDir(filepath.Base(historyDir), util.NewLute())
}
func generateAvHistory(tree *parse.Tree, historyDir string) {
func generateAvHistoryInTree(tree *parse.Tree, historyDir string) {
avNodes := tree.Root.ChildrenByType(ast.NodeAttributeView)
for _, avNode := range avNodes {
srcAvPath := filepath.Join(util.DataDir, "storage", "av", avNode.AttributeViewID+".json")
@ -958,11 +987,13 @@ func indexHistoryDir(name string, luteEngine *lute.Lute) {
}
p := strings.TrimPrefix(database, util.HistoryDir)
p = filepath.ToSlash(p[1:])
content := av.GetAttributeViewContentByPath(database)
histories = append(histories, &sql.History{
ID: id,
Type: HistoryTypeDatabase,
Op: op,
Title: id,
Content: content,
Path: p,
Created: created,
})

View file

@ -741,40 +741,6 @@ func FillAttributeViewNilValue(value *av.Value, typ av.KeyType) {
}
}
func getAttributeViewContent(avID string) (content string) {
if "" == avID {
return
}
attrView, err := av.ParseAttributeView(avID)
if err != nil {
logging.LogErrorf("parse attribute view [%s] failed: %s", avID, err)
return
}
buf := bytes.Buffer{}
buf.WriteString(attrView.Name)
buf.WriteByte(' ')
for _, v := range attrView.Views {
buf.WriteString(v.Name)
buf.WriteByte(' ')
}
for _, keyValues := range attrView.KeyValues {
buf.WriteString(keyValues.Key.Name)
buf.WriteByte(' ')
for _, value := range keyValues.Values {
if nil != value {
buf.WriteString(value.String(true))
buf.WriteByte(' ')
}
}
}
content = strings.TrimSpace(buf.String())
return
}
func getBlockValue(keyValues []*av.KeyValues) (ret *av.Value) {
for _, kv := range keyValues {
if av.KeyTypeBlock == kv.Key.Type && 0 < len(kv.Values) {

View file

@ -146,7 +146,7 @@ func NodeStaticContent(node *ast.Node, excludeTypes []string, includeTextMarkATi
if ast.NodeAttributeView == node.Type {
if fullAttrView {
return getAttributeViewContent(node.AttributeViewID)
return av.GetAttributeViewContent(node.AttributeViewID)
}
ret, _ := av.GetAttributeViewName(node.AttributeViewID)