Merge remote-tracking branch 'origin/dev' into dev

This commit is contained in:
Vanessa 2023-11-02 11:20:19 +08:00
commit 05a4fc839e
3 changed files with 219 additions and 6 deletions

View file

@ -44,7 +44,7 @@ func importSyncProviderWebDAV(c *gin.Context) {
return
}
files := form.File["file[]"]
files := form.File["file"]
if 1 != len(files) {
ret.Code = -1
ret.Msg = "invalid upload file"
@ -69,11 +69,43 @@ func importSyncProviderWebDAV(c *gin.Context) {
return
}
tmpDir := filepath.Join(util.TempDir, "import")
if err = os.MkdirAll(tmpDir, 0755); nil != err {
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
tmp := filepath.Join(tmpDir, f.Filename)
if err = os.WriteFile(tmp, data, 0644); nil != err {
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = gulu.Zip.Unzip(tmp, tmpDir); nil != err {
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
tmp = filepath.Join(tmpDir, f.Filename[:len(f.Filename)-4])
data, err = os.ReadFile(tmp)
if nil != err {
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
data = util.AESDecrypt(string(data))
data, _ = hex.DecodeString(string(data))
webdav := &conf.WebDAV{}
if err = gulu.JSON.UnmarshalJSON(data, webdav); nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
@ -81,7 +113,7 @@ func importSyncProviderWebDAV(c *gin.Context) {
err = model.SetSyncProviderWebDAV(webdav)
if nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
logging.LogErrorf("import WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
@ -123,7 +155,29 @@ func exportSyncProviderWebDAV(c *gin.Context) {
return
}
zipPath := "/export/" + name
zipFile, err := gulu.Zip.Create(tmp + ".zip")
if nil != err {
logging.LogErrorf("export WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = zipFile.AddEntry(name, tmp); nil != err {
logging.LogErrorf("export WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = zipFile.Close(); nil != err {
logging.LogErrorf("export WebDAV provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
zipPath := "/export/" + name + ".zip"
ret.Data = map[string]interface{}{
"name": name,
"zip": zipPath,
@ -142,7 +196,7 @@ func importSyncProviderS3(c *gin.Context) {
return
}
files := form.File["file[]"]
files := form.File["file"]
if 1 != len(files) {
ret.Code = -1
ret.Msg = "invalid upload file"
@ -167,6 +221,38 @@ func importSyncProviderS3(c *gin.Context) {
return
}
tmpDir := filepath.Join(util.TempDir, "import")
if err = os.MkdirAll(tmpDir, 0755); nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
tmp := filepath.Join(tmpDir, f.Filename)
if err = os.WriteFile(tmp, data, 0644); nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = gulu.Zip.Unzip(tmp, tmpDir); nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
tmp = filepath.Join(tmpDir, f.Filename[:len(f.Filename)-4])
data, err = os.ReadFile(tmp)
if nil != err {
logging.LogErrorf("import S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
data = util.AESDecrypt(string(data))
data, _ = hex.DecodeString(string(data))
s3 := &conf.S3{}
@ -221,7 +307,29 @@ func exportSyncProviderS3(c *gin.Context) {
return
}
zipPath := "/export/" + name
zipFile, err := gulu.Zip.Create(tmp + ".zip")
if nil != err {
logging.LogErrorf("export S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = zipFile.AddEntry(name, tmp); nil != err {
logging.LogErrorf("export S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
if err = zipFile.Close(); nil != err {
logging.LogErrorf("export S3 provider failed: %s", err)
ret.Code = -1
ret.Msg = err.Error()
return
}
zipPath := "/export/" + name + ".zip"
ret.Data = map[string]interface{}{
"name": name,
"zip": zipPath,

View file

@ -25,6 +25,7 @@ import (
"path/filepath"
"strconv"
"strings"
"sync"
"time"
"github.com/88250/gulu"
@ -32,6 +33,7 @@ import (
"github.com/siyuan-note/filelock"
"github.com/siyuan-note/logging"
"github.com/siyuan-note/siyuan/kernel/util"
"github.com/vmihailenco/msgpack/v5"
"golang.org/x/text/language"
"golang.org/x/text/message"
)
@ -584,6 +586,95 @@ func NewAttributeView(id string) (ret *AttributeView) {
return
}
var (
attributeViewBlocksLock = sync.Mutex{}
)
func RemoveBlockRel(avID, blockID string) {
attributeViewBlocksLock.Lock()
defer attributeViewBlocksLock.Unlock()
blocks := filepath.Join(util.DataDir, "storage", "av", "blocks.msgpack")
if !gulu.File.IsExist(blocks) {
return
}
data, err := filelock.ReadFile(blocks)
if nil != err {
logging.LogErrorf("read attribute view blocks failed: %s", err)
return
}
avBlocks := map[string][]string{}
if err = msgpack.Unmarshal(data, &avBlocks); nil != err {
logging.LogErrorf("unmarshal attribute view blocks failed: %s", err)
return
}
blockIDs := avBlocks[avID]
if nil == blockIDs {
return
}
var newBlockIDs []string
for _, v := range blockIDs {
if v != blockID {
newBlockIDs = append(newBlockIDs, v)
}
}
avBlocks[avID] = newBlockIDs
data, err = msgpack.Marshal(avBlocks)
if nil != err {
logging.LogErrorf("marshal attribute view blocks failed: %s", err)
return
}
if err = filelock.WriteFile(blocks, data); nil != err {
logging.LogErrorf("write attribute view blocks failed: %s", err)
return
}
}
func UpsertBlockRel(avID, blockID string) {
attributeViewBlocksLock.Lock()
defer attributeViewBlocksLock.Unlock()
avBlocks := map[string][]string{}
blocks := filepath.Join(util.DataDir, "storage", "av", "blocks.msgpack")
if !gulu.File.IsExist(blocks) {
if err := os.MkdirAll(filepath.Dir(blocks), 0755); nil != err {
logging.LogErrorf("create attribute view dir failed: %s", err)
return
}
} else {
data, err := filelock.ReadFile(blocks)
if nil != err {
logging.LogErrorf("read attribute view blocks failed: %s", err)
return
}
if err = msgpack.Unmarshal(data, &avBlocks); nil != err {
logging.LogErrorf("unmarshal attribute view blocks failed: %s", err)
return
}
}
blockIDs := avBlocks[avID]
blockIDs = append(blockIDs, blockID)
blockIDs = gulu.Str.RemoveDuplicatedElem(blockIDs)
avBlocks[avID] = blockIDs
data, err := msgpack.Marshal(avBlocks)
if nil != err {
logging.LogErrorf("marshal attribute view blocks failed: %s", err)
return
}
if err = filelock.WriteFile(blocks, data); nil != err {
logging.LogErrorf("write attribute view blocks failed: %s", err)
return
}
}
func ParseAttributeView(avID string) (ret *AttributeView, err error) {
avJSONPath := GetAttributeViewDataPath(avID)
if !gulu.File.IsExist(avJSONPath) {

View file

@ -712,6 +712,10 @@ func (tx *Transaction) doDelete(operation *Operation) (ret *TxErr) {
}
syncDelete2AttributeView(node)
if ast.NodeAttributeView == node.Type {
avID := node.AttributeViewID
av.RemoveBlockRel(avID, node.ID)
}
return
}
@ -898,6 +902,11 @@ func (tx *Transaction) doInsert(operation *Operation) (ret *TxErr) {
return &TxErr{code: TxErrCodeWriteTree, msg: err.Error(), id: block.ID}
}
if ast.NodeAttributeView == insertedNode.Type {
avID := insertedNode.AttributeViewID
av.UpsertBlockRel(avID, insertedNode.ID)
}
operation.ID = insertedNode.ID
operation.ParentID = insertedNode.Parent.ID
return
@ -986,6 +995,11 @@ func (tx *Transaction) doUpdate(operation *Operation) (ret *TxErr) {
if err = tx.writeTree(tree); nil != err {
return &TxErr{code: TxErrCodeWriteTree, msg: err.Error(), id: id}
}
if ast.NodeAttributeView == updatedNode.Type {
avID := updatedNode.AttributeViewID
av.UpsertBlockRel(avID, updatedNode.ID)
}
return
}