A privacy-first, self-hosted, fully open source personal knowledge management software, written in typescript and golang. (PERSONAL FORK)
1// SiYuan - Refactor your thinking
2// Copyright (c) 2020-present, b3log.org
3//
4// This program is free software: you can redistribute it and/or modify
5// it under the terms of the GNU Affero General Public License as published by
6// the Free Software Foundation, either version 3 of the License, or
7// (at your option) any later version.
8//
9// This program is distributed in the hope that it will be useful,
10// but WITHOUT ANY WARRANTY; without even the implied warranty of
11// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12// GNU Affero General Public License for more details.
13//
14// You should have received a copy of the GNU Affero General Public License
15// along with this program. If not, see <https://www.gnu.org/licenses/>.
16
17package model
18
19import (
20 "bytes"
21 "fmt"
22 "path"
23 "sort"
24 "strconv"
25 "strings"
26
27 "github.com/88250/gulu"
28 "github.com/88250/lute"
29 "github.com/88250/lute/ast"
30 "github.com/88250/lute/parse"
31 "github.com/emirpasic/gods/sets/hashset"
32 "github.com/siyuan-note/logging"
33 "github.com/siyuan-note/siyuan/kernel/filesys"
34 "github.com/siyuan-note/siyuan/kernel/search"
35 "github.com/siyuan-note/siyuan/kernel/sql"
36 "github.com/siyuan-note/siyuan/kernel/task"
37 "github.com/siyuan-note/siyuan/kernel/treenode"
38 "github.com/siyuan-note/siyuan/kernel/util"
39)
40
41func RefreshBacklink(id string) {
42 FlushTxQueue()
43 refreshRefsByDefID(id)
44}
45
46func refreshRefsByDefID(defID string) {
47 refs := sql.QueryRefsByDefID(defID, true)
48 var rootIDs []string
49 for _, ref := range refs {
50 rootIDs = append(rootIDs, ref.RootID)
51 task.AppendAsyncTaskWithDelay(task.SetDefRefCount, util.SQLFlushInterval, refreshRefCount, ref.DefBlockID)
52 }
53 rootIDs = gulu.Str.RemoveDuplicatedElem(rootIDs)
54 trees := filesys.LoadTrees(rootIDs)
55 for _, tree := range trees {
56 sql.UpdateRefsTreeQueue(tree)
57 task.AppendAsyncTaskWithDelay(task.SetDefRefCount, util.SQLFlushInterval, refreshRefCount, tree.ID)
58 }
59 if bt := treenode.GetBlockTree(defID); nil != bt {
60 task.AppendAsyncTaskWithDelay(task.SetDefRefCount, util.SQLFlushInterval, refreshRefCount, defID)
61 }
62}
63
64type Backlink struct {
65 DOM string `json:"dom"`
66 BlockPaths []*BlockPath `json:"blockPaths"`
67 Expand bool `json:"expand"`
68
69 node *ast.Node // 仅用于按文档内容顺序排序
70}
71
72func GetBackmentionDoc(defID, refTreeID, keyword string, containChildren, highlight bool) (ret []*Backlink, keywords []string) {
73 keyword = strings.TrimSpace(keyword)
74 if "" != keyword {
75 keywords = strings.Split(keyword, " ")
76 }
77 ret = []*Backlink{}
78 beforeLen := 12
79 sqlBlock := sql.GetBlock(defID)
80 if nil == sqlBlock {
81 return
82 }
83 rootID := sqlBlock.RootID
84
85 refs := sql.QueryRefsByDefID(defID, containChildren)
86 refs = removeDuplicatedRefs(refs)
87
88 linkRefs, _, excludeBacklinkIDs, originalRefBlockIDs := buildLinkRefs(rootID, refs, keywords)
89 tmpMentions, mentionKeywords := buildTreeBackmention(sqlBlock, linkRefs, keyword, excludeBacklinkIDs, beforeLen)
90 luteEngine := util.NewLute()
91 var mentions []*Block
92 for _, mention := range tmpMentions {
93 if mention.RootID == refTreeID {
94 mentions = append(mentions, mention)
95 }
96 }
97 var mentionBlockIDs []string
98 for _, mention := range mentions {
99 mentionBlockIDs = append(mentionBlockIDs, mention.ID)
100 }
101 mentionBlockIDs = gulu.Str.RemoveDuplicatedElem(mentionBlockIDs)
102
103 if "" != keyword {
104 mentionKeywords = append(mentionKeywords, strings.Split(keyword, " ")...)
105 }
106 mentionKeywords = gulu.Str.RemoveDuplicatedElem(mentionKeywords)
107 keywords = append(keywords, mentionKeywords...)
108 keywords = gulu.Str.RemoveDuplicatedElem(keywords)
109 if 1 > len(keywords) {
110 keywords = []string{}
111 }
112
113 var refTree *parse.Tree
114 trees := filesys.LoadTrees(mentionBlockIDs)
115 for id, tree := range trees {
116 backlink := buildBacklink(id, tree, originalRefBlockIDs, mentionKeywords, highlight, luteEngine)
117 if nil != backlink {
118 ret = append(ret, backlink)
119 }
120 if nil != tree && nil == refTree {
121 refTree = tree
122 }
123 }
124
125 if 0 < len(trees) {
126 sortBacklinks(ret, refTree)
127 filterBlockPaths(ret)
128 }
129 return
130}
131
132func GetBacklinkDoc(defID, refTreeID, keyword string, containChildren, highlight bool) (ret []*Backlink, keywords []string) {
133 keyword = strings.TrimSpace(keyword)
134 if "" != keyword {
135 keywords = strings.Split(keyword, " ")
136 }
137 keywords = gulu.Str.RemoveDuplicatedElem(keywords)
138 if 1 > len(keywords) {
139 keywords = []string{}
140 }
141
142 ret = []*Backlink{}
143 sqlBlock := sql.GetBlock(defID)
144 if nil == sqlBlock {
145 return
146 }
147 rootID := sqlBlock.RootID
148
149 tmpRefs := sql.QueryRefsByDefID(defID, containChildren)
150 var refs []*sql.Ref
151 for _, ref := range tmpRefs {
152 if ref.RootID == refTreeID {
153 refs = append(refs, ref)
154 }
155 }
156 refs = removeDuplicatedRefs(refs)
157
158 linkRefs, _, _, originalRefBlockIDs := buildLinkRefs(rootID, refs, keywords)
159 refTree, err := LoadTreeByBlockID(refTreeID)
160 if err != nil {
161 logging.LogWarnf("load ref tree [%s] failed: %s", refTreeID, err)
162 return
163 }
164
165 luteEngine := util.NewLute()
166 for _, linkRef := range linkRefs {
167 backlink := buildBacklink(linkRef.ID, refTree, originalRefBlockIDs, keywords, highlight, luteEngine)
168 if nil != backlink {
169 ret = append(ret, backlink)
170 }
171 }
172
173 sortBacklinks(ret, refTree)
174 filterBlockPaths(ret)
175 return
176}
177
178func filterBlockPaths(blockLinks []*Backlink) {
179 for _, b := range blockLinks {
180 if 2 == len(b.BlockPaths) {
181 // 根下只有一层则不显示
182 b.BlockPaths = []*BlockPath{}
183 }
184 }
185 return
186}
187
188func sortBacklinks(backlinks []*Backlink, tree *parse.Tree) {
189 contentSorts := map[string]int{}
190 sortVal := 0
191 ast.Walk(tree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
192 if !entering || !n.IsBlock() {
193 return ast.WalkContinue
194 }
195
196 contentSorts[n.ID] = sortVal
197 sortVal++
198 return ast.WalkContinue
199 })
200
201 sort.Slice(backlinks, func(i, j int) bool {
202 s1 := contentSorts[backlinks[i].node.ID]
203 s2 := contentSorts[backlinks[j].node.ID]
204 return s1 < s2
205 })
206}
207
208func buildBacklink(refID string, refTree *parse.Tree, originalRefBlockIDs map[string]string, keywords []string, highlight bool, luteEngine *lute.Lute) (ret *Backlink) {
209 node := treenode.GetNodeInTree(refTree, refID)
210 if nil == node {
211 return
212 }
213
214 renderNodes, expand := getBacklinkRenderNodes(node, originalRefBlockIDs)
215
216 if highlight && 0 < len(keywords) {
217 for _, renderNode := range renderNodes {
218 var unlinks []*ast.Node
219
220 ast.Walk(renderNode, func(n *ast.Node, entering bool) ast.WalkStatus {
221 if !entering {
222 return ast.WalkContinue
223 }
224
225 if n.IsBlock() {
226 return ast.WalkContinue
227 }
228
229 markReplaceSpan(n, &unlinks, keywords, search.MarkDataType, luteEngine)
230 return ast.WalkContinue
231 })
232
233 for _, unlink := range unlinks {
234 unlink.Unlink()
235 }
236 }
237 }
238
239 // 反链面板中显示块引用计数 Display reference counts in the backlink panel https://github.com/siyuan-note/siyuan/issues/13618
240 fillBlockRefCount(renderNodes)
241
242 dom := renderBlockDOMByNodes(renderNodes, luteEngine)
243 var blockPaths []*BlockPath
244 if (nil != node.Parent && ast.NodeDocument != node.Parent.Type) || (ast.NodeHeading != node.Type && 0 < treenode.HeadingLevel(node)) {
245 blockPaths = buildBlockBreadcrumb(node, nil, false)
246 }
247 if 1 > len(blockPaths) {
248 blockPaths = []*BlockPath{}
249 }
250 ret = &Backlink{DOM: dom, BlockPaths: blockPaths, Expand: expand, node: node}
251 return
252}
253
254func getBacklinkRenderNodes(n *ast.Node, originalRefBlockIDs map[string]string) (ret []*ast.Node, expand bool) {
255 expand = true
256 if ast.NodeListItem == n.Type {
257 if nil == n.FirstChild {
258 return
259 }
260
261 c := n.FirstChild
262 if 3 == n.ListData.Typ {
263 c = n.FirstChild.Next
264 }
265
266 if c != n.LastChild { // 存在子列表
267 for ; nil != c; c = c.Next {
268 if originalRefBlockIDs[n.ID] != c.ID {
269 continue
270 }
271
272 for liFirstBlockSpan := c.FirstChild; nil != liFirstBlockSpan; liFirstBlockSpan = liFirstBlockSpan.Next {
273 if treenode.IsBlockRef(liFirstBlockSpan) {
274 continue
275 }
276 if "" != strings.TrimSpace(liFirstBlockSpan.Text()) {
277 expand = false
278 break
279 }
280 }
281 }
282 }
283
284 ret = append(ret, n)
285 } else if ast.NodeHeading == n.Type {
286 c := n.FirstChild
287 if nil == c {
288 return
289 }
290
291 for headingFirstSpan := c; nil != headingFirstSpan; headingFirstSpan = headingFirstSpan.Next {
292 if treenode.IsBlockRef(headingFirstSpan) {
293 continue
294 }
295 if "" != strings.TrimSpace(headingFirstSpan.Text()) {
296 expand = false
297 break
298 }
299 }
300
301 ret = append(ret, n)
302 cc := treenode.HeadingChildren(n)
303 ret = append(ret, cc...)
304 } else {
305 ret = append(ret, n)
306 }
307 return
308}
309
310func GetBacklink2(id, keyword, mentionKeyword string, sortMode, mentionSortMode int, containChildren bool) (boxID string, backlinks, backmentions []*Path, linkRefsCount, mentionsCount int) {
311 keyword = strings.TrimSpace(keyword)
312 var keywords []string
313 if "" != keyword {
314 keywords = strings.Split(keyword, " ")
315 }
316 mentionKeyword = strings.TrimSpace(mentionKeyword)
317 backlinks, backmentions = []*Path{}, []*Path{}
318
319 sqlBlock := sql.GetBlock(id)
320 if nil == sqlBlock {
321 return
322 }
323 rootID := sqlBlock.RootID
324 boxID = sqlBlock.Box
325
326 refs := sql.QueryRefsByDefID(id, containChildren)
327 refs = removeDuplicatedRefs(refs)
328
329 linkRefs, linkRefsCount, excludeBacklinkIDs, _ := buildLinkRefs(rootID, refs, keywords)
330 tmpBacklinks := toFlatTree(linkRefs, 0, "backlink", nil)
331 for _, l := range tmpBacklinks {
332 l.Blocks = nil
333 backlinks = append(backlinks, l)
334 }
335
336 sort.Slice(backlinks, func(i, j int) bool {
337 switch sortMode {
338 case util.SortModeUpdatedDESC:
339 return backlinks[i].Updated > backlinks[j].Updated
340 case util.SortModeUpdatedASC:
341 return backlinks[i].Updated < backlinks[j].Updated
342 case util.SortModeCreatedDESC:
343 return backlinks[i].Created > backlinks[j].Created
344 case util.SortModeCreatedASC:
345 return backlinks[i].Created < backlinks[j].Created
346 case util.SortModeNameDESC:
347 return util.PinYinCompare(backlinks[j].Name, backlinks[i].Name)
348 case util.SortModeNameASC:
349 return util.PinYinCompare(backlinks[i].Name, backlinks[j].Name)
350 case util.SortModeAlphanumDESC:
351 return util.NaturalCompare(backlinks[j].Name, backlinks[i].Name)
352 case util.SortModeAlphanumASC:
353 return util.NaturalCompare(backlinks[i].Name, backlinks[j].Name)
354 }
355 return backlinks[i].ID > backlinks[j].ID
356 })
357
358 mentionRefs, _ := buildTreeBackmention(sqlBlock, linkRefs, mentionKeyword, excludeBacklinkIDs, 12)
359 tmpBackmentions := toFlatTree(mentionRefs, 0, "backlink", nil)
360 for _, l := range tmpBackmentions {
361 l.Blocks = nil
362 backmentions = append(backmentions, l)
363 }
364
365 sort.Slice(backmentions, func(i, j int) bool {
366 switch mentionSortMode {
367 case util.SortModeUpdatedDESC:
368 return backmentions[i].Updated > backmentions[j].Updated
369 case util.SortModeUpdatedASC:
370 return backmentions[i].Updated < backmentions[j].Updated
371 case util.SortModeCreatedDESC:
372 return backmentions[i].Created > backmentions[j].Created
373 case util.SortModeCreatedASC:
374 return backmentions[i].Created < backmentions[j].Created
375 case util.SortModeNameDESC:
376 return util.PinYinCompare(backmentions[j].Name, backmentions[i].Name)
377 case util.SortModeNameASC:
378 return util.PinYinCompare(backmentions[i].Name, backmentions[j].Name)
379 case util.SortModeAlphanumDESC:
380 return util.NaturalCompare(backmentions[j].Name, backmentions[i].Name)
381 case util.SortModeAlphanumASC:
382 return util.NaturalCompare(backmentions[i].Name, backmentions[j].Name)
383 }
384 return backmentions[i].ID > backmentions[j].ID
385 })
386
387 for _, backmention := range backmentions {
388 mentionsCount += backmention.Count
389 }
390
391 // 添加笔记本名称
392 var boxIDs []string
393 for _, l := range backlinks {
394 boxIDs = append(boxIDs, l.Box)
395 }
396 for _, l := range backmentions {
397 boxIDs = append(boxIDs, l.Box)
398 }
399 boxIDs = gulu.Str.RemoveDuplicatedElem(boxIDs)
400 boxNames := Conf.BoxNames(boxIDs)
401 for _, l := range backlinks {
402 name := boxNames[l.Box]
403 l.HPath = name + l.HPath
404 }
405 for _, l := range backmentions {
406 name := boxNames[l.Box]
407 l.HPath = name + l.HPath
408 }
409 return
410}
411
412func GetBacklink(id, keyword, mentionKeyword string, beforeLen int, containChildren bool) (boxID string, linkPaths, mentionPaths []*Path, linkRefsCount, mentionsCount int) {
413 linkPaths = []*Path{}
414 mentionPaths = []*Path{}
415
416 sqlBlock := sql.GetBlock(id)
417 if nil == sqlBlock {
418 return
419 }
420 rootID := sqlBlock.RootID
421 boxID = sqlBlock.Box
422
423 var links []*Block
424 refs := sql.QueryRefsByDefID(id, containChildren)
425 refs = removeDuplicatedRefs(refs)
426
427 // 为了减少查询,组装好 IDs 后一次查出
428 defSQLBlockIDs, refSQLBlockIDs := map[string]bool{}, map[string]bool{}
429 var queryBlockIDs []string
430 for _, ref := range refs {
431 defSQLBlockIDs[ref.DefBlockID] = true
432 refSQLBlockIDs[ref.BlockID] = true
433 queryBlockIDs = append(queryBlockIDs, ref.DefBlockID)
434 queryBlockIDs = append(queryBlockIDs, ref.BlockID)
435 }
436 querySQLBlocks := sql.GetBlocks(queryBlockIDs)
437 defSQLBlocksCache := map[string]*sql.Block{}
438 for _, defSQLBlock := range querySQLBlocks {
439 if nil != defSQLBlock && defSQLBlockIDs[defSQLBlock.ID] {
440 defSQLBlocksCache[defSQLBlock.ID] = defSQLBlock
441 }
442 }
443 refSQLBlocksCache := map[string]*sql.Block{}
444 for _, refSQLBlock := range querySQLBlocks {
445 if nil != refSQLBlock && refSQLBlockIDs[refSQLBlock.ID] {
446 refSQLBlocksCache[refSQLBlock.ID] = refSQLBlock
447 }
448 }
449
450 excludeBacklinkIDs := hashset.New()
451 for _, ref := range refs {
452 defSQLBlock := defSQLBlocksCache[(ref.DefBlockID)]
453 if nil == defSQLBlock {
454 continue
455 }
456
457 refSQLBlock := refSQLBlocksCache[ref.BlockID]
458 if nil == refSQLBlock {
459 continue
460 }
461 refBlock := fromSQLBlock(refSQLBlock, "", beforeLen)
462 if rootID == refBlock.RootID { // 排除当前文档内引用提及
463 excludeBacklinkIDs.Add(refBlock.RootID, refBlock.ID)
464 }
465 defBlock := fromSQLBlock(defSQLBlock, "", beforeLen)
466 if defBlock.RootID == rootID { // 当前文档的定义块
467 links = append(links, defBlock)
468 if ref.DefBlockID == defBlock.ID {
469 defBlock.Refs = append(defBlock.Refs, refBlock)
470 }
471 }
472 }
473
474 for _, link := range links {
475 for _, ref := range link.Refs {
476 excludeBacklinkIDs.Add(ref.RootID, ref.ID)
477 }
478 linkRefsCount += len(link.Refs)
479 }
480
481 var linkRefs []*Block
482 processedParagraphs := hashset.New()
483 var paragraphParentIDs []string
484 for _, link := range links {
485 for _, ref := range link.Refs {
486 if "NodeParagraph" == ref.Type {
487 paragraphParentIDs = append(paragraphParentIDs, ref.ParentID)
488 }
489 }
490 }
491 paragraphParents := sql.GetBlocks(paragraphParentIDs)
492 for _, p := range paragraphParents {
493 if nil == p {
494 continue
495 }
496
497 if "i" == p.Type || "h" == p.Type {
498 linkRefs = append(linkRefs, fromSQLBlock(p, keyword, beforeLen))
499 processedParagraphs.Add(p.ID)
500 }
501 }
502 for _, link := range links {
503 for _, ref := range link.Refs {
504 if "NodeParagraph" == ref.Type {
505 if processedParagraphs.Contains(ref.ParentID) {
506 continue
507 }
508 }
509
510 ref.DefID = link.ID
511 ref.DefPath = link.Path
512
513 content := ref.Content
514 if "" != keyword {
515 _, content = search.MarkText(content, keyword, beforeLen, Conf.Search.CaseSensitive)
516 ref.Content = content
517 }
518 linkRefs = append(linkRefs, ref)
519 }
520 }
521 linkPaths = toSubTree(linkRefs, keyword)
522
523 mentions, _ := buildTreeBackmention(sqlBlock, linkRefs, mentionKeyword, excludeBacklinkIDs, beforeLen)
524 mentionsCount = len(mentions)
525 mentionPaths = toFlatTree(mentions, 0, "backlink", nil)
526 return
527}
528
529func buildLinkRefs(defRootID string, refs []*sql.Ref, keywords []string) (ret []*Block, refsCount int, excludeBacklinkIDs *hashset.Set, originalRefBlockIDs map[string]string) {
530 // 为了减少查询,组装好 IDs 后一次查出
531 defSQLBlockIDs, refSQLBlockIDs := map[string]bool{}, map[string]bool{}
532 var queryBlockIDs []string
533 for _, ref := range refs {
534 defSQLBlockIDs[ref.DefBlockID] = true
535 refSQLBlockIDs[ref.BlockID] = true
536 queryBlockIDs = append(queryBlockIDs, ref.DefBlockID)
537 queryBlockIDs = append(queryBlockIDs, ref.BlockID)
538 }
539 queryBlockIDs = gulu.Str.RemoveDuplicatedElem(queryBlockIDs)
540 querySQLBlocks := sql.GetBlocks(queryBlockIDs)
541 defSQLBlocksCache := map[string]*sql.Block{}
542 for _, defSQLBlock := range querySQLBlocks {
543 if nil != defSQLBlock && defSQLBlockIDs[defSQLBlock.ID] {
544 defSQLBlocksCache[defSQLBlock.ID] = defSQLBlock
545 }
546 }
547 refSQLBlocksCache := map[string]*sql.Block{}
548 for _, refSQLBlock := range querySQLBlocks {
549 if nil != refSQLBlock && refSQLBlockIDs[refSQLBlock.ID] {
550 refSQLBlocksCache[refSQLBlock.ID] = refSQLBlock
551 }
552 }
553
554 var links []*Block
555 excludeBacklinkIDs = hashset.New()
556 for _, ref := range refs {
557 defSQLBlock := defSQLBlocksCache[(ref.DefBlockID)]
558 if nil == defSQLBlock {
559 continue
560 }
561
562 refSQLBlock := refSQLBlocksCache[ref.BlockID]
563 if nil == refSQLBlock {
564 continue
565 }
566 refBlock := fromSQLBlock(refSQLBlock, "", 12)
567 if defRootID == refBlock.RootID { // 排除当前文档内引用提及
568 excludeBacklinkIDs.Add(refBlock.RootID, refBlock.ID)
569 }
570 defBlock := fromSQLBlock(defSQLBlock, "", 12)
571 if defBlock.RootID == defRootID { // 当前文档的定义块
572 links = append(links, defBlock)
573 if ref.DefBlockID == defBlock.ID {
574 defBlock.Refs = append(defBlock.Refs, refBlock)
575 }
576 }
577 }
578
579 for _, link := range links {
580 for _, ref := range link.Refs {
581 excludeBacklinkIDs.Add(ref.RootID, ref.ID)
582 }
583 refsCount += len(link.Refs)
584 }
585
586 parentRefParagraphs := map[string]*Block{}
587 var paragraphParentIDs []string
588 for _, link := range links {
589 for _, ref := range link.Refs {
590 if "NodeParagraph" == ref.Type {
591 parentRefParagraphs[ref.ParentID] = ref
592 paragraphParentIDs = append(paragraphParentIDs, ref.ParentID)
593 }
594 }
595 }
596 refsCountDelta := len(paragraphParentIDs)
597 paragraphParentIDs = gulu.Str.RemoveDuplicatedElem(paragraphParentIDs)
598 refsCountDelta -= len(paragraphParentIDs)
599 refsCount -= refsCountDelta
600 sqlParagraphParents := sql.GetBlocks(paragraphParentIDs)
601 paragraphParents := fromSQLBlocks(&sqlParagraphParents, "", 12)
602
603 luteEngine := util.NewLute()
604 originalRefBlockIDs = map[string]string{}
605 processedParagraphs := hashset.New()
606 for _, parent := range paragraphParents {
607 if nil == parent {
608 continue
609 }
610
611 if "NodeListItem" == parent.Type || "NodeBlockquote" == parent.Type || "NodeSuperBlock" == parent.Type {
612 refBlock := parentRefParagraphs[parent.ID]
613 if nil == refBlock {
614 continue
615 }
616
617 paragraphUseParentLi := true
618 if "NodeListItem" == parent.Type && parent.FContent != refBlock.Content {
619 if inlineTree := parse.Inline("", []byte(refBlock.Markdown), luteEngine.ParseOptions); nil != inlineTree {
620 for c := inlineTree.Root.FirstChild.FirstChild; c != nil; c = c.Next {
621 if treenode.IsBlockRef(c) {
622 continue
623 }
624
625 if "" != strings.TrimSpace(c.Text()) {
626 paragraphUseParentLi = false
627 break
628 }
629 }
630 }
631 }
632
633 if paragraphUseParentLi {
634 processedParagraphs.Add(parent.ID)
635 }
636
637 originalRefBlockIDs[parent.ID] = refBlock.ID
638 if !matchBacklinkKeyword(parent, keywords) {
639 refsCount--
640 continue
641 }
642
643 if paragraphUseParentLi {
644 ret = append(ret, parent)
645 }
646 }
647 }
648 for _, link := range links {
649 for _, ref := range link.Refs {
650 if "NodeParagraph" == ref.Type {
651 if processedParagraphs.Contains(ref.ParentID) {
652 continue
653 }
654 }
655
656 if !matchBacklinkKeyword(ref, keywords) {
657 refsCount--
658 continue
659 }
660
661 ref.DefID = link.ID
662 ref.DefPath = link.Path
663 ret = append(ret, ref)
664 }
665 }
666
667 if 0 < len(keywords) {
668 // 过滤场景处理标题下方块 Improve backlink filtering below the heading https://github.com/siyuan-note/siyuan/issues/14929
669 headingRefChildren := map[string]*Block{}
670 var headingIDs []string
671 for _, link := range links {
672 for _, ref := range link.Refs {
673 if "NodeHeading" == ref.Type {
674 headingRefChildren[ref.ID] = ref
675 headingIDs = append(headingIDs, ref.ID)
676 }
677 }
678 }
679 var headingChildren []*Block
680 for _, headingID := range headingIDs {
681 sqlChildren := sql.GetChildBlocks(headingID, "", -1)
682 children := fromSQLBlocks(&sqlChildren, "", 12)
683 headingChildren = append(headingChildren, children...)
684 }
685 for _, child := range headingChildren {
686 if nil == child {
687 continue
688 }
689
690 if matchBacklinkKeyword(child, keywords) {
691 heading := headingRefChildren[child.ParentID]
692 if nil != heading && !existBlock(heading, ret) {
693 ret = append(ret, heading)
694 }
695 }
696 }
697 }
698 return
699}
700
701func existBlock(block *Block, blocks []*Block) bool {
702 for _, b := range blocks {
703 if block.ID == b.ID {
704 return true
705 }
706 }
707 return false
708}
709
710func matchBacklinkKeyword(block *Block, keywords []string) bool {
711 if 1 > len(keywords) {
712 return true
713 }
714
715 for _, k := range keywords {
716 k = strings.ToLower(k)
717 if strings.Contains(strings.ToLower(block.Content), k) ||
718 strings.Contains(strings.ToLower(path.Base(block.HPath)), k) ||
719 strings.Contains(strings.ToLower(block.Name), k) ||
720 strings.Contains(strings.ToLower(block.Alias), k) ||
721 strings.Contains(strings.ToLower(block.Memo), k) ||
722 strings.Contains(strings.ToLower(block.Tag), k) {
723 return true
724 }
725 }
726 return false
727}
728
729func removeDuplicatedRefs(refs []*sql.Ref) (ret []*sql.Ref) {
730 // 同一个块中引用多个块后反链去重
731 // De-duplication of backlinks after referencing multiple blocks in the same block https://github.com/siyuan-note/siyuan/issues/12147
732
733 for _, ref := range refs {
734 contain := false
735 for _, r := range ret {
736 if ref.BlockID == r.BlockID {
737 contain = true
738 break
739 }
740 }
741 if !contain {
742 ret = append(ret, ref)
743 }
744 }
745 return
746}
747
748func buildTreeBackmention(defSQLBlock *sql.Block, refBlocks []*Block, keyword string, excludeBacklinkIDs *hashset.Set, beforeLen int) (ret []*Block, mentionKeywords []string) {
749 ret = []*Block{}
750
751 var names, aliases []string
752 var fName, rootID string
753 if "d" == defSQLBlock.Type {
754 if Conf.Search.BacklinkMentionName {
755 names = sql.QueryBlockNamesByRootID(defSQLBlock.ID)
756 }
757 if Conf.Search.BacklinkMentionAlias {
758 aliases = sql.QueryBlockAliases(defSQLBlock.ID)
759 }
760 if Conf.Search.BacklinkMentionDoc {
761 fName = path.Base(defSQLBlock.HPath)
762 }
763 rootID = defSQLBlock.ID
764 } else {
765 if Conf.Search.BacklinkMentionName {
766 if "" != defSQLBlock.Name {
767 names = append(names, defSQLBlock.Name)
768 }
769 }
770 if Conf.Search.BacklinkMentionAlias {
771 if "" != defSQLBlock.Alias {
772 aliases = strings.Split(defSQLBlock.Alias, ",")
773 }
774 }
775 root := treenode.GetBlockTree(defSQLBlock.RootID)
776 rootID = root.ID
777 }
778
779 set := hashset.New()
780 for _, name := range names {
781 set.Add(name)
782 }
783 for _, alias := range aliases {
784 set.Add(alias)
785 }
786 if "" != fName {
787 set.Add(fName)
788 }
789
790 if Conf.Search.BacklinkMentionAnchor {
791 for _, refBlock := range refBlocks {
792 refs := sql.QueryRefsByDefIDRefID(refBlock.DefID, refBlock.ID)
793 for _, ref := range refs {
794 set.Add(ref.Content)
795 }
796 }
797 }
798
799 for _, v := range set.Values() {
800 mentionKeywords = append(mentionKeywords, v.(string))
801 }
802 mentionKeywords = prepareMarkKeywords(mentionKeywords)
803 mentionKeywords, ret = searchBackmention(mentionKeywords, keyword, excludeBacklinkIDs, rootID, beforeLen)
804 return
805}
806
807func searchBackmention(mentionKeywords []string, keyword string, excludeBacklinkIDs *hashset.Set, rootID string, beforeLen int) (retMentionKeywords []string, ret []*Block) {
808 ret = []*Block{}
809 if 1 > len(mentionKeywords) {
810 return
811 }
812
813 table := "blocks_fts" // 大小写敏感
814 if !Conf.Search.CaseSensitive {
815 table = "blocks_fts_case_insensitive"
816 }
817
818 buf := bytes.Buffer{}
819 buf.WriteString("SELECT * FROM " + table + " WHERE " + table + " MATCH '" + columnFilter() + ":(")
820 for i, mentionKeyword := range mentionKeywords {
821 if Conf.Search.BacklinkMentionKeywordsLimit < i {
822 util.PushMsg(fmt.Sprintf(Conf.Language(38), len(mentionKeywords)), 5000)
823 mentionKeyword = strings.ReplaceAll(mentionKeyword, "\"", "\"\"")
824 buf.WriteString("\"" + mentionKeyword + "\"")
825 break
826 }
827
828 mentionKeyword = strings.ReplaceAll(mentionKeyword, "\"", "\"\"")
829 buf.WriteString("\"" + mentionKeyword + "\"")
830 if i < len(mentionKeywords)-1 {
831 buf.WriteString(" OR ")
832 }
833 }
834 buf.WriteString(")")
835 if "" != keyword {
836 keyword = strings.ReplaceAll(keyword, "\"", "\"\"")
837 buf.WriteString(" AND (\"" + keyword + "\")")
838 }
839 buf.WriteString("'")
840 buf.WriteString(" AND root_id != '" + rootID + "'") // 不在定义块所在文档中搜索
841 buf.WriteString(" AND type IN ('d', 'h', 'p', 't')")
842 buf.WriteString(" ORDER BY id DESC LIMIT " + strconv.Itoa(Conf.Search.Limit))
843 query := buf.String()
844
845 sqlBlocks := sql.SelectBlocksRawStmt(query, 1, Conf.Search.Limit)
846 terms := mentionKeywords
847 if "" != keyword {
848 terms = append(terms, keyword)
849 }
850 blocks := fromSQLBlocks(&sqlBlocks, strings.Join(terms, search.TermSep), beforeLen)
851
852 luteEngine := util.NewLute()
853 var tmp []*Block
854 for _, b := range blocks {
855 tree := parse.Parse("", gulu.Str.ToBytes(b.Markdown), luteEngine.ParseOptions)
856 if nil == tree {
857 continue
858 }
859
860 textBuf := &bytes.Buffer{}
861 ast.Walk(tree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
862 if !entering || n.IsBlock() {
863 return ast.WalkContinue
864 }
865 if ast.NodeText == n.Type /* NodeText 包含了标签命中的情况 */ || ast.NodeLinkText == n.Type {
866 textBuf.Write(n.Tokens)
867 }
868 return ast.WalkContinue
869 })
870
871 text := textBuf.String()
872 text = strings.TrimSpace(text)
873 if "" == text {
874 continue
875 }
876
877 newText := markReplaceSpanWithSplit(text, mentionKeywords, search.GetMarkSpanStart(search.MarkDataType), search.GetMarkSpanEnd())
878 if text != newText {
879 tmp = append(tmp, b)
880
881 k := gulu.Str.SubstringsBetween(newText, search.GetMarkSpanStart(search.MarkDataType), search.GetMarkSpanEnd())
882 retMentionKeywords = append(retMentionKeywords, k...)
883 } else {
884 // columnFilter 中的命名、别名和备注命中的情况
885 // 反链提及搜索范围增加命名、别名和备注 https://github.com/siyuan-note/siyuan/issues/7639
886 if gulu.Str.Contains(trimMarkTags(b.Name), mentionKeywords) ||
887 gulu.Str.Contains(trimMarkTags(b.Alias), mentionKeywords) ||
888 gulu.Str.Contains(trimMarkTags(b.Memo), mentionKeywords) {
889 tmp = append(tmp, b)
890 }
891 }
892 }
893 blocks = tmp
894 retMentionKeywords = gulu.Str.RemoveDuplicatedElem(retMentionKeywords)
895 mentionKeywords = retMentionKeywords
896
897 mentionBlockMap := map[string]*Block{}
898 for _, block := range blocks {
899 mentionBlockMap[block.ID] = block
900
901 refText := getContainStr(block.Content, mentionKeywords)
902 block.RefText = refText
903 }
904
905 for _, mentionBlock := range mentionBlockMap {
906 if !excludeBacklinkIDs.Contains(mentionBlock.ID) {
907 ret = append(ret, mentionBlock)
908 }
909 }
910
911 sort.SliceStable(ret, func(i, j int) bool {
912 return ret[i].ID > ret[j].ID
913 })
914 return
915}
916
917func trimMarkTags(str string) string {
918 return strings.TrimSuffix(strings.TrimPrefix(str, "<mark>"), "</mark>")
919}
920
921func getContainStr(str string, strs []string) string {
922 str = strings.ToLower(str)
923 for _, s := range strs {
924 if strings.Contains(str, strings.ToLower(s)) {
925 return s
926 }
927 }
928 return ""
929}
930
931// buildFullLinks 构建正向和反向链接列表。
932// forwardlinks:正向链接关系 refs
933// backlinks:反向链接关系 defs
934func buildFullLinks(condition string) (forwardlinks, backlinks []*Block) {
935 forwardlinks, backlinks = []*Block{}, []*Block{}
936 defs := buildDefsAndRefs(condition)
937 backlinks = append(backlinks, defs...)
938 for _, def := range defs {
939 for _, ref := range def.Refs {
940 forwardlinks = append(forwardlinks, ref)
941 }
942 }
943 return
944}
945
946func buildDefsAndRefs(condition string) (defBlocks []*Block) {
947 defBlockMap := map[string]*Block{}
948 refBlockMap := map[string]*Block{}
949 defRefs := sql.DefRefs(condition, Conf.Graph.MaxBlocks)
950
951 // 将 sql block 转为 block
952 for _, row := range defRefs {
953 for def, ref := range row {
954 if nil == ref {
955 continue
956 }
957
958 refBlock := refBlockMap[ref.ID]
959 if nil == refBlock {
960 refBlock = fromSQLBlock(ref, "", 0)
961 refBlockMap[ref.ID] = refBlock
962 }
963
964 // ref 块自己也需要作为定义块,否则图上没有节点
965 if defBlock := defBlockMap[ref.ID]; nil == defBlock {
966 defBlockMap[ref.ID] = refBlock
967 }
968
969 if defBlock := defBlockMap[def.ID]; nil == defBlock {
970 defBlock = fromSQLBlock(def, "", 0)
971 defBlockMap[def.ID] = defBlock
972 }
973 }
974 }
975
976 // 组装 block.Defs 和 block.Refs 字段
977 for _, row := range defRefs {
978 for def, ref := range row {
979 if nil == ref {
980 defBlock := fromSQLBlock(def, "", 0)
981 defBlockMap[def.ID] = defBlock
982 continue
983 }
984
985 refBlock := refBlockMap[ref.ID]
986 defBlock := defBlockMap[def.ID]
987 if refBlock.ID == defBlock.ID { // 自引用
988 continue
989 }
990
991 refBlock.Defs = append(refBlock.Defs, defBlock)
992 defBlock.Refs = append(defBlock.Refs, refBlock)
993 }
994 }
995
996 for _, def := range defBlockMap {
997 defBlocks = append(defBlocks, def)
998 }
999 return
1000}