mirror of
https://github.com/toeverything/AFFiNE.git
synced 2024-12-23 20:11:43 +03:00
feat(core): support block links on Bi-Directional Links (#8169)
Clsoes [AF-1348](https://linear.app/affine-design/issue/AF-1348/修复-bi-directional-links-里面的链接地址) * Links to the current document should be ignored on `Backlinks` * Links to the current document should be ignored on `Outgoing links` https://github.com/user-attachments/assets/dbc43cea-5aca-4c6f-886a-356e3a91c1f1
This commit is contained in:
parent
b7d05d2078
commit
b74dd1c92e
@ -35,9 +35,8 @@ export interface PageReferenceRendererOptions {
|
||||
journalHelper: ReturnType<typeof useJournalHelper>;
|
||||
t: ReturnType<typeof useI18n>;
|
||||
docMode?: DocMode;
|
||||
// linking doc with block or element
|
||||
blockIds?: string[];
|
||||
elementIds?: string[];
|
||||
// Link to block or element
|
||||
linkToNode?: boolean;
|
||||
}
|
||||
// use a function to be rendered in the lit renderer
|
||||
export function pageReferenceRenderer({
|
||||
@ -46,8 +45,7 @@ export function pageReferenceRenderer({
|
||||
journalHelper,
|
||||
t,
|
||||
docMode,
|
||||
blockIds,
|
||||
elementIds,
|
||||
linkToNode = false,
|
||||
}: PageReferenceRendererOptions) {
|
||||
const { isPageJournal, getLocalizedJournalDateString } = journalHelper;
|
||||
const referencedPage = pageMetaHelper.getDocMeta(pageId);
|
||||
@ -62,7 +60,7 @@ export function pageReferenceRenderer({
|
||||
} else {
|
||||
Icon = LinkedPageIcon;
|
||||
}
|
||||
if (blockIds?.length || elementIds?.length) {
|
||||
if (linkToNode) {
|
||||
Icon = BlockLinkIcon;
|
||||
}
|
||||
}
|
||||
@ -89,33 +87,33 @@ export function AffinePageReference({
|
||||
docCollection,
|
||||
wrapper: Wrapper,
|
||||
mode = 'page',
|
||||
params = {},
|
||||
params,
|
||||
}: {
|
||||
pageId: string;
|
||||
docCollection: DocCollection;
|
||||
wrapper?: React.ComponentType<PropsWithChildren>;
|
||||
mode?: DocMode;
|
||||
params?: {
|
||||
mode?: DocMode;
|
||||
blockIds?: string[];
|
||||
elementIds?: string[];
|
||||
};
|
||||
params?: URLSearchParams;
|
||||
}) {
|
||||
const pageMetaHelper = useDocMetaHelper(docCollection);
|
||||
const journalHelper = useJournalHelper(docCollection);
|
||||
const t = useI18n();
|
||||
|
||||
const { mode: linkedWithMode, blockIds, elementIds } = params;
|
||||
let linkWithMode: DocMode | null = null;
|
||||
let linkToNode = false;
|
||||
if (params) {
|
||||
linkWithMode = params.get('mode') as DocMode;
|
||||
linkToNode = params.has('blockIds') || params.has('elementIds');
|
||||
}
|
||||
|
||||
const el = pageReferenceRenderer({
|
||||
docMode: linkedWithMode ?? mode,
|
||||
docMode: linkWithMode ?? mode,
|
||||
pageId,
|
||||
pageMetaHelper,
|
||||
journalHelper,
|
||||
docCollection,
|
||||
t,
|
||||
blockIds,
|
||||
elementIds,
|
||||
linkToNode,
|
||||
});
|
||||
|
||||
const ref = useRef<HTMLAnchorElement>(null);
|
||||
@ -154,20 +152,11 @@ export function AffinePageReference({
|
||||
|
||||
const query = useMemo(() => {
|
||||
// A block/element reference link
|
||||
const search = new URLSearchParams();
|
||||
if (linkedWithMode) {
|
||||
search.set('mode', linkedWithMode);
|
||||
}
|
||||
if (blockIds?.length) {
|
||||
search.set('blockIds', blockIds.join(','));
|
||||
}
|
||||
if (elementIds?.length) {
|
||||
search.set('elementIds', elementIds.join(','));
|
||||
}
|
||||
search.set('refreshKey', refreshKey);
|
||||
|
||||
return search.size > 0 ? `?${search.toString()}` : '';
|
||||
}, [blockIds, elementIds, linkedWithMode, refreshKey]);
|
||||
let str = params?.toString() ?? '';
|
||||
if (str.length) str += '&';
|
||||
str += `refreshKey=${refreshKey}`;
|
||||
return '?' + str;
|
||||
}, [params, refreshKey]);
|
||||
|
||||
return (
|
||||
<WorkbenchLink
|
||||
|
@ -63,10 +63,14 @@ export const BiDirectionalLinkPanel = () => {
|
||||
{t['com.affine.page-properties.outgoing-links']()} ·{' '}
|
||||
{links.length}
|
||||
</div>
|
||||
{links.map(link => (
|
||||
<div key={link.docId} className={styles.link}>
|
||||
{links.map((link, i) => (
|
||||
<div
|
||||
key={`${link.docId}-${link.params?.toString()}-${i}`}
|
||||
className={styles.link}
|
||||
>
|
||||
<AffinePageReference
|
||||
pageId={link.docId}
|
||||
params={link.params}
|
||||
docCollection={workspaceService.workspace.docCollection}
|
||||
/>
|
||||
</div>
|
||||
|
@ -7,6 +7,7 @@ import { useJournalInfoHelper } from '@affine/core/hooks/use-journal';
|
||||
import { EditorService } from '@affine/core/modules/editor';
|
||||
import { EditorSettingService } from '@affine/core/modules/editor-settting';
|
||||
import { PeekViewService } from '@affine/core/modules/peek-view';
|
||||
import { toURLSearchParams } from '@affine/core/utils';
|
||||
import type { DocMode } from '@blocksuite/blocks';
|
||||
import { DocTitle, EdgelessEditor, PageEditor } from '@blocksuite/presets';
|
||||
import type { Doc } from '@blocksuite/store';
|
||||
@ -90,12 +91,14 @@ const usePatchSpecs = (page: Doc, shared: boolean, mode: DocMode) => {
|
||||
const pageId = data.pageId;
|
||||
if (!pageId) return <span />;
|
||||
|
||||
const params = toURLSearchParams(data.params);
|
||||
|
||||
return (
|
||||
<AffinePageReference
|
||||
docCollection={page.collection}
|
||||
pageId={pageId}
|
||||
mode={mode}
|
||||
params={data.params}
|
||||
params={params}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
@ -6,6 +6,7 @@ import type { DocsSearchService } from '../../docs-search';
|
||||
export interface Link {
|
||||
docId: string;
|
||||
title: string;
|
||||
params?: URLSearchParams;
|
||||
}
|
||||
|
||||
export class DocLinks extends Entity {
|
||||
|
@ -36,7 +36,7 @@ export class DocsIndexer extends Entity {
|
||||
/**
|
||||
* increase this number to re-index all docs
|
||||
*/
|
||||
static INDEXER_VERSION = 1;
|
||||
static INDEXER_VERSION = 2;
|
||||
|
||||
private readonly jobQueue: JobQueue<IndexerJobPayload> =
|
||||
new IndexedDBJobQueue<IndexerJobPayload>(
|
||||
|
@ -11,8 +11,13 @@ export const blockIndexSchema = defineSchema({
|
||||
blockId: 'String',
|
||||
content: 'FullText',
|
||||
flavour: 'String',
|
||||
ref: 'String',
|
||||
blob: 'String',
|
||||
// reference doc id
|
||||
// ['xxx','yyy']
|
||||
refDocId: 'String',
|
||||
// reference info
|
||||
// [{"docId":"xxx","mode":"page","blockIds":["gt5Yfq1maYvgNgpi13rIq"]},{"docId":"yyy","mode":"edgeless","blockIds":["k5prpOlDF-9CzfatmO0W7"]}]
|
||||
ref: 'String',
|
||||
});
|
||||
|
||||
export type BlockIndexSchema = typeof blockIndexSchema;
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { toURLSearchParams } from '@affine/core/utils';
|
||||
import type { WorkspaceService } from '@toeverything/infra';
|
||||
import {
|
||||
fromPromise,
|
||||
@ -5,6 +6,7 @@ import {
|
||||
Service,
|
||||
WorkspaceEngineBeforeStart,
|
||||
} from '@toeverything/infra';
|
||||
import { isEmpty, omit } from 'lodash-es';
|
||||
import { type Observable, switchMap } from 'rxjs';
|
||||
|
||||
import { DocsIndexer } from '../entities/docs-indexer';
|
||||
@ -250,36 +252,64 @@ export class DocsSearchService extends Service {
|
||||
field: 'docId',
|
||||
match: docId,
|
||||
},
|
||||
// Ignore if it is a link to the current document.
|
||||
{
|
||||
type: 'boolean',
|
||||
occur: 'must_not',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'refDocId',
|
||||
match: docId,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'exists',
|
||||
field: 'ref',
|
||||
field: 'refDocId',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: ['ref'],
|
||||
fields: ['refDocId', 'ref'],
|
||||
pagination: {
|
||||
limit: 100,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const docIds = new Set(
|
||||
nodes.flatMap(node => {
|
||||
const refs = node.fields.ref;
|
||||
return typeof refs === 'string' ? [refs] : refs;
|
||||
})
|
||||
const refs: {
|
||||
docId: string;
|
||||
mode?: string;
|
||||
blockIds?: string[];
|
||||
elementIds?: string[];
|
||||
}[] = nodes.flatMap(node => {
|
||||
const { ref } = node.fields;
|
||||
return typeof ref === 'string'
|
||||
? [JSON.parse(ref)]
|
||||
: ref.map(item => JSON.parse(item));
|
||||
});
|
||||
|
||||
const docData = await this.indexer.docIndex.getAll(
|
||||
Array.from(new Set(refs.map(ref => ref.docId)))
|
||||
);
|
||||
|
||||
const docData = await this.indexer.docIndex.getAll(Array.from(docIds));
|
||||
return refs
|
||||
.flatMap(ref => {
|
||||
const doc = docData.find(doc => doc.id === ref.docId);
|
||||
if (!doc) return null;
|
||||
|
||||
return docData.map(doc => {
|
||||
const title = doc.get('title');
|
||||
return {
|
||||
docId: doc.id,
|
||||
title: title ? (typeof title === 'string' ? title : title[0]) : '',
|
||||
};
|
||||
});
|
||||
const titles = doc.get('title');
|
||||
const title = (Array.isArray(titles) ? titles[0] : titles) ?? '';
|
||||
const params = omit(ref, ['docId']);
|
||||
|
||||
return {
|
||||
title,
|
||||
docId: doc.id,
|
||||
params: isEmpty(params) ? undefined : toURLSearchParams(params),
|
||||
};
|
||||
})
|
||||
.filter(ref => !!ref);
|
||||
}
|
||||
|
||||
watchRefsFrom(docId: string) {
|
||||
@ -294,14 +324,26 @@ export class DocsSearchService extends Service {
|
||||
field: 'docId',
|
||||
match: docId,
|
||||
},
|
||||
// Ignore if it is a link to the current document.
|
||||
{
|
||||
type: 'boolean',
|
||||
occur: 'must_not',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'refDocId',
|
||||
match: docId,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'exists',
|
||||
field: 'ref',
|
||||
field: 'refDocId',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: ['ref'],
|
||||
fields: ['refDocId', 'ref'],
|
||||
pagination: {
|
||||
limit: 100,
|
||||
},
|
||||
@ -310,28 +352,41 @@ export class DocsSearchService extends Service {
|
||||
.pipe(
|
||||
switchMap(({ nodes }) => {
|
||||
return fromPromise(async () => {
|
||||
const docIds = new Set(
|
||||
nodes.flatMap(node => {
|
||||
const refs = node.fields.ref;
|
||||
return typeof refs === 'string' ? [refs] : refs;
|
||||
})
|
||||
);
|
||||
const refs: {
|
||||
docId: string;
|
||||
mode?: string;
|
||||
blockIds?: string[];
|
||||
elementIds?: string[];
|
||||
}[] = nodes.flatMap(node => {
|
||||
const { ref } = node.fields;
|
||||
return typeof ref === 'string'
|
||||
? [JSON.parse(ref)]
|
||||
: ref.map(item => JSON.parse(item));
|
||||
});
|
||||
|
||||
const docData = await this.indexer.docIndex.getAll(
|
||||
Array.from(docIds)
|
||||
Array.from(new Set(refs.map(ref => ref.docId)))
|
||||
);
|
||||
|
||||
return docData.map(doc => {
|
||||
const title = doc.get('title');
|
||||
return {
|
||||
docId: doc.id,
|
||||
title: title
|
||||
? typeof title === 'string'
|
||||
? title
|
||||
: title[0]
|
||||
: '',
|
||||
};
|
||||
});
|
||||
return refs
|
||||
.flatMap(ref => {
|
||||
const doc = docData.find(doc => doc.id === ref.docId);
|
||||
if (!doc) return null;
|
||||
|
||||
const titles = doc.get('title');
|
||||
const title =
|
||||
(Array.isArray(titles) ? titles[0] : titles) ?? '';
|
||||
const params = omit(ref, ['docId']);
|
||||
|
||||
return {
|
||||
title,
|
||||
docId: doc.id,
|
||||
params: isEmpty(params)
|
||||
? undefined
|
||||
: toURLSearchParams(params),
|
||||
};
|
||||
})
|
||||
.filter(ref => !!ref);
|
||||
});
|
||||
})
|
||||
);
|
||||
@ -346,9 +401,27 @@ export class DocsSearchService extends Service {
|
||||
> {
|
||||
const { buckets } = await this.indexer.blockIndex.aggregate(
|
||||
{
|
||||
type: 'match',
|
||||
field: 'ref',
|
||||
match: docId,
|
||||
type: 'boolean',
|
||||
occur: 'must',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'refDocId',
|
||||
match: docId,
|
||||
},
|
||||
// Ignore if it is a link to the current document.
|
||||
{
|
||||
type: 'boolean',
|
||||
occur: 'must_not',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'docId',
|
||||
match: docId,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
'docId',
|
||||
{
|
||||
@ -384,9 +457,27 @@ export class DocsSearchService extends Service {
|
||||
return this.indexer.blockIndex
|
||||
.aggregate$(
|
||||
{
|
||||
type: 'match',
|
||||
field: 'ref',
|
||||
match: docId,
|
||||
type: 'boolean',
|
||||
occur: 'must',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'refDocId',
|
||||
match: docId,
|
||||
},
|
||||
// Ignore if it is a link to the current document.
|
||||
{
|
||||
type: 'boolean',
|
||||
occur: 'must_not',
|
||||
queries: [
|
||||
{
|
||||
type: 'match',
|
||||
field: 'docId',
|
||||
match: docId,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
'docId',
|
||||
{
|
||||
|
@ -3,7 +3,7 @@ import type { DeltaInsert } from '@blocksuite/inline';
|
||||
import { Document } from '@toeverything/infra';
|
||||
import { toHexString } from 'lib0/buffer.js';
|
||||
import { digest as lib0Digest } from 'lib0/hash/sha256';
|
||||
import { difference } from 'lodash-es';
|
||||
import { difference, uniq } from 'lodash-es';
|
||||
import {
|
||||
applyUpdate,
|
||||
Array as YArray,
|
||||
@ -130,18 +130,25 @@ async function crawlingDocData({
|
||||
}
|
||||
|
||||
const deltas: DeltaInsert<AffineTextAttributes>[] = text.toDelta();
|
||||
const ref = deltas
|
||||
.map(delta => {
|
||||
if (
|
||||
delta.attributes &&
|
||||
delta.attributes.reference &&
|
||||
delta.attributes.reference.pageId
|
||||
) {
|
||||
return delta.attributes.reference.pageId;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((link): link is string => !!link);
|
||||
const refs = uniq(
|
||||
deltas
|
||||
.flatMap(delta => {
|
||||
if (
|
||||
delta.attributes &&
|
||||
delta.attributes.reference &&
|
||||
delta.attributes.reference.pageId
|
||||
) {
|
||||
const { pageId: refDocId, params = {} } =
|
||||
delta.attributes.reference;
|
||||
return {
|
||||
refDocId,
|
||||
ref: JSON.stringify({ docId: refDocId, ...params }),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter(ref => !!ref)
|
||||
);
|
||||
|
||||
blockDocuments.push(
|
||||
Document.from<BlockIndexSchema>(`${docId}:${blockId}`, {
|
||||
@ -149,7 +156,14 @@ async function crawlingDocData({
|
||||
flavour,
|
||||
blockId,
|
||||
content: text.toString(),
|
||||
ref,
|
||||
...refs.reduce<{ refDocId: string[]; ref: string[] }>(
|
||||
(prev, curr) => {
|
||||
prev.refDocId.push(curr.refDocId);
|
||||
prev.ref.push(curr.ref);
|
||||
return prev;
|
||||
},
|
||||
{ refDocId: [], ref: [] }
|
||||
),
|
||||
})
|
||||
);
|
||||
}
|
||||
@ -160,12 +174,15 @@ async function crawlingDocData({
|
||||
) {
|
||||
const pageId = block.get('prop:pageId');
|
||||
if (typeof pageId === 'string') {
|
||||
// reference info
|
||||
const params = block.get('prop:params') ?? {};
|
||||
blockDocuments.push(
|
||||
Document.from<BlockIndexSchema>(`${docId}:${blockId}`, {
|
||||
docId,
|
||||
flavour,
|
||||
blockId,
|
||||
ref: pageId,
|
||||
refDocId: [pageId],
|
||||
ref: [JSON.stringify({ docId: pageId, ...params })],
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -55,25 +55,28 @@ export class DocsQuickSearchSession
|
||||
if (!query) {
|
||||
out = of([] as QuickSearchItem<'docs', DocsPayload>[]);
|
||||
} else {
|
||||
const resolvedDoc = resolveLinkToDoc(query);
|
||||
const resolvedDocId = resolvedDoc?.docId;
|
||||
const resolvedBlockId = resolvedDoc?.blockIds?.[0];
|
||||
|
||||
out = this.docsSearchService.search$(query).pipe(
|
||||
map(docs => {
|
||||
const resolvedDoc = resolveLinkToDoc(query);
|
||||
if (
|
||||
resolvedDoc &&
|
||||
!docs.some(doc => doc.docId === resolvedDoc.docId)
|
||||
resolvedDocId &&
|
||||
!docs.some(doc => doc.docId === resolvedDocId)
|
||||
) {
|
||||
return [
|
||||
{
|
||||
docId: resolvedDoc.docId,
|
||||
docId: resolvedDocId,
|
||||
score: 100,
|
||||
blockId: resolvedDoc.blockIds?.[0],
|
||||
blockId: resolvedBlockId,
|
||||
blockContent: '',
|
||||
},
|
||||
...docs,
|
||||
];
|
||||
} else {
|
||||
return docs;
|
||||
}
|
||||
|
||||
return docs;
|
||||
}),
|
||||
map(docs =>
|
||||
docs
|
||||
|
@ -31,3 +31,13 @@ export function buildAppUrl(path: string, opts: AppUrlOptions = {}) {
|
||||
return new URL(path, webBase).toString();
|
||||
}
|
||||
}
|
||||
|
||||
export function toURLSearchParams(params?: Record<string, string | string[]>) {
|
||||
if (!params) return;
|
||||
return new URLSearchParams(
|
||||
Object.entries(params).map(([k, v]) => [
|
||||
k,
|
||||
Array.isArray(v) ? v.join(',') : v,
|
||||
])
|
||||
);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user