Merge branch 'pathfinder'

This commit is contained in:
DarkSky 2022-12-17 23:21:34 +08:00
commit ebe029adad
1737 changed files with 7696 additions and 118417 deletions

View File

@ -1,372 +0,0 @@
{
"projectName": "AFFiNE",
"projectOwner": "toeverything",
"repoType": "github",
"repoHost": "https://github.com",
"files": [
"README.md"
],
"imageSize": 50,
"commit": false,
"commitConvention": "angular",
"contributorsPerLine": 7,
"badgeTemplate": "\n[all-contributors-badge]: https://img.shields.io/badge/all_contributors-<%= contributors.length %>-orange.svg?style=flat-square\n",
"contributors": [
{
"login": "doodlewind",
"name": "Yifeng Wang",
"avatar_url": "https://avatars.githubusercontent.com/u/7312949?v=4",
"profile": "https://github.com/doodlewind",
"contributions": [
"code",
"doc"
]
},
{
"login": "darkskygit",
"name": "DarkSky",
"avatar_url": "https://avatars.githubusercontent.com/u/25152247?v=4",
"profile": "https://darksky.eu.org/",
"contributions": [
"code",
"doc"
]
},
{
"login": "tzhangchi",
"name": "Chi Zhang",
"avatar_url": "https://avatars.githubusercontent.com/u/5910926?v=4",
"profile": "http://zhangchi.page/",
"contributions": [
"code",
"doc"
]
},
{
"login": "alt1o",
"name": "wang xinglong",
"avatar_url": "https://avatars.githubusercontent.com/u/21084335?v=4",
"profile": "https://github.com/alt1o",
"contributions": [
"code",
"doc"
]
},
{
"login": "DiamondThree",
"name": "DiamondThree",
"avatar_url": "https://avatars.githubusercontent.com/u/24630517?v=4",
"profile": "https://github.com/DiamondThree",
"contributions": [
"code",
"doc"
]
},
{
"login": "lawvs",
"name": "Whitewater",
"avatar_url": "https://avatars.githubusercontent.com/u/18554747?v=4",
"profile": "https://lawvs.github.io/profile/",
"contributions": [
"code",
"doc"
]
},
{
"login": "zuoxiaodong0815",
"name": "xiaodong zuo",
"avatar_url": "https://avatars.githubusercontent.com/u/53252747?v=4",
"profile": "https://github.com/zuoxiaodong0815",
"contributions": [
"code",
"doc"
]
},
{
"login": "SaikaSakura",
"name": "MingLIang Wang",
"avatar_url": "https://avatars.githubusercontent.com/u/11530942?v=4",
"profile": "https://github.com/SaikaSakura",
"contributions": [
"code",
"doc"
]
},
{
"login": "QiShaoXuan",
"name": "Qi",
"avatar_url": "https://avatars.githubusercontent.com/u/22772830?v=4",
"profile": "https://github.com/QiShaoXuan",
"contributions": [
"code",
"doc"
]
},
{
"login": "mitsuhatu",
"name": "mitsuhatu",
"avatar_url": "https://avatars.githubusercontent.com/u/110213079?v=4",
"profile": "https://github.com/mitsuhatu",
"contributions": [
"code",
"doc"
]
},
{
"login": "Austaras",
"name": "Austaras",
"avatar_url": "https://avatars.githubusercontent.com/u/15013925?v=4",
"profile": "https://shockwave.me/",
"contributions": [
"code",
"doc"
]
},
{
"login": "uptonking",
"name": "Jin Yao",
"avatar_url": "https://avatars.githubusercontent.com/u/11391549?v=4",
"profile": "https://github.com/uptonking",
"contributions": [
"code",
"doc"
]
},
{
"login": "HeJiachen-PM",
"name": "HeJiachen-PM",
"avatar_url": "https://avatars.githubusercontent.com/u/79301703?v=4",
"profile": "https://github.com/HeJiachen-PM",
"contributions": [
"doc"
]
},
{
"login": "Yipei-Operation",
"name": "Yipei Wei",
"avatar_url": "https://avatars.githubusercontent.com/u/79373028?v=4",
"profile": "https://github.com/Yipei-Operation",
"contributions": [
"doc"
]
},
{
"login": "fanjing22",
"name": "fanjing22",
"avatar_url": "https://avatars.githubusercontent.com/u/109729699?v=4",
"profile": "https://github.com/fanjing22",
"contributions": [
"design"
]
},
{
"login": "Svaney-ssman",
"name": "Svaney",
"avatar_url": "https://avatars.githubusercontent.com/u/110808979?v=4",
"profile": "https://github.com/Svaney-ssman",
"contributions": [
"design"
]
},
{
"login": "xell",
"name": "Guozhu Liu",
"avatar_url": "https://avatars.githubusercontent.com/u/132558?v=4",
"profile": "http://xell.me/",
"contributions": [
"design"
]
},
{
"login": "fyZheng07",
"name": "fyZheng07",
"avatar_url": "https://avatars.githubusercontent.com/u/63830919?v=4",
"profile": "https://github.com/fyZheng07",
"contributions": [
"eventOrganizing",
"userTesting"
]
},
{
"login": "CJSS",
"name": "CJSS",
"avatar_url": "https://avatars.githubusercontent.com/u/4605025?v=4",
"profile": "https://github.com/CJSS",
"contributions": [
"doc"
]
},
{
"login": "JimmFly",
"name": "JimmFly",
"avatar_url": "https://avatars.githubusercontent.com/u/102217452?v=4",
"profile": "https://github.com/JimmFly",
"contributions": [
"code"
]
},
{
"login": "CarlosZoft",
"name": "Carlos Rafael ",
"avatar_url": "https://avatars.githubusercontent.com/u/62192072?v=4",
"profile": "https://github.com/CarlosZoft",
"contributions": [
"code"
]
},
{
"login": "caleboleary",
"name": "Caleb OLeary",
"avatar_url": "https://avatars.githubusercontent.com/u/12816579?v=4",
"profile": "https://github.com/caleboleary",
"contributions": [
"code"
]
},
{
"login": "westongraham",
"name": "Weston Graham",
"avatar_url": "https://avatars.githubusercontent.com/u/89493023?v=4",
"profile": "https://github.com/westongraham",
"contributions": [
"doc"
]
},
{
"login": "pointmax",
"name": "pointmax",
"avatar_url": "https://avatars.githubusercontent.com/u/49361135?v=4",
"profile": "https://github.com/pointmax",
"contributions": [
"doc"
]
},
{
"login": "liby",
"name": "Bryan Lee",
"avatar_url": "https://avatars.githubusercontent.com/u/38807139?v=4",
"profile": "https://liby.github.io/notes",
"contributions": [
"code"
]
},
{
"login": "chenmoonmo",
"name": "Simon Li",
"avatar_url": "https://avatars.githubusercontent.com/u/36295999?v=4",
"profile": "https://github.com/chenmoonmo",
"contributions": [
"code"
]
},
{
"login": "githbq",
"name": "Bob Hu",
"avatar_url": "https://avatars.githubusercontent.com/u/10009709?v=4",
"profile": "https://github.com/githbq",
"contributions": [
"code"
]
},
{
"login": "lucky-chap",
"name": "Quavo",
"avatar_url": "https://avatars.githubusercontent.com/u/67266933?v=4",
"profile": "https://quavo.vercel.app/",
"contributions": [
"doc"
]
},
{
"login": "LuciNyan",
"name": "子瞻 Luci",
"avatar_url": "https://avatars.githubusercontent.com/u/22126563?v=4",
"profile": "https://github.com/LuciNyan",
"contributions": [
"code"
]
},
{
"login": "m1911star",
"name": "Horus",
"avatar_url": "https://avatars.githubusercontent.com/u/4948120?v=4",
"profile": "http://blog.ipili.me/",
"contributions": [
"code",
"platform"
]
},
{
"login": "fanshyiis",
"name": "Super.x",
"avatar_url": "https://avatars.githubusercontent.com/u/15103283?v=4",
"profile": "https://segmentfault.com/u/qzuser_584786517d31a",
"contributions": [
"code"
]
},
{
"login": "wangyu-1999",
"name": "Wang Yu",
"avatar_url": "https://avatars.githubusercontent.com/u/80874770?v=4",
"profile": "https://wangyu-1999.github.io/",
"contributions": [
"code"
]
},
{
"login": "felixonmars",
"name": "Felix Yan",
"avatar_url": "https://avatars.githubusercontent.com/u/1006477?v=4",
"profile": "https://felixc.at/",
"contributions": [
"code"
]
},
{
"login": "lynettelopez",
"name": "Lynette Lopez",
"avatar_url": "https://avatars.githubusercontent.com/u/32908859?v=4",
"profile": "https://github.com/lynettelopez",
"contributions": [
"code"
]
},
{
"login": "Zheaoli",
"name": "Manjusaka",
"avatar_url": "https://avatars.githubusercontent.com/u/7054676?v=4",
"profile": "http://manjusaka.itscoder.com/",
"contributions": [
"code"
]
},
{
"login": "sudongyuer",
"name": "Frozen FIsh",
"avatar_url": "https://avatars.githubusercontent.com/u/76603360?v=4",
"profile": "https://juejin.cn/user/2867982785579102/posts?sort=popular",
"contributions": [
"code"
]
},
{
"login": "MuhammedFaraz",
"name": "Mohammed Faraz",
"avatar_url": "https://avatars.githubusercontent.com/u/92734739?v=4",
"profile": "https://github.com/MuhammedFaraz",
"contributions": [
"doc"
]
},
{
"login": "Pranav4399",
"name": "Pranav Sriram ",
"avatar_url": "https://avatars.githubusercontent.com/u/28348429?v=4",
"profile": "https://pranavsriram.dev/",
"contributions": [
"code"
]
}
]
}

View File

@ -1,66 +0,0 @@
module.exports = {
types: [
{ value: 'feat', name: 'feat 🍄: add new features' },
{ value: 'fix', name: 'fix 🐛: fix bug' },
{ value: 'docs', name: 'docs 📄: modify documentation, comments' },
{
value: 'refactor',
name: 'refactor 🎸: code refactoring, pay attention to distinguish it from features and fixes',
},
{ value: 'perf', name: 'perf ⚡: improve performance' },
{ value: 'test', name: 'test 👀: add a test' },
{
value: 'tool',
name: 'tool 🚗: Development tool changes (build, scaffolding tools, etc.)',
},
{
value: 'style',
name: 'style ✂: Modifications to code formatting do not affect logic',
},
{ value: 'revert', name: 'revert 🌝: version rollback' },
{
value: 'editor',
name: 'editor 🔧: editor configuration modification',
},
{ value: 'update', name: 'update ⬆: third-party library upgrade' },
],
scopes: [
{ name: 'selection' },
{ name: 'edgeless' },
{ name: 'point' },
{ name: 'group' },
{ name: 'page' },
{ name: 'component' },
{ name: 'config' },
{ name: 'others' },
],
// it needs to match the value for field type. Eg.: 'fix'
/*
scopeOverrides: {
fix: [
{name: 'merge'},
{name: 'style'}
]
},
*/
// override the messages, de faults are as follows
messages: {
type: 'Choose a type of your submission:',
scope: 'Choose a scope (optional):',
// used if allowCustomScopes is true
customScope: 'Denote the SCOPE of this change:',
subject: 'Brief description:\n',
body: 'Detailed description, use "|" newline (optional):\n',
breaking: 'Incompatibility specification (optional):\n',
footer: 'Associate closed issues, for example: #31, #34 (optional):\n',
confirmCommit: 'Are you sure to commit?',
},
allowCustomScopes: true,
allowBreakingChanges: ['Added', 'Repair'],
// limit subject length
subjectLimit: 100,
};

View File

@ -1,13 +0,0 @@
# Editor configuration, see http://editorconfig.org
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
[*.md]
max_line_length = off
trim_trailing_whitespace = false

5
.env
View File

@ -1,5 +0,0 @@
# use for download icon from figma
FIGMA_TOKEN
NODE_ENV
AFFINE_FEATURE_FLAG_TOKEN

14
.eslintrc.js Normal file
View File

@ -0,0 +1,14 @@
// https://eslint.org/docs/latest/user-guide/configuring
// "off" or 0 - turn the rule off
// "warn" or 1 - turn the rule on as a warning (doesnt affect exit code)
// "error" or 2 - turn the rule on as an error (exit code will be 1)
/** @type { import('eslint').Linter.Config } */
module.exports = {
extends: ['plugin:prettier/recommended'],
rules: {
'prettier/prettier': 'warn',
},
reportUnusedDisableDirectives: true,
};

View File

@ -1,295 +0,0 @@
{
"root": true,
"ignorePatterns": ["**/*"],
"plugins": ["@nrwl/nx", "react", "filename-rules", "import", "prettier"],
"parserOptions": {
"project": ["./tsconfig.base.json"]
},
"overrides": [
{
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
"rules": {
"prettier/prettier": "warn",
"@nrwl/nx/enforce-module-boundaries": [
"error",
{
"enforceBuildableLibDependency": true,
"allow": [],
"depConstraints": [
{
"sourceTag": "library:utils",
"onlyDependOnLibsWithTags": ["library:utils"]
},
{
"sourceTag": "datasource:jwt",
"onlyDependOnLibsWithTags": [
"library:utils",
"datasource:remote-kv",
"datasource:jwt-rpc"
]
},
{
"sourceTag": "datasource:db-services",
"onlyDependOnLibsWithTags": [
"library:utils",
"datasource:jwt"
]
},
{
"sourceTag": "datasource:hooks",
"onlyDependOnLibsWithTags": [
"library:utils",
"datasource:jwt",
"datasource:db-services"
]
},
{
"sourceTag": "datasource:http",
"onlyDependOnLibsWithTags": ["library:utils"]
},
{
"sourceTag": "datasource:state",
"onlyDependOnLibsWithTags": [
"library:utils",
"library:feature-flags",
"datasource:http",
"datasource:jwt"
]
},
{
"sourceTag": "components:common",
"onlyDependOnLibsWithTags": [
"library:utils",
"components:icons",
"components:ui",
"library:feature-flags"
]
},
{
"sourceTag": "components:editor-core",
"onlyDependOnLibsWithTags": [
"library:utils",
"library:feature-flags",
"datasource:db-services",
"datasource:state",
"datasource:commands",
"datasource:jwt",
"components:ui",
"components:common",
"components:icons"
]
},
{
"sourceTag": "components:editor-blocks",
"onlyDependOnLibsWithTags": [
"library:utils",
"library:feature-flags",
"components:common",
"components:editor-core",
"framework:editor",
"datasource:db-services",
"components:ui",
"components:icons"
]
},
{
"sourceTag": "components:editor-plugins",
"onlyDependOnLibsWithTags": [
"library:utils",
"components:common",
"components:editor-core",
"framework:editor",
"components:editor-blocks",
"datasource:db-services",
"components:ui",
"components:icons",
"library:feature-flags",
"datasource:i18n"
]
},
{
"sourceTag": "components:ui",
"onlyDependOnLibsWithTags": [
"components:icons",
"library:utils"
]
},
{
"sourceTag": "framework:editor",
"onlyDependOnLibsWithTags": [
"components:editor-core"
]
},
{
"sourceTag": "*",
"onlyDependOnLibsWithTags": ["*"]
}
],
"allowCircularSelfDependency": false
}
],
"filename-rules/match": [
"warn",
{
".tsx": "PascalCase",
".ts": "kebab-case",
".json": "kebab-case",
"": "kebab-case"
}
],
"no-restricted-imports": [
"error",
{
"patterns": [
{
"group": ["lodash"],
"message": "Forbid direct import of lodash, use @toeverything/utils"
},
{
"group": ["lodash-es"],
"message": "Forbid direct import of lodash-es, use @toeverything/utils"
},
{
"group": ["@mui/material", "@mui/material/*"],
"message": "Forbid direct import of @mui/material, use @toeverything/components/ui"
}
]
}
],
"@typescript-eslint/naming-convention": [
"warn",
{
"selector": [
"property",
"parameterProperty",
"accessor",
"enumMember"
],
"format": ["strictCamelCase"]
},
{
"selector": ["property", "accessor"],
"modifiers": ["private"],
"format": ["strictCamelCase"],
"leadingUnderscore": "require"
},
{
"selector": ["method"],
"modifiers": ["public"],
"format": ["strictCamelCase"]
},
// Private methods
{
"selector": ["method"],
"modifiers": ["private"],
"format": ["strictCamelCase"],
"leadingUnderscore": "require"
},
{
"selector": ["method"],
"modifiers": ["protected"],
"format": ["strictCamelCase"],
"leadingUnderscore": "require"
},
// Top Level Methods
// const func: Function
// allow PascalCase for react components
{
"selector": ["variable"],
"modifiers": ["global"],
"types": ["function"],
"format": ["strictCamelCase", "StrictPascalCase"],
"leadingUnderscore": "allow"
},
// function something() { }
{
"selector": ["function"],
"modifiers": ["global"],
"format": ["strictCamelCase"],
"leadingUnderscore": "require"
},
// export const func: Function
{
"selector": ["variable"],
"modifiers": ["exported"],
"types": ["function"],
"format": ["strictCamelCase", "StrictPascalCase"],
"leadingUnderscore": "forbid"
},
// export function something() { }
{
"selector": ["function"],
"modifiers": ["exported"],
"format": ["strictCamelCase", "StrictPascalCase"],
"leadingUnderscore": "forbid"
},
// Top Level Variables
{
"selector": ["variable"],
"modifiers": ["global", "const"],
"types": ["boolean", "string", "number"],
"format": ["UPPER_CASE"]
},
{
"selector": ["variable"],
"modifiers": ["global"],
"format": ["strictCamelCase"],
"leadingUnderscore": "require"
},
{
"selector": ["variable"],
"modifiers": ["exported"],
"format": ["strictCamelCase"]
},
// types, enums
{
"selector": "typeLike",
"format": ["PascalCase"]
},
{
"selector": "variableLike",
"format": ["strictCamelCase"]
}
],
"react/self-closing-comp": "warn",
"no-restricted-syntax": [
"warn",
{
"selector": ":matches(PropertyDefinition)[accessibility!='private'][accessibility!='protected'][key.name!='constructor']",
"message": "Use private instead, please implement getter or setXxx for external read and write requirements"
}
]
}
},
{
"files": ["index.tsx", "index.*.tsx"],
"rules": {
"filename-rules/match": "off"
}
},
{
"files": ["*.ts", "*.tsx"],
"extends": ["plugin:@nrwl/nx/typescript"],
"rules": {
"prefer-const": "warn",
"no-console": ["warn", { "allow": ["warn", "error"] }],
"@typescript-eslint/ban-ts-comment": "warn",
"@typescript-eslint/no-empty-interface": "warn",
"@typescript-eslint/no-empty-function": "warn",
// https://github.com/nrwl/nx/issues/10445
"react/jsx-key": "error",
"import/no-default-export": "warn",
"import/no-duplicates": "warn",
"max-lines": [
"warn",
{ "max": 300, "skipComments": true, "skipBlankLines": true }
]
}
},
{
"files": ["*.js", "*.jsx"],
"extends": ["plugin:@nrwl/nx/javascript"],
"rules": {}
}
]
}

47
.github/CODEOWNERS vendored
View File

@ -1,47 +0,0 @@
# About code owners
# You can use a CODEOWNERS file to define individuals or teams that are responsible for code in a repository.
# See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
# =================
# configs
# =================
.* @darkskygit
babel.config.json @darkskygit
nx.json @darkskygit
tsconfig.base.json @darkskygit
**/project.json @darkskygit
**/tsconfig.json @darkskygit
**/tsconfig.*.json @darkskygit
**/.babelrc @darkskygit
**/babel.config.js @darkskygit
**/.* @darkskygit
# =================
# components
# =================
# editor-core
libs/components/editor-core @lawvs
# editor-blocks
# group block
libs/components/editor-blocks/src/blocks/group @lawvs
# todo block
libs/components/editor-blocks/src/blocks/todo @lawvs
libs/framework/virgo @SaikaSakura
# =================
# datasource
# =================
# feature flags
libs/datasource/feature-flags @lawvs
# jwt
libs/datasource/jwt @darkskygit

View File

@ -1,7 +1,7 @@
name: 🐛 Bug report name: 🐛 Bug report (Alpha)
description: Report a reproducible bug or regression description: "Report a reproducible bug or regression for https://pathfinder.affine.pro"
title: "[bug]: " title: "[bug]: "
labels: ["bug"] labels: ["bug", "alpha"]
body: body:
- type: markdown - type: markdown
attributes: attributes:
@ -43,4 +43,3 @@ body:
attributes: attributes:
label: Additional context label: Additional context
placeholder: Add any other context about the problem here. placeholder: Add any other context about the problem here.

View File

@ -0,0 +1,45 @@
name: 🐛 Bug report (Pre-Alpha)
description: "Report a reproducible bug or regression for https://livedemo.affine.pro"
title: "[bug]: "
labels: ["bug", "pre-alpha"]
body:
- type: markdown
attributes:
value: Thanks for taking the time to fill out this bug report!
- type: input
id: description
attributes:
label: Describe the bug
placeholder: A clear and concise description of what the bug is.
- type: textarea
id: reproduce
attributes:
label: To Reproduce
placeholder: "Steps to reproduce the behavior\n1. Go to '...'\n2. Click on '....'\n3. Scroll down to '....'\n4. See error"
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
placeholder: If applicable, add screenshots to help explain your problem.
- type: textarea
id: expected
attributes:
label: Expected behavior
placeholder: A clear and concise description of what you expected to happen.
- type: input
id: platform
attributes:
label: Platform
placeholder: e.g. MacOS, Windows10...
- type: input
id: browser
attributes:
label: Browser
placeholder: e.g. Chrome, Safari
- type: textarea
id: additional
attributes:
label: Additional context
placeholder: Add any other context about the problem here.

View File

@ -1,28 +0,0 @@
:3000 {
root /* ./dist
file_server {
precompressed br
}
encode {
zstd
gzip 9
}
@notStatic {
not path /*.css
not path /*.js
not path /*.png
not path /*.jpg
not path /*.svg
not path /*.ttf
not path /*.eot
not path /*.woff
not path /*.woff2
}
handle @notStatic {
try_files {path} /index.html
}
}

View File

@ -1,39 +0,0 @@
:80 {
reverse_proxy /api/* keck:3001
@websockets {
path /collaboration/*
}
reverse_proxy @websockets keck:3000
reverse_proxy /* lisa:3001 {
header_up Host lisa:3001
}
}
http://lisa:3001 {
root /* ./dist
file_server {
precompressed br
}
encode {
zstd
gzip 9
}
@notStatic {
not path /*.css
not path /*.js
not path /*.png
not path /*.jpg
not path /*.svg
not path /*.ttf
not path /*.eot
not path /*.woff
not path /*.woff2
}
handle @notStatic {
try_files {path} /index.html
}
}

View File

@ -1,28 +0,0 @@
:80 {
root /* ./dist
file_server {
precompressed br
}
encode {
zstd
gzip 9
}
@notStatic {
not path /*.css
not path /*.js
not path /*.png
not path /*.jpg
not path /*.svg
not path /*.ttf
not path /*.eot
not path /*.woff
not path /*.woff2
}
handle @notStatic {
try_files {path} /index.html
}
}

5
.github/deployment/Dockerfile vendored Normal file
View File

@ -0,0 +1,5 @@
FROM nginx:alpine
COPY ./packages/app/out /usr/share/nginx/html
COPY ./.github/deployment/nginx.conf /etc/nginx/conf.d/default.conf

View File

@ -1,23 +0,0 @@
FROM node:16-alpine as builder
ARG AFFINE_EMBED_HEADER
WORKDIR /app
RUN apk add git && npm i -g pnpm@7
COPY . .
# RUN apk add g++ make python3 git libpng-dev
RUN --mount=type=cache,target=/app/node_modules,rw,sharing=private pnpm i --frozen-lockfile --store=node_modules/.pnpm-store --filter "!ligo-virgo-e2e" --filter "!keck" --filter "!venus" && pnpm run build:local --skip-nx-cache
FROM node:16-alpine as relocate
WORKDIR /app
COPY --from=builder /app/dist/apps/ligo-virgo ./dist
COPY --from=builder /app/.github/deployment/Caddyfile-affine ./Caddyfile
RUN rm ./dist/*.txt
# =============
# AFFiNE image
# =============
FROM caddy:2.4.6-alpine as AFFiNE
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 3000
CMD ["caddy", "run"]

View File

@ -1,22 +0,0 @@
FROM node:16-alpine as builder
WORKDIR /app
RUN apk add git && npm i -g pnpm@7
COPY . .
# RUN apk add g++ make python3 git libpng-dev
RUN --mount=type=cache,target=/app/node_modules,rw,sharing=private pnpm i --frozen-lockfile --store=node_modules/.pnpm-store --filter "!ligo-virgo-e2e" --filter "!keck" --filter "!venus" && pnpm run build:local-keck --skip-nx-cache
FROM node:16-alpine as relocate
WORKDIR /app
COPY --from=builder /app/dist/apps/ligo-virgo ./dist
COPY --from=builder /app/.github/deployment/Caddyfile-affine ./Caddyfile
RUN rm ./dist/*.txt
# =============
# AFFiNE image
# =============
FROM caddy:2.4.6-alpine as AFFiNE
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 3000
CMD ["caddy", "run"]

View File

@ -1,13 +0,0 @@
ARG BASE_IMAGE=localhost:5000/toeverything/relocate:latest
FROM ${BASE_IMAGE} as relocate
# =============
# AFFiNE image
# =============
FROM caddy:2.4.6-alpine as AFFiNE
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 3000
CMD ["caddy", "run"]

View File

@ -1,33 +0,0 @@
FROM node:16-alpine as builder
WORKDIR /app
RUN apk add g++ make python3 git libpng-dev && npm i -g pnpm@7
COPY . .
RUN --mount=type=cache,target=/app/node_modules,rw,sharing=private pnpm i --frozen-lockfile --store=node_modules/.pnpm-store && pnpm run build:keck
FROM node:16-alpine as node_modules
WORKDIR /app
COPY --from=builder /app/dist/apps/keck .
COPY --from=builder /app/apps/keck/package.prod.json ./package.json
RUN npm i
# =============
# keck image
# =============
FROM node:16-alpine as keck
WORKDIR /app
COPY --from=node_modules /app .
ENV FIREBASE_ACCOUNT=
ENV FIREBASE_CERT=
ENV FIREBASE_PROJECT=
ENV HOST=0.0.0.0
ENV PORT=3000
# coding
ENV TZ 'Asia/Shanghai'
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
EXPOSE 3000
CMD ["node", "main.js"]

View File

@ -1,22 +0,0 @@
FROM node:16-alpine as builder
WORKDIR /app
RUN apk add git && npm i -g pnpm@7
COPY . .
# RUN apk add g++ make python3 git libpng-dev
RUN --mount=type=cache,target=/app/node_modules,rw,sharing=private pnpm i --frozen-lockfile --store=node_modules/.pnpm-store --filter "!ligo-virgo-e2e" --filter "!keck" --filter "!venus" && pnpm run build
FROM node:16-alpine as relocate
WORKDIR /app
COPY --from=builder /app/dist/apps/ligo-virgo ./dist
COPY --from=builder /app/.github/deployment/Caddyfile-lisa ./Caddyfile
RUN rm ./dist/*.txt
# =============
# lisa image
# =============
FROM caddy:2.4.6-alpine as lisa
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 3000
CMD ["caddy", "run"]

View File

@ -1,21 +0,0 @@
FROM node:16-alpine as builder
WORKDIR /app
RUN apk add g++ make python3 git libpng-dev && npm i -g pnpm@7
COPY . .
RUN --mount=type=cache,target=/app/node_modules,rw,sharing=private pnpm i --frozen-lockfile --store=node_modules/.pnpm-store && pnpm run build:venus
FROM node:16-alpine as relocate
WORKDIR /app
COPY --from=builder /app/dist/apps/venus ./dist
COPY --from=builder /app/.github/deployment/Caddyfile-venus ./Caddyfile
RUN rm ./dist/*.txt
# =============
# venus image
# =============
FROM caddy:2.4.6-alpine as venus
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 80
CMD ["caddy", "run"]

29
.github/deployment/nginx.conf vendored Normal file
View File

@ -0,0 +1,29 @@
server {
listen 80;
listen [::]:80;
server_name localhost;
#access_log /var/log/nginx/host.access.log main;
root /usr/share/nginx/html;
index index.html index.htm;
location /_next/static {
# 7 days
add_header Cache-Control "max-age=86400";
}
location / {
try_files $uri $uri.html /$uri /index.html
add_header Cache-Control "no-cache";
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
}

View File

@ -1,2 +0,0 @@
NX_LOCAL=true
NX_E2E=true

View File

@ -1 +0,0 @@
NX_LOCAL=true

View File

@ -1,2 +0,0 @@
NX_LOCAL=true
NX_KECK=true

View File

@ -1,82 +0,0 @@
name: Build AFFiNE-Local-Keck
on:
workflow_dispatch:
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
AFFINE_IMAGE_NAME: AFFiNE
IMAGE_TAG_LATEST: nightly-keck
LOCAL_CACHE: localhost:5000/toeverything/relocate:latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
services:
registry:
image: registry:2
ports:
- 5000:5000
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
driver-opts: network=host
- name: Extract metadata (tags, labels) for Docker (AFFiNE-Local)
id: meta_affine
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.AFFINE_IMAGE_NAME }}
tags: ${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile-affine-keck
tags: ${{ env.LOCAL_CACHE }}
target: AFFiNE
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64,linux/arm64/v8
file: ./.github/deployment/Dockerfile-affine-multiplatform
push: ${{ github.ref == 'refs/heads/master' && true || false }}
tags: ${{ steps.meta_affine.outputs.tags }}
labels: ${{ steps.meta_affine.outputs.labels }}
target: AFFiNE
build-args: |
BASE_IMAGE=${{ env.LOCAL_CACHE }}

View File

@ -1,85 +0,0 @@
name: Build AFFiNE-Local
on:
push:
branches: [master]
# pull_request:
# branches: [master]
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
AFFINE_IMAGE_NAME: AFFiNE
IMAGE_TAG_LATEST: nightly-latest
LOCAL_CACHE: localhost:5000/toeverything/relocate:latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
services:
registry:
image: registry:2
ports:
- 5000:5000
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
driver-opts: network=host
- name: Extract metadata (tags, labels) for Docker (AFFiNE-Local)
id: meta_affine
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.AFFINE_IMAGE_NAME }}
tags: ${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile-affine
tags: ${{ env.LOCAL_CACHE }}
target: AFFiNE
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64,linux/arm64/v8
file: ./.github/deployment/Dockerfile-affine-multiplatform
push: ${{ github.ref == 'refs/heads/master' && true || false }}
tags: ${{ steps.meta_affine.outputs.tags }}
labels: ${{ steps.meta_affine.outputs.labels }}
target: AFFiNE
build-args: |
BASE_IMAGE=${{ env.LOCAL_CACHE }}

120
.github/workflows/build-community.yml vendored Normal file
View File

@ -0,0 +1,120 @@
name: Build Pathfinder Community Version
on:
push:
branches: [pathfinder]
pull_request:
branches: [pathfinder]
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
jobs:
build:
name: Lint and Build
runs-on: self-hosted
steps:
- uses: actions/checkout@v2
- uses: pnpm/action-setup@v2
with:
version: 'latest'
- name: Use Node.js
uses: actions/setup-node@v2
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_GITHUB_AUTH_TOKEN }}
with:
node-version: 18.x
cache: 'pnpm'
- name: Restore cache
uses: actions/cache@v3
with:
path: |
.next/cache
# Generate a new cache whenever packages or source files change.
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
# If source files changed but packages didn't, rebuild from a prior cache.
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-
- name: Install dependencies
run: pnpm install
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_GITHUB_AUTH_TOKEN }}
- name: Lint
run: |
pnpm lint --max-warnings=0
# - name: Test
# run: pnpm test
- name: Build
run: pnpm build
- name: Export
run: pnpm export
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
path: ./packages/app/out
push_to_registry:
# See https://docs.github.com/en/actions/publishing-packages/publishing-docker-images
name: Push Docker image to Docker Hub
if: github.ref == 'refs/heads/pathfinder'
runs-on: ubuntu-latest
needs: build
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: 'toeverything/affine-pathfinder-community'
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: artifact
path: packages/app/out/
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

122
.github/workflows/build-livedemo.yml vendored Normal file
View File

@ -0,0 +1,122 @@
name: Build Pathfinder LiveDemo Version
on:
push:
branches: [pathfinder]
pull_request:
branches: [pathfinder]
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
jobs:
build:
name: Lint and Build
runs-on: self-hosted
steps:
- uses: actions/checkout@v2
- uses: pnpm/action-setup@v2
with:
version: 'latest'
- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: 18.x
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'pnpm'
- run: node scripts/module-resolve/ci.js
- name: Restore cache
uses: actions/cache@v3
with:
path: |
.next/cache
# Generate a new cache whenever packages or source files change.
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
# If source files changed but packages didn't, rebuild from a prior cache.
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-
- name: Install dependencies
run: pnpm install --no-frozen-lockfile
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_GITHUB_AUTH_TOKEN }}
- name: Lint
run: |
pnpm lint --max-warnings=0
# - name: Test
# run: pnpm test
- name: Build
run: pnpm build
- name: Export
run: pnpm export
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
path: ./packages/app/out
push_to_registry:
# See https://docs.github.com/en/actions/publishing-packages/publishing-docker-images
name: Push Docker image to Docker Hub
if: github.ref == 'refs/heads/pathfinder'
runs-on: ubuntu-latest
needs: build
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: 'toeverything/affine-pathfinder'
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: artifact
path: packages/app/out/
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

123
.github/workflows/build-test-version.yml vendored Normal file
View File

@ -0,0 +1,123 @@
name: Build Pathfinder LiveDemo Test Version
on:
workflow_dispatch:
inputs:
tag:
description: "Custom Tag. Set nightly-latest will publish to development."
required: true
type: string
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
jobs:
build:
name: Lint and Build
runs-on: self-hosted
steps:
- uses: actions/checkout@v2
- uses: pnpm/action-setup@v2
with:
version: 'latest'
- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: 18.x
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'pnpm'
- run: node scripts/module-resolve/ci.js
- name: Restore cache
uses: actions/cache@v3
with:
path: |
.next/cache
# Generate a new cache whenever packages or source files change.
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
# If source files changed but packages didn't, rebuild from a prior cache.
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('**/pnpm-lock.yaml') }}-
- name: Install dependencies
run: pnpm install --no-frozen-lockfile
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_GITHUB_AUTH_TOKEN }}
- name: Lint
run: |
pnpm lint --max-warnings=0
# - name: Test
# run: pnpm test
- name: Build
run: pnpm build
- name: Export
run: pnpm export
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
path: ./packages/app/out
push_to_registry:
# See https://docs.github.com/en/actions/publishing-packages/publishing-docker-images
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
needs: build
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: 'toeverything/affine-pathfinder-testing'
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: artifact
path: packages/app/out/
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ inputs.tag }}
- name: Build and push Docker image
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -1,34 +0,0 @@
name: standard check
on:
push:
branches: [ "develop", "master" ]
pull_request:
branches: [ "develop", "master" ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.number || github.ref }}
cancel-in-progress: true
jobs:
main:
name: Nx Cloud - Main Job
uses: ./.github/workflows/nx-cloud-main.yml
secrets: inherit
with:
main-branch-name: develop
# number-of-agents: 2
parallel-commands: |
pnpm exec nx-cloud record -- pnpm exec nx format:check
pnpm e2e:ci ${{ github.ref == 'refs/heads/develop' && '--record' || '' }}
pnpm exec nx affected --target=lint --parallel=2 --exclude=components-common,keck,theme
pnpm exec nx affected --target=build --parallel=2
# parallel-commands-on-agents: |
# pnpm exec nx affected --target=lint --parallel=2 --exclude=components-common,keck,theme
# pnpm exec nx affected --target=build --parallel=2
# agents:
# name: Nx Cloud - Agents
# uses: nrwl/ci/.github/workflows/nx-cloud-agents.yml@v0.6
# with:
# number-of-agents: 2

View File

@ -1,72 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "develop", master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "develop" ]
schedule:
- cron: '27 1 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

69
.github/workflows/codeql.yml vendored Normal file
View File

@ -0,0 +1,69 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
on:
push:
branches: [pathfinder]
pull_request:
# The branches below must be a subset of the branches above
branches: [pathfinder]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -1,69 +0,0 @@
name: Build Keck
on:
workflow_dispatch:
push:
branches: [master]
paths:
- 'apps/keck/**'
- '.github/deployment'
- '.github/workflows/keck.yml'
# pull_request:
# branches: [master]
# paths:
# - 'apps/keck/**'
# - '.github/deployment'
# - '.github/workflows/keck.yml'
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
KECK_IMAGE_NAME: keck
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker (keck)
id: meta_keck
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.KECK_IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (keck)
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
file: ./.github/deployment/Dockerfile-keck
push: ${{ github.ref == 'refs/heads/field' && true || false }}
tags: ${{ steps.meta_keck.outputs.tags }}
labels: ${{ steps.meta_keck.outputs.labels }}
target: keck

View File

@ -1,89 +0,0 @@
name: Download Languages Resources
on:
schedule:
- cron: "0 0 * * 5" # At 00:00(UTC) on Friday.
workflow_dispatch:
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
# This action need write permission to create pull requests
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
permissions:
contents: write
pull-requests: write
jobs:
main:
strategy:
matrix:
node-version: [18]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Use pnpm
uses: pnpm/action-setup@v2
with:
version: 7
- name: Use Node.js ${{ matrix.node-version }}
# https://github.com/actions/setup-node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'pnpm'
- name: Install node modules
run: pnpm install
- name: Sync Languages
working-directory: ./libs/datasource/i18n
run: pnpm run download-resources
env:
TOLGEE_API_KEY: ${{ secrets.TOLGEE_API_KEY }}
- name: Push Branch
id: push
run: |
git add libs/datasource/i18n
# Do not proceed if there are no file differences
COMMIT=$(git rev-parse --verify origin/$TARGET_BRANCH || echo HEAD)
FILES_CHANGED=$(git diff-index --name-only --cached $COMMIT | wc -l)
if [[ "$FILES_CHANGED" = "0" ]]; then
echo "No file changes detected."
echo "::set-output name=skipPR::true"
exit 0
fi
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
git commit --message 'feat(i18n): new translations' --no-verify
git remote set-url origin "https://$GITHUB_ACTOR:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY"
git push --force origin HEAD:$TARGET_BRANCH
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TARGET_BRANCH: bot/new-translations
- name: Get current date
id: date
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
# see https://github.com/repo-sync/pull-request
- name: Create Pull Request
if: steps.push.outputs.skipPR != 'true'
uses: repo-sync/pull-request@v2
with:
source_branch: 'bot/new-translations' # If blank, default: triggered branch
destination_branch: "develop"
pr_title: Update i18n (${{ steps.date.outputs.date }}) # Title of pull request
pr_label: 'data,bot' # Comma-separated list (no spaces)
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,63 +0,0 @@
name: Languages Sync
on:
push:
branches: [ "develop", "master" ]
paths:
- 'libs/datasource/i18n/**'
- '.github/workflows/languages-sync.yml'
pull_request:
branches: [ "develop", "master" ]
paths:
- 'libs/datasource/i18n/**'
- '.github/workflows/languages-sync.yml'
workflow_dispatch:
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
jobs:
main:
strategy:
matrix:
node-version: [18]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Use pnpm
uses: pnpm/action-setup@v2
with:
version: 7
- name: Use Node.js ${{ matrix.node-version }}
# https://github.com/actions/setup-node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'pnpm'
- name: Install node modules
run: pnpm install
- name: Check Language Key
if: github.ref != 'refs/heads/develop' && github.ref != 'refs/heads/master'
working-directory: ./libs/datasource/i18n
run: pnpm run sync-languages:check
env:
TOLGEE_API_KEY: ${{ secrets.TOLGEE_API_KEY }}
- name: Sync Languages
if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master'
working-directory: ./libs/datasource/i18n
run: pnpm run sync-languages
env:
TOLGEE_API_KEY: ${{ secrets.TOLGEE_API_KEY }}

View File

@ -1,60 +0,0 @@
name: Build Lisa
on:
push:
branches: [master]
# pull_request:
# branches: [master]
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
LISA_IMAGE_NAME: lisa
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker (lisa)
id: meta_lisa
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.LISA_IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (lisa)
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
file: ./.github/deployment/Dockerfile-lisa
push: ${{ github.ref == 'refs/heads/master' && true || false }}
tags: ${{ steps.meta_lisa.outputs.tags }}
labels: ${{ steps.meta_lisa.outputs.labels }}
target: lisa

View File

@ -1,87 +0,0 @@
name: Build AFFiNE-Livedemo
on:
push:
branches: [master]
# pull_request:
# branches: [master]
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
AFFINE_IMAGE_NAME: LIVEDEMO
IMAGE_TAG_LATEST: nightly-latest
LOCAL_CACHE: localhost:5000/toeverything/relocate:latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
services:
registry:
image: registry:2
ports:
- 5000:5000
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
driver-opts: network=host
- name: Extract metadata (tags, labels) for Docker (AFFiNE-Local)
id: meta_affine
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.AFFINE_IMAGE_NAME }}
tags: ${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
push: true
file: ./.github/deployment/Dockerfile-affine
tags: ${{ env.LOCAL_CACHE }}
target: AFFiNE
build-args: |
AFFINE_EMBED_HEADER=${{ secrets.AFFINE_EMBED_HEADER }}
- name: Build and push Docker image (AFFINE-Local)
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64,linux/arm64/v8
file: ./.github/deployment/Dockerfile-affine-multiplatform
push: ${{ github.ref == 'refs/heads/master' && true || false }}
tags: ${{ steps.meta_affine.outputs.tags }}
labels: ${{ steps.meta_affine.outputs.labels }}
target: AFFiNE
build-args: |
BASE_IMAGE=${{ env.LOCAL_CACHE }}

View File

@ -1,300 +0,0 @@
name: Nx Cloud Main
on:
workflow_call:
secrets:
NX_CLOUD_ACCESS_TOKEN:
required: false
NX_CLOUD_AUTH_TOKEN:
required: false
NX_CYPRESS_KEY:
required: false
inputs:
number-of-agents:
required: false
type: number
environment-variables:
required: false
type: string
init-commands:
required: false
type: string
final-commands:
required: false
type: string
parallel-commands:
required: false
type: string
parallel-commands-on-agents:
required: false
type: string
node-version:
required: false
type: string
yarn-version:
required: false
type: string
npm-version:
required: false
type: string
pnpm-version:
required: false
type: string
install-commands:
required: false
type: string
main-branch-name:
required: false
type: string
default: main
runs-on:
required: false
type: string
default: ubuntu-latest
# We needed this input in order to be able to configure out integration tests for this repo, it is not documented
# so as to not cause confusion/add noise, but technically any consumer of the workflow can use it if they want to.
working-directory:
required: false
type: string
env:
NX_CLOUD_DISTRIBUTED_EXECUTION: true
NX_CLOUD_DISTRIBUTED_EXECUTION_AGENT_COUNT: ${{ inputs.number-of-agents }}
NX_BRANCH: ${{ github.event.number || github.ref_name }}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
NX_CLOUD_AUTH_TOKEN: ${{ secrets.NX_CLOUD_AUTH_TOKEN }}
CYPRESS_RECORD_KEY: ${{ secrets.NX_CYPRESS_KEY }}
jobs:
main:
runs-on: ${{ inputs.runs-on }}
# The name of the job which will invoke this one is expected to be "Nx Cloud - Main Job", and whatever we call this will be appended
# to that one after a forward slash, so we keep this one intentionally short to produce "Nx Cloud - Main Job / Run" in the Github UI
name: Run
defaults:
run:
working-directory: ${{ inputs.working-directory || github.workspace }}
# Specify shell to help normalize across different operating systems
shell: bash
steps:
- uses: actions/checkout@v2
name: Checkout [Pull Request]
if: ${{ github.event_name == 'pull_request' }}
with:
# By default, PRs will be checked-out based on the Merge Commit, but we want the actual branch HEAD.
ref: ${{ github.event.pull_request.head.sha }}
# We need to fetch all branches and commits so that Nx affected has a base to compare against.
fetch-depth: 0
- uses: actions/checkout@v2
name: Checkout [Default Branch]
if: ${{ github.event_name != 'pull_request' }}
with:
# We need to fetch all branches and commits so that Nx affected has a base to compare against.
fetch-depth: 0
- name: Derive appropriate SHAs for base and head for `nx affected` commands
uses: nrwl/nx-set-shas@v2
with:
main-branch-name: ${{ inputs.main-branch-name }}
- name: Detect package manager
id: package_manager
shell: bash
run: |
echo "::set-output name=name::$([[ -f ./yarn.lock ]] && echo "yarn" || ([[ -f ./pnpm-lock.yaml ]] && echo "pnpm") || echo "npm")"
# Set node/npm/yarn versions using volta, with optional overrides provided by the consumer
- uses: volta-cli/action@v3
with:
node-version: "${{ inputs.node-version }}"
npm-version: "${{ inputs.npm-version }}"
yarn-version: "${{ inputs.yarn-version }}"
# Install pnpm with exact version provided by consumer or fallback to latest
- name: Install PNPM
if: steps.package_manager.outputs.name == 'pnpm'
uses: pnpm/action-setup@v2.2.1
with:
version: ${{ inputs.pnpm-version || 'latest' }}
- name: Print node/npm/yarn versions
id: versions
run: |
node_ver=$( node --version )
yarn_ver=$( yarn --version || true )
pnpm_ver=$( pnpm --version || true )
echo "Node: ${node_ver:1}"
echo "NPM: $( npm --version )"
if [[ $yarn_ver != '' ]]; then echo "Yarn: $yarn_ver"; fi
if [[ $pnpm_ver != '' ]]; then echo "PNPM: $pnpm_ver"; fi
echo "::set-output name=node_version::${node_ver:1}"
- name: Use the node_modules cache if available [npm]
if: steps.package_manager.outputs.name == 'npm'
uses: actions/cache@v2
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-
- name: Use the node_modules cache if available [pnpm]
if: steps.package_manager.outputs.name == 'pnpm'
uses: actions/cache@v2
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-
- name: Get yarn cache directory path
if: steps.package_manager.outputs.name == 'yarn'
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Use the node_modules cache if available [yarn]
if: steps.package_manager.outputs.name == 'yarn'
uses: actions/cache@v2
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-node-${{ steps.versions.outputs.node_version }}-yarn-
- name: Process environment-variables
if: ${{ inputs.environment-variables != '' }}
uses: actions/github-script@v6
env:
ENV_VARS: ${{ inputs.environment-variables }}
with:
script: |
const { appendFileSync } = require('fs');
// trim spaces and escape quotes
const cleanStr = str => str
.trim()
.replaceAll(/`/g, "\`");
// parse variable to correct type
const parseStr = str =>
str === 'true' || str === 'TRUE'
? true
: str === 'false' || str === 'FALSE'
? false
: isNaN(str)
? str
: parseFloat(str);
const varsStr = process.env.ENV_VARS || '';
const vars = varsStr
.split('\n')
.map(variable => variable.trim())
.filter(variable => variable.indexOf('=') > 0)
.map(variable => ({
name: cleanStr(variable.split('=')[0]),
value: cleanStr(variable.slice(variable.indexOf('=') + 1))
}));
for (const v of vars) {
console.log(`Appending environment variable \`${v.name}\` with value \`${v.value}\` to ${process.env.GITHUB_ENV}`);
appendFileSync(process.env.GITHUB_ENV, `${v.name}=${parseStr(v.value)}\n`);
}
- name: Run any configured install-commands
if: ${{ inputs.install-commands != '' }}
run: |
${{ inputs.install-commands }}
- name: Install dependencies
if: ${{ inputs.install-commands == '' }}
run: |
if [ "${{ steps.package_manager.outputs.name == 'yarn' }}" == "true" ]; then
echo "Running yarn install --frozen-lockfile"
yarn install --frozen-lockfile
elif [ "${{ steps.package_manager.outputs.name == 'pnpm' }}" == "true" ]; then
echo "Running pnpm install --frozen-lockfile"
pnpm install --frozen-lockfile
else
echo "Running npm ci"
npm ci
fi
# An unfortunate side-effect of the way reusable workflows work is that by the time they are pulled into the "caller"
# repo, they are effectively completely embedded in that context. This means that we cannot reference any files which
# are local to this repo which defines the workflow, and we therefore need to work around this by embedding the contents
# of the shell utilities for executing commands into the workflow directly.
- name: Create command utils
uses: actions/github-script@v6
with:
script: |
const { writeFileSync } = require('fs');
const runCommandsInParallelScript = `
# Extract the provided commands from the stringified JSON array.
IFS=$'\n' read -d '' -a userCommands < <((jq -c -r '.[]') <<<"$1")
# Invoke the provided commands in parallel and collect their exit codes.
pids=()
for userCommand in "\${userCommands[@]}"; do
eval "$userCommand" & pids+=($!)
done
# If any one of the invoked commands exited with a non-zero exit code, exit the whole thing with code 1.
for pid in \${pids[*]}; do
if ! wait $pid; then
exit 1
fi
done
# All the invoked commands must have exited with code zero.
exit 0
`;
writeFileSync('./.github/workflows/run-commands-in-parallel.sh', runCommandsInParallelScript);
- name: Prepare command utils
# We need to escape the workspace path to be consistent cross-platform: https://github.com/actions/runner/issues/1066
run: chmod +x ${GITHUB_WORKSPACE//\\//}/.github/workflows/run-commands-in-parallel.sh
- name: Initialize the Nx Cloud distributed CI run
run: npx nx-cloud start-ci-run
# The good thing about the multi-line string input for sequential commands is that we can simply forward it on as is to the bash shell and it will behave
# how we want it to in terms of quote escaping, variable assignment etc
- name: Run any configured init-commands sequentially
if: ${{ inputs.init-commands != '' }}
run: |
${{ inputs.init-commands }}
- name: Process parallel commands configuration
uses: actions/github-script@v6
id: parallel_commands_config
env:
PARALLEL_COMMANDS: ${{ inputs.parallel-commands }}
PARALLEL_COMMANDS_ON_AGENTS: ${{ inputs.parallel-commands-on-agents }}
with:
# For the ones configured for main, explicitly set NX_CLOUD_DISTRIBUTED_EXECUTION to false, taking into account commands chained with &&
# within the strings. In order to properly escape single quotes we need to do some manual replacing and escaping so that the commands
# are forwarded onto the run-commands-in-parallel.sh script appropriately.
script: |
const parallelCommandsOnMainStr = process.env.PARALLEL_COMMANDS || '';
const parallelCommandsOnAgentsStr = process.env.PARALLEL_COMMANDS_ON_AGENTS || '';
const parallelCommandsOnMain = parallelCommandsOnMainStr
.split('\n')
.map(command => command.trim())
.filter(command => command.length > 0)
.map(s => s.replace(/'/g, '%27'));
const parallelCommandsOnAgents = parallelCommandsOnAgentsStr
.split('\n')
.map(command => command.trim())
.filter(command => command.length > 0)
.map(s => s.replace(/'/g, '%27'));
const formattedArrayOfCommands = [
...parallelCommandsOnMain.map(s => s
.split(' && ')
.map(s => `NX_CLOUD_DISTRIBUTED_EXECUTION=false ${s}`)
.join(' && ')
),
...parallelCommandsOnAgents,
];
const stringifiedEncodedArrayOfCommands = JSON.stringify(formattedArrayOfCommands)
.replace(/%27/g, "'\\''");
return stringifiedEncodedArrayOfCommands
result-encoding: string
- name: Run any configured parallel commands on main and agent jobs
# We need to escape the workspace path to be consistent cross-platform: https://github.com/actions/runner/issues/1066
run: ${GITHUB_WORKSPACE//\\//}/.github/workflows/run-commands-in-parallel.sh '${{ steps.parallel_commands_config.outputs.result }}'
# The good thing about the multi-line string input for sequential commands is that we can simply forward it on as is to the bash shell and it will behave
# how we want it to in terms of quote escaping, variable assignment etc
- name: Run any configured final-commands sequentially
if: ${{ inputs.final-commands != '' }}
run: |
${{ inputs.final-commands }}
- name: Stop all running agents for this CI run
# It's important that we always run this step, otherwise in the case of any failures in preceding non-Nx steps, the agents will keep running and waste billable minutes
if: ${{ always() }}
run: npx nx-cloud stop-all-agents

40
.github/workflows/tests.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: E2E & Unit Tests
on:
push:
branches: [pathfinder]
pull_request:
branches: [pathfinder]
jobs:
test:
timeout-minutes: 60
runs-on: self-hosted
steps:
- uses: actions/checkout@v3
- uses: pnpm/action-setup@v2
with:
version: 'latest'
- uses: actions/setup-node@v3
with:
node-version: 16
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'pnpm'
- run: node scripts/module-resolve/ci.js
- name: Install dependencies
run: pnpm install --no-frozen-lockfile
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_GITHUB_AUTH_TOKEN }}
- name: Install Playwright Browsers
run: npx playwright install --with-deps
- name: Run E2E tests
run: pnpm run test:e2e
- name: Run Unit tests
run: pnpm run test:unit

View File

@ -1,67 +0,0 @@
name: Build Venus
on:
push:
branches: [master]
paths:
- 'apps/venus/**'
- '.github/deployment'
- '.github/workflows/venus.yml'
# pull_request:
# branches: [master]
# paths:
# - 'apps/venus/**'
# - '.github/workflows/venus.yml'
# Cancels all previous workflow runs for pull requests that have not completed.
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
NAMESPACE: toeverything
VENUS_IMAGE_NAME: venus
IMAGE_TAG: canary-${{ github.sha }}
IMAGE_TAG_LATEST: nightly-latest
jobs:
ligo-virgo:
runs-on: self-hosted
environment: development
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker (venus)
id: meta_venus
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.NAMESPACE }}/${{ env.VENUS_IMAGE_NAME }}
tags: |
${{ env.IMAGE_TAG }}
${{ env.IMAGE_TAG_LATEST }}
- name: Build and push Docker image (venus)
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
file: ./.github/deployment/Dockerfile-venus
push: ${{ github.ref == 'refs/heads/master' && true || false }}
tags: ${{ steps.meta_venus.outputs.tags }}
labels: ${{ steps.meta_venus.outputs.labels }}
target: venus

8
.gitignore vendored
View File

@ -43,3 +43,11 @@ Thumbs.db
*.env.local *.env.local
*.local.env *.local.env
.history .history
.next
out/
module-resolve.js
/test-results/
/playwright-report/
/playwright/.cache/

View File

@ -1,6 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# npx lint-staged
pnpm run format:ci

View File

@ -1,14 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Show just the current branch in Git
# See https://stackoverflow.com/questions/1417957/show-just-the-current-branch-in-git/1418022#1418022
current_branch=$(git rev-parse --abbrev-ref HEAD)
default_branch="master"
if test $current_branch != $default_branch; then
exit 0
fi
pnpm run build:check

3
.npmrc
View File

@ -1,3 +0,0 @@
registry=https://registry.npmjs.org
engine-strict=true
auto-install-peers=true

28
.pnpmfile.cjs Normal file
View File

@ -0,0 +1,28 @@
const fs = require('fs');
function getCustomize() {
const customed = fs.existsSync('./module-resolve.js');
if (!customed) {
return null;
}
const script = require('./module-resolve.js');
return script && script.resolve;
}
const customize = getCustomize();
function readPackage(pkg) {
if (!customize) {
return pkg;
}
const customizedPkg = customize(pkg);
return customizedPkg;
}
module.exports = {
hooks: {
readPackage,
},
};

View File

@ -1,13 +0,0 @@
# Add files here to ignore them from prettier formatting
/dist
/coverage
pnpm-lock.yaml
# Automatically generated from Figma
libs/components/icons/src/auto-icons
libs/components/common/src/lib/icon
# DevOps
.github/**

View File

@ -1,6 +1,6 @@
{ {
"singleQuote": true, "singleQuote": true,
"trailingComma": "es5", "trailingComma": "es5",
"tabWidth": 4, "tabWidth": 2,
"arrowParens": "avoid" "arrowParens": "avoid"
} }

View File

@ -1,11 +0,0 @@
{
"recommendations": [
"mikestead.dotenv",
"esbenp.prettier-vscode",
"visualstudioexptteam.vscodeintellicode",
"nrwl.angular-console",
"firsttris.vscode-jest-runner",
"dbaeumer.vscode-eslint",
"streetsidesoftware.code-spell-checker"
]
}

50
.vscode/settings.json vendored
View File

@ -1,47 +1,7 @@
{ {
"editor.defaultFormatter": "esbenp.prettier-vscode", "eslint.packageManager": "pnpm",
"editor.formatOnSave": true, "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSaveMode": "file", "editor.formatOnSave": true,
"editor.codeActionsOnSave": ["source.fixAll", "source.organizeImports"], "editor.formatOnSaveMode": "file",
"prettier.prettierPath": "./node_modules/prettier", "cSpell.words": ["testid"]
"cSpell.words": [
"aboutus",
"AUTOINCREMENT",
"Backlinks",
"blockdb",
"booktitle",
"Buildx",
"Cascader",
"clsx",
"cssmodule",
"datasource",
"fflate",
"fstore",
"groq",
"howpublished",
"immer",
"inbook",
"incollection",
"inproceedings",
"Kanban",
"keyval",
"ligo",
"livedemo",
"lozad",
"mastersthesis",
"nrwl",
"phdthesis",
"pnpm",
"reindex",
"ROOTNODE",
"techreport",
"tldr",
"tldraw",
"tldtaw",
"toeverything",
"Unstyled",
"unversioned",
"uuidv",
"webm"
]
} }

124
README.md
View File

@ -1,18 +1,17 @@
<h1 align="center" style="border-bottom: none"> <div align="center">
<b>
<a href="https://affine.pro">AFFiNE.PRO</a><br> <h1 style="border-bottom: none">
</b> <b><a href="https://affine.pro">AFFiNE.PRO</a></b><br />
The Next-Gen Collaborative Knowledge Base The Next-Gen Collaborative Knowledge Base
<br> <br>
</h1> </h1>
<p align="center"> <p>
Open-source and privacy-first. <br /> AFFiNE is a next-gen knowledge base that brings planning, sorting and creating all together.<br />
A free replacement for Notion & Miro. Privacy first, open-source, customizable and ready to use - a free replacement for Notion & Miro. <br />
</p> </p>
<div align="center"> <div>
<!-- <!--
Make New Badge Pattern badges inline Make New Badge Pattern badges inline
See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066 See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
@ -26,15 +25,13 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
[![affine.pro](https://img.shields.io/static/v1?label=live%20demo&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAhpJREFUWEdjZEACtnl3MxgY/0YzMjAaMzAwcCLLUYH9/T/D/7MM/5mXHp6kPANmHiOI4Zx9Xfg3C+tKBob/zlSwiAgjGPey/vkdvneq5luwA+zy7+yhn+Vwv+89NFHFhREU7IyM/6YT4WyqK/n/nymT0Tb/1mFGBkYbqptOhIH/Gf4fYbTLv/2NBgmOCOvBSr6DHPCfWNW0UEe2A2x1uRlakiXBbtpx6jND+7KXZLmPbAdURokzeJjxwi31rrzH8OX7P5IdQbYDtnUoMXBzMMEt7Fj2imH7qU/0cQBy8MNsPHL5K0P13Of0cQB68MNsJScaSI4CHk4mhq3tSnCf3n36k0FZmh3Mn7L+DcPqgx9ICgWSHeBpxsdQESUGtgRk+eqDH+H8O09/MiR3P6atA1qTJRlsdLnhPgYlPOQQCW96wPDi3R+iHUFSCKAHP8wydEeREg0kOQA9+JOgwR1qL8CQEygC9jWp0UCSA+aVysIT3JqDHxgmr38DtlRCiIVhZZ0CPNhB6QDkEGIA0Q4gZAkuxxFyBNEOQA7ml+/+MIQ1PUAxG1kelAhB6YMYQLQDCPmQUAjhcgxRDiDWcEKOxOYIohyQGyjCEGIvANaPLfhhBiNHA6hmBBXNhABRDgCV/aBQAAFQpYMrn4PUgNTCACiXEMoNRDmAkC8okR8UDhjYRumAN8sHvGMCSkAD2jUDOWDAO6ewbDQQ3XMAy/oxKownQR0AAAAASUVORK5CYII=&color=orange&message=→)](https://livedemo.affine.pro) [![affine.pro](https://img.shields.io/static/v1?label=live%20demo&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAhpJREFUWEdjZEACtnl3MxgY/0YzMjAaMzAwcCLLUYH9/T/D/7MM/5mXHp6kPANmHiOI4Zx9Xfg3C+tKBob/zlSwiAgjGPey/vkdvneq5luwA+zy7+yhn+Vwv+89NFHFhREU7IyM/6YT4WyqK/n/nymT0Tb/1mFGBkYbqptOhIH/Gf4fYbTLv/2NBgmOCOvBSr6DHPCfWNW0UEe2A2x1uRlakiXBbtpx6jND+7KXZLmPbAdURokzeJjxwi31rrzH8OX7P5IdQbYDtnUoMXBzMMEt7Fj2imH7qU/0cQBy8MNsPHL5K0P13Of0cQB68MNsJScaSI4CHk4mhq3tSnCf3n36k0FZmh3Mn7L+DcPqgx9ICgWSHeBpxsdQESUGtgRk+eqDH+H8O09/MiR3P6atA1qTJRlsdLnhPgYlPOQQCW96wPDi3R+iHUFSCKAHP8wydEeREg0kOQA9+JOgwR1qL8CQEygC9jWp0UCSA+aVysIT3JqDHxgmr38DtlRCiIVhZZ0CPNhB6QDkEGIA0Q4gZAkuxxFyBNEOQA7ml+/+MIQ1PUAxG1kelAhB6YMYQLQDCPmQUAjhcgxRDiDWcEKOxOYIohyQGyjCEGIvANaPLfhhBiNHA6hmBBXNhABRDgCV/aBQAAFQpYMrn4PUgNTCACiXEMoNRDmAkC8okR8UDhjYRumAN8sHvGMCSkAD2jUDOWDAO6ewbDQQ3XMAy/oxKownQR0AAAAASUVORK5CYII=&color=orange&message=→)](https://livedemo.affine.pro)
[![stars](https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars)](https://github.com/toeverything/AFFiNE) [![stars](https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars)](https://github.com/toeverything/AFFiNE)
[![All Contributors][all-contributors-badge]](#contributors) [![All Contributors][all-contributors-badge]](#contributors)
<br/>
[![Node](https://img.shields.io/badge/node->=16.0-success)](https://www.typescriptlang.org/) [![Node](https://img.shields.io/badge/node->=16.0-success)](https://www.typescriptlang.org/)
[![React](https://img.shields.io/badge/TypeScript-4.7-3178c6)](https://www.typescriptlang.org/) [![React](https://img.shields.io/badge/TypeScript-4.7-3178c6)](https://www.typescriptlang.org/)
[![React](https://img.shields.io/badge/React-18-61dafb)](https://reactjs.org/) [![React](https://img.shields.io/badge/React-18-61dafb)](https://reactjs.org/)
[![Rust](https://img.shields.io/badge/Rust-1.62-dea584)](https://www.rust-lang.org/) [![Rust](https://img.shields.io/badge/Rust-1.62-dea584)](https://www.rust-lang.org/)
</div> </div>
<p align="center"> <div>
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a> <a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a>
&nbsp; &nbsp;
<a href="https://discord.com/invite/yz6tGVsf5p"><img src="https://img.shields.io/badge/-Discord-424549?style=social&logo=discord" height=25></a> <a href="https://discord.com/invite/yz6tGVsf5p"><img src="https://img.shields.io/badge/-Discord-424549?style=social&logo=discord" height=25></a>
@ -44,82 +41,63 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
<a href="https://twitter.com/AffineOfficial"><img src="https://img.shields.io/badge/-Twitter-red?style=social&logo=twitter" height=25></a> <a href="https://twitter.com/AffineOfficial"><img src="https://img.shields.io/badge/-Twitter-red?style=social&logo=twitter" height=25></a>
&nbsp; &nbsp;
<a href="https://medium.com/@affineworkos"><img src="https://img.shields.io/badge/-Medium-red?style=social&logo=medium" height=25></a> <a href="https://medium.com/@affineworkos"><img src="https://img.shields.io/badge/-Medium-red?style=social&logo=medium" height=25></a>
</p> </div>
<br />
<div>
<em>AFFiNE - just like the word affine (əˈɪn | a-fine).</em>
</div>
<br /> <br />
<p align="center"><img width="1920" alt="affine_screen" src="https://user-images.githubusercontent.com/21084335/182552060-972cac0e-6258-4ccb-85bd-3bb466c30ccd.png"><p/> <img width="1920" alt="affine_screen" src="https://user-images.githubusercontent.com/4605025/198174913-d4f90da8-ae2a-4eb6-8424-55c94de432a1.jpeg"></div>
# :star: Support Us and Keep Updated :star: ## Join our community
![952cd7a5-70fe-48ab-b74f-23981d94d2c5](https://user-images.githubusercontent.com/79301703/182365526-df074c64-cee4-45f6-b8e0-b912f17332c6.gif) Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://docs.affine.pro/affine/community-links/official-communities)! Once youre familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://docs.affine.pro/affine/affine-ambassadors/welcome) to help spread AFFiNE to the world.
# Getting Started ## Features
**Privacy focussed** — AFFiNE is built with your privacy in mind and is one of our key concerns. We want you to keep control of your data, allowing you to store it as you like, where you like while still being able to freely edit and view your data on-demand.
**Offline-first** - With your privacy in mind we also decided to go offline-first. This means that AFFiNE can be used offline, whether you want to view or edit, with support for conflict-free merging when you are back online.
**Clean, intuitive design** — With AFFiNE you can concentrate on editing with a clean and modern interface. Which is responsive, so it looks great on tablets too, and mobile support is coming in the future.
**Seamless transitions** — However you want your data displayed, whichever viewing mode you use, AFFiNE supports easy transitions to allow you to quickly and effortlessly view your data in the way you want.
**Markdown support** — When you write in AFFiNE you can use Markdown syntax which helps create an easier editing experience, that can be experienced with just a keyboard. And this allows you to export your data cleanly into Markdown.
<!--
**Colloboration** — Whether you want to colloborate with yourself across multiple devices, or work together with others, support for colloboration and multiplayer is out-of-the-box, which makes it easy for teams to get started with AFFiNE.
-->
**Choice of multiple languages** — Thanks to community contributions AFFiNE offers support for multiple languages. If you don't find your language or would like to suggest some changes we welcome your contributions.
## Getting started
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
[![affine.pro](https://img.shields.io/static/v1?label=Try%20it%20Online&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAhpJREFUWEdjZEACtnl3MxgY/0YzMjAaMzAwcCLLUYH9/T/D/7MM/5mXHp6kPANmHiOI4Zx9Xfg3C+tKBob/zlSwiAgjGPey/vkdvneq5luwA+zy7+yhn+Vwv+89NFHFhREU7IyM/6YT4WyqK/n/nymT0Tb/1mFGBkYbqptOhIH/Gf4fYbTLv/2NBgmOCOvBSr6DHPCfWNW0UEe2A2x1uRlakiXBbtpx6jND+7KXZLmPbAdURokzeJjxwi31rrzH8OX7P5IdQbYDtnUoMXBzMMEt7Fj2imH7qU/0cQBy8MNsPHL5K0P13Of0cQB68MNsJScaSI4CHk4mhq3tSnCf3n36k0FZmh3Mn7L+DcPqgx9ICgWSHeBpxsdQESUGtgRk+eqDH+H8O09/MiR3P6atA1qTJRlsdLnhPgYlPOQQCW96wPDi3R+iHUFSCKAHP8wydEeREg0kOQA9+JOgwR1qL8CQEygC9jWp0UCSA+aVysIT3JqDHxgmr38DtlRCiIVhZZ0CPNhB6QDkEGIA0Q4gZAkuxxFyBNEOQA7ml+/+MIQ1PUAxG1kelAhB6YMYQLQDCPmQUAjhcgxRDiDWcEKOxOYIohyQGyjCEGIvANaPLfhhBiNHA6hmBBXNhABRDgCV/aBQAAFQpYMrn4PUgNTCACiXEMoNRDmAkC8okR8UDhjYRumAN8sHvGMCSkAD2jUDOWDAO6ewbDQQ3XMAy/oxKownQR0AAAAASUVORK5CYII=&message=%E2%86%92&style=for-the-badge)](https://affine.pro) No installation or registration required! Head over to our website and try it out now. [![affine.pro](https://img.shields.io/static/v1?label=Try%20it%20Online&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAhpJREFUWEdjZEACtnl3MxgY/0YzMjAaMzAwcCLLUYH9/T/D/7MM/5mXHp6kPANmHiOI4Zx9Xfg3C+tKBob/zlSwiAgjGPey/vkdvneq5luwA+zy7+yhn+Vwv+89NFHFhREU7IyM/6YT4WyqK/n/nymT0Tb/1mFGBkYbqptOhIH/Gf4fYbTLv/2NBgmOCOvBSr6DHPCfWNW0UEe2A2x1uRlakiXBbtpx6jND+7KXZLmPbAdURokzeJjxwi31rrzH8OX7P5IdQbYDtnUoMXBzMMEt7Fj2imH7qU/0cQBy8MNsPHL5K0P13Of0cQB68MNsJScaSI4CHk4mhq3tSnCf3n36k0FZmh3Mn7L+DcPqgx9ICgWSHeBpxsdQESUGtgRk+eqDH+H8O09/MiR3P6atA1qTJRlsdLnhPgYlPOQQCW96wPDi3R+iHUFSCKAHP8wydEeREg0kOQA9+JOgwR1qL8CQEygC9jWp0UCSA+aVysIT3JqDHxgmr38DtlRCiIVhZZ0CPNhB6QDkEGIA0Q4gZAkuxxFyBNEOQA7ml+/+MIQ1PUAxG1kelAhB6YMYQLQDCPmQUAjhcgxRDiDWcEKOxOYIohyQGyjCEGIvANaPLfhhBiNHA6hmBBXNhABRDgCV/aBQAAFQpYMrn4PUgNTCACiXEMoNRDmAkC8okR8UDhjYRumAN8sHvGMCSkAD2jUDOWDAO6ewbDQQ3XMAy/oxKownQR0AAAAASUVORK5CYII=&message=%E2%86%92&style=for-the-badge)](https://affine.pro) No installation or registration required! Head over to our website and try it out now.
Want to deploy it yourself? AFFiNE can run just about anywhere. <br />
[AFFiNE Documentation](https://docs.affine.pro/affine/) - More detailed documentation on how to use and develop with AFFiNE [AFFiNE Documentation](https://docs.affine.pro/affine/) - More detailed documentation on how to use and develop with AFFiNE
AFFiNE Communities: [Discord](https://discord.gg/yz6tGVsf5p) | [Telegram](https://t.me/affineworkos) | [Twitter](https://twitter.com/AffineOfficial) [Our official communities](https://docs.affine.pro/affine/community-links/official-communities) - Join our friendly communities for more support and discussions
<br /><br />
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
[Install AFFiNE with docker](https://docs.affine.pro/affine/getting-started/install-affine-with-docker) - This guide assumes you already have an environment setup running docker ## Contributing
## Create your story Calling all developers, testers, tech writers and more! Contributions of all types are more than welcome, you can read more in [docs/types-of-contributions.md](docs/types-of-contributions.md). If you are interested in contributing code, read our [docs/CONTRIBUTING.md](docs/CONTRIBUTING.md) and feel free to check out our GitHub issues to get stuck in to show us what youre made of.
There can be more than Notion and Miro. AFFiNE is a next-gen knowledge base that brings planning, sorting and creating all together. Privacy first, open-source, customizable and ready to use, built with web technologies to ensure consistency and accessibility on Mac, Windows and Linux. We want your data always to be yours, without any sacrifice to your accessibility. Your data is always stored local first, with full support for real-time collaboration through peer-to-peer technology. We don't think "privacy-first" is a good excuse for not supporting modern web features. For **bug reports**, **feature requests** and other **suggestions** you can also [create a new issue](https://github.com/toeverything/AFFiNE/issues/new/choose) and choose the most appropiate template for your feedback.
And when it comes to collaboration, these features are not just necessarily for teams - you can take and insert pictures on your phone, edit them from your desktop, and then share them with your collaborators.
### Shape your page For **translation** and **language support** you can visit our docs for the [internationalization guide].(https://docs.affine.pro/affine/internationalization/welcome)
![546163d6-4c39-4128-ae7f-55d59bc3b76b](https://user-images.githubusercontent.com/79301703/182365611-b0ba3690-21c0-4d9b-bfbc-0bc15da05aeb.gif) Looking for **others ways to contribute** and wondering where to start? Check out the [AFFiNE Ambassador program](https://docs.affine.pro/affine/affine-ambassadors/welcome), we work closely with passionate members of our community and provide them with a wide-range of support and resources.
### Plan your task If you have questions, join us across various [**social platforms**](https://docs.affine.pro/affine/community-links/official-communities) where our friendly community can help provide the answers.
![41a7b3a4-32f2-4d18-ac6b-57d1e1fda753](https://user-images.githubusercontent.com/79301703/182366553-1f6558a7-f17b-4611-ab95-aea3ec997154.gif) ## Thanks
### Sort your knowledge We would also like to give thanks to open-source projects that make AFFiNE possible:
![c9e1ff46-cec2-411b-b89d-6727a5e6f6c3](https://user-images.githubusercontent.com/79301703/182366602-08e44d28-a031-4097-9904-52fb9b1e9e17.gif)
# Contact Us
Feel free to send us an email: contact@toeverything.info
# The Philosophy of AFFiNE
Timothy Berners-Lee once taught us about the idea of the semantic web, where all the data can be interpreted in any form while the "truth" is kept. This gives our best image of an ideal knowledge base by far, that sorting of information, planning of project and goals as well as creating of knowledge can be all together.
We have witnessed waves of paradigm shift so many times. At first, everything was noted on office-like apps or DSL like LaTeX, then we found todo-list apps and WYSIWYG markdown editors better for writing and planning. Finally, here comes Notion and Miro, who take advantage of the idea of blocks to further liberate our creativity.
It is all perfect... without waste operations and redundant information. And, we insist that privacy first should always be given by default.
That's why we are making AFFiNE. Some of the most important features are:
- Transformable
- Every block can be transformed equally
- e.g. you can create a todo in Markdown in the text view and then later edit it in the kanban view.
- Every document can be turned into a whiteboard
- An always good-to-read, structured docs-form page is the best for your notes, but a boundless doodle surface is better for collaboration and creativity.
- Atomic
- The basic elements of AFFiNE are blocks, not pages.
- Blocks can be directly reused and synced between pages.
- Pages and blocks are searched and organized based on connected graphs, not tree-like paths.
- Dual-link and semantic search are fully supported.
- Collaborative and privacy-first
- Data is always stored locally by default
- CRDTs are applied so that peer-to-peer collaboration is possible.
We appreciate the ideas of Monday, Airtable, and Notion databases. They have inspired us and shaped our product, helping us get it right when it comes to task management. But we also do things differently. We don't like doing things again and again. It's easy to set a todo with Markdown, but then why do you need to repeat and recreate data for a kanban or other databases. This is the power of AFFiNE. With AFFiNE, every block group has infinite views, for you to keep your single source of data, a single source of truth.
We would like to give special thanks to the innovators and pioneers who greatly inspired us:
- Quip & Notion -- that docs can be organized as blocks
- Taskade & Monday -- brilliant multi-dimensional tables
- Height & Linear -- beautiful task management tool
We would also like to give thanks to open-source projects that make affine possible:
- [BlockSuite](https://github.com/toeverything/BlockSuite) - AFFiNE is built with and powered by BlockSuite.
- [Yjs](https://github.com/yjs/yjs) & [Yrs](https://github.com/y-crdt/y-crdt) -- Fundamental support of CRDTs for our implementation on state management and data sync. - [Yjs](https://github.com/yjs/yjs) & [Yrs](https://github.com/y-crdt/y-crdt) -- Fundamental support of CRDTs for our implementation on state management and data sync.
- [React](https://github.com/facebook/react) -- View layer support and web GUI framework. - [React](https://github.com/facebook/react) -- View layer support and web GUI framework.
- [Rust](https://github.com/rust-lang/rust) -- High performance language that extends the ability and availability of our real-time backend, JWST. - [Rust](https://github.com/rust-lang/rust) -- High performance language that extends the ability and availability of our real-time backend, JWST.
@ -137,8 +115,6 @@ We use the following open source projects to help us build a better development
Thanks a lot to the community for providing such powerful and simple libraries, so that we can focus more on the implementation of the product logic, and we hope that in the future our projects will also provide a more easy-to-use knowledge base for everyone. Thanks a lot to the community for providing such powerful and simple libraries, so that we can focus more on the implementation of the product logic, and we hope that in the future our projects will also provide a more easy-to-use knowledge base for everyone.
[Some amazing companies](./docs/jobs/summary.md), including AFFiNE are looking for developers! Are you interested in helping build with AFFiNE and/or its partners? Check out some of the latest [jobs available](./docs/jobs/summary.md).
# Contributors # Contributors
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section --> <!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
@ -168,7 +144,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
<td align="center"><a href="https://github.com/Svaney-ssman"><img src="https://avatars.githubusercontent.com/u/110808979?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Svaney</b></sub></a><br /><a href="#design-Svaney-ssman" title="Design">🎨</a></td> <td align="center"><a href="https://github.com/Svaney-ssman"><img src="https://avatars.githubusercontent.com/u/110808979?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Svaney</b></sub></a><br /><a href="#design-Svaney-ssman" title="Design">🎨</a></td>
<td align="center"><a href="http://xell.me/"><img src="https://avatars.githubusercontent.com/u/132558?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Guozhu Liu</b></sub></a><br /><a href="#design-xell" title="Design">🎨</a></td> <td align="center"><a href="http://xell.me/"><img src="https://avatars.githubusercontent.com/u/132558?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Guozhu Liu</b></sub></a><br /><a href="#design-xell" title="Design">🎨</a></td>
<td align="center"><a href="https://github.com/fyZheng07"><img src="https://avatars.githubusercontent.com/u/63830919?v=4?s=50" width="50px;" alt=""/><br /><sub><b>fyZheng07</b></sub></a><br /><a href="#eventOrganizing-fyZheng07" title="Event Organizing">📋</a> <a href="#userTesting-fyZheng07" title="User Testing">📓</a></td> <td align="center"><a href="https://github.com/fyZheng07"><img src="https://avatars.githubusercontent.com/u/63830919?v=4?s=50" width="50px;" alt=""/><br /><sub><b>fyZheng07</b></sub></a><br /><a href="#eventOrganizing-fyZheng07" title="Event Organizing">📋</a> <a href="#userTesting-fyZheng07" title="User Testing">📓</a></td>
<td align="center"><a href="https://github.com/CJSS"><img src="https://avatars.githubusercontent.com/u/4605025?v=4?s=50" width="50px;" alt=""/><br /><sub><b>CJSS</b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=CJSS" title="Documentation">📖</a></td> <td align="center"><a href="https://github.com/ShortCipher5"><img src="https://avatars.githubusercontent.com/u/4605025?v=4?s=50" width="50px;" alt=""/><br /><sub><b>ShortCipher5</b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=ShortCipher5" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/JimmFly"><img src="https://avatars.githubusercontent.com/u/102217452?v=4?s=50" width="50px;" alt=""/><br /><sub><b>JimmFly</b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=JimmFly" title="Code">💻</a></td> <td align="center"><a href="https://github.com/JimmFly"><img src="https://avatars.githubusercontent.com/u/102217452?v=4?s=50" width="50px;" alt=""/><br /><sub><b>JimmFly</b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=JimmFly" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/CarlosZoft"><img src="https://avatars.githubusercontent.com/u/62192072?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Carlos Rafael </b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=CarlosZoft" title="Code">💻</a></td> <td align="center"><a href="https://github.com/CarlosZoft"><img src="https://avatars.githubusercontent.com/u/62192072?v=4?s=50" width="50px;" alt=""/><br /><sub><b>Carlos Rafael </b></sub></a><br /><a href="https://github.com/toeverything/AFFiNE/commits?author=CarlosZoft" title="Code">💻</a></td>
</tr> </tr>
@ -202,8 +178,10 @@ Thanks a lot to the community for providing such powerful and simple libraries,
<!-- ALL-CONTRIBUTORS-LIST:END --> <!-- ALL-CONTRIBUTORS-LIST:END -->
# License ## Jobs
AFFiNE is distributed under the terms of MIT license. Some amazing companies including AFFiNE are looking for developers! Are you interested in helping build with AFFiNE and/or its partners? Check out some of the latest [jobs available](./docs/jobs/summary.md).
## License
See [LICENSE](/LICENSE) for details. See [LICENSE](/LICENSE) for details.

View File

View File

@ -1,18 +0,0 @@
{
"extends": ["../../.eslintrc.json"],
"ignorePatterns": ["!**/*"],
"overrides": [
{
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
"rules": {}
},
{
"files": ["*.ts", "*.tsx"],
"rules": {}
},
{
"files": ["*.js", "*.jsx"],
"rules": {}
}
]
}

View File

@ -1,16 +0,0 @@
/* eslint-disable */
export default {
displayName: 'keck',
preset: '../../jest.preset.js',
globals: {
'ts-jest': {
tsconfig: '<rootDir>/tsconfig.spec.json',
},
},
testEnvironment: 'node',
transform: {
'^.+\\.[tj]s$': 'ts-jest',
},
moduleFileExtensions: ['ts', 'js', 'html'],
coverageDirectory: '../../coverage/apps/keck',
};

View File

@ -1,22 +0,0 @@
{
"name": "@toeverything/keck",
"version": "0.0.1",
"license": "MIT",
"author": "DarkSky <darksky2048@gmail.com>",
"main": "jest.config.ts",
"dependencies": {
"authing-js-sdk": "^4.23.35",
"firebase-admin": "^11.0.1",
"lib0": "^0.2.52",
"lru-cache": "^7.13.2",
"nanoid": "^4.0.0",
"readable-stream": "^4.1.0",
"ws": "^8.8.1",
"y-protocols": "^1.0.5",
"yjs": "^13.5.41"
},
"devDependencies": {
"@types/readable-stream": "^2.3.13",
"@types/ws": "^8.5.3"
}
}

View File

@ -1,9 +0,0 @@
{
"name": "@toeverything/keck",
"license": "MIT",
"version": "0.0.1",
"dependencies": {
"level": "^8.0.0",
"level-read-stream": "1.1.0"
}
}

View File

@ -1,52 +0,0 @@
{
"$schema": "../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "apps/keck/src",
"projectType": "application",
"targets": {
"build": {
"executor": "@nrwl/node:webpack",
"outputs": ["{options.outputPath}"],
"options": {
"outputPath": "dist/apps/keck",
"main": "apps/keck/src/index.ts",
"tsConfig": "apps/keck/tsconfig.app.json",
"assets": []
},
"configurations": {
"production": {
"optimization": true,
"extractLicenses": true,
"inspect": false,
"fileReplacements": [
{
"replace": "apps/keck/src/environments/environment.ts",
"with": "apps/keck/src/environments/environment.prod.ts"
}
]
}
}
},
"serve": {
"executor": "@nrwl/node:node",
"options": {
"buildTarget": "keck:build"
}
},
"lint": {
"executor": "@nrwl/linter:eslint",
"outputs": ["{options.outputFile}"],
"options": {
"lintFilePatterns": ["apps/keck/**/*.ts"]
}
},
"test": {
"executor": "@nrwl/jest:jest",
"outputs": ["coverage/apps/keck"],
"options": {
"jestConfig": "apps/keck/jest.config.ts",
"passWithNoTests": true
}
}
},
"tags": []
}

View File

@ -1,3 +0,0 @@
export const environment = {
production: true,
};

View File

@ -1,3 +0,0 @@
export const environment = {
production: false,
};

View File

@ -1,115 +0,0 @@
import WebSocket = require('ws');
import http = require('http');
// import authing = require('authing-js-sdk');
import firebaseApp = require('firebase-admin/app');
import firebaseAuth = require('firebase-admin/auth');
import LRUCache = require('lru-cache');
import nanoid = require('nanoid');
import { URL } from 'url';
import { handleConnection } from './utils';
if (process.env.NODE_ENV !== 'development') {
firebaseApp.initializeApp({
credential: firebaseApp.cert({
clientEmail: process.env.FIREBASE_ACCOUNT,
privateKey: process.env.FIREBASE_CERT,
projectId: process.env.FIREBASE_PROJECT,
}),
projectId: process.env.FIREBASE_PROJECT,
});
}
const _getWorkspace = (path: string) => {
const [_, part1] = path.split('/collaboration/');
const [workspace] = part1?.split('/') || [];
return workspace;
};
const AFFINE_COMMON_WORKSPACE = 'affine2vin277tcmafwq';
const _checkAuth = async (
request: http.IncomingMessage,
response: http.ServerResponse,
callback: (response: http.OutgoingMessage, workspace: string) => boolean
) => {
const url = new URL(request.url, `http://${request.headers.host}`);
const workspace = _getWorkspace(url.pathname);
if (process.env.NODE_ENV === 'development') {
if (workspace) return callback(response, workspace);
return false;
}
try {
const decodedToken = await firebaseAuth
.getAuth()
.verifyIdToken(request.headers.token as string);
const allowWorkspace = [AFFINE_COMMON_WORKSPACE, decodedToken.uid];
if (allowWorkspace.includes(workspace)) {
return callback(response, workspace);
}
} catch (error) {
console.log(error);
}
return false;
};
const HOST = process.env.HOST || 'localhost';
const PORT = process.env.PORT || 3000;
const _tokens = new LRUCache<string, string>({
max: 1024 * 10,
ttl: 1000 * 60 * 5,
});
const _server = http.createServer((request, response) => {
if (
request.method === 'POST' &&
typeof request.headers.token === 'string'
) {
_checkAuth(request, response, (response, workspace) => {
const protocol = nanoid.nanoid(16);
_tokens.set(protocol, workspace);
response.end(JSON.stringify({ protocol }));
return true;
})
.then(responded => {
if (!responded) {
response.writeHead(401).end();
}
})
.catch(error => {
console.log(error);
response.writeHead(401).end();
});
return;
}
response.writeHead(200, { 'Content-Type': 'text/plain' });
response.end('okay');
});
const _websocketServer = new WebSocket.Server({ noServer: true });
_websocketServer.on('connection', handleConnection);
_server.on('upgrade', (request, socket, head) => {
// You may check auth of request here..
// See https://github.com/websockets/ws#client-authentication
const protocol = request.headers['sec-websocket-protocol'];
if (typeof protocol === 'string' && _tokens.get(protocol)) {
_websocketServer.handleUpgrade(request, socket, head, ws => {
_websocketServer.emit(
'connection',
ws,
request,
_tokens.get(protocol)
);
});
} else {
socket.write('HTTP/1.1 401 Unauthorized');
socket.destroy();
}
});
_server.listen(PORT, () => {
console.log(`running at '${HOST}' on port ${PORT}`);
});

View File

@ -1,239 +0,0 @@
import WebSocket = require('ws');
import http = require('http');
import Y = require('yjs');
import lib0 = require('lib0');
import awarenessProtocol = require('y-protocols/awareness');
import syncProtocol = require('y-protocols/sync');
// import { getPersistenceStorage } from './persistence';
const { encoding, decoding, mutex, map } = lib0;
const wsReadyStateConnecting = 0;
const wsReadyStateOpen = 1;
// disable gc when using snapshots!
const gcEnabled = process.env.GC !== 'false' && process.env.GC !== '0';
type Persistence =
| ((arg0: string, arg1: WSSharedDoc) => Promise<any>)
| undefined;
const persistence: Persistence = null; // getPersistenceStorage('./affine');
const docs: Map<string, WSSharedDoc> = new Map();
const messageSync = 0;
const messageAwareness = 1;
// const messageAuth = 2
const updateHandler = (update: Uint8Array, origin: any, doc: WSSharedDoc) => {
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageSync);
syncProtocol.writeUpdate(encoder, update);
const message = encoding.toUint8Array(encoder);
doc.conns.forEach((_, conn) => send(doc, conn, message));
};
type AwarenessEvent = {
added: Array<number>;
updated: Array<number>;
removed: Array<number>;
};
export class WSSharedDoc extends Y.Doc {
name: string;
mux: lib0.mutex.mutex;
conns: Map<any, any>;
awareness: awarenessProtocol.Awareness;
/**
* @param {string} name
*/
constructor(name: string) {
super({ gc: gcEnabled });
this.name = name;
this.mux = mutex.createMutex();
/**
* Maps from conn to set of controlled user ids. Delete all user ids from awareness when this conn is closed
* @type {Map<Object, Set<number>>}
*/
this.conns = new Map();
/**
* @type {awarenessProtocol.Awareness}
*/
this.awareness = new awarenessProtocol.Awareness(this);
this.awareness.setLocalState(null);
const awarenessChangeHandler = (
{ added, updated, removed }: AwarenessEvent,
conn: object | null
) => {
const changedClients = added.concat(updated, removed);
if (conn !== null) {
const connControlledIds: Set<number> = this.conns.get(conn);
if (connControlledIds !== undefined) {
added.forEach(clientId => {
connControlledIds.add(clientId);
});
removed.forEach(clientId => {
connControlledIds.delete(clientId);
});
}
}
// broadcast awareness update
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageAwareness);
encoding.writeVarUint8Array(
encoder,
awarenessProtocol.encodeAwarenessUpdate(
this.awareness,
changedClients
)
);
const buff = encoding.toUint8Array(encoder);
this.conns.forEach((_, c) => {
send(this, c, buff);
});
};
this.awareness.on('update', awarenessChangeHandler);
this.on('update', updateHandler);
}
}
// Gets a Y.Doc by name, whether in memory or on disk
const getYDoc = (docname: string, gc = true): WSSharedDoc =>
map.setIfUndefined(docs, docname, () => {
const doc = new WSSharedDoc(docname);
doc.gc = gc;
if (persistence !== null) {
persistence(docname, doc);
}
docs.set(docname, doc);
return doc;
});
const messageListener = (conn: any, doc: WSSharedDoc, message: Uint8Array) => {
try {
const encoder = encoding.createEncoder();
const decoder = decoding.createDecoder(message);
const messageType = decoding.readVarUint(decoder);
switch (messageType) {
case messageSync:
encoding.writeVarUint(encoder, messageSync);
syncProtocol.readSyncMessage(decoder, encoder, doc, null);
if (encoding.length(encoder) > 1) {
send(doc, conn, encoding.toUint8Array(encoder));
}
break;
case messageAwareness: {
awarenessProtocol.applyAwarenessUpdate(
doc.awareness,
decoding.readVarUint8Array(decoder),
conn
);
break;
}
}
} catch (err) {
console.error(err);
doc.emit('error', [err]);
}
};
const closeConn = (doc: WSSharedDoc, conn: any) => {
if (doc.conns.has(conn)) {
const controlledIds: Set<number> = doc.conns.get(conn);
doc.conns.delete(conn);
awarenessProtocol.removeAwarenessStates(
doc.awareness,
Array.from(controlledIds),
null
);
if (doc.conns.size === 0 && persistence !== null) {
// if persisted, we store state and destroy ydocument
persistence(doc.name, doc).then(() => {
doc.destroy();
});
docs.delete(doc.name);
}
}
conn.close();
};
const send = (doc: WSSharedDoc, conn: any, m: Uint8Array) => {
if (
conn.readyState !== wsReadyStateConnecting &&
conn.readyState !== wsReadyStateOpen
) {
closeConn(doc, conn);
}
try {
conn.send(m, (/** @param {any} err */ err: any) => {
err != null && closeConn(doc, conn);
});
} catch (e) {
closeConn(doc, conn);
}
};
export const handleConnection = (
socket: WebSocket.WebSocket,
request: http.IncomingMessage,
docName: string
) => {
const gc = true;
socket.binaryType = 'arraybuffer';
// get doc, initialize if it does not exist yet
const doc = getYDoc(docName, gc);
doc.conns.set(socket, new Set());
// listen and reply to events
socket.on('message', (message: ArrayBuffer) =>
messageListener(socket, doc, new Uint8Array(message))
);
// Check if connection is still alive
let pongReceived = true;
const pingInterval = setInterval(() => {
if (!pongReceived) {
if (doc.conns.has(socket)) {
closeConn(doc, socket);
}
clearInterval(pingInterval);
} else if (doc.conns.has(socket)) {
pongReceived = false;
try {
socket.ping();
} catch (e) {
closeConn(doc, socket);
clearInterval(pingInterval);
}
}
}, 30 * 1000);
socket.on('close', () => {
closeConn(doc, socket);
clearInterval(pingInterval);
});
socket.on('pong', () => {
pongReceived = true;
});
// put the following in a variables in a block so the interval handlers don't keep in in
// scope
{
// send sync step 1
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageSync);
syncProtocol.writeSyncStep1(encoder, doc);
send(doc, socket, encoding.toUint8Array(encoder));
const awarenessStates = doc.awareness.getStates();
if (awarenessStates.size > 0) {
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageAwareness);
encoding.writeVarUint8Array(
encoder,
awarenessProtocol.encodeAwarenessUpdate(
doc.awareness,
Array.from(awarenessStates.keys())
)
);
send(doc, socket, encoding.toUint8Array(encoder));
}
}
};

View File

@ -1,139 +0,0 @@
import WebSocket = require('ws');
import http = require('http');
import Y = require('yjs');
import lib0 = require('lib0');
import syncProtocol = require('y-protocols/sync');
const { encoding, decoding, map } = lib0;
const wsReadyStateConnecting = 0;
const wsReadyStateOpen = 1;
// disable gc when using snapshots!
const gcEnabled = process.env.GC !== 'false' && process.env.GC !== '0';
const docs: Map<string, WSSharedDoc> = new Map();
const messageSync = 0;
const updateHandler = (update: Uint8Array, origin: any, doc: WSSharedDoc) => {
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageSync);
syncProtocol.writeUpdate(encoder, update);
const message = encoding.toUint8Array(encoder);
doc.conns.forEach((_, conn) => send(doc, conn, message));
};
export class WSSharedDoc extends Y.Doc {
name: string;
conns: Map<any, any>;
constructor(name: string) {
super({ gc: gcEnabled });
this.name = name;
// Maps from conn to set of controlled user ids. Delete all user ids from awareness when this conn is closed
this.conns = new Map();
this.on('update', updateHandler);
}
}
// Gets a Y.Doc by name, whether in memory or on disk
const getYDoc = (docname: string, gc = true): WSSharedDoc =>
map.setIfUndefined(docs, docname, () => {
const doc = new WSSharedDoc(docname);
doc.gc = gc;
docs.set(docname, doc);
return doc;
});
const closeConn = (doc: WSSharedDoc, conn: any) => {
if (doc.conns.has(conn)) {
doc.conns.delete(conn);
}
conn.close();
};
const send = (doc: WSSharedDoc, conn: any, m: Uint8Array) => {
if (
conn.readyState !== wsReadyStateConnecting &&
conn.readyState !== wsReadyStateOpen
) {
closeConn(doc, conn);
}
try {
conn.send(m, (/** @param {any} err */ err: any) => {
err != null && closeConn(doc, conn);
});
} catch (e) {
closeConn(doc, conn);
}
};
export const handleConnection = (
socket: WebSocket.WebSocket,
request: http.IncomingMessage,
docName: string
) => {
const gc = true;
socket.binaryType = 'arraybuffer';
// get doc, initialize if it does not exist yet
const doc = getYDoc(docName, gc);
doc.conns.set(socket, new Set());
// listen and reply to events
socket.on('message', (message: ArrayBuffer) => {
try {
const encoder = encoding.createEncoder();
const decoder = decoding.createDecoder(new Uint8Array(message));
const messageType = decoding.readVarUint(decoder);
switch (messageType) {
case messageSync:
encoding.writeVarUint(encoder, messageSync);
syncProtocol.readSyncMessage(decoder, encoder, doc, null);
if (encoding.length(encoder) > 1) {
send(doc, socket, encoding.toUint8Array(encoder));
}
break;
}
} catch (err) {
console.error(err);
doc.emit('error', [err]);
}
});
// Check if connection is still alive
let pongReceived = true;
const pingInterval = setInterval(() => {
if (!pongReceived) {
if (doc.conns.has(socket)) {
closeConn(doc, socket);
}
clearInterval(pingInterval);
} else if (doc.conns.has(socket)) {
pongReceived = false;
try {
socket.ping();
} catch (e) {
closeConn(doc, socket);
clearInterval(pingInterval);
}
}
}, 30 * 1000);
socket.on('close', () => {
closeConn(doc, socket);
clearInterval(pingInterval);
});
socket.on('pong', () => {
pongReceived = true;
});
// put the following in a variables in a block so the interval handlers don't keep in in
// scope
{
// send sync step 1
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, messageSync);
console.log('sync step 0', encoding.toUint8Array(encoder));
syncProtocol.writeSyncStep1(encoder, doc);
send(doc, socket, encoding.toUint8Array(encoder));
console.log('sync step 1 sent', encoding.toUint8Array(encoder));
}
};

View File

@ -1,10 +0,0 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "../../dist/out-tsc",
"module": "commonjs",
"types": ["node"]
},
"exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"],
"include": ["**/*.ts"]
}

View File

@ -1,13 +0,0 @@
{
"extends": "../../tsconfig.base.json",
"files": [],
"include": [],
"references": [
{
"path": "./tsconfig.app.json"
},
{
"path": "./tsconfig.spec.json"
}
]
}

View File

@ -1,9 +0,0 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "../../dist/out-tsc",
"module": "commonjs",
"types": ["jest", "node"]
},
"include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts"]
}

View File

@ -1,17 +0,0 @@
{
"extends": ["plugin:cypress/recommended", "../../.eslintrc.json"],
"ignorePatterns": ["!**/*"],
"overrides": [
{
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
"rules": {}
},
{
"files": ["src/plugins/index.js"],
"rules": {
"@typescript-eslint/no-var-requires": "off",
"no-undef": "off"
}
}
]
}

View File

@ -1,18 +0,0 @@
import { defineConfig } from 'cypress';
module.exports = defineConfig({
projectId: 'r1wrqr',
e2e: {
supportFile: './src/support/index.ts',
specPattern: './src/integration',
setupNodeEvents(on, config) {
// implement node event listeners here
},
},
fileServerFolder: '.',
fixturesFolder: './src/fixtures',
video: false,
// videosFolder: '../../dist/cypress/apps/ligo-virgo-e2e/videos',
screenshotsFolder: '../../dist/cypress/apps/ligo-virgo-e2e/screenshots',
chromeWebSecurity: false,
});

View File

@ -1,11 +0,0 @@
{
"name": "ligo-virgo-e2e",
"version": "1.0.0",
"license": "MIT",
"description": "",
"author": "AFFiNE <developer@affine.pro>",
"dependencies": {},
"devDependencies": {
"cypress": "^10.4.0"
}
}

View File

@ -1,28 +0,0 @@
{
"$schema": "../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "apps/ligo-virgo-e2e/src",
"projectType": "application",
"targets": {
"e2e": {
"executor": "@nrwl/cypress:cypress",
"options": {
"cypressConfig": "apps/ligo-virgo-e2e/cypress.config.ts",
"devServerTarget": "ligo-virgo:serve"
},
"configurations": {
"production": {
"devServerTarget": "ligo-virgo:serve:production"
}
}
},
"lint": {
"executor": "@nrwl/linter:eslint",
"outputs": ["{options.outputFile}"],
"options": {
"lintFilePatterns": ["apps/ligo-virgo-e2e/**/*.{js,ts}"]
}
}
},
"tags": [],
"implicitDependencies": ["ligo-virgo"]
}

View File

@ -1,4 +0,0 @@
{
"name": "Using fixtures to represent data",
"email": "hello@cypress.io"
}

View File

@ -1,14 +0,0 @@
import { getBoard, getTitle } from '../support/app.po';
describe('ligo-virgo', () => {
beforeEach(() => cy.visit('/'));
it('basic load check', () => {
getTitle().contains('👋 Get Started with AFFiNE');
cy.get('.block_container').contains('The Essentials');
getBoard().click();
cy.get('.tl-inner-div').contains('The Essentials');
});
});

View File

@ -1,3 +0,0 @@
export const getTitle = () => cy.get('span[title]');
export const getDoc = () => cy.contains('Paper');
export const getBoard = () => cy.contains('Edgeless');

View File

@ -1,33 +0,0 @@
// ***********************************************
// This example commands.js shows you how to
// create various custom commands and overwrite
// existing commands.
//
// For more comprehensive examples of custom
// commands please read more here:
// https://on.cypress.io/custom-commands
// ***********************************************
// eslint-disable-next-line @typescript-eslint/no-namespace
declare namespace Cypress {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
interface Chainable<Subject> {
// login(email: string, password: string): void;
}
}
//
// -- This is a parent command --
// Cypress.Commands.add('login', (email, password) => {
// console.log('Custom command example: Login', email, password);
// });
//
// -- This is a child command --
// Cypress.Commands.add("drag", { prevSubject: 'element'}, (subject, options) => { ... })
//
//
// -- This is a dual command --
// Cypress.Commands.add("dismiss", { prevSubject: 'optional'}, (subject, options) => { ... })
//
//
// -- This will overwrite an existing command --
// Cypress.Commands.overwrite("visit", (originalFn, url, options) => { ... })

View File

@ -1,17 +0,0 @@
// ***********************************************************
// This example support/index.js is processed and
// loaded automatically before your test files.
//
// This is a great place to put global configuration and
// behavior that modifies Cypress.
//
// You can change the location of this file or turn off
// automatically serving support files with the
// 'supportFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/configuration
// ***********************************************************
// Import commands.js using ES2015 syntax:
import './commands';

View File

@ -1,10 +0,0 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"sourceMap": false,
"outDir": "../../dist/out-tsc",
"allowJs": true,
"types": ["cypress", "node"]
},
"include": ["src/**/*.ts", "src/**/*.js"]
}

View File

@ -1,11 +0,0 @@
{
"presets": [
[
"@nrwl/react/babel",
{
"runtime": "automatic"
}
]
],
"plugins": []
}

View File

@ -1,16 +0,0 @@
# This file is used by:
# 1. autoprefixer to adjust CSS to support the below specified browsers
# 2. babel preset-env to adjust included polyfills
#
# For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries
#
# If you need to support different browsers in production, you may tweak the list below.
last 1 Chrome version
last 1 Firefox version
last 2 Edge major versions
last 2 Safari major version
last 2 iOS major versions
Firefox ESR
not IE 9-11 # For IE 9-11 support, remove 'not'.

View File

@ -1,18 +0,0 @@
{
"extends": ["plugin:@nrwl/nx/react", "../../.eslintrc.json"],
"ignorePatterns": ["!**/*"],
"overrides": [
{
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
"rules": {}
},
{
"files": ["*.ts", "*.tsx"],
"rules": {}
},
{
"files": ["*.js", "*.jsx"],
"rules": {}
}
]
}

View File

@ -1,12 +0,0 @@
module.exports = {
displayName: 'ligo-virgo',
preset: '../../jest.preset.js',
transform: {
'node_modules\\/.+\\.js$': 'jest-esm-transformer',
'^(?!.*\\.(js|jsx|ts|tsx|css|json)$)': '@nrwl/react/plugins/jest',
'^.+\\.[tj]sx?$': 'babel-jest',
},
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx'],
coverageDirectory: '../../coverage/apps/ligo-virgo',
transformIgnorePatterns: [],
};

View File

@ -1,21 +0,0 @@
{
"name": "ligo-virgo",
"version": "1.0.0",
"license": "MIT",
"description": "",
"main": "jest.config.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "AFFiNE <developer@affine.pro>",
"dependencies": {
"@emotion/babel-plugin": "^11.10.2",
"@mui/icons-material": "^5.8.4"
},
"devDependencies": {
"firebase": "^9.9.3",
"mini-css-extract-plugin": "^2.6.1",
"webpack": "^5.74.0"
}
}

View File

@ -1,76 +0,0 @@
{
"sourceRoot": "apps/ligo-virgo/src",
"projectType": "application",
"targets": {
"build": {
"executor": "@nrwl/web:webpack",
"outputs": ["{options.outputPath}"],
"defaultConfiguration": "production",
"options": {
"compiler": "babel",
"outputPath": "dist/apps/ligo-virgo",
"index": "apps/ligo-virgo/src/index.html",
"baseHref": "/",
"main": "apps/ligo-virgo/src/index.tsx",
"polyfills": "apps/ligo-virgo/src/polyfills.ts",
"tsConfig": "apps/ligo-virgo/tsconfig.app.json",
"assets": ["apps/ligo-virgo/src/assets"],
"styles": [],
"scripts": [],
"webpackConfig": "apps/ligo-virgo/webpack.config.js"
},
"configurations": {
"production": {
"fileReplacements": [
{
"replace": "apps/ligo-virgo/src/environments/environment.ts",
"with": "apps/ligo-virgo/src/environments/environment.prod.ts"
}
],
"optimization": true,
"outputHashing": "all",
"sourceMap": false,
"namedChunks": true,
"extractLicenses": false,
"vendorChunk": false,
"generateIndexHtml": false
}
}
},
"serve": {
"executor": "@nrwl/web:dev-server",
"options": {
"buildTarget": "ligo-virgo:build:development",
"hmr": true,
"proxyConfig": "apps/ligo-virgo/proxy.conf.json",
"open": true
},
"configurations": {
"production": {
"buildTarget": "ligo-virgo:build:production",
"hmr": false,
"open": false
}
}
},
"lint": {
"executor": "@nrwl/linter:eslint",
"outputs": ["{options.outputFile}"],
"options": {
"lintFilePatterns": ["apps/ligo-virgo/**/*.{ts,tsx,js,jsx}"]
}
},
"test": {
"executor": "@nrwl/jest:jest",
"outputs": ["coverage/apps/ligo-virgo"],
"options": {
"jestConfig": "apps/ligo-virgo/jest.config.js",
"passWithNoTests": true
}
},
"check": {
"executor": "./tools/executors/tsCheck:tsCheck"
}
},
"tags": ["app:ligo-virgo"]
}

View File

@ -1,8 +0,0 @@
{
"/collaboration": {
"target": "http://127.0.0.1:3000/",
"ws": true,
"changeOrigin": true,
"secure": false
}
}

View File

@ -1,60 +0,0 @@
import { AsyncBlock } from '@toeverything/framework/virgo';
import { isDev } from '@toeverything/utils';
/**
* Ported from https://github.com/vuejs/core/blob/main/packages/runtime-core/src/customFormatter.ts
* See [Custom Object Formatters in Chrome DevTools](https://docs.google.com/document/d/1FTascZXT9cxfetuPRT2eXPQKXui4nWFivUnS_335T3U)
*/
const isAsyncBlock = (x: unknown): x is AsyncBlock => {
return x instanceof AsyncBlock;
};
export function initCustomFormatter() {
if (!isDev || typeof window === 'undefined') {
return;
}
const bannerStyle = {
style: 'color: #eee; background: #3F6FDB; margin-right: 5px; padding: 2px; border-radius: 4px',
};
const typeStyle = {
style: 'color: #eee; background: #DB6D56; margin-right: 5px; padding: 2px; border-radius: 4px',
};
// custom formatter for Chrome
// https://www.mattzeunert.com/2016/02/19/custom-chrome-devtools-object-formatters.html
const formatter = {
header(obj: unknown, config = { expand: false }) {
if (!isAsyncBlock(obj) || config.expand) {
return null;
}
return [
'div',
{},
['span', bannerStyle, 'AsyncBlock'],
['span', typeStyle, obj.type],
// @ts-expect-error Debug at development environment
`${JSON.stringify(obj.raw_data.properties)}`,
];
},
hasBody(obj: unknown) {
return true;
},
body(obj: unknown) {
return ['object', { object: obj, config: { expand: true } }];
},
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((window as any).devtoolsFormatters) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).devtoolsFormatters.push(formatter);
} else {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).devtoolsFormatters = [formatter];
}
}
initCustomFormatter();

View File

@ -1,3 +0,0 @@
export const environment = {
production: true,
};

View File

@ -1,6 +0,0 @@
// This file can be replaced during build by using the `fileReplacements` array.
// When building for production, this file is replaced with `environment.prod.ts`.
export const environment = {
production: false,
};

View File

@ -1,16 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<!-- local dev index.html -->
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="icon" href="/favicon.ico" />
<title>AFFiNE - All In One Workos</title>
<script>
window.global = window;
</script>
</head>
<body>
<div id="root"></div>
</body>
</html>

View File

@ -1,24 +0,0 @@
import { createRoot } from 'react-dom/client';
import { BrowserRouter } from 'react-router-dom';
import { ThemeProvider } from '@toeverything/components/ui';
import { FeatureFlagsProvider } from '@toeverything/datasource/feature-flags';
import './custom-formatter';
import { LigoVirgoRoutes } from './pages';
import './styles.css';
const container = document.getElementById('root');
if (!container) {
throw new Error('No root container found');
}
const root = createRoot(container);
root.render(
<BrowserRouter>
<ThemeProvider>
<FeatureFlagsProvider>
<LigoVirgoRoutes />
</FeatureFlagsProvider>
</ThemeProvider>
</BrowserRouter>
);

View File

@ -1,40 +0,0 @@
import { css, Global } from '@emotion/react';
import { LayoutHeader, SettingsSidebar } from '@toeverything/components/layout';
import { styled } from '@toeverything/components/ui';
import { Outlet } from 'react-router-dom';
export function LigoVirgoRootContainer() {
return (
<>
<Global
styles={css`
#root {
display: flex;
flex-direction: row;
height: 100vh;
}
`}
/>
<StyledContentContainer>
<LayoutHeader />
<StyledMainContainer>
<Outlet />
</StyledMainContainer>
</StyledContentContainer>
<SettingsSidebar />
</>
);
}
const StyledMainContainer = styled('div')({
flex: 'auto',
display: 'flex',
overflowY: 'hidden',
});
const StyledContentContainer = styled('div')({
flex: 'auto',
display: 'flex',
flexDirection: 'column',
overflow: 'hidden',
});

View File

@ -1,49 +0,0 @@
import { Navigate, Route, Routes } from 'react-router-dom';
import { Login } from './account';
import { LigoVirgoRootContainer } from './AppContainer';
import { RoutePrivate } from './RoutePrivate';
import { RoutePublicAutoLogin } from './RoutePublicAutoLogin';
import { PageNotFound } from './status/page-not-found';
import { WorkspaceNotFound } from './status/workspace-not-found';
import { Tools } from './tools';
import { UIPage } from './ui';
import { WorkspaceContainer } from './workspace';
export function LigoVirgoRoutes() {
return (
<Routes>
<Route path="/" element={<LigoVirgoRootContainer />}>
<Route path="/error/404" element={<PageNotFound />} />
<Route
path="/error/workspace"
element={<WorkspaceNotFound />}
/>
<Route path="/ui" element={<UIPage />} />
<Route
path="/:workspaceId/*"
element={
<RoutePrivate>
<WorkspaceContainer />
</RoutePrivate>
}
/>
<Route path="/" element={<Navigate to="/login" replace />} />
</Route>
{/* put public routes here; header and sidebar are disabled here */}
<Route>
<Route path="/tools/*" element={<Tools />} />
<Route
path="/login"
element={
<RoutePublicAutoLogin>
<Login />
</RoutePublicAutoLogin>
}
/>
<Route path="/" element={<Navigate to="/login" replace />} />
</Route>
</Routes>
);
}

View File

@ -1,37 +0,0 @@
import { Navigate, useLocation } from 'react-router-dom';
import { PageLoading } from '@toeverything/components/account';
import { useUserAndSpaces } from '@toeverything/datasource/state';
export type RoutePrivateProps = {
children: JSX.Element;
unauthorizedRedirectTo?: string;
};
/**
* A routing component that cannot be accessed without logging in, and can only be accessed after logging in.
*/
export function RoutePrivate({
children,
unauthorizedRedirectTo = '/login',
}: RoutePrivateProps) {
const { pathname } = useLocation();
const { user, loading } = useUserAndSpaces();
if (user == null && loading) {
return <PageLoading />;
}
if (!user || !pathname.startsWith(`/${user.id}`)) {
return (
<Navigate
to={unauthorizedRedirectTo}
state={{ from: pathname }}
replace={true}
/>
);
}
return children;
}

View File

@ -1,33 +0,0 @@
import { Navigate, useLocation } from 'react-router-dom';
import { PageLoading } from '@toeverything/components/account';
import { useUserAndSpaces } from '@toeverything/datasource/state';
export type RouteUnauthorizedOnlyProps = {
children: JSX.Element;
};
/**
* Routing components that are accessible without logging in and inaccessible after logging in will automatically jump to the specified route authorizedRedirectTo
*/
export function RoutePublicAutoLogin({ children }: RouteUnauthorizedOnlyProps) {
const { pathname } = useLocation();
const { user, loading, currentSpaceId } = useUserAndSpaces();
if (user == null && loading) {
return <PageLoading />;
}
if (currentSpaceId) {
return (
<Navigate
to={`/${currentSpaceId}`}
state={{ from: pathname }}
replace={true}
/>
);
}
return children;
}

View File

@ -1,3 +0,0 @@
import { Login } from '@toeverything/components/account';
export { Login };

View File

@ -1,3 +0,0 @@
export default function AgendaCalendar() {
return <span>AgendaCalendar</span>;
}

View File

@ -1,18 +0,0 @@
import { Outlet } from 'react-router-dom';
import style9 from 'style9';
import { MuiBox as Box } from '@toeverything/components/ui';
const styles = style9.create({
container: {
display: 'flex',
},
});
export default function AgendaRootContainer() {
return (
<Box className={styles('container')}>
<Outlet />
</Box>
);
}

View File

@ -1,3 +0,0 @@
export default function AgendaHome() {
return <span>AgendaHome</span>;
}

View File

@ -1,20 +0,0 @@
import { Route, Routes } from 'react-router-dom';
import Calendar from './calendar';
import Container from './container';
import Home from './home';
import Tasks from './tasks';
import Today from './today';
export default function AgendaContainer() {
return (
<Routes>
<Route path="/" element={<Container />}>
<Route path="/calendar" element={<Calendar />} />
<Route path="/tasks" element={<Tasks />} />
<Route path="/today" element={<Today />} />
<Route path="/" element={<Home />} />
</Route>
</Routes>
);
}

View File

@ -1,3 +0,0 @@
export default function AgendaTasks() {
return <span>AgendaTasks</span>;
}

View File

@ -1,3 +0,0 @@
export default function AgendaToday() {
return <span>AgendaToday</span>;
}

View File

@ -1 +0,0 @@
export { LigoVirgoRoutes } from './AppRoutes';

View File

@ -1,7 +0,0 @@
import { Error } from '@toeverything/components/account';
export function PageNotFound() {
return <Error clearOnClick={true} />;
}
export default PageNotFound;

View File

@ -1,13 +0,0 @@
import { Error } from '@toeverything/components/account';
export function WorkspaceNotFound() {
return (
<Error
subTitle="No workspace is found, please contact the admin"
action1Text="Login or Register"
clearOnClick={true}
/>
);
}
export default WorkspaceNotFound;

Some files were not shown because too many files have changed in this diff Show More