Enable build cache (#3446)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2023-06-21 12:33:00 +07:00 committed by GitHub
parent 2eab0fc71f
commit ef8dfdc006
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 51 additions and 24 deletions

View File

@ -269,12 +269,21 @@ jobs:
# run: | # run: |
# cd ./tests # cd ./tests
# ./update-snapshot-ci.sh ./tests/db_dump # ./update-snapshot-ci.sh ./tests/db_dump
- name: "Store docker logs"
run: |
cd ./tests/sanity
mkdir logs
docker logs $(docker ps | grep transactor | cut -f 1 -d ' ') > logs/transactor.log
docker logs $(docker ps | grep account | cut -f 1 -d ' ') > logs/transactor.log
docker logs $(docker ps | grep front | cut -f 1 -d ' ') > logs/front.log
- name: Upload test results - name: Upload test results
if: always() if: always()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: playwright-results name: playwright-results
path: ./tests/sanity/playwright-report/ path: |
./tests/sanity/playwright-report/
./tests/sanity/logs
# - name: Upload DB snapshot # - name: Upload DB snapshot
# if: always() # if: always()
# uses: actions/upload-artifact@v3 # uses: actions/upload-artifact@v3

View File

@ -10,7 +10,7 @@
* *
* See https://rushjs.io/pages/maintainer/build_cache/ for details about this experimental feature. * See https://rushjs.io/pages/maintainer/build_cache/ for details about this experimental feature.
*/ */
"buildCacheEnabled": false, "buildCacheEnabled": true,
/** /**
* (Required) Choose where project build outputs will be cached. * (Required) Choose where project build outputs will be cached.

4
dev/prod/config/rig.json Normal file
View File

@ -0,0 +1,4 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig"
}

View File

@ -17,6 +17,7 @@
"deploy": "cp -p public/* dist && aws s3 sync dist s3://anticrm-platform --delete --acl public-read" "deploy": "cp -p public/* dist && aws s3 sync dist s3://anticrm-platform --delete --acl public-read"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0",
"cross-env": "~7.0.3", "cross-env": "~7.0.3",
"webpack-cli": "^5.0.1", "webpack-cli": "^5.0.1",
"webpack": "^5.75.0", "webpack": "^5.75.0",

View File

@ -1,11 +1,11 @@
{ {
"incrementalBuildIgnoredGlobs": ["temp/**"], "incrementalBuildIgnoredGlobs": ["temp/**", "dist_cache/**", "lib/**", "dist/**"],
"disableBuildCacheForProject": false, "disableBuildCacheForProject": false,
"operationSettings": [ "operationSettings": [
{ {
"operationName": "build", "operationName": "build",
"outputFolderNames": ["lib", "dist"] "outputFolderNames": ["dist", "lib", "dist_cache"]
} }
] ]
} }

View File

@ -1,11 +1,11 @@
{ {
"incrementalBuildIgnoredGlobs": ["temp/**", "dist_cache/**", "lib/**"], "incrementalBuildIgnoredGlobs": ["temp/**", "dist_cache/**", "lib/**", "dist/**", "deploy/**"],
"disableBuildCacheForProject": false, "disableBuildCacheForProject": false,
"operationSettings": [ "operationSettings": [
{ {
"operationName": "build", "operationName": "build",
"outputFolderNames": ["lib", "dist"] "outputFolderNames": ["lib", "dist", "dist_cache"]
} }
] ]
} }

View File

@ -1,5 +1,5 @@
{ {
"incrementalBuildIgnoredGlobs": ["temp/**", "dist_cache/**", "lib/**"], "incrementalBuildIgnoredGlobs": ["temp/**", "dist_cache/**", "lib/**", "dist/**"],
"disableBuildCacheForProject": false, "disableBuildCacheForProject": false,
"operationSettings": [ "operationSettings": [

View File

@ -84,6 +84,9 @@ class ElasticDataAdapter implements DbAdapter {
} }
resp = await this.client.search(q) resp = await this.client.search(q)
if (resp.statusCode !== 200) { if (resp.statusCode !== 200) {
if (resp.body?.error?.type === 'index_not_found_exception') {
return undefined
}
console.error('failed elastic query', q, resp) console.error('failed elastic query', q, resp)
throw new PlatformError(unknownStatus(`failed to elastic query ${JSON.stringify(resp)}`)) throw new PlatformError(unknownStatus(`failed to elastic query ${JSON.stringify(resp)}`))
} }

File diff suppressed because one or more lines are too long

View File

@ -17,8 +17,14 @@ program
cmd.source.split(',').map((it) => join(process.cwd(), it.trim())), cmd.source.split(',').map((it) => join(process.cwd(), it.trim())),
process.cwd(), process.cwd(),
root, root,
cmd.include.split(','), cmd.include
cmd.exclude.split(',') .split(',')
.map((it) => it.trim())
.filter((it) => it.length > 0),
cmd.exclude
.split(',')
.map((it) => it.trim())
.filter((it) => it.length > 0)
) )
}) })

View File

@ -28,16 +28,17 @@ export async function syncRushFiles (
for (const prj of [join(platformRoot, 'rush.json'), ...root]) { for (const prj of [join(platformRoot, 'rush.json'), ...root]) {
const sPath = path.dirname(path.resolve(prj)).split(path.sep) const sPath = path.dirname(path.resolve(prj)).split(path.sep)
const diff = path.join(...sPath.slice(abs.length)) const diff = path.join(...sPath.slice(abs.length))
console.log('processing', diff)
const rushJsonSource = (await parse((await readFile(prj)).toString())) as CommentObject const rushJsonSource = (await parse((await readFile(prj)).toString())) as CommentObject
const sprojects = rushJsonSource.projects as unknown as CommentArray<RushPackage> const sprojects = rushJsonSource.projects as unknown as CommentArray<RushPackage>
for (const [k, v] of Object.entries(rushJsonSource)) { for (const [k, v] of Object.entries(rushJsonSource)) {
platformJson[k] = v platformJson[k] = v
} }
projects projects.push(
...sprojects
.filter((it) => filterPackage(it, include, exclude)) .filter((it) => filterPackage(it, include, exclude))
.push( .map((it) => ({
...sprojects.map((it) => ({
...it, ...it,
projectFolder: join(diff, it.projectFolder), projectFolder: join(diff, it.projectFolder),
shouldPublish: diff === '.' ? it.shouldPublish : false shouldPublish: diff === '.' ? it.shouldPublish : false
@ -53,13 +54,16 @@ function filterPackage (it: RushPackage, include: string[], exclude: string[]):
const pkgName = it.packageName const pkgName = it.packageName
for (const i of include) { for (const i of include) {
if (pkgName.includes(i)) { if (pkgName.includes(i)) {
console.log('Include', pkgName, i)
return true return true
} }
} }
for (const i of exclude) { for (const i of exclude) {
if (pkgName.includes(i)) { if (pkgName.includes(i)) {
console.log('Exclude', pkgName, i)
return false return false
} }
} }
console.log('Default', pkgName)
return true return true
} }