From c3bc1a7fe4c362decb818c64e1b587649d52be2e Mon Sep 17 00:00:00 2001 From: Xavier Deguillard Date: Wed, 16 Oct 2019 14:19:36 -0700 Subject: [PATCH] remotefilelog: make fetchpacks the default Summary: This has been enabled for a while now. We won't be going back to using loosefiles, so let's make fetchpacks the default in the code. A future step will remove the code paths that are no longer exercised. Reviewed By: quark-zju Differential Revision: D17919275 fbshipit-source-id: 0614f5710b630690de92cdb43ec07d3a2888aa1e --- tests/integration/test-hash-validation.t | 23 +------ tests/integration/test-lfs-copytracing.t | 2 + tests/integration/test-lfs-to-mononoke.t | 66 +++---------------- .../test-lfs-upload-alias-on-fetch.t | 2 + tests/integration/test-lfs.t | 2 + tests/integration/test-linknodes.t | 50 ++++++-------- ...ononoke-hg-sync-job-generate-bundles-lfs.t | 6 ++ 7 files changed, 43 insertions(+), 108 deletions(-) diff --git a/tests/integration/test-hash-validation.t b/tests/integration/test-hash-validation.t index fcc4115953..a362b46995 100644 --- a/tests/integration/test-hash-validation.t +++ b/tests/integration/test-hash-validation.t @@ -60,29 +60,8 @@ Prefetch should fail with corruption error added 3 changesets with 0 changes to 0 files adding remote bookmark master_bookmark new changesets 426bada5c675:26805aba1e60 - $ hgmn prefetch -r ":" - remote: Command failed - remote: Error: - remote: Corrupt hg filenode returned: 005d992c5dcf32993668f7cede29d296c494a5d9 != a2e456504a5e61f763f1a0b36a6c247c7541b2b3 - remote: Root cause: - remote: CorruptHgFileNode { - remote: expected: HgFileNodeId( - remote: HgNodeHash( - remote: Sha1(005d992c5dcf32993668f7cede29d296c494a5d9), - remote: ), - remote: ), - remote: actual: HgFileNodeId( - remote: HgNodeHash( - remote: Sha1(a2e456504a5e61f763f1a0b36a6c247c7541b2b3), - remote: ), - remote: ), - remote: } - abort: error downloading file contents: - 'connection closed early for filename * and node *' (glob) - [255] -Same for getpackv1 - $ hgmn prefetch -r ":" --config remotefilelog.fetchpacks=True + $ hgmn prefetch -r ":" remote: Command failed remote: Error: remote: Corrupt hg filenode returned: 005d992c5dcf32993668f7cede29d296c494a5d9 != a2e456504a5e61f763f1a0b36a6c247c7541b2b3 diff --git a/tests/integration/test-lfs-copytracing.t b/tests/integration/test-lfs-copytracing.t index 000008691c..efb2c9e8c6 100644 --- a/tests/integration/test-lfs-copytracing.t +++ b/tests/integration/test-lfs-copytracing.t @@ -62,6 +62,8 @@ Create a new repository, enable LFS there as well > [extensions] > pushrebase = > remotenames = + > [remotefilelog] + > getpackversion = 2 > EOF Pull changes from Mononoke diff --git a/tests/integration/test-lfs-to-mononoke.t b/tests/integration/test-lfs-to-mononoke.t index 8a21117234..098770cdfe 100644 --- a/tests/integration/test-lfs-to-mononoke.t +++ b/tests/integration/test-lfs-to-mononoke.t @@ -121,16 +121,20 @@ Verify that if we fail to upload LFS blobs first, the push fails $ cd .. -Create a new client repository - $ hgclone_treemanifest ssh://user@dummy/repo-hg-nolfs repo-hg-lfs2 --noupdate --config extensions.remotenames= - $ cd repo-hg-lfs2 +Create a new client repository, using getpack (with its own cachepath) + $ hgclone_treemanifest ssh://user@dummy/repo-hg-nolfs repo-hg-lfs3 --noupdate --config extensions.remotenames= + $ cd repo-hg-lfs3 $ setup_hg_client - $ setup_hg_lfs "$lfs_uri" 1000B "$TESTTMP/lfs-cache2" + $ setup_hg_lfs "$lfs_uri" 1000B "$TESTTMP/lfs-cache3" $ cat >> .hg/hgrc < [extensions] > pushrebase = > remotenames = + > [remotefilelog] + > fetchpacks = True + > getpackversion = 2 + > cachepath=$TESTTMP/cachepath-alt > EOF $ hgmn pull -v @@ -142,7 +146,7 @@ Create a new client repository adding file changes added 2 changesets with 0 changes to 0 files new changesets 99765c8d839c:c651f052c52d - + $ hgmn update -r master_bookmark -v resolving manifests lfs: need to transfer 2 objects (3.92 KB) @@ -205,55 +209,3 @@ Change "sha256:oid" to an another valid oid to check sha1 consisnency abort: stream ended unexpectedly (got 0 bytes, expected 4) [255] - $ cd .. - - -Create a new client repository, using getpack (with its own cachepath) - $ hgclone_treemanifest ssh://user@dummy/repo-hg-nolfs repo-hg-lfs3 --noupdate --config extensions.remotenames= - $ cd repo-hg-lfs3 - $ setup_hg_client - $ setup_hg_lfs "$lfs_uri" 1000B "$TESTTMP/lfs-cache3" - - $ cat >> .hg/hgrc < [extensions] - > pushrebase = - > remotenames = - > [remotefilelog] - > fetchpacks = True - > getpackversion = 2 - > cachepath=$TESTTMP/cachepath-alt - > EOF - - $ hgmn pull -v - pulling from ssh://user@dummy/repo - searching for changes - all local heads known remotely - adding changesets - adding manifests - adding file changes - added 2 changesets with 0 changes to 0 files - new changesets 99765c8d839c:c651f052c52d - - $ hgmn update -r master_bookmark -v - resolving manifests - lfs: need to transfer 2 objects (3.92 KB) - lfs: downloading d19bca751e178f8cce59e1b872e0fd5857951c2577a2318aefad3253c317d982 (1.96 KB) - lfs: processed: d19bca751e178f8cce59e1b872e0fd5857951c2577a2318aefad3253c317d982 - lfs: downloading e2fff2ce58d585b4b0572e0a323f9e7e5f98cc641489e12c03c401d05d0e350d (1.95 KB) - lfs: processed: e2fff2ce58d585b4b0572e0a323f9e7e5f98cc641489e12c03c401d05d0e350d - getting lfs-largefile - getting lfs-largefile-renamed - getting smallfile - calling hook update.prefetch: edenscm.hgext.remotefilelog.wcpprefetch - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - - $ sha256sum lfs-largefile - e2fff2ce58d585b4b0572e0a323f9e7e5f98cc641489e12c03c401d05d0e350d lfs-largefile - - $ sha256sum lfs-largefile-renamed - d19bca751e178f8cce59e1b872e0fd5857951c2577a2318aefad3253c317d982 lfs-largefile-renamed - - $ hgmn st --change . -C - A lfs-largefile-renamed - lfs-largefile-for-rename - R lfs-largefile-for-rename diff --git a/tests/integration/test-lfs-upload-alias-on-fetch.t b/tests/integration/test-lfs-upload-alias-on-fetch.t index fdd2799707..9815170d2b 100644 --- a/tests/integration/test-lfs-upload-alias-on-fetch.t +++ b/tests/integration/test-lfs-upload-alias-on-fetch.t @@ -93,6 +93,8 @@ > [extensions] > pushrebase = > remotenames = + > [remotefilelog] + > getpackversion = 2 > EOF $ hgmn pull diff --git a/tests/integration/test-lfs.t b/tests/integration/test-lfs.t index fcf1b89614..f7d621b8b7 100644 --- a/tests/integration/test-lfs.t +++ b/tests/integration/test-lfs.t @@ -107,6 +107,8 @@ > threshold=1000B > usercache=$TESTTMP/lfs-cache2 > url=$lfs_uri + > [remotefilelog] + > getpackversion = 2 > EOF $ hg update master_bookmark -v diff --git a/tests/integration/test-linknodes.t b/tests/integration/test-linknodes.t index 30de588826..38318d0980 100644 --- a/tests/integration/test-linknodes.t +++ b/tests/integration/test-linknodes.t @@ -24,16 +24,14 @@ define a remotefilelog cache process that just logs when things are added > cmd = sys.stdin.readline().strip() > if cmd == 'exit': > sys.exit(0) - > elif cmd == 'get': + > elif cmd == 'getdata' or cmd == 'gethistory': > count = int(sys.stdin.readline()) > for _ in range(count): > key = sys.stdin.readline()[:-1] - > if '\0' in key: - > _, key = key.split('\0') > sys.stdout.write(key + '\n') > sys.stdout.write('0\n') > sys.stdout.flush() - > elif cmd == 'set': + > elif cmd == 'setdata' or cmd == 'sethistory': > count = int(sys.stdin.readline()) > for _ in range(count): > key = sys.stdin.readline()[:-1] @@ -115,16 +113,15 @@ pull the infinitepush commit the blob didn't get uploaded to the cache $ cat $TESTTMP/cachelog.log + cacheprocess: set $TESTTMP/cachepath/repo-pull1/packs/07bbbe5abb17b910e6011232ceba22b0c6b29b9a + cacheprocess: set $TESTTMP/cachepath/repo-pull1/packs/f361a1ed16f4b87bbe47e638d8c2cc9f1de8e06f - $ hg debugremotefilelog ../cachepath/repo-pull1/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - size: 9 bytes - path: ../cachepath/repo-pull1/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - key: b4aa7b980f00 - filename: file + $ hg debughistorypack ../cachepath/repo-pull1/packs/f361a1ed16f4b87bbe47e638d8c2cc9f1de8e06f - node => p1 p2 linknode copyfrom - b4aa7b980f00 => 599997c6080f 000000000000 000000000000 - 599997c6080f => 000000000000 000000000000 d998012a9c34 + file + Node P1 Node P2 Node Link Node Copy From + b4aa7b980f00 599997c6080f 000000000000 000000000000 + 599997c6080f 000000000000 000000000000 d998012a9c34 $ hg debughistorypack ../cachepath/repo-pull1/packs/manifests/0a557814daab121c2043c7ba26a89a0d60671de6.histpack @@ -191,22 +188,20 @@ pull only the master branch into another repo the blob was uploaded to the cache $ cat $TESTTMP/cachelog.log - cacheprocess: set repo-pull2/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 + cacheprocess: set $TESTTMP/cachepath/repo-pull2/packs/07bbbe5abb17b910e6011232ceba22b0c6b29b9a + cacheprocess: set $TESTTMP/cachepath/repo-pull2/packs/e5e1a8b81e9d2360fe54412f8370812c06c6cadb $ hg log -G -T '{node} {desc} ({remotenames})\n' -r "all()" @ 6dbc3093b5955d7bb47512155149ec66791c277d master (default/master_bookmark) | o d998012a9c34a2423757a3d40f8579c78af1b342 base () - $ hg debugremotefilelog ../cachepath/repo-pull2/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - size: 9 bytes - path: ../cachepath/repo-pull2/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - key: b4aa7b980f00 - filename: file + $ hg debughistorypack ../cachepath/repo-pull2/packs/e5e1a8b81e9d2360fe54412f8370812c06c6cadb - node => p1 p2 linknode copyfrom - b4aa7b980f00 => 599997c6080f 000000000000 6dbc3093b595 - 599997c6080f => 000000000000 000000000000 d998012a9c34 + file + Node P1 Node P2 Node Link Node Copy From + b4aa7b980f00 599997c6080f 000000000000 6dbc3093b595 + 599997c6080f 000000000000 000000000000 d998012a9c34 $ hg debughistorypack ../cachepath/repo-pull2/packs/manifests/d4f69b796da6848a455a916d75afe6b27e774058.histpack @@ -236,15 +231,12 @@ pull the infinitepush commit again in a new repo new changesets 60ab8a6c8e65 $ hgmn up 60ab8a6c8e652ea968be7ffdb658b49de35d3621 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg debugremotefilelog ../cachepath/repo-pull3/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - size: 9 bytes - path: ../cachepath/repo-pull3/97/1c419dd609331343dee105fffd0f4608dc0bf2/b4aa7b980f00bcd3ea58510798c1425dcdc511f3 - key: b4aa7b980f00 - filename: file + $ hg debughistorypack ../cachepath/repo-pull2/packs/e5e1a8b81e9d2360fe54412f8370812c06c6cadb - node => p1 p2 linknode copyfrom - b4aa7b980f00 => 599997c6080f 000000000000 6dbc3093b595 - 599997c6080f => 000000000000 000000000000 d998012a9c34 + file + Node P1 Node P2 Node Link Node Copy From + b4aa7b980f00 599997c6080f 000000000000 6dbc3093b595 + 599997c6080f 000000000000 000000000000 d998012a9c34 $ hg debughistorypack ../cachepath/repo-pull3/packs/manifests/d4f69b796da6848a455a916d75afe6b27e774058.histpack diff --git a/tests/integration/test-mononoke-hg-sync-job-generate-bundles-lfs.t b/tests/integration/test-mononoke-hg-sync-job-generate-bundles-lfs.t index aa3f988278..56ef6add6a 100644 --- a/tests/integration/test-mononoke-hg-sync-job-generate-bundles-lfs.t +++ b/tests/integration/test-mononoke-hg-sync-job-generate-bundles-lfs.t @@ -149,6 +149,12 @@ Setup another client and update to latest commit from mercurial $ cd client-pull $ setup_hg_client $ setup_hg_lfs "$lfs_uri" 1000B "$TESTTMP/lfs-cache1" + + $ cat >> .hg/hgrc < [remotefilelog] + > getpackversion=2 + > EOF + $ hg up 2 -v resolving manifests lfs: downloading c12949887b7d8c46e9fcc5d9cd4bd884de33c1d00e24d7ac56ed9200e07f31a1 (40 bytes)