mirror of
https://github.com/facebook/sapling.git
synced 2024-10-07 07:17:55 +03:00
clean up HgQueuedBackingStore
Summary: split functions up. Reviewed By: chadaustin Differential Revision: D20808045 fbshipit-source-id: 3160566deb763c888a0bf34557d934feccc5ae3b
This commit is contained in:
parent
cc880f9622
commit
5bf6b58928
@ -84,6 +84,11 @@ class HgImportRequest {
|
||||
return std::get_if<T>(&request_);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool isType() const noexcept {
|
||||
return std::holds_alternative<T>(request_);
|
||||
}
|
||||
|
||||
size_t getType() const noexcept {
|
||||
return request_.index();
|
||||
}
|
||||
|
@ -44,6 +44,39 @@ HgQueuedBackingStore::~HgQueuedBackingStore() {
|
||||
}
|
||||
}
|
||||
|
||||
void HgQueuedBackingStore::processBlobImportRequests(
|
||||
std::vector<HgImportRequest>&& requests) {
|
||||
for (auto& request : requests) {
|
||||
auto parameter = request.getRequest<HgImportRequest::BlobImport>();
|
||||
request.setWith<HgImportRequest::BlobImport>(
|
||||
[store = backingStore_.get(), hash = parameter->hash]() {
|
||||
return store->getBlob(hash).getTry();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void HgQueuedBackingStore::processTreeImportRequests(
|
||||
std::vector<HgImportRequest>&& requests) {
|
||||
for (auto& request : requests) {
|
||||
auto parameter = request.getRequest<HgImportRequest::TreeImport>();
|
||||
request.setWith<HgImportRequest::TreeImport>(
|
||||
[store = backingStore_.get(), hash = parameter->hash]() {
|
||||
return store->getTree(hash).getTry();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void HgQueuedBackingStore::processPrefetchRequests(
|
||||
std::vector<HgImportRequest>&& requests) {
|
||||
for (auto& request : requests) {
|
||||
auto parameter = request.getRequest<HgImportRequest::Prefetch>();
|
||||
request.setWith<HgImportRequest::Prefetch>(
|
||||
[store = backingStore_.get(), hashes = parameter->hashes]() {
|
||||
return store->prefetchBlobs(hashes).getTry();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void HgQueuedBackingStore::processRequest() {
|
||||
for (;;) {
|
||||
auto requests = queue_.dequeue(FLAGS_hg_queue_batch_size);
|
||||
@ -52,25 +85,14 @@ void HgQueuedBackingStore::processRequest() {
|
||||
break;
|
||||
}
|
||||
|
||||
for (auto& request : requests) {
|
||||
if (auto parameter = request.getRequest<HgImportRequest::BlobImport>()) {
|
||||
request.setWith<HgImportRequest::BlobImport>(
|
||||
[store = backingStore_.get(), hash = parameter->hash]() {
|
||||
return store->getBlob(hash).getTry();
|
||||
});
|
||||
} else if (
|
||||
auto parameter = request.getRequest<HgImportRequest::TreeImport>()) {
|
||||
request.setWith<HgImportRequest::TreeImport>(
|
||||
[store = backingStore_.get(), hash = parameter->hash]() {
|
||||
return store->getTree(hash).getTry();
|
||||
});
|
||||
} else if (
|
||||
auto parameter = request.getRequest<HgImportRequest::Prefetch>()) {
|
||||
request.setWith<HgImportRequest::Prefetch>(
|
||||
[store = backingStore_.get(), hashes = parameter->hashes]() {
|
||||
return store->prefetchBlobs(hashes).getTry();
|
||||
});
|
||||
}
|
||||
const auto& first = requests.at(0);
|
||||
|
||||
if (first.isType<HgImportRequest::BlobImport>()) {
|
||||
processBlobImportRequests(std::move(requests));
|
||||
} else if (first.isType<HgImportRequest::TreeImport>()) {
|
||||
processTreeImportRequests(std::move(requests));
|
||||
} else if (first.isType<HgImportRequest::Prefetch>()) {
|
||||
processPrefetchRequests(std::move(requests));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -78,6 +78,10 @@ class HgQueuedBackingStore : public BackingStore {
|
||||
HgQueuedBackingStore(const HgQueuedBackingStore&) = delete;
|
||||
HgQueuedBackingStore& operator=(const HgQueuedBackingStore&) = delete;
|
||||
|
||||
void processBlobImportRequests(std::vector<HgImportRequest>&& requests);
|
||||
void processTreeImportRequests(std::vector<HgImportRequest>&& requests);
|
||||
void processPrefetchRequests(std::vector<HgImportRequest>&& requests);
|
||||
|
||||
/**
|
||||
* The worker runloop function.
|
||||
*/
|
||||
|
Loading…
Reference in New Issue
Block a user