Merge with stable

This commit is contained in:
Matt Mackall 2010-08-06 12:59:13 -05:00
commit c0eb9c1315
5 changed files with 37 additions and 22 deletions

View File

@ -17,7 +17,9 @@ def zgenerator(f):
zd = zlib.decompressobj()
try:
for chunk in util.filechunkiter(f):
yield zd.decompress(chunk)
while chunk:
yield zd.decompress(chunk, 2**18)
chunk = zd.unconsumed_tail
except httplib.HTTPException:
raise IOError(None, _('connection ended unexpectedly'))
yield zd.flush()

View File

@ -323,6 +323,7 @@ def applyupdates(repo, action, wctx, mctx, actx):
repo.ui.note(_("getting %s\n") % f)
t = mctx.filectx(f).data()
repo.wwrite(f, t, flags)
t = None
updated += 1
if f == '.hgsubstate': # subrepo states need updating
subrepo.submerge(repo, wctx, mctx, wctx)

View File

@ -1041,6 +1041,9 @@ class revlog(object):
base = self._cache[1]
text = self._cache[2]
# drop cache to save memory
self._cache = None
self._loadindex(base, rev + 1)
self._chunkraw(base, rev)
if text is None:

View File

@ -925,30 +925,36 @@ class chunkbuffer(object):
else:
yield chunk
self.iter = splitbig(in_iter)
self.buf = ''
self._queue = []
def read(self, l):
"""Read L bytes of data from the iterator of chunks of data.
Returns less than L bytes if the iterator runs dry."""
if l > len(self.buf) and self.iter:
# Clamp to a multiple of 2**16
targetsize = max(l, 2**16)
collector = [str(self.buf)]
collected = len(self.buf)
for chunk in self.iter:
collector.append(chunk)
collected += len(chunk)
if collected >= targetsize:
left = l
buf = ''
queue = self._queue
while left > 0:
# refill the queue
if not queue:
target = 2**18
for chunk in self.iter:
queue.append(chunk)
target -= len(chunk)
if target <= 0:
break
if not queue:
break
else:
self.iter = False
self.buf = ''.join(collector)
if len(self.buf) == l:
s, self.buf = str(self.buf), ''
else:
s, self.buf = self.buf[:l], buffer(self.buf, l)
return s
chunk = queue.pop(0)
left -= len(chunk)
if left < 0:
queue.insert(0, chunk[left:])
buf += chunk[:left]
else:
buf += chunk
return buf
def filechunkiter(f, size=65536, limit=None):
"""Create a generator that produces the data in the file size
(default 65536) bytes at a time, up to optional limit (default is

View File

@ -48,6 +48,8 @@ def _verify(repo):
if isinstance(inst, KeyboardInterrupt):
ui.warn(_("interrupted"))
raise
if not str(inst):
inst = repr(inst)
err(linkrev, "%s: %s" % (msg, inst), filename)
def warn(msg):
@ -229,6 +231,7 @@ def _verify(repo):
checklog(fl, f, lr)
seen = {}
rp = None
for i in fl:
revisions += 1
n = fl.node(i)
@ -241,12 +244,12 @@ def _verify(repo):
# verify contents
try:
t = fl.read(n)
l = len(fl.read(n))
rp = fl.renamed(n)
if len(t) != fl.size(i):
if l != fl.size(i):
if len(fl.revision(n)) != fl.size(i):
err(lr, _("unpacked size is %s, %s expected") %
(len(t), fl.size(i)), f)
(l, fl.size(i)), f)
except Exception, inst:
exc(lr, _("unpacking %s") % short(n), inst, f)