2005-08-28 01:21:25 +04:00
|
|
|
# httprepo.py - HTTP repository proxy classes for mercurial
|
|
|
|
#
|
2006-08-12 23:30:02 +04:00
|
|
|
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
|
|
|
|
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
|
2005-08-28 01:21:25 +04:00
|
|
|
#
|
2009-04-26 03:08:54 +04:00
|
|
|
# This software may be used and distributed according to the terms of the
|
|
|
|
# GNU General Public License version 2, incorporated herein by reference.
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2008-10-23 00:41:32 +04:00
|
|
|
from node import bin, hex, nullid
|
2006-12-15 05:25:19 +03:00
|
|
|
from i18n import _
|
2009-04-28 19:40:46 +04:00
|
|
|
import repo, changegroup, statichttprepo, error, url, util
|
|
|
|
import os, urllib, urllib2, urlparse, zlib, httplib
|
|
|
|
import errno, socket
|
2009-11-16 15:35:36 +03:00
|
|
|
import encoding
|
2007-06-22 21:32:54 +04:00
|
|
|
|
2006-11-16 00:51:58 +03:00
|
|
|
def zgenerator(f):
|
|
|
|
zd = zlib.decompressobj()
|
|
|
|
try:
|
|
|
|
for chunk in util.filechunkiter(f):
|
|
|
|
yield zd.decompress(chunk)
|
2008-10-28 21:25:26 +03:00
|
|
|
except httplib.HTTPException:
|
2006-11-16 00:51:58 +03:00
|
|
|
raise IOError(None, _('connection ended unexpectedly'))
|
|
|
|
yield zd.flush()
|
|
|
|
|
2008-03-20 19:12:35 +03:00
|
|
|
class httprepository(repo.repository):
|
2005-08-28 01:21:25 +04:00
|
|
|
def __init__(self, ui, path):
|
2006-07-26 00:50:32 +04:00
|
|
|
self.path = path
|
2006-06-16 04:07:30 +04:00
|
|
|
self.caps = None
|
2007-02-19 12:38:58 +03:00
|
|
|
self.handler = None
|
2006-05-23 02:42:49 +04:00
|
|
|
scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
|
|
|
|
if query or frag:
|
|
|
|
raise util.Abort(_('unsupported URL component: "%s"') %
|
|
|
|
(query or frag))
|
|
|
|
|
|
|
|
# urllib cannot handle URLs with embedded user or passwd
|
2008-10-27 23:50:01 +03:00
|
|
|
self._url, authinfo = url.getauthinfo(path)
|
|
|
|
|
2005-08-28 01:21:25 +04:00
|
|
|
self.ui = ui
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug('using %s\n' % self._url)
|
2006-05-23 02:42:49 +04:00
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
self.urlopener = url.opener(ui, authinfo)
|
2007-06-06 22:22:52 +04:00
|
|
|
|
2009-01-29 05:06:59 +03:00
|
|
|
def __del__(self):
|
|
|
|
for h in self.urlopener.handlers:
|
|
|
|
h.close()
|
|
|
|
if hasattr(h, "close_all"):
|
|
|
|
h.close_all()
|
|
|
|
|
2006-07-26 00:50:32 +04:00
|
|
|
def url(self):
|
|
|
|
return self.path
|
|
|
|
|
2006-06-16 04:07:30 +04:00
|
|
|
# look up capabilities only when needed
|
|
|
|
|
|
|
|
def get_caps(self):
|
|
|
|
if self.caps is None:
|
|
|
|
try:
|
2009-04-22 02:55:32 +04:00
|
|
|
self.caps = set(self.do_read('capabilities').split())
|
2009-01-12 19:42:31 +03:00
|
|
|
except error.RepoError:
|
2009-04-22 02:55:32 +04:00
|
|
|
self.caps = set()
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug('capabilities: %s\n' %
|
2006-06-21 02:17:28 +04:00
|
|
|
(' '.join(self.caps or ['none'])))
|
2006-06-16 04:07:30 +04:00
|
|
|
return self.caps
|
|
|
|
|
|
|
|
capabilities = property(get_caps)
|
|
|
|
|
2006-03-09 23:18:59 +03:00
|
|
|
def lock(self):
|
|
|
|
raise util.Abort(_('operation not supported over http'))
|
|
|
|
|
2005-08-28 01:21:25 +04:00
|
|
|
def do_cmd(self, cmd, **args):
|
2006-06-21 02:17:28 +04:00
|
|
|
data = args.pop('data', None)
|
|
|
|
headers = args.pop('headers', {})
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug("sending %s command\n" % cmd)
|
2005-08-28 01:21:25 +04:00
|
|
|
q = {"cmd": cmd}
|
|
|
|
q.update(args)
|
2006-10-27 17:02:27 +04:00
|
|
|
qs = '?%s' % urllib.urlencode(q)
|
|
|
|
cu = "%s%s" % (self._url, qs)
|
2006-05-16 11:35:38 +04:00
|
|
|
try:
|
2006-10-27 21:24:57 +04:00
|
|
|
if data:
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug("sending %s bytes\n" % len(data))
|
2008-10-27 23:50:01 +03:00
|
|
|
resp = self.urlopener.open(urllib2.Request(cu, data, headers))
|
2006-06-21 02:23:54 +04:00
|
|
|
except urllib2.HTTPError, inst:
|
|
|
|
if inst.code == 401:
|
|
|
|
raise util.Abort(_('authorization failed'))
|
|
|
|
raise
|
2006-05-16 11:35:38 +04:00
|
|
|
except httplib.HTTPException, inst:
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug('http error while sending %s command\n' % cmd)
|
2009-04-27 01:50:44 +04:00
|
|
|
self.ui.traceback()
|
2006-05-22 20:00:24 +04:00
|
|
|
raise IOError(None, inst)
|
2006-10-15 23:09:56 +04:00
|
|
|
except IndexError:
|
|
|
|
# this only happens with Python 2.3, later versions raise URLError
|
|
|
|
raise util.Abort(_('http error, possibly caused by proxy setting'))
|
2006-10-27 17:02:27 +04:00
|
|
|
# record the url we got redirected to
|
2006-10-29 01:05:57 +04:00
|
|
|
resp_url = resp.geturl()
|
|
|
|
if resp_url.endswith(qs):
|
|
|
|
resp_url = resp_url[:-len(qs)]
|
|
|
|
if self._url != resp_url:
|
|
|
|
self.ui.status(_('real URL is %s\n') % resp_url)
|
|
|
|
self._url = resp_url
|
2006-06-15 23:57:59 +04:00
|
|
|
try:
|
|
|
|
proto = resp.getheader('content-type')
|
|
|
|
except AttributeError:
|
|
|
|
proto = resp.headers['content-type']
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2009-04-12 03:34:21 +04:00
|
|
|
safeurl = url.hidepassword(self._url)
|
2005-08-28 01:21:25 +04:00
|
|
|
# accept old "text/plain" and "application/hg-changegroup" for now
|
2007-06-19 10:06:37 +04:00
|
|
|
if not (proto.startswith('application/mercurial-') or
|
|
|
|
proto.startswith('text/plain') or
|
|
|
|
proto.startswith('application/hg-changegroup')):
|
2009-09-19 03:15:38 +04:00
|
|
|
self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu))
|
Sometimes pushing to an HTTP repo fails with a useless message.
abort: 'https://.../.../' does not appear to be an hg repository!
Ought to produce a better diagnostics on the client. With patched 1.3.1,
observed to produce an Apache HTML error message (from cgitb) including the
vital text:
File ".../hgwebdir.cgi", line 70, in ?
wsgicgi.launch(application)
File "mercurial/hgweb/wsgicgi.py", line 68, in launch
File "mercurial/hgweb/hgwebdir_mod.py", line 91, in __call__
File "mercurial/hgweb/hgwebdir_mod.py", line 148, in run_wsgi
File "mercurial/hgweb/hgweb_mod.py", line 100, in run_wsgi
File "mercurial/hgweb/protocol.py", line 156, in unbundle
File "mercurial/localrepo.py", line 2031, in addchangegroup
File "mercurial/revlog.py", line 1204, in addgroup
LookupError: 00manifest.i@......: unknown parent
Might also be helpful if server's hgweb_mod.run_wsgi caught unexpected errors
and returned a well-formed error response including the stack trace. The client
patch would still be useful in this case, because it would let you diagnose
issues with HTTP proxy servers and so on.
2009-10-17 22:00:41 +04:00
|
|
|
raise error.RepoError(_("'%s' does not appear to be an hg repository:\n"
|
|
|
|
"---%%<--- (%s)\n%s\n---%%<---\n")
|
|
|
|
% (safeurl, proto, resp.read()))
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2006-12-29 07:27:48 +03:00
|
|
|
if proto.startswith('application/mercurial-'):
|
|
|
|
try:
|
2007-04-19 19:52:42 +04:00
|
|
|
version = proto.split('-', 1)[1]
|
|
|
|
version_info = tuple([int(n) for n in version.split('.')])
|
2006-12-29 07:27:48 +03:00
|
|
|
except ValueError:
|
2009-01-12 19:42:31 +03:00
|
|
|
raise error.RepoError(_("'%s' sent a broken Content-Type "
|
2009-04-12 03:34:21 +04:00
|
|
|
"header (%s)") % (safeurl, proto))
|
2007-04-19 19:52:42 +04:00
|
|
|
if version_info > (0, 1):
|
2009-01-12 19:42:31 +03:00
|
|
|
raise error.RepoError(_("'%s' uses newer protocol %s") %
|
2009-04-12 03:34:21 +04:00
|
|
|
(safeurl, version))
|
2005-08-28 01:21:25 +04:00
|
|
|
|
|
|
|
return resp
|
|
|
|
|
2006-06-15 23:57:59 +04:00
|
|
|
def do_read(self, cmd, **args):
|
|
|
|
fp = self.do_cmd(cmd, **args)
|
|
|
|
try:
|
|
|
|
return fp.read()
|
|
|
|
finally:
|
|
|
|
# if using keepalive, allow connection to be reused
|
|
|
|
fp.close()
|
|
|
|
|
2006-09-10 05:25:06 +04:00
|
|
|
def lookup(self, key):
|
2007-08-28 01:48:08 +04:00
|
|
|
self.requirecap('lookup', _('look up remote revision'))
|
2006-10-18 11:08:36 +04:00
|
|
|
d = self.do_cmd("lookup", key = key).read()
|
|
|
|
success, data = d[:-1].split(' ', 1)
|
|
|
|
if int(success):
|
|
|
|
return bin(data)
|
2009-01-12 19:42:31 +03:00
|
|
|
raise error.RepoError(data)
|
2006-09-10 05:25:06 +04:00
|
|
|
|
2005-08-28 01:21:25 +04:00
|
|
|
def heads(self):
|
2006-06-15 23:57:59 +04:00
|
|
|
d = self.do_read("heads")
|
2005-08-28 01:21:25 +04:00
|
|
|
try:
|
|
|
|
return map(bin, d[:-1].split(" "))
|
|
|
|
except:
|
2009-01-12 20:28:28 +03:00
|
|
|
raise error.ResponseError(_("unexpected response:"), d)
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2009-05-23 19:03:51 +04:00
|
|
|
def branchmap(self):
|
|
|
|
d = self.do_read("branchmap")
|
|
|
|
try:
|
|
|
|
branchmap = {}
|
|
|
|
for branchpart in d.splitlines():
|
|
|
|
branchheads = branchpart.split(' ')
|
|
|
|
branchname = urllib.unquote(branchheads[0])
|
2009-11-18 17:20:08 +03:00
|
|
|
# Earlier servers (1.3.x) send branch names in (their) local
|
|
|
|
# charset. The best we can do is assume it's identical to our
|
|
|
|
# own local charset, in case it's not utf-8.
|
2009-11-16 15:35:36 +03:00
|
|
|
try:
|
2009-11-18 17:20:08 +03:00
|
|
|
branchname.decode('utf-8')
|
2009-11-16 15:35:36 +03:00
|
|
|
except UnicodeDecodeError:
|
2009-11-18 17:20:08 +03:00
|
|
|
branchname = encoding.fromlocal(branchname)
|
2009-05-23 19:03:51 +04:00
|
|
|
branchheads = [bin(x) for x in branchheads[1:]]
|
|
|
|
branchmap[branchname] = branchheads
|
|
|
|
return branchmap
|
|
|
|
except:
|
|
|
|
raise error.ResponseError(_("unexpected response:"), d)
|
|
|
|
|
2005-08-28 01:21:25 +04:00
|
|
|
def branches(self, nodes):
|
|
|
|
n = " ".join(map(hex, nodes))
|
2006-06-15 23:57:59 +04:00
|
|
|
d = self.do_read("branches", nodes=n)
|
2005-08-28 01:21:25 +04:00
|
|
|
try:
|
|
|
|
br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
|
|
|
|
return br
|
|
|
|
except:
|
2009-01-12 20:28:28 +03:00
|
|
|
raise error.ResponseError(_("unexpected response:"), d)
|
2005-08-28 01:21:25 +04:00
|
|
|
|
|
|
|
def between(self, pairs):
|
2008-11-08 00:30:25 +03:00
|
|
|
batch = 8 # avoid giant requests
|
|
|
|
r = []
|
|
|
|
for i in xrange(0, len(pairs), batch):
|
|
|
|
n = " ".join(["-".join(map(hex, p)) for p in pairs[i:i + batch]])
|
|
|
|
d = self.do_read("between", pairs=n)
|
|
|
|
try:
|
|
|
|
r += [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
|
|
|
|
except:
|
2009-01-12 20:28:28 +03:00
|
|
|
raise error.ResponseError(_("unexpected response:"), d)
|
2008-11-08 00:30:25 +03:00
|
|
|
return r
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2006-02-17 19:26:21 +03:00
|
|
|
def changegroup(self, nodes, kind):
|
2005-08-28 01:21:25 +04:00
|
|
|
n = " ".join(map(hex, nodes))
|
|
|
|
f = self.do_cmd("changegroup", roots=n)
|
2006-11-16 00:51:58 +03:00
|
|
|
return util.chunkbuffer(zgenerator(f))
|
2006-09-10 05:25:06 +04:00
|
|
|
|
|
|
|
def changegroupsubset(self, bases, heads, source):
|
2007-08-28 01:48:08 +04:00
|
|
|
self.requirecap('changegroupsubset', _('look up remote changes'))
|
2006-09-10 05:25:06 +04:00
|
|
|
baselst = " ".join([hex(n) for n in bases])
|
|
|
|
headlst = " ".join([hex(n) for n in heads])
|
|
|
|
f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
|
2006-11-16 00:51:58 +03:00
|
|
|
return util.chunkbuffer(zgenerator(f))
|
2005-08-28 01:21:25 +04:00
|
|
|
|
2006-06-16 03:37:23 +04:00
|
|
|
def unbundle(self, cg, heads, source):
|
2006-06-21 02:17:28 +04:00
|
|
|
# have to stream bundle to a temp file because we do not have
|
|
|
|
# http 1.1 chunked transfer.
|
|
|
|
|
2006-11-16 08:37:45 +03:00
|
|
|
type = ""
|
|
|
|
types = self.capable('unbundle')
|
2006-11-22 04:08:29 +03:00
|
|
|
# servers older than d1b16a746db6 will send 'unbundle' as a
|
|
|
|
# boolean capability
|
|
|
|
try:
|
|
|
|
types = types.split(',')
|
|
|
|
except AttributeError:
|
|
|
|
types = [""]
|
2006-11-16 08:37:45 +03:00
|
|
|
if types:
|
2006-11-22 04:08:29 +03:00
|
|
|
for x in types:
|
2006-11-16 08:37:45 +03:00
|
|
|
if x in changegroup.bundletypes:
|
|
|
|
type = x
|
|
|
|
break
|
2006-11-02 00:01:30 +03:00
|
|
|
|
2006-11-16 08:37:45 +03:00
|
|
|
tempname = changegroup.writebundle(cg, None, type)
|
2008-10-27 23:50:01 +03:00
|
|
|
fp = url.httpsendfile(tempname, "rb")
|
2006-11-02 16:58:55 +03:00
|
|
|
try:
|
2006-06-21 02:17:28 +04:00
|
|
|
try:
|
2008-09-08 16:22:14 +04:00
|
|
|
resp = self.do_read(
|
|
|
|
'unbundle', data=fp,
|
|
|
|
headers={'Content-Type': 'application/octet-stream'},
|
|
|
|
heads=' '.join(map(hex, heads)))
|
|
|
|
resp_code, output = resp.split('\n', 1)
|
2006-06-21 02:23:54 +04:00
|
|
|
try:
|
2008-09-08 16:22:14 +04:00
|
|
|
ret = int(resp_code)
|
|
|
|
except ValueError, err:
|
2009-01-12 20:28:28 +03:00
|
|
|
raise error.ResponseError(
|
2008-09-08 16:22:14 +04:00
|
|
|
_('push failed (unexpected response):'), resp)
|
|
|
|
self.ui.write(output)
|
|
|
|
return ret
|
2006-06-21 02:23:54 +04:00
|
|
|
except socket.error, err:
|
|
|
|
if err[0] in (errno.ECONNRESET, errno.EPIPE):
|
2006-09-08 11:36:18 +04:00
|
|
|
raise util.Abort(_('push failed: %s') % err[1])
|
2006-06-21 02:23:54 +04:00
|
|
|
raise util.Abort(err[1])
|
2006-06-21 02:17:28 +04:00
|
|
|
finally:
|
|
|
|
fp.close()
|
|
|
|
os.unlink(tempname)
|
2006-06-16 03:37:23 +04:00
|
|
|
|
2006-07-14 22:17:22 +04:00
|
|
|
def stream_out(self):
|
|
|
|
return self.do_cmd('stream_out')
|
|
|
|
|
2005-08-28 01:21:25 +04:00
|
|
|
class httpsrepository(httprepository):
|
2006-07-06 10:14:55 +04:00
|
|
|
def __init__(self, ui, path):
|
2008-10-28 21:07:14 +03:00
|
|
|
if not url.has_https:
|
2006-07-06 10:14:55 +04:00
|
|
|
raise util.Abort(_('Python support for SSL and HTTPS '
|
|
|
|
'is not installed'))
|
|
|
|
httprepository.__init__(self, ui, path)
|
2006-07-31 18:11:12 +04:00
|
|
|
|
|
|
|
def instance(ui, path, create):
|
|
|
|
if create:
|
|
|
|
raise util.Abort(_('cannot create new http repository'))
|
2008-10-23 00:41:32 +04:00
|
|
|
try:
|
|
|
|
if path.startswith('https:'):
|
|
|
|
inst = httpsrepository(ui, path)
|
|
|
|
else:
|
|
|
|
inst = httprepository(ui, path)
|
|
|
|
inst.between([(nullid, nullid)])
|
|
|
|
return inst
|
2009-01-12 19:42:31 +03:00
|
|
|
except error.RepoError:
|
2008-10-23 00:41:32 +04:00
|
|
|
ui.note('(falling back to static-http)\n')
|
|
|
|
return statichttprepo.instance(ui, "static-" + path, create)
|