2008-10-27 23:50:01 +03:00
|
|
|
# url.py - HTTP handling for mercurial
|
|
|
|
#
|
|
|
|
# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
|
|
|
|
# Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
|
|
|
|
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
|
|
|
|
#
|
2009-04-26 03:08:54 +04:00
|
|
|
# This software may be used and distributed according to the terms of the
|
2010-01-20 07:20:08 +03:00
|
|
|
# GNU General Public License version 2 or any later version.
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2015-08-09 06:14:50 +03:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
import base64
|
|
|
|
import os
|
|
|
|
import socket
|
|
|
|
|
|
|
|
from .i18n import _
|
|
|
|
from . import (
|
2017-01-15 10:47:05 +03:00
|
|
|
encoding,
|
2015-10-08 22:55:45 +03:00
|
|
|
error,
|
2015-08-09 06:14:50 +03:00
|
|
|
httpconnection as httpconnectionmod,
|
|
|
|
keepalive,
|
|
|
|
sslutil,
|
|
|
|
util,
|
|
|
|
)
|
2011-02-13 06:59:43 +03:00
|
|
|
|
2016-06-28 13:31:53 +03:00
|
|
|
httplib = util.httplib
|
|
|
|
stringio = util.stringio
|
2016-04-07 02:22:12 +03:00
|
|
|
urlerr = util.urlerr
|
|
|
|
urlreq = util.urlreq
|
|
|
|
|
2016-06-06 00:36:23 +03:00
|
|
|
class passwordmgr(object):
|
|
|
|
def __init__(self, ui, passwddb):
|
2008-10-27 23:50:01 +03:00
|
|
|
self.ui = ui
|
2016-06-06 00:36:23 +03:00
|
|
|
self.passwddb = passwddb
|
|
|
|
|
|
|
|
def add_password(self, realm, uri, user, passwd):
|
|
|
|
return self.passwddb.add_password(realm, uri, user, passwd)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
|
|
|
def find_user_password(self, realm, authuri):
|
2016-06-06 00:36:23 +03:00
|
|
|
authinfo = self.passwddb.find_user_password(realm, authuri)
|
2008-10-27 23:50:01 +03:00
|
|
|
user, passwd = authinfo
|
|
|
|
if user and passwd:
|
2009-05-04 22:26:27 +04:00
|
|
|
self._writedebug(user, passwd)
|
2008-10-27 23:50:01 +03:00
|
|
|
return (user, passwd)
|
|
|
|
|
2011-08-02 01:58:50 +04:00
|
|
|
if not user or not passwd:
|
2011-08-05 23:05:41 +04:00
|
|
|
res = httpconnectionmod.readauthforuri(self.ui, authuri, user)
|
2011-02-13 21:19:58 +03:00
|
|
|
if res:
|
|
|
|
group, auth = res
|
2009-06-20 12:58:57 +04:00
|
|
|
user, passwd = auth.get('username'), auth.get('password')
|
2011-02-13 21:19:58 +03:00
|
|
|
self.ui.debug("using auth.%s.* for authentication\n" % group)
|
2009-05-04 22:26:27 +04:00
|
|
|
if not user or not passwd:
|
2014-01-16 04:46:20 +04:00
|
|
|
u = util.url(authuri)
|
|
|
|
u.query = None
|
2009-05-04 22:26:27 +04:00
|
|
|
if not self.ui.interactive():
|
2015-10-08 22:55:45 +03:00
|
|
|
raise error.Abort(_('http authorization required for %s') %
|
2014-01-16 04:46:20 +04:00
|
|
|
util.hidepassword(str(u)))
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2014-01-16 04:46:20 +04:00
|
|
|
self.ui.write(_("http authorization required for %s\n") %
|
|
|
|
util.hidepassword(str(u)))
|
2010-10-26 15:41:58 +04:00
|
|
|
self.ui.write(_("realm: %s\n") % realm)
|
2009-05-04 22:26:27 +04:00
|
|
|
if user:
|
2010-10-26 15:41:58 +04:00
|
|
|
self.ui.write(_("user: %s\n") % user)
|
2009-05-04 22:26:27 +04:00
|
|
|
else:
|
|
|
|
user = self.ui.prompt(_("user:"), default=None)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2009-05-04 22:26:27 +04:00
|
|
|
if not passwd:
|
|
|
|
passwd = self.ui.getpass()
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2016-06-06 00:36:23 +03:00
|
|
|
self.passwddb.add_password(realm, authuri, user, passwd)
|
2009-05-04 22:26:27 +04:00
|
|
|
self._writedebug(user, passwd)
|
2008-10-27 23:50:01 +03:00
|
|
|
return (user, passwd)
|
|
|
|
|
2009-05-04 22:26:27 +04:00
|
|
|
def _writedebug(self, user, passwd):
|
|
|
|
msg = _('http auth: user %s, password %s\n')
|
|
|
|
self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
|
|
|
|
|
2011-08-05 23:05:41 +04:00
|
|
|
def find_stored_password(self, authuri):
|
2016-06-06 00:36:23 +03:00
|
|
|
return self.passwddb.find_user_password(None, authuri)
|
2011-08-05 23:05:41 +04:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
class proxyhandler(urlreq.proxyhandler):
|
2008-10-27 23:50:01 +03:00
|
|
|
def __init__(self, ui):
|
2016-12-19 00:24:49 +03:00
|
|
|
proxyurl = (ui.config("http_proxy", "host") or
|
2017-01-15 10:47:05 +03:00
|
|
|
encoding.environ.get('http_proxy'))
|
2008-10-27 23:50:01 +03:00
|
|
|
# XXX proxyauthinfo = None
|
|
|
|
|
|
|
|
if proxyurl:
|
|
|
|
# proxy can be proper url or host[:port]
|
|
|
|
if not (proxyurl.startswith('http:') or
|
|
|
|
proxyurl.startswith('https:')):
|
|
|
|
proxyurl = 'http://' + proxyurl + '/'
|
2011-04-30 20:43:20 +04:00
|
|
|
proxy = util.url(proxyurl)
|
2011-03-31 07:01:44 +04:00
|
|
|
if not proxy.user:
|
|
|
|
proxy.user = ui.config("http_proxy", "user")
|
|
|
|
proxy.passwd = ui.config("http_proxy", "passwd")
|
2008-10-27 23:50:01 +03:00
|
|
|
|
|
|
|
# see if we should use a proxy for this url
|
2010-01-25 09:05:27 +03:00
|
|
|
no_list = ["localhost", "127.0.0.1"]
|
2008-10-27 23:50:01 +03:00
|
|
|
no_list.extend([p.lower() for
|
|
|
|
p in ui.configlist("http_proxy", "no")])
|
|
|
|
no_list.extend([p.strip().lower() for
|
2017-01-15 10:47:05 +03:00
|
|
|
p in encoding.environ.get("no_proxy", '').split(',')
|
2008-10-27 23:50:01 +03:00
|
|
|
if p.strip()])
|
|
|
|
# "http_proxy.always" config is for running tests on localhost
|
|
|
|
if ui.configbool("http_proxy", "always"):
|
|
|
|
self.no_list = []
|
|
|
|
else:
|
|
|
|
self.no_list = no_list
|
|
|
|
|
2011-03-31 07:01:44 +04:00
|
|
|
proxyurl = str(proxy)
|
2008-10-27 23:50:01 +03:00
|
|
|
proxies = {'http': proxyurl, 'https': proxyurl}
|
2009-09-19 03:15:38 +04:00
|
|
|
ui.debug('proxying through http://%s:%s\n' %
|
2011-03-31 07:01:44 +04:00
|
|
|
(proxy.host, proxy.port))
|
2008-10-27 23:50:01 +03:00
|
|
|
else:
|
|
|
|
proxies = {}
|
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
urlreq.proxyhandler.__init__(self, proxies)
|
2008-10-27 23:50:01 +03:00
|
|
|
self.ui = ui
|
|
|
|
|
|
|
|
def proxy_open(self, req, proxy, type_):
|
|
|
|
host = req.get_host().split(':')[0]
|
2013-08-03 22:23:48 +04:00
|
|
|
for e in self.no_list:
|
|
|
|
if host == e:
|
|
|
|
return None
|
|
|
|
if e.startswith('*.') and host.endswith(e[2:]):
|
|
|
|
return None
|
|
|
|
if e.startswith('.') and host.endswith(e[1:]):
|
|
|
|
return None
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2011-02-16 06:28:17 +03:00
|
|
|
def _gen_sendfile(orgsend):
|
2008-10-27 23:50:01 +03:00
|
|
|
def _sendfile(self, data):
|
|
|
|
# send a file
|
2011-05-06 19:22:08 +04:00
|
|
|
if isinstance(data, httpconnectionmod.httpsendfile):
|
2008-10-27 23:50:01 +03:00
|
|
|
# if auth required, some data sent twice, so rewind here
|
|
|
|
data.seek(0)
|
|
|
|
for chunk in util.filechunkiter(data):
|
2011-02-16 06:28:17 +03:00
|
|
|
orgsend(self, chunk)
|
2008-10-27 23:50:01 +03:00
|
|
|
else:
|
2011-02-16 06:28:17 +03:00
|
|
|
orgsend(self, data)
|
2008-10-27 23:50:01 +03:00
|
|
|
return _sendfile
|
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
has_https = util.safehasattr(urlreq, 'httpshandler')
|
2010-02-10 22:27:46 +03:00
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
class httpconnection(keepalive.HTTPConnection):
|
|
|
|
# must be able to send big bundle as stream.
|
2011-02-16 06:28:17 +03:00
|
|
|
send = _gen_sendfile(keepalive.HTTPConnection.send)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2009-05-22 10:56:43 +04:00
|
|
|
def getresponse(self):
|
|
|
|
proxyres = getattr(self, 'proxyres', None)
|
|
|
|
if proxyres:
|
|
|
|
if proxyres.will_close:
|
|
|
|
self.close()
|
|
|
|
self.proxyres = None
|
|
|
|
return proxyres
|
|
|
|
return keepalive.HTTPConnection.getresponse(self)
|
|
|
|
|
2009-11-13 08:29:49 +03:00
|
|
|
# general transaction handler to support different ways to handle
|
|
|
|
# HTTPS proxying before and after Python 2.6.3.
|
|
|
|
def _generic_start_transaction(handler, h, req):
|
2011-07-26 00:55:51 +04:00
|
|
|
tunnel_host = getattr(req, '_tunnel_host', None)
|
|
|
|
if tunnel_host:
|
2009-11-13 08:29:49 +03:00
|
|
|
if tunnel_host[:7] not in ['http://', 'https:/']:
|
|
|
|
tunnel_host = 'https://' + tunnel_host
|
|
|
|
new_tunnel = True
|
|
|
|
else:
|
|
|
|
tunnel_host = req.get_selector()
|
|
|
|
new_tunnel = False
|
|
|
|
|
|
|
|
if new_tunnel or tunnel_host == req.get_full_url(): # has proxy
|
2011-04-30 20:43:20 +04:00
|
|
|
u = util.url(tunnel_host)
|
2011-03-31 07:01:44 +04:00
|
|
|
if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
|
|
|
|
h.realhostport = ':'.join([u.host, (u.port or '443')])
|
2009-11-13 08:29:49 +03:00
|
|
|
h.headers = req.headers.copy()
|
|
|
|
h.headers.update(handler.parent.addheaders)
|
|
|
|
return
|
|
|
|
|
2010-02-11 22:42:20 +03:00
|
|
|
h.realhostport = None
|
2009-11-13 08:29:49 +03:00
|
|
|
h.headers = None
|
|
|
|
|
|
|
|
def _generic_proxytunnel(self):
|
|
|
|
proxyheaders = dict(
|
|
|
|
[(x, self.headers[x]) for x in self.headers
|
|
|
|
if x.lower().startswith('proxy-')])
|
2010-02-11 22:42:20 +03:00
|
|
|
self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
|
2009-11-13 08:29:49 +03:00
|
|
|
for header in proxyheaders.iteritems():
|
|
|
|
self.send('%s: %s\r\n' % header)
|
|
|
|
self.send('\r\n')
|
|
|
|
|
|
|
|
# majority of the following code is duplicated from
|
|
|
|
# httplib.HTTPConnection as there are no adequate places to
|
|
|
|
# override functions to provide the needed functionality
|
|
|
|
res = self.response_class(self.sock,
|
|
|
|
strict=self.strict,
|
|
|
|
method=self._method)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
version, status, reason = res._read_status()
|
|
|
|
if status != httplib.CONTINUE:
|
|
|
|
break
|
2016-08-05 21:00:39 +03:00
|
|
|
# skip lines that are all whitespace
|
|
|
|
list(iter(lambda: res.fp.readline().strip(), ''))
|
2009-11-13 08:29:49 +03:00
|
|
|
res.status = status
|
|
|
|
res.reason = reason.strip()
|
|
|
|
|
|
|
|
if res.status == 200:
|
2016-08-05 21:00:39 +03:00
|
|
|
# skip lines until we find a blank line
|
|
|
|
list(iter(res.fp.readline, '\r\n'))
|
2009-11-13 08:29:49 +03:00
|
|
|
return True
|
|
|
|
|
|
|
|
if version == 'HTTP/1.0':
|
|
|
|
res.version = 10
|
|
|
|
elif version.startswith('HTTP/1.'):
|
|
|
|
res.version = 11
|
|
|
|
elif version == 'HTTP/0.9':
|
|
|
|
res.version = 9
|
|
|
|
else:
|
|
|
|
raise httplib.UnknownProtocol(version)
|
|
|
|
|
|
|
|
if res.version == 9:
|
|
|
|
res.length = None
|
|
|
|
res.chunked = 0
|
|
|
|
res.will_close = 1
|
2016-04-10 23:55:37 +03:00
|
|
|
res.msg = httplib.HTTPMessage(stringio())
|
2009-11-13 08:29:49 +03:00
|
|
|
return False
|
|
|
|
|
|
|
|
res.msg = httplib.HTTPMessage(res.fp)
|
|
|
|
res.msg.fp = None
|
|
|
|
|
|
|
|
# are we using the chunked-style of transfer encoding?
|
|
|
|
trenc = res.msg.getheader('transfer-encoding')
|
|
|
|
if trenc and trenc.lower() == "chunked":
|
|
|
|
res.chunked = 1
|
|
|
|
res.chunk_left = None
|
|
|
|
else:
|
|
|
|
res.chunked = 0
|
|
|
|
|
|
|
|
# will the connection close at the end of the response?
|
|
|
|
res.will_close = res._check_close()
|
|
|
|
|
|
|
|
# do we have a Content-Length?
|
2012-08-28 01:14:27 +04:00
|
|
|
# NOTE: RFC 2616, section 4.4, #3 says we ignore this if
|
|
|
|
# transfer-encoding is "chunked"
|
2009-11-13 08:29:49 +03:00
|
|
|
length = res.msg.getheader('content-length')
|
|
|
|
if length and not res.chunked:
|
|
|
|
try:
|
|
|
|
res.length = int(length)
|
|
|
|
except ValueError:
|
|
|
|
res.length = None
|
|
|
|
else:
|
|
|
|
if res.length < 0: # ignore nonsensical negative lengths
|
|
|
|
res.length = None
|
|
|
|
else:
|
|
|
|
res.length = None
|
|
|
|
|
|
|
|
# does the body have a fixed length? (of zero)
|
|
|
|
if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
|
|
|
|
100 <= status < 200 or # 1xx codes
|
|
|
|
res._method == 'HEAD'):
|
|
|
|
res.length = 0
|
|
|
|
|
|
|
|
# if the connection remains open, and we aren't using chunked, and
|
|
|
|
# a content-length was not provided, then assume that the connection
|
|
|
|
# WILL close.
|
|
|
|
if (not res.will_close and
|
|
|
|
not res.chunked and
|
|
|
|
res.length is None):
|
|
|
|
res.will_close = 1
|
|
|
|
|
|
|
|
self.proxyres = res
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
class httphandler(keepalive.HTTPHandler):
|
|
|
|
def http_open(self, req):
|
|
|
|
return self.do_open(httpconnection, req)
|
|
|
|
|
2009-05-22 10:56:43 +04:00
|
|
|
def _start_transaction(self, h, req):
|
2009-11-13 08:29:49 +03:00
|
|
|
_generic_start_transaction(self, h, req)
|
2009-05-22 10:56:43 +04:00
|
|
|
return keepalive.HTTPHandler._start_transaction(self, h, req)
|
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
if has_https:
|
2015-05-07 11:02:20 +03:00
|
|
|
class httpsconnection(httplib.HTTPConnection):
|
2011-02-16 06:36:36 +03:00
|
|
|
response_class = keepalive.HTTPResponse
|
2015-05-07 11:02:20 +03:00
|
|
|
default_port = httplib.HTTPS_PORT
|
2011-02-16 06:36:36 +03:00
|
|
|
# must be able to send big bundle as stream.
|
|
|
|
send = _gen_sendfile(keepalive.safesend)
|
2015-05-07 11:02:20 +03:00
|
|
|
getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection)
|
|
|
|
|
|
|
|
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
|
|
|
*args, **kwargs):
|
|
|
|
httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs)
|
|
|
|
self.key_file = key_file
|
|
|
|
self.cert_file = cert_file
|
2009-11-02 19:03:22 +03:00
|
|
|
|
2010-02-10 22:27:46 +03:00
|
|
|
def connect(self):
|
2016-07-18 17:12:09 +03:00
|
|
|
self.sock = socket.create_connection((self.host, self.port))
|
2011-02-16 06:28:17 +03:00
|
|
|
|
2011-02-16 06:28:17 +03:00
|
|
|
host = self.host
|
2011-02-16 06:36:36 +03:00
|
|
|
if self.realhostport: # use CONNECT proxy
|
2011-04-30 15:59:14 +04:00
|
|
|
_generic_proxytunnel(self)
|
2011-02-16 06:36:36 +03:00
|
|
|
host = self.realhostport.rsplit(':', 1)[0]
|
2015-06-05 15:25:28 +03:00
|
|
|
self.sock = sslutil.wrapsocket(
|
2016-05-26 05:43:22 +03:00
|
|
|
self.sock, self.key_file, self.cert_file, ui=self.ui,
|
2016-05-26 05:57:02 +03:00
|
|
|
serverhostname=host)
|
2016-05-15 21:38:38 +03:00
|
|
|
sslutil.validatesocket(self.sock)
|
2010-02-10 22:27:46 +03:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
|
2009-06-20 12:58:57 +04:00
|
|
|
def __init__(self, ui):
|
|
|
|
keepalive.KeepAliveHandler.__init__(self)
|
2016-04-07 02:22:12 +03:00
|
|
|
urlreq.httpshandler.__init__(self)
|
2009-06-20 12:58:57 +04:00
|
|
|
self.ui = ui
|
2016-06-06 00:36:23 +03:00
|
|
|
self.pwmgr = passwordmgr(self.ui,
|
2016-06-09 12:41:36 +03:00
|
|
|
self.ui.httppasswordmgrdb)
|
2009-06-20 12:58:57 +04:00
|
|
|
|
2009-11-13 08:29:49 +03:00
|
|
|
def _start_transaction(self, h, req):
|
|
|
|
_generic_start_transaction(self, h, req)
|
|
|
|
return keepalive.KeepAliveHandler._start_transaction(self, h, req)
|
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
def https_open(self, req):
|
2011-08-05 23:05:41 +04:00
|
|
|
# req.get_full_url() does not contain credentials and we may
|
|
|
|
# need them to match the certificates.
|
|
|
|
url = req.get_full_url()
|
|
|
|
user, password = self.pwmgr.find_stored_password(url)
|
|
|
|
res = httpconnectionmod.readauthforuri(self.ui, url, user)
|
2011-02-13 21:19:58 +03:00
|
|
|
if res:
|
|
|
|
group, auth = res
|
|
|
|
self.auth = auth
|
|
|
|
self.ui.debug("using auth.%s.* for authentication\n" % group)
|
|
|
|
else:
|
|
|
|
self.auth = None
|
2009-06-20 12:58:57 +04:00
|
|
|
return self.do_open(self._makeconnection, req)
|
|
|
|
|
2010-02-10 22:08:18 +03:00
|
|
|
def _makeconnection(self, host, port=None, *args, **kwargs):
|
2009-06-20 12:58:57 +04:00
|
|
|
keyfile = None
|
|
|
|
certfile = None
|
|
|
|
|
2010-02-19 04:51:35 +03:00
|
|
|
if len(args) >= 1: # key_file
|
|
|
|
keyfile = args[0]
|
|
|
|
if len(args) >= 2: # cert_file
|
|
|
|
certfile = args[1]
|
|
|
|
args = args[2:]
|
2009-06-20 12:58:57 +04:00
|
|
|
|
|
|
|
# if the user has specified different key/cert files in
|
|
|
|
# hgrc, we prefer these
|
|
|
|
if self.auth and 'key' in self.auth and 'cert' in self.auth:
|
|
|
|
keyfile = self.auth['key']
|
|
|
|
certfile = self.auth['cert']
|
|
|
|
|
2012-05-12 17:54:54 +04:00
|
|
|
conn = httpsconnection(host, port, keyfile, certfile, *args,
|
|
|
|
**kwargs)
|
2010-02-10 22:27:46 +03:00
|
|
|
conn.ui = self.ui
|
|
|
|
return conn
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
class httpdigestauthhandler(urlreq.httpdigestauthhandler):
|
2010-06-27 01:00:58 +04:00
|
|
|
def __init__(self, *args, **kwargs):
|
2016-04-07 02:22:12 +03:00
|
|
|
urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs)
|
2010-06-27 01:00:58 +04:00
|
|
|
self.retried_req = None
|
|
|
|
|
|
|
|
def reset_retry_count(self):
|
|
|
|
# Python 2.6.5 will call this on 401 or 407 errors and thus loop
|
|
|
|
# forever. We disable reset_retry_count completely and reset in
|
|
|
|
# http_error_auth_reqed instead.
|
|
|
|
pass
|
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
2010-06-27 01:00:58 +04:00
|
|
|
# Reset the retry counter once for each request.
|
|
|
|
if req is not self.retried_req:
|
|
|
|
self.retried_req = req
|
|
|
|
self.retried = 0
|
2016-04-07 02:22:12 +03:00
|
|
|
return urlreq.httpdigestauthhandler.http_error_auth_reqed(
|
2015-10-16 00:21:08 +03:00
|
|
|
self, auth_header, host, req, headers)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
class httpbasicauthhandler(urlreq.httpbasicauthhandler):
|
2010-08-13 20:32:05 +04:00
|
|
|
def __init__(self, *args, **kwargs):
|
2013-12-20 17:56:05 +04:00
|
|
|
self.auth = None
|
2016-04-07 02:22:12 +03:00
|
|
|
urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs)
|
2010-08-13 20:32:05 +04:00
|
|
|
self.retried_req = None
|
|
|
|
|
2013-12-20 17:56:05 +04:00
|
|
|
def http_request(self, request):
|
|
|
|
if self.auth:
|
|
|
|
request.add_unredirected_header(self.auth_header, self.auth)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
|
|
|
def https_request(self, request):
|
|
|
|
if self.auth:
|
|
|
|
request.add_unredirected_header(self.auth_header, self.auth)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
2010-08-13 20:32:05 +04:00
|
|
|
def reset_retry_count(self):
|
|
|
|
# Python 2.6.5 will call this on 401 or 407 errors and thus loop
|
|
|
|
# forever. We disable reset_retry_count completely and reset in
|
|
|
|
# http_error_auth_reqed instead.
|
|
|
|
pass
|
|
|
|
|
|
|
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
|
|
|
# Reset the retry counter once for each request.
|
|
|
|
if req is not self.retried_req:
|
|
|
|
self.retried_req = req
|
|
|
|
self.retried = 0
|
2016-04-07 02:22:12 +03:00
|
|
|
return urlreq.httpbasicauthhandler.http_error_auth_reqed(
|
2010-08-13 20:32:05 +04:00
|
|
|
self, auth_header, host, req, headers)
|
|
|
|
|
2013-12-20 17:56:05 +04:00
|
|
|
def retry_http_basic_auth(self, host, req, realm):
|
|
|
|
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
|
|
|
|
if pw is not None:
|
|
|
|
raw = "%s:%s" % (user, pw)
|
|
|
|
auth = 'Basic %s' % base64.b64encode(raw).strip()
|
2016-07-29 14:46:07 +03:00
|
|
|
if req.get_header(self.auth_header, None) == auth:
|
2013-12-20 17:56:05 +04:00
|
|
|
return None
|
|
|
|
self.auth = auth
|
|
|
|
req.add_unredirected_header(self.auth_header, auth)
|
|
|
|
return self.parent.open(req)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2017-03-10 09:40:52 +03:00
|
|
|
class cookiehandler(urlreq.basehandler):
|
|
|
|
def __init__(self, ui):
|
|
|
|
self.cookiejar = None
|
|
|
|
|
|
|
|
cookiefile = ui.config('auth', 'cookiefile')
|
|
|
|
if not cookiefile:
|
|
|
|
return
|
|
|
|
|
|
|
|
cookiefile = util.expandpath(cookiefile)
|
|
|
|
try:
|
|
|
|
cookiejar = util.cookielib.MozillaCookieJar(cookiefile)
|
|
|
|
cookiejar.load()
|
|
|
|
self.cookiejar = cookiejar
|
|
|
|
except util.cookielib.LoadError as e:
|
|
|
|
ui.warn(_('(error loading cookie file %s: %s; continuing without '
|
|
|
|
'cookies)\n') % (cookiefile, str(e)))
|
|
|
|
|
|
|
|
def http_request(self, request):
|
|
|
|
if self.cookiejar:
|
|
|
|
self.cookiejar.add_cookie_header(request)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
|
|
|
def https_request(self, request):
|
|
|
|
if self.cookiejar:
|
|
|
|
self.cookiejar.add_cookie_header(request)
|
|
|
|
|
|
|
|
return request
|
|
|
|
|
2009-08-12 00:45:38 +04:00
|
|
|
handlerfuncs = []
|
|
|
|
|
2008-10-27 23:50:01 +03:00
|
|
|
def opener(ui, authinfo=None):
|
|
|
|
'''
|
|
|
|
construct an opener suitable for urllib2
|
|
|
|
authinfo will be added to the password manager
|
|
|
|
'''
|
2015-06-26 01:48:43 +03:00
|
|
|
# experimental config: ui.usehttp2
|
codemod: register core configitems using a script
This is done by a script [2] using RedBaron [1], a tool designed for doing
code refactoring. All "default" values are decided by the script and are
strongly consistent with the existing code.
There are 2 changes done manually to fix tests:
[warn] mercurial/exchange.py: experimental.bundle2-output-capture: default needs manual removal
[warn] mercurial/localrepo.py: experimental.hook-track-tags: default needs manual removal
Since RedBaron is not confident about how to indent things [2].
[1]: https://github.com/PyCQA/redbaron
[2]: https://github.com/PyCQA/redbaron/issues/100
[3]:
#!/usr/bin/env python
# codemod_configitems.py - codemod tool to fill configitems
#
# Copyright 2017 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import, print_function
import os
import sys
import redbaron
def readpath(path):
with open(path) as f:
return f.read()
def writepath(path, content):
with open(path, 'w') as f:
f.write(content)
_configmethods = {'config', 'configbool', 'configint', 'configbytes',
'configlist', 'configdate'}
def extractstring(rnode):
"""get the string from a RedBaron string or call_argument node"""
while rnode.type != 'string':
rnode = rnode.value
return rnode.value[1:-1] # unquote, "'str'" -> "str"
def uiconfigitems(red):
"""match *.ui.config* pattern, yield (node, method, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
obj = node[-3].value
method = node[-2].value
args = node[-1]
section = args[0].value
name = args[1].value
if (obj in ('ui', 'self') and method in _configmethods
and section.type == 'string' and name.type == 'string'):
entry = (node, method, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def coreconfigitems(red):
"""match coreconfigitem(...) pattern, yield (node, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
args = node[1]
section = args[0].value
name = args[1].value
if (node[0].value == 'coreconfigitem' and section.type == 'string'
and name.type == 'string'):
entry = (node, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def registercoreconfig(cfgred, section, name, defaultrepr):
"""insert coreconfigitem to cfgred AST
section and name are plain string, defaultrepr is a string
"""
# find a place to insert the "coreconfigitem" item
entries = list(coreconfigitems(cfgred))
for node, args, nodesection, nodename in reversed(entries):
if (nodesection, nodename) < (section, name):
# insert after this entry
node.insert_after(
'coreconfigitem(%r, %r,\n'
' default=%s,\n'
')' % (section, name, defaultrepr))
return
def main(argv):
if not argv:
print('Usage: codemod_configitems.py FILES\n'
'For example, FILES could be "{hgext,mercurial}/*/**.py"')
dirname = os.path.dirname
reporoot = dirname(dirname(dirname(os.path.abspath(__file__))))
# register configitems to this destination
cfgpath = os.path.join(reporoot, 'mercurial', 'configitems.py')
cfgred = redbaron.RedBaron(readpath(cfgpath))
# state about what to do
registered = set((s, n) for n, a, s, n in coreconfigitems(cfgred))
toregister = {} # {(section, name): defaultrepr}
coreconfigs = set() # {(section, name)}, whether it's used in core
# first loop: scan all files before taking any action
for i, path in enumerate(argv):
print('(%d/%d) scanning %s' % (i + 1, len(argv), path))
iscore = ('mercurial' in path) and ('hgext' not in path)
red = redbaron.RedBaron(readpath(path))
# find all repo.ui.config* and ui.config* calls, and collect their
# section, name and default value information.
for node, method, args, section, name in uiconfigitems(red):
if section == 'web':
# [web] section has some weirdness, ignore them for now
continue
defaultrepr = None
key = (section, name)
if len(args) == 2:
if key in registered:
continue
if method == 'configlist':
defaultrepr = 'list'
elif method == 'configbool':
defaultrepr = 'False'
else:
defaultrepr = 'None'
elif len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
# try to understand the "default" value
dnode = args[2].value
if dnode.type == 'name':
if dnode.value in {'None', 'True', 'False'}:
defaultrepr = dnode.value
elif dnode.type == 'string':
defaultrepr = repr(dnode.value[1:-1])
elif dnode.type in ('int', 'float'):
defaultrepr = dnode.value
# inconsistent default
if key in toregister and toregister[key] != defaultrepr:
defaultrepr = None
# interesting to rewrite
if key not in registered:
if defaultrepr is None:
print('[note] %s: %s.%s: unsupported default'
% (path, section, name))
registered.add(key) # skip checking it again
else:
toregister[key] = defaultrepr
if iscore:
coreconfigs.add(key)
# second loop: rewrite files given "toregister" result
for path in argv:
# reconstruct redbaron - trade CPU for memory
red = redbaron.RedBaron(readpath(path))
changed = False
for node, method, args, section, name in uiconfigitems(red):
key = (section, name)
defaultrepr = toregister.get(key)
if defaultrepr is None or key not in coreconfigs:
continue
if len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
try:
del args[2]
changed = True
except Exception:
# redbaron fails to do the rewrite due to indentation
# see https://github.com/PyCQA/redbaron/issues/100
print('[warn] %s: %s.%s: default needs manual removal'
% (path, section, name))
if key not in registered:
print('registering %s.%s' % (section, name))
registercoreconfig(cfgred, section, name, defaultrepr)
registered.add(key)
if changed:
print('updating %s' % path)
writepath(path, red.dumps())
if toregister:
print('updating configitems.py')
writepath(cfgpath, cfgred.dumps())
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
2017-07-15 00:22:40 +03:00
|
|
|
if ui.configbool('ui', 'usehttp2'):
|
2016-06-06 00:36:23 +03:00
|
|
|
handlers = [
|
|
|
|
httpconnectionmod.http2handler(
|
|
|
|
ui,
|
2016-06-09 12:41:36 +03:00
|
|
|
passwordmgr(ui, ui.httppasswordmgrdb))
|
2016-06-06 00:36:23 +03:00
|
|
|
]
|
2011-05-06 19:22:08 +04:00
|
|
|
else:
|
|
|
|
handlers = [httphandler()]
|
|
|
|
if has_https:
|
|
|
|
handlers.append(httpshandler(ui))
|
2008-10-27 23:50:01 +03:00
|
|
|
|
|
|
|
handlers.append(proxyhandler(ui))
|
|
|
|
|
2016-06-09 12:41:36 +03:00
|
|
|
passmgr = passwordmgr(ui, ui.httppasswordmgrdb)
|
2008-10-27 23:50:01 +03:00
|
|
|
if authinfo is not None:
|
2016-06-09 13:41:57 +03:00
|
|
|
realm, uris, user, passwd = authinfo
|
|
|
|
saveduser, savedpass = passmgr.find_stored_password(uris[0])
|
|
|
|
if user != saveduser or passwd:
|
|
|
|
passmgr.add_password(realm, uris, user, passwd)
|
2009-09-19 03:15:38 +04:00
|
|
|
ui.debug('http auth: user %s, password %s\n' %
|
2008-10-27 23:50:01 +03:00
|
|
|
(user, passwd and '*' * len(passwd) or 'not set'))
|
|
|
|
|
2010-08-13 20:32:05 +04:00
|
|
|
handlers.extend((httpbasicauthhandler(passmgr),
|
2008-10-27 23:50:01 +03:00
|
|
|
httpdigestauthhandler(passmgr)))
|
2009-08-12 00:45:38 +04:00
|
|
|
handlers.extend([h(ui, passmgr) for h in handlerfuncs])
|
2017-03-10 09:40:52 +03:00
|
|
|
handlers.append(cookiehandler(ui))
|
2016-04-07 02:22:12 +03:00
|
|
|
opener = urlreq.buildopener(*handlers)
|
2008-10-27 23:50:01 +03:00
|
|
|
|
2016-07-15 05:16:46 +03:00
|
|
|
# The user agent should should *NOT* be used by servers for e.g.
|
|
|
|
# protocol detection or feature negotiation: there are other
|
|
|
|
# facilities for that.
|
|
|
|
#
|
|
|
|
# "mercurial/proto-1.0" was the original user agent string and
|
|
|
|
# exists for backwards compatibility reasons.
|
|
|
|
#
|
|
|
|
# The "(Mercurial %s)" string contains the distribution
|
|
|
|
# name and version. Other client implementations should choose their
|
|
|
|
# own distribution name. Since servers should not be using the user
|
|
|
|
# agent string for anything, clients should be able to define whatever
|
|
|
|
# user agent they deem appropriate.
|
|
|
|
agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
|
|
|
|
opener.addheaders = [('User-agent', agent)]
|
2016-12-25 01:22:18 +03:00
|
|
|
|
|
|
|
# This header should only be needed by wire protocol requests. But it has
|
|
|
|
# been sent on all requests since forever. We keep sending it for backwards
|
|
|
|
# compatibility reasons. Modern versions of the wire protocol use
|
|
|
|
# X-HgProto-<N> for advertising client support.
|
2008-10-27 23:50:01 +03:00
|
|
|
opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
|
|
|
|
return opener
|
|
|
|
|
2011-03-31 07:01:34 +04:00
|
|
|
def open(ui, url_, data=None):
|
2011-04-30 20:43:20 +04:00
|
|
|
u = util.url(url_)
|
2011-03-31 07:01:34 +04:00
|
|
|
if u.scheme:
|
|
|
|
u.scheme = u.scheme.lower()
|
|
|
|
url_, authinfo = u.authinfo()
|
2008-10-27 23:50:01 +03:00
|
|
|
else:
|
2011-03-31 07:01:34 +04:00
|
|
|
path = util.normpath(os.path.abspath(url_))
|
2016-04-07 02:22:12 +03:00
|
|
|
url_ = 'file://' + urlreq.pathname2url(path)
|
2011-03-31 07:01:34 +04:00
|
|
|
authinfo = None
|
|
|
|
return opener(ui, authinfo).open(url_, data)
|