2011-05-06 19:22:08 +04:00
|
|
|
# httpconnection.py - urllib2 handler for new http support
|
|
|
|
#
|
|
|
|
# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
|
|
|
|
# Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
|
|
|
|
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
|
|
|
|
# Copyright 2011 Google, Inc.
|
|
|
|
#
|
|
|
|
# This software may be used and distributed according to the terms of the
|
|
|
|
# GNU General Public License version 2 or any later version.
|
2015-12-22 08:52:58 +03:00
|
|
|
|
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2011-05-06 19:22:08 +04:00
|
|
|
import logging
|
2015-12-22 08:52:58 +03:00
|
|
|
import os
|
2011-05-06 19:22:08 +04:00
|
|
|
import socket
|
|
|
|
|
2015-12-22 08:52:58 +03:00
|
|
|
from .i18n import _
|
|
|
|
from . import (
|
|
|
|
httpclient,
|
|
|
|
sslutil,
|
2017-10-01 19:14:21 +03:00
|
|
|
urllibcompat,
|
2015-12-22 08:52:58 +03:00
|
|
|
util,
|
|
|
|
)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
2016-04-07 02:22:12 +03:00
|
|
|
urlerr = util.urlerr
|
|
|
|
urlreq = util.urlreq
|
|
|
|
|
2011-05-06 19:22:08 +04:00
|
|
|
# moved here from url.py to avoid a cycle
|
|
|
|
class httpsendfile(object):
|
|
|
|
"""This is a wrapper around the objects returned by python's "open".
|
|
|
|
|
2011-09-22 00:52:00 +04:00
|
|
|
Its purpose is to send file-like objects via HTTP.
|
|
|
|
It do however not define a __len__ attribute because the length
|
|
|
|
might be more than Py_ssize_t can handle.
|
2011-05-06 19:22:08 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, ui, *args, **kwargs):
|
|
|
|
self.ui = ui
|
|
|
|
self._data = open(*args, **kwargs)
|
|
|
|
self.seek = self._data.seek
|
|
|
|
self.close = self._data.close
|
|
|
|
self.write = self._data.write
|
2011-09-22 00:52:00 +04:00
|
|
|
self.length = os.fstat(self._data.fileno()).st_size
|
2011-05-06 19:22:08 +04:00
|
|
|
self._pos = 0
|
2012-01-08 21:15:54 +04:00
|
|
|
self._total = self.length // 1024 * 2
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
def read(self, *args, **kwargs):
|
2017-03-18 10:02:14 +03:00
|
|
|
ret = self._data.read(*args, **kwargs)
|
|
|
|
if not ret:
|
2011-05-06 19:22:08 +04:00
|
|
|
self.ui.progress(_('sending'), None)
|
2017-03-18 10:02:14 +03:00
|
|
|
return ret
|
2011-05-06 19:22:08 +04:00
|
|
|
self._pos += len(ret)
|
|
|
|
# We pass double the max for total because we currently have
|
|
|
|
# to send the bundle twice in the case of a server that
|
|
|
|
# requires authentication. Since we can't know until we try
|
|
|
|
# once whether authentication will be required, just lie to
|
|
|
|
# the user and maybe the push succeeds suddenly at 50%.
|
2012-01-08 21:15:54 +04:00
|
|
|
self.ui.progress(_('sending'), self._pos // 1024,
|
2011-05-06 19:22:08 +04:00
|
|
|
unit=_('kb'), total=self._total)
|
|
|
|
return ret
|
|
|
|
|
2016-10-08 01:59:41 +03:00
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.close()
|
|
|
|
|
2011-05-06 19:22:08 +04:00
|
|
|
# moved here from url.py to avoid a cycle
|
2011-08-05 23:05:41 +04:00
|
|
|
def readauthforuri(ui, uri, user):
|
2011-05-06 19:22:08 +04:00
|
|
|
# Read configuration
|
2017-03-10 07:53:14 +03:00
|
|
|
groups = {}
|
2011-05-06 19:22:08 +04:00
|
|
|
for key, val in ui.configitems('auth'):
|
2017-03-10 09:35:10 +03:00
|
|
|
if key in ('cookiefile',):
|
|
|
|
continue
|
|
|
|
|
2011-05-06 19:22:08 +04:00
|
|
|
if '.' not in key:
|
|
|
|
ui.warn(_("ignoring invalid [auth] key '%s'\n") % key)
|
|
|
|
continue
|
|
|
|
group, setting = key.rsplit('.', 1)
|
2017-03-10 07:53:14 +03:00
|
|
|
gdict = groups.setdefault(group, {})
|
2011-05-06 19:22:08 +04:00
|
|
|
if setting in ('username', 'cert', 'key'):
|
|
|
|
val = util.expandpath(val)
|
|
|
|
gdict[setting] = val
|
|
|
|
|
|
|
|
# Find the best match
|
2015-05-19 00:47:26 +03:00
|
|
|
scheme, hostpath = uri.split('://', 1)
|
2011-08-02 01:58:50 +04:00
|
|
|
bestuser = None
|
2011-05-06 19:22:08 +04:00
|
|
|
bestlen = 0
|
|
|
|
bestauth = None
|
2017-03-10 07:53:14 +03:00
|
|
|
for group, auth in groups.iteritems():
|
2011-08-02 01:58:50 +04:00
|
|
|
if user and user != auth.get('username', user):
|
|
|
|
# If a username was set in the URI, the entry username
|
|
|
|
# must either match it or be unset
|
|
|
|
continue
|
2011-05-06 19:22:08 +04:00
|
|
|
prefix = auth.get('prefix')
|
|
|
|
if not prefix:
|
|
|
|
continue
|
|
|
|
p = prefix.split('://', 1)
|
|
|
|
if len(p) > 1:
|
|
|
|
schemes, prefix = [p[0]], p[1]
|
|
|
|
else:
|
|
|
|
schemes = (auth.get('schemes') or 'https').split()
|
|
|
|
if (prefix == '*' or hostpath.startswith(prefix)) and \
|
2011-08-02 01:58:50 +04:00
|
|
|
(len(prefix) > bestlen or (len(prefix) == bestlen and \
|
|
|
|
not bestuser and 'username' in auth)) \
|
|
|
|
and scheme in schemes:
|
2011-05-06 19:22:08 +04:00
|
|
|
bestlen = len(prefix)
|
|
|
|
bestauth = group, auth
|
2011-08-02 01:58:50 +04:00
|
|
|
bestuser = auth.get('username')
|
|
|
|
if user and not bestuser:
|
|
|
|
auth['username'] = user
|
2011-05-06 19:22:08 +04:00
|
|
|
return bestauth
|
|
|
|
|
|
|
|
# Mercurial (at least until we can remove the old codepath) requires
|
|
|
|
# that the http response object be sufficiently file-like, so we
|
|
|
|
# provide a close() method here.
|
|
|
|
class HTTPResponse(httpclient.HTTPResponse):
|
|
|
|
def close(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class HTTPConnection(httpclient.HTTPConnection):
|
|
|
|
response_class = HTTPResponse
|
2015-09-24 10:54:30 +03:00
|
|
|
def request(self, method, uri, body=None, headers=None):
|
|
|
|
if headers is None:
|
|
|
|
headers = {}
|
2011-05-06 19:22:08 +04:00
|
|
|
if isinstance(body, httpsendfile):
|
|
|
|
body.seek(0)
|
|
|
|
httpclient.HTTPConnection.request(self, method, uri, body=body,
|
|
|
|
headers=headers)
|
|
|
|
|
|
|
|
_configuredlogging = False
|
2011-05-17 01:59:45 +04:00
|
|
|
LOGFMT = '%(levelname)s:%(name)s:%(lineno)d:%(message)s'
|
2011-05-06 19:22:08 +04:00
|
|
|
# Subclass BOTH of these because otherwise urllib2 "helpfully"
|
|
|
|
# reinserts them since it notices we don't include any subclasses of
|
|
|
|
# them.
|
2016-04-07 02:22:12 +03:00
|
|
|
class http2handler(urlreq.httphandler, urlreq.httpshandler):
|
2011-05-06 19:22:08 +04:00
|
|
|
def __init__(self, ui, pwmgr):
|
|
|
|
global _configuredlogging
|
2016-04-07 02:22:12 +03:00
|
|
|
urlreq.abstracthttphandler.__init__(self)
|
2011-05-06 19:22:08 +04:00
|
|
|
self.ui = ui
|
|
|
|
self.pwmgr = pwmgr
|
|
|
|
self._connections = {}
|
2015-06-26 01:48:43 +03:00
|
|
|
# developer config: ui.http2debuglevel
|
codemod: register core configitems using a script
This is done by a script [2] using RedBaron [1], a tool designed for doing
code refactoring. All "default" values are decided by the script and are
strongly consistent with the existing code.
There are 2 changes done manually to fix tests:
[warn] mercurial/exchange.py: experimental.bundle2-output-capture: default needs manual removal
[warn] mercurial/localrepo.py: experimental.hook-track-tags: default needs manual removal
Since RedBaron is not confident about how to indent things [2].
[1]: https://github.com/PyCQA/redbaron
[2]: https://github.com/PyCQA/redbaron/issues/100
[3]:
#!/usr/bin/env python
# codemod_configitems.py - codemod tool to fill configitems
#
# Copyright 2017 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import, print_function
import os
import sys
import redbaron
def readpath(path):
with open(path) as f:
return f.read()
def writepath(path, content):
with open(path, 'w') as f:
f.write(content)
_configmethods = {'config', 'configbool', 'configint', 'configbytes',
'configlist', 'configdate'}
def extractstring(rnode):
"""get the string from a RedBaron string or call_argument node"""
while rnode.type != 'string':
rnode = rnode.value
return rnode.value[1:-1] # unquote, "'str'" -> "str"
def uiconfigitems(red):
"""match *.ui.config* pattern, yield (node, method, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
obj = node[-3].value
method = node[-2].value
args = node[-1]
section = args[0].value
name = args[1].value
if (obj in ('ui', 'self') and method in _configmethods
and section.type == 'string' and name.type == 'string'):
entry = (node, method, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def coreconfigitems(red):
"""match coreconfigitem(...) pattern, yield (node, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
args = node[1]
section = args[0].value
name = args[1].value
if (node[0].value == 'coreconfigitem' and section.type == 'string'
and name.type == 'string'):
entry = (node, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def registercoreconfig(cfgred, section, name, defaultrepr):
"""insert coreconfigitem to cfgred AST
section and name are plain string, defaultrepr is a string
"""
# find a place to insert the "coreconfigitem" item
entries = list(coreconfigitems(cfgred))
for node, args, nodesection, nodename in reversed(entries):
if (nodesection, nodename) < (section, name):
# insert after this entry
node.insert_after(
'coreconfigitem(%r, %r,\n'
' default=%s,\n'
')' % (section, name, defaultrepr))
return
def main(argv):
if not argv:
print('Usage: codemod_configitems.py FILES\n'
'For example, FILES could be "{hgext,mercurial}/*/**.py"')
dirname = os.path.dirname
reporoot = dirname(dirname(dirname(os.path.abspath(__file__))))
# register configitems to this destination
cfgpath = os.path.join(reporoot, 'mercurial', 'configitems.py')
cfgred = redbaron.RedBaron(readpath(cfgpath))
# state about what to do
registered = set((s, n) for n, a, s, n in coreconfigitems(cfgred))
toregister = {} # {(section, name): defaultrepr}
coreconfigs = set() # {(section, name)}, whether it's used in core
# first loop: scan all files before taking any action
for i, path in enumerate(argv):
print('(%d/%d) scanning %s' % (i + 1, len(argv), path))
iscore = ('mercurial' in path) and ('hgext' not in path)
red = redbaron.RedBaron(readpath(path))
# find all repo.ui.config* and ui.config* calls, and collect their
# section, name and default value information.
for node, method, args, section, name in uiconfigitems(red):
if section == 'web':
# [web] section has some weirdness, ignore them for now
continue
defaultrepr = None
key = (section, name)
if len(args) == 2:
if key in registered:
continue
if method == 'configlist':
defaultrepr = 'list'
elif method == 'configbool':
defaultrepr = 'False'
else:
defaultrepr = 'None'
elif len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
# try to understand the "default" value
dnode = args[2].value
if dnode.type == 'name':
if dnode.value in {'None', 'True', 'False'}:
defaultrepr = dnode.value
elif dnode.type == 'string':
defaultrepr = repr(dnode.value[1:-1])
elif dnode.type in ('int', 'float'):
defaultrepr = dnode.value
# inconsistent default
if key in toregister and toregister[key] != defaultrepr:
defaultrepr = None
# interesting to rewrite
if key not in registered:
if defaultrepr is None:
print('[note] %s: %s.%s: unsupported default'
% (path, section, name))
registered.add(key) # skip checking it again
else:
toregister[key] = defaultrepr
if iscore:
coreconfigs.add(key)
# second loop: rewrite files given "toregister" result
for path in argv:
# reconstruct redbaron - trade CPU for memory
red = redbaron.RedBaron(readpath(path))
changed = False
for node, method, args, section, name in uiconfigitems(red):
key = (section, name)
defaultrepr = toregister.get(key)
if defaultrepr is None or key not in coreconfigs:
continue
if len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
try:
del args[2]
changed = True
except Exception:
# redbaron fails to do the rewrite due to indentation
# see https://github.com/PyCQA/redbaron/issues/100
print('[warn] %s: %s.%s: default needs manual removal'
% (path, section, name))
if key not in registered:
print('registering %s.%s' % (section, name))
registercoreconfig(cfgred, section, name, defaultrepr)
registered.add(key)
if changed:
print('updating %s' % path)
writepath(path, red.dumps())
if toregister:
print('updating configitems.py')
writepath(cfgpath, cfgred.dumps())
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
2017-07-15 00:22:40 +03:00
|
|
|
loglevel = ui.config('ui', 'http2debuglevel')
|
2011-05-06 19:22:08 +04:00
|
|
|
if loglevel and not _configuredlogging:
|
|
|
|
_configuredlogging = True
|
2011-05-11 17:07:51 +04:00
|
|
|
logger = logging.getLogger('mercurial.httpclient')
|
2011-05-06 19:22:08 +04:00
|
|
|
logger.setLevel(getattr(logging, loglevel.upper()))
|
2011-05-17 01:59:45 +04:00
|
|
|
handler = logging.StreamHandler()
|
|
|
|
handler.setFormatter(logging.Formatter(LOGFMT))
|
|
|
|
logger.addHandler(handler)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
def close_all(self):
|
|
|
|
"""Close and remove all connection objects being kept for reuse."""
|
|
|
|
for openconns in self._connections.values():
|
|
|
|
for conn in openconns:
|
|
|
|
conn.close()
|
|
|
|
self._connections = {}
|
|
|
|
|
|
|
|
# shamelessly borrowed from urllib2.AbstractHTTPHandler
|
2011-05-17 01:59:45 +04:00
|
|
|
def do_open(self, http_class, req, use_ssl):
|
2011-05-06 19:22:08 +04:00
|
|
|
"""Return an addinfourl object for the request, using http_class.
|
|
|
|
|
|
|
|
http_class must implement the HTTPConnection API from httplib.
|
|
|
|
The addinfourl return value is a file-like object. It also
|
|
|
|
has methods and attributes including:
|
|
|
|
- info(): return a mimetools.Message object for the headers
|
|
|
|
- geturl(): return the original request URL
|
|
|
|
- code: HTTP status code
|
|
|
|
"""
|
|
|
|
# If using a proxy, the host returned by get_host() is
|
|
|
|
# actually the proxy. On Python 2.6.1, the real destination
|
|
|
|
# hostname is encoded in the URI in the urllib2 request
|
|
|
|
# object. On Python 2.6.5, it's stored in the _tunnel_host
|
|
|
|
# attribute which has no accessor.
|
|
|
|
tunhost = getattr(req, '_tunnel_host', None)
|
2017-10-01 19:14:21 +03:00
|
|
|
host = urllibcompat.gethost(req)
|
2011-05-06 19:22:08 +04:00
|
|
|
if tunhost:
|
|
|
|
proxyhost = host
|
|
|
|
host = tunhost
|
|
|
|
elif req.has_proxy():
|
2017-10-01 19:14:21 +03:00
|
|
|
proxyhost = urllibcompat.gethost(req)
|
|
|
|
host = urllibcompat.getselector(
|
|
|
|
req).split('://', 1)[1].split('/', 1)[0]
|
2011-05-06 19:22:08 +04:00
|
|
|
else:
|
|
|
|
proxyhost = None
|
|
|
|
|
|
|
|
if proxyhost:
|
|
|
|
if ':' in proxyhost:
|
|
|
|
# Note: this means we'll explode if we try and use an
|
|
|
|
# IPv6 http proxy. This isn't a regression, so we
|
|
|
|
# won't worry about it for now.
|
|
|
|
proxyhost, proxyport = proxyhost.rsplit(':', 1)
|
|
|
|
else:
|
|
|
|
proxyport = 3128 # squid default
|
|
|
|
proxy = (proxyhost, proxyport)
|
|
|
|
else:
|
|
|
|
proxy = None
|
|
|
|
|
|
|
|
if not host:
|
2016-04-07 02:22:12 +03:00
|
|
|
raise urlerr.urlerror('no host given')
|
2011-05-06 19:22:08 +04:00
|
|
|
|
2011-05-17 01:59:45 +04:00
|
|
|
connkey = use_ssl, host, proxy
|
|
|
|
allconns = self._connections.get(connkey, [])
|
2011-05-06 19:22:08 +04:00
|
|
|
conns = [c for c in allconns if not c.busy()]
|
|
|
|
if conns:
|
|
|
|
h = conns[0]
|
|
|
|
else:
|
|
|
|
if allconns:
|
|
|
|
self.ui.debug('all connections for %s busy, making a new '
|
|
|
|
'one\n' % host)
|
|
|
|
timeout = None
|
|
|
|
if req.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
|
|
|
|
timeout = req.timeout
|
|
|
|
h = http_class(host, timeout=timeout, proxy_hostport=proxy)
|
2011-05-17 01:59:45 +04:00
|
|
|
self._connections.setdefault(connkey, []).append(h)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
headers = dict(req.headers)
|
|
|
|
headers.update(req.unredirected_hdrs)
|
|
|
|
headers = dict(
|
|
|
|
(name.title(), val) for name, val in headers.items())
|
|
|
|
try:
|
2017-10-01 19:14:21 +03:00
|
|
|
path = urllibcompat.getselector(req)
|
2011-05-06 19:22:08 +04:00
|
|
|
if '://' in path:
|
|
|
|
path = path.split('://', 1)[1].split('/', 1)[1]
|
|
|
|
if path[0] != '/':
|
|
|
|
path = '/' + path
|
|
|
|
h.request(req.get_method(), path, req.data, headers)
|
|
|
|
r = h.getresponse()
|
2015-06-24 08:20:08 +03:00
|
|
|
except socket.error as err: # XXX what error?
|
2016-04-07 02:22:12 +03:00
|
|
|
raise urlerr.urlerror(err)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
# Pick apart the HTTPResponse object to get the addinfourl
|
|
|
|
# object initialized properly.
|
|
|
|
r.recv = r.read
|
|
|
|
|
2017-10-01 19:14:21 +03:00
|
|
|
resp = urlreq.addinfourl(r, r.headers, urllibcompat.getfullurl(req))
|
2011-05-06 19:22:08 +04:00
|
|
|
resp.code = r.status
|
|
|
|
resp.msg = r.reason
|
|
|
|
return resp
|
|
|
|
|
|
|
|
# httplib always uses the given host/port as the socket connect
|
|
|
|
# target, and then allows full URIs in the request path, which it
|
|
|
|
# then observes and treats as a signal to do proxying instead.
|
|
|
|
def http_open(self, req):
|
2017-10-01 19:14:21 +03:00
|
|
|
if urllibcompat.getfullurl(req).startswith('https'):
|
2011-05-06 19:22:08 +04:00
|
|
|
return self.https_open(req)
|
2012-10-17 03:05:33 +04:00
|
|
|
def makehttpcon(*args, **kwargs):
|
2012-10-19 08:55:15 +04:00
|
|
|
k2 = dict(kwargs)
|
2017-12-10 02:17:04 +03:00
|
|
|
k2[r'use_ssl'] = False
|
2012-10-19 08:55:15 +04:00
|
|
|
return HTTPConnection(*args, **k2)
|
2012-10-17 03:05:33 +04:00
|
|
|
return self.do_open(makehttpcon, req, False)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
def https_open(self, req):
|
2017-10-01 19:14:21 +03:00
|
|
|
# urllibcompat.getfullurl(req) does not contain credentials and we may
|
2011-08-05 23:05:41 +04:00
|
|
|
# need them to match the certificates.
|
2017-10-01 19:14:21 +03:00
|
|
|
url = urllibcompat.getfullurl(req)
|
2011-08-05 23:05:41 +04:00
|
|
|
user, password = self.pwmgr.find_stored_password(url)
|
|
|
|
res = readauthforuri(self.ui, url, user)
|
2011-05-06 19:22:08 +04:00
|
|
|
if res:
|
|
|
|
group, auth = res
|
|
|
|
self.auth = auth
|
|
|
|
self.ui.debug("using auth.%s.* for authentication\n" % group)
|
|
|
|
else:
|
|
|
|
self.auth = None
|
2011-05-17 01:59:45 +04:00
|
|
|
return self.do_open(self._makesslconnection, req, True)
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
def _makesslconnection(self, host, port=443, *args, **kwargs):
|
|
|
|
keyfile = None
|
|
|
|
certfile = None
|
|
|
|
|
|
|
|
if args: # key_file
|
|
|
|
keyfile = args.pop(0)
|
|
|
|
if args: # cert_file
|
|
|
|
certfile = args.pop(0)
|
|
|
|
|
|
|
|
# if the user has specified different key/cert files in
|
|
|
|
# hgrc, we prefer these
|
|
|
|
if self.auth and 'key' in self.auth and 'cert' in self.auth:
|
|
|
|
keyfile = self.auth['key']
|
|
|
|
certfile = self.auth['cert']
|
|
|
|
|
|
|
|
# let host port take precedence
|
|
|
|
if ':' in host and '[' not in host or ']:' in host:
|
|
|
|
host, port = host.rsplit(':', 1)
|
|
|
|
port = int(port)
|
|
|
|
if '[' in host:
|
|
|
|
host = host[1:-1]
|
|
|
|
|
2017-12-10 02:17:04 +03:00
|
|
|
kwargs[r'keyfile'] = keyfile
|
|
|
|
kwargs[r'certfile'] = certfile
|
2011-05-06 19:22:08 +04:00
|
|
|
|
|
|
|
con = HTTPConnection(host, port, use_ssl=True,
|
2015-06-05 15:25:28 +03:00
|
|
|
ssl_wrap_socket=sslutil.wrapsocket,
|
2016-05-15 21:38:38 +03:00
|
|
|
ssl_validator=sslutil.validatesocket,
|
2016-05-26 05:43:22 +03:00
|
|
|
ui=self.ui,
|
2011-05-06 19:22:08 +04:00
|
|
|
**kwargs)
|
|
|
|
return con
|